parent
781e0b24c8
commit
b3e68cf3fb
@ -0,0 +1,3 @@
|
||||
bea0ff6e05a4de73a5db625d4ae181a015b50855:frontend/components/utilities/attemptLogin.js:stripe-access-token:147
|
||||
bea0ff6e05a4de73a5db625d4ae181a015b50855:backend/src/json/integrations.json:generic-api-key:5
|
||||
1961b92340e5d2613acae528b886c842427ce5d0:frontend/components/utilities/attemptLogin.js:stripe-access-token:148
|
@ -0,0 +1,85 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Allowlist allows a rule to be ignored for specific
|
||||
// regexes, paths, and/or commits
|
||||
type Allowlist struct {
|
||||
// Short human readable description of the allowlist.
|
||||
Description string
|
||||
|
||||
// Regexes is slice of content regular expressions that are allowed to be ignored.
|
||||
Regexes []*regexp.Regexp
|
||||
|
||||
// RegexTarget
|
||||
RegexTarget string
|
||||
|
||||
// Paths is a slice of path regular expressions that are allowed to be ignored.
|
||||
Paths []*regexp.Regexp
|
||||
|
||||
// Commits is a slice of commit SHAs that are allowed to be ignored.
|
||||
Commits []string
|
||||
|
||||
// StopWords is a slice of stop words that are allowed to be ignored.
|
||||
// This targets the _secret_, not the content of the regex match like the
|
||||
// Regexes slice.
|
||||
StopWords []string
|
||||
}
|
||||
|
||||
// CommitAllowed returns true if the commit is allowed to be ignored.
|
||||
func (a *Allowlist) CommitAllowed(c string) bool {
|
||||
if c == "" {
|
||||
return false
|
||||
}
|
||||
for _, commit := range a.Commits {
|
||||
if commit == c {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// PathAllowed returns true if the path is allowed to be ignored.
|
||||
func (a *Allowlist) PathAllowed(path string) bool {
|
||||
return anyRegexMatch(path, a.Paths)
|
||||
}
|
||||
|
||||
// RegexAllowed returns true if the regex is allowed to be ignored.
|
||||
func (a *Allowlist) RegexAllowed(s string) bool {
|
||||
return anyRegexMatch(s, a.Regexes)
|
||||
}
|
||||
|
||||
func (a *Allowlist) ContainsStopWord(s string) bool {
|
||||
s = strings.ToLower(s)
|
||||
for _, stopWord := range a.StopWords {
|
||||
if strings.Contains(s, strings.ToLower(stopWord)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
@ -0,0 +1,115 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestCommitAllowed(t *testing.T) {
|
||||
tests := []struct {
|
||||
allowlist Allowlist
|
||||
commit string
|
||||
commitAllowed bool
|
||||
}{
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Commits: []string{"commitA"},
|
||||
},
|
||||
commit: "commitA",
|
||||
commitAllowed: true,
|
||||
},
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Commits: []string{"commitB"},
|
||||
},
|
||||
commit: "commitA",
|
||||
commitAllowed: false,
|
||||
},
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Commits: []string{"commitB"},
|
||||
},
|
||||
commit: "",
|
||||
commitAllowed: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
assert.Equal(t, tt.commitAllowed, tt.allowlist.CommitAllowed(tt.commit))
|
||||
}
|
||||
}
|
||||
|
||||
func TestRegexAllowed(t *testing.T) {
|
||||
tests := []struct {
|
||||
allowlist Allowlist
|
||||
secret string
|
||||
regexAllowed bool
|
||||
}{
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Regexes: []*regexp.Regexp{regexp.MustCompile("matchthis")},
|
||||
},
|
||||
secret: "a secret: matchthis, done",
|
||||
regexAllowed: true,
|
||||
},
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Regexes: []*regexp.Regexp{regexp.MustCompile("matchthis")},
|
||||
},
|
||||
secret: "a secret",
|
||||
regexAllowed: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
assert.Equal(t, tt.regexAllowed, tt.allowlist.RegexAllowed(tt.secret))
|
||||
}
|
||||
}
|
||||
|
||||
func TestPathAllowed(t *testing.T) {
|
||||
tests := []struct {
|
||||
allowlist Allowlist
|
||||
path string
|
||||
pathAllowed bool
|
||||
}{
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Paths: []*regexp.Regexp{regexp.MustCompile("path")},
|
||||
},
|
||||
path: "a path",
|
||||
pathAllowed: true,
|
||||
},
|
||||
{
|
||||
allowlist: Allowlist{
|
||||
Paths: []*regexp.Regexp{regexp.MustCompile("path")},
|
||||
},
|
||||
path: "a ???",
|
||||
pathAllowed: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
assert.Equal(t, tt.pathAllowed, tt.allowlist.PathAllowed(tt.path))
|
||||
}
|
||||
}
|
@ -0,0 +1,275 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
//go:embed gitleaks.toml
|
||||
var DefaultConfig string
|
||||
|
||||
// use to keep track of how many configs we can extend
|
||||
// yea I know, globals bad
|
||||
var extendDepth int
|
||||
|
||||
const maxExtendDepth = 2
|
||||
|
||||
// ViperConfig is the config struct used by the Viper config package
|
||||
// to parse the config file. This struct does not include regular expressions.
|
||||
// It is used as an intermediary to convert the Viper config to the Config struct.
|
||||
type ViperConfig struct {
|
||||
Description string
|
||||
Extend Extend
|
||||
Rules []struct {
|
||||
ID string
|
||||
Description string
|
||||
Entropy float64
|
||||
SecretGroup int
|
||||
Regex string
|
||||
Keywords []string
|
||||
Path string
|
||||
Tags []string
|
||||
|
||||
Allowlist struct {
|
||||
RegexTarget string
|
||||
Regexes []string
|
||||
Paths []string
|
||||
Commits []string
|
||||
StopWords []string
|
||||
}
|
||||
}
|
||||
Allowlist struct {
|
||||
RegexTarget string
|
||||
Regexes []string
|
||||
Paths []string
|
||||
Commits []string
|
||||
StopWords []string
|
||||
}
|
||||
}
|
||||
|
||||
// Config is a configuration struct that contains rules and an allowlist if present.
|
||||
type Config struct {
|
||||
Extend Extend
|
||||
Path string
|
||||
Description string
|
||||
Rules map[string]Rule
|
||||
Allowlist Allowlist
|
||||
Keywords []string
|
||||
|
||||
// used to keep sarif results consistent
|
||||
orderedRules []string
|
||||
}
|
||||
|
||||
// Extend is a struct that allows users to define how they want their
|
||||
// configuration extended by other configuration files.
|
||||
type Extend struct {
|
||||
Path string
|
||||
URL string
|
||||
UseDefault bool
|
||||
}
|
||||
|
||||
func (vc *ViperConfig) Translate() (Config, error) {
|
||||
var (
|
||||
keywords []string
|
||||
orderedRules []string
|
||||
)
|
||||
rulesMap := make(map[string]Rule)
|
||||
|
||||
for _, r := range vc.Rules {
|
||||
var allowlistRegexes []*regexp.Regexp
|
||||
for _, a := range r.Allowlist.Regexes {
|
||||
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
|
||||
}
|
||||
var allowlistPaths []*regexp.Regexp
|
||||
for _, a := range r.Allowlist.Paths {
|
||||
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
|
||||
}
|
||||
|
||||
if r.Keywords == nil {
|
||||
r.Keywords = []string{}
|
||||
} else {
|
||||
for _, k := range r.Keywords {
|
||||
keywords = append(keywords, strings.ToLower(k))
|
||||
}
|
||||
}
|
||||
|
||||
if r.Tags == nil {
|
||||
r.Tags = []string{}
|
||||
}
|
||||
|
||||
var configRegex *regexp.Regexp
|
||||
var configPathRegex *regexp.Regexp
|
||||
if r.Regex == "" {
|
||||
configRegex = nil
|
||||
} else {
|
||||
configRegex = regexp.MustCompile(r.Regex)
|
||||
}
|
||||
if r.Path == "" {
|
||||
configPathRegex = nil
|
||||
} else {
|
||||
configPathRegex = regexp.MustCompile(r.Path)
|
||||
}
|
||||
r := Rule{
|
||||
Description: r.Description,
|
||||
RuleID: r.ID,
|
||||
Regex: configRegex,
|
||||
Path: configPathRegex,
|
||||
SecretGroup: r.SecretGroup,
|
||||
Entropy: r.Entropy,
|
||||
Tags: r.Tags,
|
||||
Keywords: r.Keywords,
|
||||
Allowlist: Allowlist{
|
||||
RegexTarget: r.Allowlist.RegexTarget,
|
||||
Regexes: allowlistRegexes,
|
||||
Paths: allowlistPaths,
|
||||
Commits: r.Allowlist.Commits,
|
||||
StopWords: r.Allowlist.StopWords,
|
||||
},
|
||||
}
|
||||
orderedRules = append(orderedRules, r.RuleID)
|
||||
|
||||
if r.Regex != nil && r.SecretGroup > r.Regex.NumSubexp() {
|
||||
return Config{}, fmt.Errorf("%s invalid regex secret group %d, max regex secret group %d", r.Description, r.SecretGroup, r.Regex.NumSubexp())
|
||||
}
|
||||
rulesMap[r.RuleID] = r
|
||||
}
|
||||
var allowlistRegexes []*regexp.Regexp
|
||||
for _, a := range vc.Allowlist.Regexes {
|
||||
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
|
||||
}
|
||||
var allowlistPaths []*regexp.Regexp
|
||||
for _, a := range vc.Allowlist.Paths {
|
||||
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
|
||||
}
|
||||
c := Config{
|
||||
Description: vc.Description,
|
||||
Extend: vc.Extend,
|
||||
Rules: rulesMap,
|
||||
Allowlist: Allowlist{
|
||||
RegexTarget: vc.Allowlist.RegexTarget,
|
||||
Regexes: allowlistRegexes,
|
||||
Paths: allowlistPaths,
|
||||
Commits: vc.Allowlist.Commits,
|
||||
StopWords: vc.Allowlist.StopWords,
|
||||
},
|
||||
Keywords: keywords,
|
||||
orderedRules: orderedRules,
|
||||
}
|
||||
|
||||
if maxExtendDepth != extendDepth {
|
||||
// disallow both usedefault and path from being set
|
||||
if c.Extend.Path != "" && c.Extend.UseDefault {
|
||||
log.Fatal().Msg("unable to load config due to extend.path and extend.useDefault being set")
|
||||
}
|
||||
if c.Extend.UseDefault {
|
||||
c.extendDefault()
|
||||
} else if c.Extend.Path != "" {
|
||||
c.extendPath()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func (c *Config) OrderedRules() []Rule {
|
||||
var orderedRules []Rule
|
||||
for _, id := range c.orderedRules {
|
||||
if _, ok := c.Rules[id]; ok {
|
||||
orderedRules = append(orderedRules, c.Rules[id])
|
||||
}
|
||||
}
|
||||
return orderedRules
|
||||
}
|
||||
|
||||
func (c *Config) extendDefault() {
|
||||
extendDepth++
|
||||
viper.SetConfigType("toml")
|
||||
if err := viper.ReadConfig(strings.NewReader(DefaultConfig)); err != nil {
|
||||
log.Fatal().Msgf("failed to load extended config, err: %s", err)
|
||||
return
|
||||
}
|
||||
defaultViperConfig := ViperConfig{}
|
||||
if err := viper.Unmarshal(&defaultViperConfig); err != nil {
|
||||
log.Fatal().Msgf("failed to load extended config, err: %s", err)
|
||||
return
|
||||
}
|
||||
cfg, err := defaultViperConfig.Translate()
|
||||
if err != nil {
|
||||
log.Fatal().Msgf("failed to load extended config, err: %s", err)
|
||||
return
|
||||
}
|
||||
log.Debug().Msg("extending config with default config")
|
||||
c.extend(cfg)
|
||||
|
||||
}
|
||||
|
||||
func (c *Config) extendPath() {
|
||||
extendDepth++
|
||||
viper.SetConfigFile(c.Extend.Path)
|
||||
if err := viper.ReadInConfig(); err != nil {
|
||||
log.Fatal().Msgf("failed to load extended config, err: %s", err)
|
||||
return
|
||||
}
|
||||
extensionViperConfig := ViperConfig{}
|
||||
if err := viper.Unmarshal(&extensionViperConfig); err != nil {
|
||||
log.Fatal().Msgf("failed to load extended config, err: %s", err)
|
||||
return
|
||||
}
|
||||
cfg, err := extensionViperConfig.Translate()
|
||||
if err != nil {
|
||||
log.Fatal().Msgf("failed to load extended config, err: %s", err)
|
||||
return
|
||||
}
|
||||
log.Debug().Msgf("extending config with %s", c.Extend.Path)
|
||||
c.extend(cfg)
|
||||
}
|
||||
|
||||
func (c *Config) extendURL() {
|
||||
// TODO
|
||||
}
|
||||
|
||||
func (c *Config) extend(extensionConfig Config) {
|
||||
for ruleID, rule := range extensionConfig.Rules {
|
||||
if _, ok := c.Rules[ruleID]; !ok {
|
||||
log.Trace().Msgf("adding %s to base config", ruleID)
|
||||
c.Rules[ruleID] = rule
|
||||
c.Keywords = append(c.Keywords, rule.Keywords...)
|
||||
}
|
||||
}
|
||||
|
||||
// append allowlists, not attempting to merge
|
||||
c.Allowlist.Commits = append(c.Allowlist.Commits,
|
||||
extensionConfig.Allowlist.Commits...)
|
||||
c.Allowlist.Paths = append(c.Allowlist.Paths,
|
||||
extensionConfig.Allowlist.Paths...)
|
||||
c.Allowlist.Regexes = append(c.Allowlist.Regexes,
|
||||
extensionConfig.Allowlist.Regexes...)
|
||||
}
|
@ -0,0 +1,170 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
const configPath = "../testdata/config/"
|
||||
|
||||
func TestTranslate(t *testing.T) {
|
||||
tests := []struct {
|
||||
cfgName string
|
||||
cfg Config
|
||||
wantError error
|
||||
}{
|
||||
{
|
||||
cfgName: "allow_aws_re",
|
||||
cfg: Config{
|
||||
Rules: map[string]Rule{"aws-access-key": {
|
||||
Description: "AWS Access Key",
|
||||
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
|
||||
Tags: []string{"key", "AWS"},
|
||||
Keywords: []string{},
|
||||
RuleID: "aws-access-key",
|
||||
Allowlist: Allowlist{
|
||||
Regexes: []*regexp.Regexp{
|
||||
regexp.MustCompile("AKIALALEMEL33243OLIA"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "allow_commit",
|
||||
cfg: Config{
|
||||
Rules: map[string]Rule{"aws-access-key": {
|
||||
Description: "AWS Access Key",
|
||||
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
|
||||
Tags: []string{"key", "AWS"},
|
||||
Keywords: []string{},
|
||||
RuleID: "aws-access-key",
|
||||
Allowlist: Allowlist{
|
||||
Commits: []string{"allowthiscommit"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "allow_path",
|
||||
cfg: Config{
|
||||
Rules: map[string]Rule{"aws-access-key": {
|
||||
Description: "AWS Access Key",
|
||||
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
|
||||
Tags: []string{"key", "AWS"},
|
||||
Keywords: []string{},
|
||||
RuleID: "aws-access-key",
|
||||
Allowlist: Allowlist{
|
||||
Paths: []*regexp.Regexp{
|
||||
regexp.MustCompile(".go"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "entropy_group",
|
||||
cfg: Config{
|
||||
Rules: map[string]Rule{"discord-api-key": {
|
||||
Description: "Discord API key",
|
||||
Regex: regexp.MustCompile(`(?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{64})['\"]`),
|
||||
RuleID: "discord-api-key",
|
||||
Allowlist: Allowlist{},
|
||||
Entropy: 3.5,
|
||||
SecretGroup: 3,
|
||||
Tags: []string{},
|
||||
Keywords: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "bad_entropy_group",
|
||||
cfg: Config{},
|
||||
wantError: fmt.Errorf("Discord API key invalid regex secret group 5, max regex secret group 3"),
|
||||
},
|
||||
{
|
||||
cfgName: "base",
|
||||
cfg: Config{
|
||||
Rules: map[string]Rule{
|
||||
"aws-access-key": {
|
||||
Description: "AWS Access Key",
|
||||
Regex: regexp.MustCompile("(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}"),
|
||||
Tags: []string{"key", "AWS"},
|
||||
Keywords: []string{},
|
||||
RuleID: "aws-access-key",
|
||||
},
|
||||
"aws-secret-key": {
|
||||
Description: "AWS Secret Key",
|
||||
Regex: regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
|
||||
Tags: []string{"key", "AWS"},
|
||||
Keywords: []string{},
|
||||
RuleID: "aws-secret-key",
|
||||
},
|
||||
"aws-secret-key-again": {
|
||||
Description: "AWS Secret Key",
|
||||
Regex: regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
|
||||
Tags: []string{"key", "AWS"},
|
||||
Keywords: []string{},
|
||||
RuleID: "aws-secret-key-again",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
viper.Reset()
|
||||
viper.AddConfigPath(configPath)
|
||||
viper.SetConfigName(tt.cfgName)
|
||||
viper.SetConfigType("toml")
|
||||
err := viper.ReadInConfig()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
var vc ViperConfig
|
||||
err = viper.Unmarshal(&vc)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
cfg, err := vc.Translate()
|
||||
if tt.wantError != nil {
|
||||
if err == nil {
|
||||
t.Errorf("expected error")
|
||||
}
|
||||
assert.Equal(t, tt.wantError, err)
|
||||
}
|
||||
|
||||
assert.Equal(t, cfg.Rules, tt.cfg.Rules)
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,43 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
)
|
||||
|
||||
// Rules contain information that define details on how to detect secrets
|
||||
type Rule struct {
|
||||
// Description is the description of the rule.
|
||||
Description string
|
||||
|
||||
// RuleID is a unique identifier for this rule
|
||||
RuleID string
|
||||
|
||||
// Entropy is a float representing the minimum shannon
|
||||
// entropy a regex group must have to be considered a secret.
|
||||
Entropy float64
|
||||
|
||||
// SecretGroup is an int used to extract secret from regex
|
||||
// match and used as the group that will have its entropy
|
||||
// checked if `entropy` is set.
|
||||
SecretGroup int
|
||||
|
||||
// Regex is a golang regular expression used to detect secrets.
|
||||
Regex *regexp.Regexp
|
||||
|
||||
// Path is a golang regular expression used to
|
||||
// filter secrets by path
|
||||
Path *regexp.Regexp
|
||||
|
||||
// Tags is an array of strings used for metadata
|
||||
// and reporting purposes.
|
||||
Tags []string
|
||||
|
||||
// Keywords are used for pre-regex check filtering. Rules that contain
|
||||
// keywords will perform a quick string compare check to make sure the
|
||||
// keyword(s) are in the content being scanned.
|
||||
Keywords []string
|
||||
|
||||
// Allowlist allows a rule to be ignored for specific
|
||||
// regexes, paths, and/or commits
|
||||
Allowlist Allowlist
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
)
|
||||
|
||||
func anyRegexMatch(f string, res []*regexp.Regexp) bool {
|
||||
for _, re := range res {
|
||||
if regexMatched(f, re) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func regexMatched(f string, re *regexp.Regexp) bool {
|
||||
if re == nil {
|
||||
return false
|
||||
}
|
||||
if re.FindString(f) != "" {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
@ -0,0 +1,87 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
|
||||
"github.com/Infisical/infisical-merge/report"
|
||||
)
|
||||
|
||||
func IsNew(finding report.Finding, baseline []report.Finding) bool {
|
||||
// Explicitly testing each property as it gives significantly better performance in comparison to cmp.Equal(). Drawback is that
|
||||
// the code requires maintanance if/when the Finding struct changes
|
||||
for _, b := range baseline {
|
||||
|
||||
if finding.Author == b.Author &&
|
||||
finding.Commit == b.Commit &&
|
||||
finding.Date == b.Date &&
|
||||
finding.Description == b.Description &&
|
||||
finding.Email == b.Email &&
|
||||
finding.EndColumn == b.EndColumn &&
|
||||
finding.EndLine == b.EndLine &&
|
||||
finding.Entropy == b.Entropy &&
|
||||
finding.File == b.File &&
|
||||
// Omit checking finding.Fingerprint - if the format of the fingerprint changes, the users will see unexpected behaviour
|
||||
finding.Match == b.Match &&
|
||||
finding.Message == b.Message &&
|
||||
finding.RuleID == b.RuleID &&
|
||||
finding.Secret == b.Secret &&
|
||||
finding.StartColumn == b.StartColumn &&
|
||||
finding.StartLine == b.StartLine {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func LoadBaseline(baselinePath string) ([]report.Finding, error) {
|
||||
var previousFindings []report.Finding
|
||||
jsonFile, err := os.Open(baselinePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not open %s", baselinePath)
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if cerr := jsonFile.Close(); cerr != nil {
|
||||
log.Warn().Err(cerr).Msg("problem closing jsonFile handle")
|
||||
}
|
||||
}()
|
||||
|
||||
bytes, err := io.ReadAll(jsonFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not read data from the file %s", baselinePath)
|
||||
}
|
||||
|
||||
err = json.Unmarshal(bytes, &previousFindings)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("the format of the file %s is not supported", baselinePath)
|
||||
}
|
||||
|
||||
return previousFindings, nil
|
||||
}
|
@ -0,0 +1,160 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/Infisical/infisical-merge/report"
|
||||
)
|
||||
|
||||
func TestIsNew(t *testing.T) {
|
||||
tests := []struct {
|
||||
findings report.Finding
|
||||
baseline []report.Finding
|
||||
expect bool
|
||||
}{
|
||||
{
|
||||
findings: report.Finding{
|
||||
Author: "a",
|
||||
Commit: "0000",
|
||||
},
|
||||
baseline: []report.Finding{
|
||||
{
|
||||
Author: "a",
|
||||
Commit: "0000",
|
||||
},
|
||||
},
|
||||
expect: false,
|
||||
},
|
||||
{
|
||||
findings: report.Finding{
|
||||
Author: "a",
|
||||
Commit: "0000",
|
||||
},
|
||||
baseline: []report.Finding{
|
||||
{
|
||||
Author: "a",
|
||||
Commit: "0002",
|
||||
},
|
||||
},
|
||||
expect: true,
|
||||
},
|
||||
{
|
||||
findings: report.Finding{
|
||||
Author: "a",
|
||||
Commit: "0000",
|
||||
Tags: []string{"a", "b"},
|
||||
},
|
||||
baseline: []report.Finding{
|
||||
{
|
||||
Author: "a",
|
||||
Commit: "0000",
|
||||
Tags: []string{"a", "c"},
|
||||
},
|
||||
},
|
||||
expect: false, // Updated tags doesn't make it a new finding
|
||||
},
|
||||
}
|
||||
for _, test := range tests {
|
||||
assert.Equal(t, test.expect, IsNew(test.findings, test.baseline))
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileLoadBaseline(t *testing.T) {
|
||||
tests := []struct {
|
||||
Filename string
|
||||
ExpectedError error
|
||||
}{
|
||||
{
|
||||
Filename: "../testdata/baseline/baseline.csv",
|
||||
ExpectedError: errors.New("the format of the file ../testdata/baseline/baseline.csv is not supported"),
|
||||
},
|
||||
{
|
||||
Filename: "../testdata/baseline/baseline.sarif",
|
||||
ExpectedError: errors.New("the format of the file ../testdata/baseline/baseline.sarif is not supported"),
|
||||
},
|
||||
{
|
||||
Filename: "../testdata/baseline/notfound.json",
|
||||
ExpectedError: errors.New("could not open ../testdata/baseline/notfound.json"),
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
_, err := LoadBaseline(test.Filename)
|
||||
assert.Equal(t, test.ExpectedError.Error(), err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func TestIgnoreIssuesInBaseline(t *testing.T) {
|
||||
tests := []struct {
|
||||
findings []report.Finding
|
||||
baseline []report.Finding
|
||||
expectCount int
|
||||
}{
|
||||
{
|
||||
findings: []report.Finding{
|
||||
{
|
||||
Author: "a",
|
||||
Commit: "5",
|
||||
},
|
||||
},
|
||||
baseline: []report.Finding{
|
||||
{
|
||||
Author: "a",
|
||||
Commit: "5",
|
||||
},
|
||||
},
|
||||
expectCount: 0,
|
||||
},
|
||||
{
|
||||
findings: []report.Finding{
|
||||
{
|
||||
Author: "a",
|
||||
Commit: "5",
|
||||
Fingerprint: "a",
|
||||
},
|
||||
},
|
||||
baseline: []report.Finding{
|
||||
{
|
||||
Author: "a",
|
||||
Commit: "5",
|
||||
Fingerprint: "b",
|
||||
},
|
||||
},
|
||||
expectCount: 0,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
d, _ := NewDetectorDefaultConfig()
|
||||
d.baseline = test.baseline
|
||||
for _, finding := range test.findings {
|
||||
d.addFinding(finding)
|
||||
}
|
||||
assert.Equal(t, test.expectCount, len(d.findings))
|
||||
}
|
||||
}
|
@ -0,0 +1,652 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/h2non/filetype"
|
||||
|
||||
"github.com/Infisical/infisical-merge/config"
|
||||
"github.com/Infisical/infisical-merge/detect/git"
|
||||
"github.com/Infisical/infisical-merge/report"
|
||||
|
||||
"github.com/fatih/semgroup"
|
||||
"github.com/gitleaks/go-gitdiff/gitdiff"
|
||||
ahocorasick "github.com/petar-dambovaliev/aho-corasick"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
// Type used to differentiate between git scan types:
|
||||
// $ gitleaks detect
|
||||
// $ gitleaks protect
|
||||
// $ gitleaks protect staged
|
||||
type GitScanType int
|
||||
|
||||
const (
|
||||
DetectType GitScanType = iota
|
||||
ProtectType
|
||||
ProtectStagedType
|
||||
|
||||
gitleaksAllowSignature = "gitleaks:allow"
|
||||
)
|
||||
|
||||
// Detector is the main detector struct
|
||||
type Detector struct {
|
||||
// Config is the configuration for the detector
|
||||
Config config.Config
|
||||
|
||||
// Redact is a flag to redact findings. This is exported
|
||||
// so users using gitleaks as a library can set this flag
|
||||
// without calling `detector.Start(cmd *cobra.Command)`
|
||||
Redact bool
|
||||
|
||||
// verbose is a flag to print findings
|
||||
Verbose bool
|
||||
|
||||
// files larger than this will be skipped
|
||||
MaxTargetMegaBytes int
|
||||
|
||||
// followSymlinks is a flag to enable scanning symlink files
|
||||
FollowSymlinks bool
|
||||
|
||||
// NoColor is a flag to disable color output
|
||||
NoColor bool
|
||||
|
||||
// commitMap is used to keep track of commits that have been scanned.
|
||||
// This is only used for logging purposes and git scans.
|
||||
commitMap map[string]bool
|
||||
|
||||
// findingMutex is to prevent concurrent access to the
|
||||
// findings slice when adding findings.
|
||||
findingMutex *sync.Mutex
|
||||
|
||||
// findings is a slice of report.Findings. This is the result
|
||||
// of the detector's scan which can then be used to generate a
|
||||
// report.
|
||||
findings []report.Finding
|
||||
|
||||
// prefilter is a ahocorasick struct used for doing efficient string
|
||||
// matching given a set of words (keywords from the rules in the config)
|
||||
prefilter ahocorasick.AhoCorasick
|
||||
|
||||
// a list of known findings that should be ignored
|
||||
baseline []report.Finding
|
||||
|
||||
// path to baseline
|
||||
baselinePath string
|
||||
|
||||
// gitleaksIgnore
|
||||
gitleaksIgnore map[string]bool
|
||||
}
|
||||
|
||||
// Fragment contains the data to be scanned
|
||||
type Fragment struct {
|
||||
// Raw is the raw content of the fragment
|
||||
Raw string
|
||||
|
||||
// FilePath is the path to the file if applicable
|
||||
FilePath string
|
||||
SymlinkFile string
|
||||
|
||||
// CommitSHA is the SHA of the commit if applicable
|
||||
CommitSHA string
|
||||
|
||||
// newlineIndices is a list of indices of newlines in the raw content.
|
||||
// This is used to calculate the line location of a finding
|
||||
newlineIndices [][]int
|
||||
|
||||
// keywords is a map of all the keywords contain within the contents
|
||||
// of this fragment
|
||||
keywords map[string]bool
|
||||
}
|
||||
|
||||
// NewDetector creates a new detector with the given config
|
||||
func NewDetector(cfg config.Config) *Detector {
|
||||
builder := ahocorasick.NewAhoCorasickBuilder(ahocorasick.Opts{
|
||||
AsciiCaseInsensitive: true,
|
||||
MatchOnlyWholeWords: false,
|
||||
MatchKind: ahocorasick.LeftMostLongestMatch,
|
||||
DFA: true,
|
||||
})
|
||||
|
||||
return &Detector{
|
||||
commitMap: make(map[string]bool),
|
||||
gitleaksIgnore: make(map[string]bool),
|
||||
findingMutex: &sync.Mutex{},
|
||||
findings: make([]report.Finding, 0),
|
||||
Config: cfg,
|
||||
prefilter: builder.Build(cfg.Keywords),
|
||||
}
|
||||
}
|
||||
|
||||
// NewDetectorDefaultConfig creates a new detector with the default config
|
||||
func NewDetectorDefaultConfig() (*Detector, error) {
|
||||
viper.SetConfigType("toml")
|
||||
err := viper.ReadConfig(strings.NewReader(config.DefaultConfig))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var vc config.ViperConfig
|
||||
err = viper.Unmarshal(&vc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cfg, err := vc.Translate()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return NewDetector(cfg), nil
|
||||
}
|
||||
|
||||
func (d *Detector) AddGitleaksIgnore(gitleaksIgnorePath string) error {
|
||||
log.Debug().Msg("found .gitleaksignore file")
|
||||
file, err := os.Open(gitleaksIgnorePath)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// https://github.com/securego/gosec/issues/512
|
||||
defer func() {
|
||||
if err := file.Close(); err != nil {
|
||||
log.Warn().Msgf("Error closing .gitleaksignore file: %s\n", err)
|
||||
}
|
||||
}()
|
||||
scanner := bufio.NewScanner(file)
|
||||
|
||||
for scanner.Scan() {
|
||||
d.gitleaksIgnore[scanner.Text()] = true
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Detector) AddBaseline(baselinePath string, source string) error {
|
||||
if baselinePath != "" {
|
||||
absoluteSource, err := filepath.Abs(source)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
absoluteBaseline, err := filepath.Abs(baselinePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
relativeBaseline, err := filepath.Rel(absoluteSource, absoluteBaseline)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
baseline, err := LoadBaseline(baselinePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
d.baseline = baseline
|
||||
baselinePath = relativeBaseline
|
||||
|
||||
}
|
||||
|
||||
d.baselinePath = baselinePath
|
||||
return nil
|
||||
}
|
||||
|
||||
// DetectBytes scans the given bytes and returns a list of findings
|
||||
func (d *Detector) DetectBytes(content []byte) []report.Finding {
|
||||
return d.DetectString(string(content))
|
||||
}
|
||||
|
||||
// DetectString scans the given string and returns a list of findings
|
||||
func (d *Detector) DetectString(content string) []report.Finding {
|
||||
return d.Detect(Fragment{
|
||||
Raw: content,
|
||||
})
|
||||
}
|
||||
|
||||
// detectRule scans the given fragment for the given rule and returns a list of findings
|
||||
func (d *Detector) detectRule(fragment Fragment, rule config.Rule) []report.Finding {
|
||||
var findings []report.Finding
|
||||
|
||||
// check if filepath or commit is allowed for this rule
|
||||
if rule.Allowlist.CommitAllowed(fragment.CommitSHA) ||
|
||||
rule.Allowlist.PathAllowed(fragment.FilePath) {
|
||||
return findings
|
||||
}
|
||||
|
||||
if rule.Path != nil && rule.Regex == nil {
|
||||
// Path _only_ rule
|
||||
if rule.Path.Match([]byte(fragment.FilePath)) {
|
||||
finding := report.Finding{
|
||||
Description: rule.Description,
|
||||
File: fragment.FilePath,
|
||||
SymlinkFile: fragment.SymlinkFile,
|
||||
RuleID: rule.RuleID,
|
||||
Match: fmt.Sprintf("file detected: %s", fragment.FilePath),
|
||||
Tags: rule.Tags,
|
||||
}
|
||||
return append(findings, finding)
|
||||
}
|
||||
} else if rule.Path != nil {
|
||||
// if path is set _and_ a regex is set, then we need to check both
|
||||
// so if the path does not match, then we should return early and not
|
||||
// consider the regex
|
||||
if !rule.Path.Match([]byte(fragment.FilePath)) {
|
||||
return findings
|
||||
}
|
||||
}
|
||||
|
||||
// if path only rule, skip content checks
|
||||
if rule.Regex == nil {
|
||||
return findings
|
||||
}
|
||||
|
||||
// If flag configure and raw data size bigger then the flag
|
||||
if d.MaxTargetMegaBytes > 0 {
|
||||
rawLength := len(fragment.Raw) / 1000000
|
||||
if rawLength > d.MaxTargetMegaBytes {
|
||||
log.Debug().Msgf("skipping file: %s scan due to size: %d", fragment.FilePath, rawLength)
|
||||
return findings
|
||||
}
|
||||
}
|
||||
|
||||
matchIndices := rule.Regex.FindAllStringIndex(fragment.Raw, -1)
|
||||
for _, matchIndex := range matchIndices {
|
||||
// extract secret from match
|
||||
secret := strings.Trim(fragment.Raw[matchIndex[0]:matchIndex[1]], "\n")
|
||||
|
||||
// determine location of match. Note that the location
|
||||
// in the finding will be the line/column numbers of the _match_
|
||||
// not the _secret_, which will be different if the secretGroup
|
||||
// value is set for this rule
|
||||
loc := location(fragment, matchIndex)
|
||||
|
||||
if matchIndex[1] > loc.endLineIndex {
|
||||
loc.endLineIndex = matchIndex[1]
|
||||
}
|
||||
|
||||
finding := report.Finding{
|
||||
Description: rule.Description,
|
||||
File: fragment.FilePath,
|
||||
SymlinkFile: fragment.SymlinkFile,
|
||||
RuleID: rule.RuleID,
|
||||
StartLine: loc.startLine,
|
||||
EndLine: loc.endLine,
|
||||
StartColumn: loc.startColumn,
|
||||
EndColumn: loc.endColumn,
|
||||
Secret: secret,
|
||||
Match: secret,
|
||||
Tags: rule.Tags,
|
||||
Line: fragment.Raw[loc.startLineIndex:loc.endLineIndex],
|
||||
}
|
||||
|
||||
if strings.Contains(fragment.Raw[loc.startLineIndex:loc.endLineIndex],
|
||||
gitleaksAllowSignature) {
|
||||
continue
|
||||
}
|
||||
|
||||
// extract secret from secret group if set
|
||||
if rule.SecretGroup != 0 {
|
||||
groups := rule.Regex.FindStringSubmatch(secret)
|
||||
if len(groups) <= rule.SecretGroup || len(groups) == 0 {
|
||||
// Config validation should prevent this
|
||||
continue
|
||||
}
|
||||
secret = groups[rule.SecretGroup]
|
||||
finding.Secret = secret
|
||||
}
|
||||
|
||||
// check if the regexTarget is defined in the allowlist "regexes" entry
|
||||
allowlistTarget := finding.Secret
|
||||
switch rule.Allowlist.RegexTarget {
|
||||
case "match":
|
||||
allowlistTarget = finding.Match
|
||||
case "line":
|
||||
allowlistTarget = finding.Line
|
||||
}
|
||||
|
||||
globalAllowlistTarget := finding.Secret
|
||||
switch d.Config.Allowlist.RegexTarget {
|
||||
case "match":
|
||||
globalAllowlistTarget = finding.Match
|
||||
case "line":
|
||||
globalAllowlistTarget = finding.Line
|
||||
}
|
||||
if rule.Allowlist.RegexAllowed(allowlistTarget) ||
|
||||
d.Config.Allowlist.RegexAllowed(globalAllowlistTarget) {
|
||||
continue
|
||||
}
|
||||
|
||||
// check if the secret is in the list of stopwords
|
||||
if rule.Allowlist.ContainsStopWord(finding.Secret) ||
|
||||
d.Config.Allowlist.ContainsStopWord(finding.Secret) {
|
||||
continue
|
||||
}
|
||||
|
||||
// check entropy
|
||||
entropy := shannonEntropy(finding.Secret)
|
||||
finding.Entropy = float32(entropy)
|
||||
if rule.Entropy != 0.0 {
|
||||
if entropy <= rule.Entropy {
|
||||
// entropy is too low, skip this finding
|
||||
continue
|
||||
}
|
||||
// NOTE: this is a goofy hack to get around the fact there golang's regex engine
|
||||
// does not support positive lookaheads. Ideally we would want to add a
|
||||
// restriction on generic rules regex that requires the secret match group
|
||||
// contains both numbers and alphabetical characters, not just alphabetical characters.
|
||||
// What this bit of code does is check if the ruleid is prepended with "generic" and enforces the
|
||||
// secret contains both digits and alphabetical characters.
|
||||
// TODO: this should be replaced with stop words
|
||||
if strings.HasPrefix(rule.RuleID, "generic") {
|
||||
if !containsDigit(secret) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
findings = append(findings, finding)
|
||||
}
|
||||
return findings
|
||||
}
|
||||
|
||||
// GitScan accepts a *gitdiff.File channel which contents a git history generated from
|
||||
// the output of `git log -p ...`. startGitScan will look at each file (patch) in the history
|
||||
// and determine if the patch contains any findings.
|
||||
func (d *Detector) DetectGit(source string, logOpts string, gitScanType GitScanType) ([]report.Finding, error) {
|
||||
var (
|
||||
gitdiffFiles <-chan *gitdiff.File
|
||||
err error
|
||||
)
|
||||
switch gitScanType {
|
||||
case DetectType:
|
||||
gitdiffFiles, err = git.GitLog(source, logOpts)
|
||||
if err != nil {
|
||||
return d.findings, err
|
||||
}
|
||||
case ProtectType:
|
||||
gitdiffFiles, err = git.GitDiff(source, false)
|
||||
if err != nil {
|
||||
return d.findings, err
|
||||
}
|
||||
case ProtectStagedType:
|
||||
gitdiffFiles, err = git.GitDiff(source, true)
|
||||
if err != nil {
|
||||
return d.findings, err
|
||||
}
|
||||
}
|
||||
|
||||
s := semgroup.NewGroup(context.Background(), 4)
|
||||
|
||||
for gitdiffFile := range gitdiffFiles {
|
||||
gitdiffFile := gitdiffFile
|
||||
|
||||
// skip binary files
|
||||
if gitdiffFile.IsBinary || gitdiffFile.IsDelete {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if commit is allowed
|
||||
commitSHA := ""
|
||||
if gitdiffFile.PatchHeader != nil {
|
||||
commitSHA = gitdiffFile.PatchHeader.SHA
|
||||
if d.Config.Allowlist.CommitAllowed(gitdiffFile.PatchHeader.SHA) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
d.addCommit(commitSHA)
|
||||
|
||||
s.Go(func() error {
|
||||
for _, textFragment := range gitdiffFile.TextFragments {
|
||||
if textFragment == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
fragment := Fragment{
|
||||
Raw: textFragment.Raw(gitdiff.OpAdd),
|
||||
CommitSHA: commitSHA,
|
||||
FilePath: gitdiffFile.NewName,
|
||||
}
|
||||
|
||||
for _, finding := range d.Detect(fragment) {
|
||||
d.addFinding(augmentGitFinding(finding, textFragment, gitdiffFile))
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
if err := s.Wait(); err != nil {
|
||||
return d.findings, err
|
||||
}
|
||||
log.Info().Msgf("%d commits scanned.", len(d.commitMap))
|
||||
log.Debug().Msg("Note: this number might be smaller than expected due to commits with no additions")
|
||||
if git.ErrEncountered {
|
||||
return d.findings, fmt.Errorf("%s", "git error encountered, see logs")
|
||||
}
|
||||
return d.findings, nil
|
||||
}
|
||||
|
||||
type scanTarget struct {
|
||||
Path string
|
||||
Symlink string
|
||||
}
|
||||
|
||||
// DetectFiles accepts a path to a source directory or file and begins a scan of the
|
||||
// file or directory.
|
||||
func (d *Detector) DetectFiles(source string) ([]report.Finding, error) {
|
||||
s := semgroup.NewGroup(context.Background(), 4)
|
||||
paths := make(chan scanTarget)
|
||||
s.Go(func() error {
|
||||
defer close(paths)
|
||||
return filepath.Walk(source,
|
||||
func(path string, fInfo os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if fInfo.Name() == ".git" && fInfo.IsDir() {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
if fInfo.Size() == 0 {
|
||||
return nil
|
||||
}
|
||||
if fInfo.Mode().IsRegular() {
|
||||
paths <- scanTarget{
|
||||
Path: path,
|
||||
Symlink: "",
|
||||
}
|
||||
}
|
||||
if fInfo.Mode().Type() == fs.ModeSymlink && d.FollowSymlinks {
|
||||
realPath, err := filepath.EvalSymlinks(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
realPathFileInfo, _ := os.Stat(realPath)
|
||||
if realPathFileInfo.IsDir() {
|
||||
log.Debug().Msgf("found symlinked directory: %s -> %s [skipping]", path, realPath)
|
||||
return nil
|
||||
}
|
||||
paths <- scanTarget{
|
||||
Path: realPath,
|
||||
Symlink: path,
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
})
|
||||
for pa := range paths {
|
||||
p := pa
|
||||
s.Go(func() error {
|
||||
b, err := os.ReadFile(p.Path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
mimetype, err := filetype.Match(b)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if mimetype.MIME.Type == "application" {
|
||||
return nil // skip binary files
|
||||
}
|
||||
|
||||
fragment := Fragment{
|
||||
Raw: string(b),
|
||||
FilePath: p.Path,
|
||||
}
|
||||
if p.Symlink != "" {
|
||||
fragment.SymlinkFile = p.Symlink
|
||||
}
|
||||
for _, finding := range d.Detect(fragment) {
|
||||
// need to add 1 since line counting starts at 1
|
||||
finding.EndLine++
|
||||
finding.StartLine++
|
||||
d.addFinding(finding)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
if err := s.Wait(); err != nil {
|
||||
return d.findings, err
|
||||
}
|
||||
|
||||
return d.findings, nil
|
||||
}
|
||||
|
||||
// DetectReader accepts an io.Reader and a buffer size for the reader in KB
|
||||
func (d *Detector) DetectReader(r io.Reader, bufSize int) ([]report.Finding, error) {
|
||||
reader := bufio.NewReader(r)
|
||||
buf := make([]byte, 0, 1000*bufSize)
|
||||
findings := []report.Finding{}
|
||||
|
||||
for {
|
||||
n, err := reader.Read(buf[:cap(buf)])
|
||||
buf = buf[:n]
|
||||
if err != nil {
|
||||
if err != io.EOF {
|
||||
return findings, err
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
fragment := Fragment{
|
||||
Raw: string(buf),
|
||||
}
|
||||
for _, finding := range d.Detect(fragment) {
|
||||
findings = append(findings, finding)
|
||||
if d.Verbose {
|
||||
printFinding(finding, d.NoColor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return findings, nil
|
||||
}
|
||||
|
||||
// Detect scans the given fragment and returns a list of findings
|
||||
func (d *Detector) Detect(fragment Fragment) []report.Finding {
|
||||
var findings []report.Finding
|
||||
|
||||
// initiate fragment keywords
|
||||
fragment.keywords = make(map[string]bool)
|
||||
|
||||
// check if filepath is allowed
|
||||
if fragment.FilePath != "" && (d.Config.Allowlist.PathAllowed(fragment.FilePath) ||
|
||||
fragment.FilePath == d.Config.Path || (d.baselinePath != "" && fragment.FilePath == d.baselinePath)) {
|
||||
return findings
|
||||
}
|
||||
|
||||
// add newline indices for location calculation in detectRule
|
||||
fragment.newlineIndices = regexp.MustCompile("\n").FindAllStringIndex(fragment.Raw, -1)
|
||||
|
||||
// build keyword map for prefiltering rules
|
||||
normalizedRaw := strings.ToLower(fragment.Raw)
|
||||
matches := d.prefilter.FindAll(normalizedRaw)
|
||||
for _, m := range matches {
|
||||
fragment.keywords[normalizedRaw[m.Start():m.End()]] = true
|
||||
}
|
||||
|
||||
for _, rule := range d.Config.Rules {
|
||||
if len(rule.Keywords) == 0 {
|
||||
// if not keywords are associated with the rule always scan the
|
||||
// fragment using the rule
|
||||
findings = append(findings, d.detectRule(fragment, rule)...)
|
||||
continue
|
||||
}
|
||||
fragmentContainsKeyword := false
|
||||
// check if keywords are in the fragment
|
||||
for _, k := range rule.Keywords {
|
||||
if _, ok := fragment.keywords[strings.ToLower(k)]; ok {
|
||||
fragmentContainsKeyword = true
|
||||
}
|
||||
}
|
||||
if fragmentContainsKeyword {
|
||||
findings = append(findings, d.detectRule(fragment, rule)...)
|
||||
}
|
||||
}
|
||||
return filter(findings, d.Redact)
|
||||
}
|
||||
|
||||
// addFinding synchronously adds a finding to the findings slice
|
||||
func (d *Detector) addFinding(finding report.Finding) {
|
||||
if finding.Commit == "" {
|
||||
finding.Fingerprint = fmt.Sprintf("%s:%s:%d", finding.File, finding.RuleID, finding.StartLine)
|
||||
} else {
|
||||
finding.Fingerprint = fmt.Sprintf("%s:%s:%s:%d", finding.Commit, finding.File, finding.RuleID, finding.StartLine)
|
||||
}
|
||||
// check if we should ignore this finding
|
||||
if _, ok := d.gitleaksIgnore[finding.Fingerprint]; ok {
|
||||
log.Debug().Msgf("ignoring finding with Fingerprint %s",
|
||||
finding.Fingerprint)
|
||||
return
|
||||
}
|
||||
|
||||
if d.baseline != nil && !IsNew(finding, d.baseline) {
|
||||
log.Debug().Msgf("baseline duplicate -- ignoring finding with Fingerprint %s", finding.Fingerprint)
|
||||
return
|
||||
}
|
||||
|
||||
d.findingMutex.Lock()
|
||||
d.findings = append(d.findings, finding)
|
||||
if d.Verbose {
|
||||
printFinding(finding, d.NoColor)
|
||||
}
|
||||
d.findingMutex.Unlock()
|
||||
}
|
||||
|
||||
// addCommit synchronously adds a commit to the commit slice
|
||||
func (d *Detector) addCommit(commit string) {
|
||||
d.commitMap[commit] = true
|
||||
}
|
@ -0,0 +1,754 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/Infisical/infisical-merge/config"
|
||||
"github.com/Infisical/infisical-merge/report"
|
||||
)
|
||||
|
||||
const configPath = "../testdata/config/"
|
||||
const repoBasePath = "../testdata/repos/"
|
||||
|
||||
func TestDetect(t *testing.T) {
|
||||
tests := []struct {
|
||||
cfgName string
|
||||
baselinePath string
|
||||
fragment Fragment
|
||||
// NOTE: for expected findings, all line numbers will be 0
|
||||
// because line deltas are added _after_ the finding is created.
|
||||
// I.e, if the finding is from a --no-git file, the line number will be
|
||||
// increase by 1 in DetectFromFiles(). If the finding is from git,
|
||||
// the line number will be increased by the patch delta.
|
||||
expectedFindings []report.Finding
|
||||
wantError error
|
||||
}{
|
||||
{
|
||||
cfgName: "simple",
|
||||
fragment: Fragment{
|
||||
Raw: `awsToken := \"AKIALALEMEL33243OKIA\ // gitleaks:allow"`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
{
|
||||
cfgName: "simple",
|
||||
fragment: Fragment{
|
||||
Raw: `awsToken := \
|
||||
|
||||
\"AKIALALEMEL33243OKIA\ // gitleaks:allow"
|
||||
|
||||
`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
{
|
||||
cfgName: "simple",
|
||||
fragment: Fragment{
|
||||
Raw: `awsToken := \"AKIALALEMEL33243OKIA\"
|
||||
|
||||
// gitleaks:allow"
|
||||
|
||||
`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "AWS Access Key",
|
||||
Secret: "AKIALALEMEL33243OKIA",
|
||||
Match: "AKIALALEMEL33243OKIA",
|
||||
File: "tmp.go",
|
||||
Line: `awsToken := \"AKIALALEMEL33243OKIA\"`,
|
||||
RuleID: "aws-access-key",
|
||||
Tags: []string{"key", "AWS"},
|
||||
StartLine: 0,
|
||||
EndLine: 0,
|
||||
StartColumn: 15,
|
||||
EndColumn: 34,
|
||||
Entropy: 3.1464393,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "escaped_character_group",
|
||||
fragment: Fragment{
|
||||
Raw: `pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "PyPI upload token",
|
||||
Secret: "pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB",
|
||||
Match: "pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB",
|
||||
Line: `pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB`,
|
||||
File: "tmp.go",
|
||||
RuleID: "pypi-upload-token",
|
||||
Tags: []string{"key", "pypi"},
|
||||
StartLine: 0,
|
||||
EndLine: 0,
|
||||
StartColumn: 1,
|
||||
EndColumn: 86,
|
||||
Entropy: 1.9606875,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "simple",
|
||||
fragment: Fragment{
|
||||
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "AWS Access Key",
|
||||
Secret: "AKIALALEMEL33243OLIA",
|
||||
Match: "AKIALALEMEL33243OLIA",
|
||||
Line: `awsToken := \"AKIALALEMEL33243OLIA\"`,
|
||||
File: "tmp.go",
|
||||
RuleID: "aws-access-key",
|
||||
Tags: []string{"key", "AWS"},
|
||||
StartLine: 0,
|
||||
EndLine: 0,
|
||||
StartColumn: 15,
|
||||
EndColumn: 34,
|
||||
Entropy: 3.0841837,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "simple",
|
||||
fragment: Fragment{
|
||||
Raw: `export BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;`,
|
||||
FilePath: "tmp.sh",
|
||||
},
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "Sidekiq Secret",
|
||||
Match: "BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;",
|
||||
Secret: "cafebabe:deadbeef",
|
||||
Line: `export BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;`,
|
||||
File: "tmp.sh",
|
||||
RuleID: "sidekiq-secret",
|
||||
Tags: []string{},
|
||||
Entropy: 2.6098502,
|
||||
StartLine: 0,
|
||||
EndLine: 0,
|
||||
StartColumn: 8,
|
||||
EndColumn: 60,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "simple",
|
||||
fragment: Fragment{
|
||||
Raw: `echo hello1; export BUNDLE_ENTERPRISE__CONTRIBSYS__COM="cafebabe:deadbeef" && echo hello2`,
|
||||
FilePath: "tmp.sh",
|
||||
},
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "Sidekiq Secret",
|
||||
Match: "BUNDLE_ENTERPRISE__CONTRIBSYS__COM=\"cafebabe:deadbeef\"",
|
||||
Secret: "cafebabe:deadbeef",
|
||||
File: "tmp.sh",
|
||||
Line: `echo hello1; export BUNDLE_ENTERPRISE__CONTRIBSYS__COM="cafebabe:deadbeef" && echo hello2`,
|
||||
RuleID: "sidekiq-secret",
|
||||
Tags: []string{},
|
||||
Entropy: 2.6098502,
|
||||
StartLine: 0,
|
||||
EndLine: 0,
|
||||
StartColumn: 21,
|
||||
EndColumn: 74,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "simple",
|
||||
fragment: Fragment{
|
||||
Raw: `url = "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:80/path?param1=true¶m2=false#heading1"`,
|
||||
FilePath: "tmp.sh",
|
||||
},
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "Sidekiq Sensitive URL",
|
||||
Match: "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:",
|
||||
Secret: "cafeb4b3:d3adb33f",
|
||||
File: "tmp.sh",
|
||||
Line: `url = "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:80/path?param1=true¶m2=false#heading1"`,
|
||||
RuleID: "sidekiq-sensitive-url",
|
||||
Tags: []string{},
|
||||
Entropy: 2.984234,
|
||||
StartLine: 0,
|
||||
EndLine: 0,
|
||||
StartColumn: 8,
|
||||
EndColumn: 58,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "allow_aws_re",
|
||||
fragment: Fragment{
|
||||
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
{
|
||||
cfgName: "allow_path",
|
||||
fragment: Fragment{
|
||||
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
{
|
||||
cfgName: "allow_commit",
|
||||
fragment: Fragment{
|
||||
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
|
||||
FilePath: "tmp.go",
|
||||
CommitSHA: "allowthiscommit",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
{
|
||||
cfgName: "entropy_group",
|
||||
fragment: Fragment{
|
||||
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "Discord API key",
|
||||
Match: "Discord_Public_Key = \"e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5\"",
|
||||
Secret: "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5",
|
||||
Line: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
|
||||
File: "tmp.go",
|
||||
RuleID: "discord-api-key",
|
||||
Tags: []string{},
|
||||
Entropy: 3.7906237,
|
||||
StartLine: 0,
|
||||
EndLine: 0,
|
||||
StartColumn: 7,
|
||||
EndColumn: 93,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "generic_with_py_path",
|
||||
fragment: Fragment{
|
||||
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
{
|
||||
cfgName: "generic_with_py_path",
|
||||
fragment: Fragment{
|
||||
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
|
||||
FilePath: "tmp.py",
|
||||
},
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "Generic API Key",
|
||||
Match: "Key = \"e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5\"",
|
||||
Secret: "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5",
|
||||
Line: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
|
||||
File: "tmp.py",
|
||||
RuleID: "generic-api-key",
|
||||
Tags: []string{},
|
||||
Entropy: 3.7906237,
|
||||
StartLine: 0,
|
||||
EndLine: 0,
|
||||
StartColumn: 22,
|
||||
EndColumn: 93,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "path_only",
|
||||
fragment: Fragment{
|
||||
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
|
||||
FilePath: "tmp.py",
|
||||
},
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "Python Files",
|
||||
Match: "file detected: tmp.py",
|
||||
File: "tmp.py",
|
||||
RuleID: "python-files-only",
|
||||
Tags: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
cfgName: "bad_entropy_group",
|
||||
fragment: Fragment{
|
||||
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
wantError: fmt.Errorf("Discord API key invalid regex secret group 5, max regex secret group 3"),
|
||||
},
|
||||
{
|
||||
cfgName: "simple",
|
||||
fragment: Fragment{
|
||||
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
|
||||
FilePath: filepath.Join(configPath, "simple.toml"),
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
{
|
||||
cfgName: "allow_global_aws_re",
|
||||
fragment: Fragment{
|
||||
Raw: `awsToken := \"AKIALALEMEL33243OLIA\"`,
|
||||
FilePath: "tmp.go",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
{
|
||||
cfgName: "generic_with_py_path",
|
||||
fragment: Fragment{
|
||||
Raw: `const Discord_Public_Key = "load2523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
|
||||
FilePath: "tmp.py",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
{
|
||||
cfgName: "path_only",
|
||||
baselinePath: ".baseline.json",
|
||||
fragment: Fragment{
|
||||
Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
|
||||
FilePath: ".baseline.json",
|
||||
},
|
||||
expectedFindings: []report.Finding{},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
viper.Reset()
|
||||
viper.AddConfigPath(configPath)
|
||||
viper.SetConfigName(tt.cfgName)
|
||||
viper.SetConfigType("toml")
|
||||
err := viper.ReadInConfig()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
var vc config.ViperConfig
|
||||
err = viper.Unmarshal(&vc)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
cfg, err := vc.Translate()
|
||||
cfg.Path = filepath.Join(configPath, tt.cfgName+".toml")
|
||||
if tt.wantError != nil {
|
||||
if err == nil {
|
||||
t.Errorf("expected error")
|
||||
}
|
||||
assert.Equal(t, tt.wantError, err)
|
||||
}
|
||||
d := NewDetector(cfg)
|
||||
d.baselinePath = tt.baselinePath
|
||||
|
||||
findings := d.Detect(tt.fragment)
|
||||
assert.ElementsMatch(t, tt.expectedFindings, findings)
|
||||
}
|
||||
}
|
||||
|
||||
// TestFromGit tests the FromGit function
|
||||
func TestFromGit(t *testing.T) {
|
||||
tests := []struct {
|
||||
cfgName string
|
||||
source string
|
||||
logOpts string
|
||||
expectedFindings []report.Finding
|
||||
}{
|
||||
{
|
||||
source: filepath.Join(repoBasePath, "small"),
|
||||
cfgName: "simple",
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "AWS Access Key",
|
||||
StartLine: 20,
|
||||
EndLine: 20,
|
||||
StartColumn: 19,
|
||||
EndColumn: 38,
|
||||
Line: "\n awsToken := \"AKIALALEMEL33243OLIA\"",
|
||||
Secret: "AKIALALEMEL33243OLIA",
|
||||
Match: "AKIALALEMEL33243OLIA",
|
||||
File: "main.go",
|
||||
Date: "2021-11-02T23:37:53Z",
|
||||
Commit: "1b6da43b82b22e4eaa10bcf8ee591e91abbfc587",
|
||||
Author: "Zachary Rice",
|
||||
Email: "zricer@protonmail.com",
|
||||
Message: "Accidentally add a secret",
|
||||
RuleID: "aws-access-key",
|
||||
Tags: []string{"key", "AWS"},
|
||||
Entropy: 3.0841837,
|
||||
Fingerprint: "1b6da43b82b22e4eaa10bcf8ee591e91abbfc587:main.go:aws-access-key:20",
|
||||
},
|
||||
{
|
||||
Description: "AWS Access Key",
|
||||
StartLine: 9,
|
||||
EndLine: 9,
|
||||
StartColumn: 17,
|
||||
EndColumn: 36,
|
||||
Secret: "AKIALALEMEL33243OLIA",
|
||||
Match: "AKIALALEMEL33243OLIA",
|
||||
Line: "\n\taws_token := \"AKIALALEMEL33243OLIA\"",
|
||||
File: "foo/foo.go",
|
||||
Date: "2021-11-02T23:48:06Z",
|
||||
Commit: "491504d5a31946ce75e22554cc34203d8e5ff3ca",
|
||||
Author: "Zach Rice",
|
||||
Email: "zricer@protonmail.com",
|
||||
Message: "adding foo package with secret",
|
||||
RuleID: "aws-access-key",
|
||||
Tags: []string{"key", "AWS"},
|
||||
Entropy: 3.0841837,
|
||||
Fingerprint: "491504d5a31946ce75e22554cc34203d8e5ff3ca:foo/foo.go:aws-access-key:9",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
source: filepath.Join(repoBasePath, "small"),
|
||||
logOpts: "--all foo...",
|
||||
cfgName: "simple",
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "AWS Access Key",
|
||||
StartLine: 9,
|
||||
EndLine: 9,
|
||||
StartColumn: 17,
|
||||
EndColumn: 36,
|
||||
Secret: "AKIALALEMEL33243OLIA",
|
||||
Line: "\n\taws_token := \"AKIALALEMEL33243OLIA\"",
|
||||
Match: "AKIALALEMEL33243OLIA",
|
||||
Date: "2021-11-02T23:48:06Z",
|
||||
File: "foo/foo.go",
|
||||
Commit: "491504d5a31946ce75e22554cc34203d8e5ff3ca",
|
||||
Author: "Zach Rice",
|
||||
Email: "zricer@protonmail.com",
|
||||
Message: "adding foo package with secret",
|
||||
RuleID: "aws-access-key",
|
||||
Tags: []string{"key", "AWS"},
|
||||
Entropy: 3.0841837,
|
||||
Fingerprint: "491504d5a31946ce75e22554cc34203d8e5ff3ca:foo/foo.go:aws-access-key:9",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
err := moveDotGit("dotGit", ".git")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer func() {
|
||||
if err := moveDotGit(".git", "dotGit"); err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
}()
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
viper.AddConfigPath(configPath)
|
||||
viper.SetConfigName("simple")
|
||||
viper.SetConfigType("toml")
|
||||
err = viper.ReadInConfig()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
var vc config.ViperConfig
|
||||
err = viper.Unmarshal(&vc)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
cfg, err := vc.Translate()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
detector := NewDetector(cfg)
|
||||
findings, err := detector.DetectGit(tt.source, tt.logOpts, DetectType)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
for _, f := range findings {
|
||||
f.Match = "" // remove lines cause copying and pasting them has some wack formatting
|
||||
}
|
||||
assert.ElementsMatch(t, tt.expectedFindings, findings)
|
||||
}
|
||||
}
|
||||
func TestFromGitStaged(t *testing.T) {
|
||||
tests := []struct {
|
||||
cfgName string
|
||||
source string
|
||||
logOpts string
|
||||
expectedFindings []report.Finding
|
||||
}{
|
||||
{
|
||||
source: filepath.Join(repoBasePath, "staged"),
|
||||
cfgName: "simple",
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "AWS Access Key",
|
||||
StartLine: 7,
|
||||
EndLine: 7,
|
||||
StartColumn: 18,
|
||||
EndColumn: 37,
|
||||
Line: "\n\taws_token2 := \"AKIALALEMEL33243OLIA\" // this one is not",
|
||||
Match: "AKIALALEMEL33243OLIA",
|
||||
Secret: "AKIALALEMEL33243OLIA",
|
||||
File: "api/api.go",
|
||||
SymlinkFile: "",
|
||||
Commit: "",
|
||||
Entropy: 3.0841837,
|
||||
Author: "",
|
||||
Email: "",
|
||||
Date: "0001-01-01T00:00:00Z",
|
||||
Message: "",
|
||||
Tags: []string{
|
||||
"key",
|
||||
"AWS",
|
||||
},
|
||||
RuleID: "aws-access-key",
|
||||
Fingerprint: "api/api.go:aws-access-key:7",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
err := moveDotGit("dotGit", ".git")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer func() {
|
||||
if err := moveDotGit(".git", "dotGit"); err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
}()
|
||||
|
||||
for _, tt := range tests {
|
||||
|
||||
viper.AddConfigPath(configPath)
|
||||
viper.SetConfigName("simple")
|
||||
viper.SetConfigType("toml")
|
||||
err = viper.ReadInConfig()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
var vc config.ViperConfig
|
||||
err = viper.Unmarshal(&vc)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
cfg, err := vc.Translate()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
detector := NewDetector(cfg)
|
||||
detector.AddGitleaksIgnore(filepath.Join(tt.source, ".gitleaksignore"))
|
||||
findings, err := detector.DetectGit(tt.source, tt.logOpts, ProtectStagedType)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
for _, f := range findings {
|
||||
f.Match = "" // remove lines cause copying and pasting them has some wack formatting
|
||||
}
|
||||
assert.ElementsMatch(t, tt.expectedFindings, findings)
|
||||
}
|
||||
}
|
||||
|
||||
// TestFromFiles tests the FromFiles function
|
||||
func TestFromFiles(t *testing.T) {
|
||||
tests := []struct {
|
||||
cfgName string
|
||||
source string
|
||||
expectedFindings []report.Finding
|
||||
}{
|
||||
{
|
||||
source: filepath.Join(repoBasePath, "nogit"),
|
||||
cfgName: "simple",
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "AWS Access Key",
|
||||
StartLine: 20,
|
||||
EndLine: 20,
|
||||
StartColumn: 16,
|
||||
EndColumn: 35,
|
||||
Match: "AKIALALEMEL33243OLIA",
|
||||
Secret: "AKIALALEMEL33243OLIA",
|
||||
Line: "\n\tawsToken := \"AKIALALEMEL33243OLIA\"",
|
||||
File: "../testdata/repos/nogit/main.go",
|
||||
SymlinkFile: "",
|
||||
RuleID: "aws-access-key",
|
||||
Tags: []string{"key", "AWS"},
|
||||
Entropy: 3.0841837,
|
||||
Fingerprint: "../testdata/repos/nogit/main.go:aws-access-key:20",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
source: filepath.Join(repoBasePath, "nogit", "main.go"),
|
||||
cfgName: "simple",
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "AWS Access Key",
|
||||
StartLine: 20,
|
||||
EndLine: 20,
|
||||
StartColumn: 16,
|
||||
EndColumn: 35,
|
||||
Match: "AKIALALEMEL33243OLIA",
|
||||
Secret: "AKIALALEMEL33243OLIA",
|
||||
Line: "\n\tawsToken := \"AKIALALEMEL33243OLIA\"",
|
||||
File: "../testdata/repos/nogit/main.go",
|
||||
RuleID: "aws-access-key",
|
||||
Tags: []string{"key", "AWS"},
|
||||
Entropy: 3.0841837,
|
||||
Fingerprint: "../testdata/repos/nogit/main.go:aws-access-key:20",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
viper.AddConfigPath(configPath)
|
||||
viper.SetConfigName("simple")
|
||||
viper.SetConfigType("toml")
|
||||
err := viper.ReadInConfig()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
var vc config.ViperConfig
|
||||
err = viper.Unmarshal(&vc)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
cfg, _ := vc.Translate()
|
||||
detector := NewDetector(cfg)
|
||||
detector.FollowSymlinks = true
|
||||
findings, err := detector.DetectFiles(tt.source)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
assert.ElementsMatch(t, tt.expectedFindings, findings)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDetectWithSymlinks(t *testing.T) {
|
||||
tests := []struct {
|
||||
cfgName string
|
||||
source string
|
||||
expectedFindings []report.Finding
|
||||
}{
|
||||
{
|
||||
source: filepath.Join(repoBasePath, "symlinks/file_symlink"),
|
||||
cfgName: "simple",
|
||||
expectedFindings: []report.Finding{
|
||||
{
|
||||
Description: "Asymmetric Private Key",
|
||||
StartLine: 1,
|
||||
EndLine: 1,
|
||||
StartColumn: 1,
|
||||
EndColumn: 35,
|
||||
Match: "-----BEGIN OPENSSH PRIVATE KEY-----",
|
||||
Secret: "-----BEGIN OPENSSH PRIVATE KEY-----",
|
||||
Line: "-----BEGIN OPENSSH PRIVATE KEY-----",
|
||||
File: "../testdata/repos/symlinks/source_file/id_ed25519",
|
||||
SymlinkFile: "../testdata/repos/symlinks/file_symlink/symlinked_id_ed25519",
|
||||
RuleID: "apkey",
|
||||
Tags: []string{"key", "AsymmetricPrivateKey"},
|
||||
Entropy: 3.587164,
|
||||
Fingerprint: "../testdata/repos/symlinks/source_file/id_ed25519:apkey:1",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
viper.AddConfigPath(configPath)
|
||||
viper.SetConfigName("simple")
|
||||
viper.SetConfigType("toml")
|
||||
err := viper.ReadInConfig()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
var vc config.ViperConfig
|
||||
err = viper.Unmarshal(&vc)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
cfg, _ := vc.Translate()
|
||||
detector := NewDetector(cfg)
|
||||
detector.FollowSymlinks = true
|
||||
findings, err := detector.DetectFiles(tt.source)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
assert.ElementsMatch(t, tt.expectedFindings, findings)
|
||||
}
|
||||
}
|
||||
|
||||
func moveDotGit(from, to string) error {
|
||||
repoDirs, err := os.ReadDir("../testdata/repos")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, dir := range repoDirs {
|
||||
if to == ".git" {
|
||||
_, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), "dotGit"))
|
||||
if os.IsNotExist(err) {
|
||||
// dont want to delete the only copy of .git accidentally
|
||||
continue
|
||||
}
|
||||
os.RemoveAll(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), ".git"))
|
||||
}
|
||||
if !dir.IsDir() {
|
||||
continue
|
||||
}
|
||||
_, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from))
|
||||
if os.IsNotExist(err) {
|
||||
continue
|
||||
}
|
||||
|
||||
err = os.Rename(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from),
|
||||
fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), to))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
@ -0,0 +1,143 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package git
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"io"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gitleaks/go-gitdiff/gitdiff"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
var ErrEncountered bool
|
||||
|
||||
// GitLog returns a channel of gitdiff.File objects from the
|
||||
// git log -p command for the given source.
|
||||
func GitLog(source string, logOpts string) (<-chan *gitdiff.File, error) {
|
||||
sourceClean := filepath.Clean(source)
|
||||
var cmd *exec.Cmd
|
||||
if logOpts != "" {
|
||||
args := []string{"-C", sourceClean, "log", "-p", "-U0"}
|
||||
args = append(args, strings.Split(logOpts, " ")...)
|
||||
cmd = exec.Command("git", args...)
|
||||
} else {
|
||||
cmd = exec.Command("git", "-C", sourceClean, "log", "-p", "-U0",
|
||||
"--full-history", "--all")
|
||||
}
|
||||
|
||||
log.Debug().Msgf("executing: %s", cmd.String())
|
||||
|
||||
stdout, err := cmd.StdoutPipe()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
stderr, err := cmd.StderrPipe()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
go listenForStdErr(stderr)
|
||||
|
||||
if err := cmd.Start(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// HACK: to avoid https://github.com/zricethezav/gitleaks/issues/722
|
||||
time.Sleep(50 * time.Millisecond)
|
||||
|
||||
return gitdiff.Parse(cmd, stdout)
|
||||
}
|
||||
|
||||
// GitDiff returns a channel of gitdiff.File objects from
|
||||
// the git diff command for the given source.
|
||||
func GitDiff(source string, staged bool) (<-chan *gitdiff.File, error) {
|
||||
sourceClean := filepath.Clean(source)
|
||||
var cmd *exec.Cmd
|
||||
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0", ".")
|
||||
if staged {
|
||||
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0",
|
||||
"--staged", ".")
|
||||
}
|
||||
log.Debug().Msgf("executing: %s", cmd.String())
|
||||
|
||||
stdout, err := cmd.StdoutPipe()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
stderr, err := cmd.StderrPipe()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
go listenForStdErr(stderr)
|
||||
|
||||
if err := cmd.Start(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// HACK: to avoid https://github.com/zricethezav/gitleaks/issues/722
|
||||
time.Sleep(50 * time.Millisecond)
|
||||
|
||||
return gitdiff.Parse(cmd, stdout)
|
||||
}
|
||||
|
||||
// listenForStdErr listens for stderr output from git and prints it to stdout
|
||||
// then exits with exit code 1
|
||||
func listenForStdErr(stderr io.ReadCloser) {
|
||||
scanner := bufio.NewScanner(stderr)
|
||||
for scanner.Scan() {
|
||||
// if git throws one of the following errors:
|
||||
//
|
||||
// exhaustive rename detection was skipped due to too many files.
|
||||
// you may want to set your diff.renameLimit variable to at least
|
||||
// (some large number) and retry the command.
|
||||
//
|
||||
// inexact rename detection was skipped due to too many files.
|
||||
// you may want to set your diff.renameLimit variable to at least
|
||||
// (some large number) and retry the command.
|
||||
//
|
||||
// we skip exiting the program as git log -p/git diff will continue
|
||||
// to send data to stdout and finish executing. This next bit of
|
||||
// code prevents gitleaks from stopping mid scan if this error is
|
||||
// encountered
|
||||
if strings.Contains(scanner.Text(),
|
||||
"exhaustive rename detection was skipped") ||
|
||||
strings.Contains(scanner.Text(),
|
||||
"inexact rename detection was skipped") ||
|
||||
strings.Contains(scanner.Text(),
|
||||
"you may want to set your diff.renameLimit") {
|
||||
log.Warn().Msg(scanner.Text())
|
||||
} else {
|
||||
log.Error().Msgf("[git] %s", scanner.Text())
|
||||
|
||||
// asynchronously set this error flag to true so that we can
|
||||
// capture a log message and exit with a non-zero exit code
|
||||
// This value should get set before the `git` command exits so it's
|
||||
// safe-ish, although I know I know, bad practice.
|
||||
ErrEncountered = true
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,158 @@
|
||||
package git_test
|
||||
|
||||
// TODO: commenting out this test for now because it's flaky. Alternatives to consider to get this working:
|
||||
// -- use `git stash` instead of `restore()`
|
||||
|
||||
// const repoBasePath = "../../testdata/repos/"
|
||||
|
||||
// const expectPath = "../../testdata/expected/"
|
||||
|
||||
// func TestGitLog(t *testing.T) {
|
||||
// tests := []struct {
|
||||
// source string
|
||||
// logOpts string
|
||||
// expected string
|
||||
// }{
|
||||
// {
|
||||
// source: filepath.Join(repoBasePath, "small"),
|
||||
// expected: filepath.Join(expectPath, "git", "small.txt"),
|
||||
// },
|
||||
// {
|
||||
// source: filepath.Join(repoBasePath, "small"),
|
||||
// expected: filepath.Join(expectPath, "git", "small-branch-foo.txt"),
|
||||
// logOpts: "--all foo...",
|
||||
// },
|
||||
// }
|
||||
|
||||
// err := moveDotGit("dotGit", ".git")
|
||||
// if err != nil {
|
||||
// t.Fatal(err)
|
||||
// }
|
||||
// defer func() {
|
||||
// if err = moveDotGit(".git", "dotGit"); err != nil {
|
||||
// t.Fatal(err)
|
||||
// }
|
||||
// }()
|
||||
|
||||
// for _, tt := range tests {
|
||||
// files, err := git.GitLog(tt.source, tt.logOpts)
|
||||
// if err != nil {
|
||||
// t.Error(err)
|
||||
// }
|
||||
|
||||
// var diffSb strings.Builder
|
||||
// for f := range files {
|
||||
// for _, tf := range f.TextFragments {
|
||||
// diffSb.WriteString(tf.Raw(gitdiff.OpAdd))
|
||||
// }
|
||||
// }
|
||||
|
||||
// expectedBytes, err := os.ReadFile(tt.expected)
|
||||
// if err != nil {
|
||||
// t.Error(err)
|
||||
// }
|
||||
// expected := string(expectedBytes)
|
||||
// if expected != diffSb.String() {
|
||||
// // write string builder to .got file using os.Create
|
||||
// err = os.WriteFile(strings.Replace(tt.expected, ".txt", ".got.txt", 1), []byte(diffSb.String()), 0644)
|
||||
// if err != nil {
|
||||
// t.Error(err)
|
||||
// }
|
||||
// t.Error("expected: ", expected, "got: ", diffSb.String())
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// func TestGitDiff(t *testing.T) {
|
||||
// tests := []struct {
|
||||
// source string
|
||||
// expected string
|
||||
// additions string
|
||||
// target string
|
||||
// }{
|
||||
// {
|
||||
// source: filepath.Join(repoBasePath, "small"),
|
||||
// expected: "this line is added\nand another one",
|
||||
// additions: "this line is added\nand another one",
|
||||
// target: filepath.Join(repoBasePath, "small", "main.go"),
|
||||
// },
|
||||
// }
|
||||
|
||||
// err := moveDotGit("dotGit", ".git")
|
||||
// if err != nil {
|
||||
// t.Fatal(err)
|
||||
// }
|
||||
// defer func() {
|
||||
// if err = moveDotGit(".git", "dotGit"); err != nil {
|
||||
// t.Fatal(err)
|
||||
// }
|
||||
// }()
|
||||
|
||||
// for _, tt := range tests {
|
||||
// noChanges, err := os.ReadFile(tt.target)
|
||||
// if err != nil {
|
||||
// t.Error(err)
|
||||
// }
|
||||
// err = os.WriteFile(tt.target, []byte(tt.additions), 0644)
|
||||
// if err != nil {
|
||||
// restore(tt.target, noChanges, t)
|
||||
// t.Error(err)
|
||||
// }
|
||||
|
||||
// files, err := git.GitDiff(tt.source, false)
|
||||
// if err != nil {
|
||||
// restore(tt.target, noChanges, t)
|
||||
// t.Error(err)
|
||||
// }
|
||||
|
||||
// for f := range files {
|
||||
// sb := strings.Builder{}
|
||||
// for _, tf := range f.TextFragments {
|
||||
// sb.WriteString(tf.Raw(gitdiff.OpAdd))
|
||||
// }
|
||||
// if sb.String() != tt.expected {
|
||||
// restore(tt.target, noChanges, t)
|
||||
// t.Error("expected: ", tt.expected, "got: ", sb.String())
|
||||
// }
|
||||
// }
|
||||
// restore(tt.target, noChanges, t)
|
||||
// }
|
||||
// }
|
||||
|
||||
// func restore(path string, data []byte, t *testing.T) {
|
||||
// err := os.WriteFile(path, data, 0644)
|
||||
// if err != nil {
|
||||
// t.Fatal(err)
|
||||
// }
|
||||
// }
|
||||
|
||||
// func moveDotGit(from, to string) error {
|
||||
// repoDirs, err := os.ReadDir("../../testdata/repos")
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
// for _, dir := range repoDirs {
|
||||
// if to == ".git" {
|
||||
// _, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), "dotGit"))
|
||||
// if os.IsNotExist(err) {
|
||||
// // dont want to delete the only copy of .git accidentally
|
||||
// continue
|
||||
// }
|
||||
// os.RemoveAll(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), ".git"))
|
||||
// }
|
||||
// if !dir.IsDir() {
|
||||
// continue
|
||||
// }
|
||||
// _, err := os.Stat(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from))
|
||||
// if os.IsNotExist(err) {
|
||||
// continue
|
||||
// }
|
||||
|
||||
// err = os.Rename(fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), from),
|
||||
// fmt.Sprintf("%s/%s/%s", repoBasePath, dir.Name(), to))
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
// }
|
||||
// return nil
|
||||
// }
|
@ -0,0 +1,101 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
// Location represents a location in a file
|
||||
type Location struct {
|
||||
startLine int
|
||||
endLine int
|
||||
startColumn int
|
||||
endColumn int
|
||||
startLineIndex int
|
||||
endLineIndex int
|
||||
}
|
||||
|
||||
func location(fragment Fragment, matchIndex []int) Location {
|
||||
var (
|
||||
prevNewLine int
|
||||
location Location
|
||||
lineSet bool
|
||||
_lineNum int
|
||||
)
|
||||
|
||||
start := matchIndex[0]
|
||||
end := matchIndex[1]
|
||||
|
||||
// default startLineIndex to 0
|
||||
location.startLineIndex = 0
|
||||
|
||||
// Fixes: https://github.com/zricethezav/gitleaks/issues/1037
|
||||
// When a fragment does NOT have any newlines, a default "newline"
|
||||
// will be counted to make the subsequent location calculation logic work
|
||||
// for fragments will no newlines.
|
||||
if len(fragment.newlineIndices) == 0 {
|
||||
fragment.newlineIndices = [][]int{
|
||||
{len(fragment.Raw), len(fragment.Raw) + 1},
|
||||
}
|
||||
}
|
||||
|
||||
for lineNum, pair := range fragment.newlineIndices {
|
||||
_lineNum = lineNum
|
||||
newLineByteIndex := pair[0]
|
||||
if prevNewLine <= start && start < newLineByteIndex {
|
||||
lineSet = true
|
||||
location.startLine = lineNum
|
||||
location.endLine = lineNum
|
||||
location.startColumn = (start - prevNewLine) + 1 // +1 because counting starts at 1
|
||||
location.startLineIndex = prevNewLine
|
||||
location.endLineIndex = newLineByteIndex
|
||||
}
|
||||
if prevNewLine < end && end <= newLineByteIndex {
|
||||
location.endLine = lineNum
|
||||
location.endColumn = (end - prevNewLine)
|
||||
location.endLineIndex = newLineByteIndex
|
||||
}
|
||||
prevNewLine = pair[0]
|
||||
}
|
||||
|
||||
if !lineSet {
|
||||
// if lines never get set then that means the secret is most likely
|
||||
// on the last line of the diff output and the diff output does not have
|
||||
// a newline
|
||||
location.startColumn = (start - prevNewLine) + 1 // +1 because counting starts at 1
|
||||
location.endColumn = (end - prevNewLine)
|
||||
location.startLine = _lineNum + 1
|
||||
location.endLine = _lineNum + 1
|
||||
|
||||
// search for new line byte index
|
||||
i := 0
|
||||
for end+i < len(fragment.Raw) {
|
||||
if fragment.Raw[end+i] == '\n' {
|
||||
break
|
||||
}
|
||||
if fragment.Raw[end+i] == '\r' {
|
||||
break
|
||||
}
|
||||
i++
|
||||
}
|
||||
location.endLineIndex = end + i
|
||||
}
|
||||
return location
|
||||
}
|
@ -0,0 +1,82 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
// TestGetLocation tests the getLocation function.
|
||||
func TestGetLocation(t *testing.T) {
|
||||
tests := []struct {
|
||||
linePairs [][]int
|
||||
start int
|
||||
end int
|
||||
wantLocation Location
|
||||
}{
|
||||
{
|
||||
linePairs: [][]int{
|
||||
{0, 39},
|
||||
{40, 55},
|
||||
{56, 57},
|
||||
},
|
||||
start: 35,
|
||||
end: 38,
|
||||
wantLocation: Location{
|
||||
startLine: 1,
|
||||
startColumn: 36,
|
||||
endLine: 1,
|
||||
endColumn: 38,
|
||||
startLineIndex: 0,
|
||||
endLineIndex: 40,
|
||||
},
|
||||
},
|
||||
{
|
||||
linePairs: [][]int{
|
||||
{0, 39},
|
||||
{40, 55},
|
||||
{56, 57},
|
||||
},
|
||||
start: 40,
|
||||
end: 44,
|
||||
wantLocation: Location{
|
||||
startLine: 2,
|
||||
startColumn: 1,
|
||||
endLine: 2,
|
||||
endColumn: 4,
|
||||
startLineIndex: 40,
|
||||
endLineIndex: 56,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
loc := location(Fragment{newlineIndices: test.linePairs}, []int{test.start, test.end})
|
||||
if loc != test.wantLocation {
|
||||
t.Errorf("\nstartLine %d\nstartColumn: %d\nendLine: %d\nendColumn: %d\nstartLineIndex: %d\nendlineIndex %d",
|
||||
loc.startLine, loc.startColumn, loc.endLine, loc.endColumn, loc.startLineIndex, loc.endLineIndex)
|
||||
|
||||
t.Error("got", loc, "want", test.wantLocation)
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,211 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package detect
|
||||
|
||||
import (
|
||||
// "encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
|
||||
"github.com/Infisical/infisical-merge/report"
|
||||
|
||||
"github.com/gitleaks/go-gitdiff/gitdiff"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
// augmentGitFinding updates the start and end line numbers of a finding to include the
|
||||
// delta from the git diff
|
||||
func augmentGitFinding(finding report.Finding, textFragment *gitdiff.TextFragment, f *gitdiff.File) report.Finding {
|
||||
if !strings.HasPrefix(finding.Match, "file detected") {
|
||||
finding.StartLine += int(textFragment.NewPosition)
|
||||
finding.EndLine += int(textFragment.NewPosition)
|
||||
}
|
||||
|
||||
if f.PatchHeader != nil {
|
||||
finding.Commit = f.PatchHeader.SHA
|
||||
finding.Message = f.PatchHeader.Message()
|
||||
if f.PatchHeader.Author != nil {
|
||||
finding.Author = f.PatchHeader.Author.Name
|
||||
finding.Email = f.PatchHeader.Author.Email
|
||||
}
|
||||
finding.Date = f.PatchHeader.AuthorDate.UTC().Format(time.RFC3339)
|
||||
}
|
||||
return finding
|
||||
}
|
||||
|
||||
// shannonEntropy calculates the entropy of data using the formula defined here:
|
||||
// https://en.wiktionary.org/wiki/Shannon_entropy
|
||||
// Another way to think about what this is doing is calculating the number of bits
|
||||
// needed to on average encode the data. So, the higher the entropy, the more random the data, the
|
||||
// more bits needed to encode that data.
|
||||
func shannonEntropy(data string) (entropy float64) {
|
||||
if data == "" {
|
||||
return 0
|
||||
}
|
||||
|
||||
charCounts := make(map[rune]int)
|
||||
for _, char := range data {
|
||||
charCounts[char]++
|
||||
}
|
||||
|
||||
invLength := 1.0 / float64(len(data))
|
||||
for _, count := range charCounts {
|
||||
freq := float64(count) * invLength
|
||||
entropy -= freq * math.Log2(freq)
|
||||
}
|
||||
|
||||
return entropy
|
||||
}
|
||||
|
||||
// filter will dedupe and redact findings
|
||||
func filter(findings []report.Finding, redact bool) []report.Finding {
|
||||
var retFindings []report.Finding
|
||||
for _, f := range findings {
|
||||
include := true
|
||||
if strings.Contains(strings.ToLower(f.RuleID), "generic") {
|
||||
for _, fPrime := range findings {
|
||||
if f.StartLine == fPrime.StartLine &&
|
||||
f.Commit == fPrime.Commit &&
|
||||
f.RuleID != fPrime.RuleID &&
|
||||
strings.Contains(fPrime.Secret, f.Secret) &&
|
||||
!strings.Contains(strings.ToLower(fPrime.RuleID), "generic") {
|
||||
|
||||
genericMatch := strings.Replace(f.Match, f.Secret, "REDACTED", -1)
|
||||
betterMatch := strings.Replace(fPrime.Match, fPrime.Secret, "REDACTED", -1)
|
||||
log.Trace().Msgf("skipping %s finding (%s), %s rule takes precendence (%s)", f.RuleID, genericMatch, fPrime.RuleID, betterMatch)
|
||||
include = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if redact {
|
||||
f.Redact()
|
||||
}
|
||||
if include {
|
||||
retFindings = append(retFindings, f)
|
||||
}
|
||||
}
|
||||
return retFindings
|
||||
}
|
||||
|
||||
func printFinding(f report.Finding, noColor bool) {
|
||||
// trim all whitespace and tabs
|
||||
f.Line = strings.TrimSpace(f.Line)
|
||||
f.Secret = strings.TrimSpace(f.Secret)
|
||||
f.Match = strings.TrimSpace(f.Match)
|
||||
|
||||
isFileMatch := strings.HasPrefix(f.Match, "file detected:")
|
||||
skipColor := noColor
|
||||
finding := ""
|
||||
var secret lipgloss.Style
|
||||
|
||||
// Matches from filenames do not have a |line| or |secret|
|
||||
if !isFileMatch {
|
||||
matchInLineIDX := strings.Index(f.Line, f.Match)
|
||||
secretInMatchIdx := strings.Index(f.Match, f.Secret)
|
||||
|
||||
skipColor = false
|
||||
|
||||
if matchInLineIDX == -1 || noColor {
|
||||
skipColor = true
|
||||
matchInLineIDX = 0
|
||||
}
|
||||
|
||||
start := f.Line[0:matchInLineIDX]
|
||||
startMatchIdx := 0
|
||||
if matchInLineIDX > 20 {
|
||||
startMatchIdx = matchInLineIDX - 20
|
||||
start = "..." + f.Line[startMatchIdx:matchInLineIDX]
|
||||
}
|
||||
|
||||
matchBeginning := lipgloss.NewStyle().SetString(f.Match[0:secretInMatchIdx]).Foreground(lipgloss.Color("#f5d445"))
|
||||
secret = lipgloss.NewStyle().SetString(f.Secret).
|
||||
Bold(true).
|
||||
Italic(true).
|
||||
Foreground(lipgloss.Color("#f05c07"))
|
||||
matchEnd := lipgloss.NewStyle().SetString(f.Match[secretInMatchIdx+len(f.Secret):]).Foreground(lipgloss.Color("#f5d445"))
|
||||
|
||||
lineEndIdx := matchInLineIDX + len(f.Match)
|
||||
if len(f.Line)-1 <= lineEndIdx {
|
||||
lineEndIdx = len(f.Line) - 1
|
||||
}
|
||||
|
||||
lineEnd := f.Line[lineEndIdx:]
|
||||
|
||||
if len(f.Secret) > 100 {
|
||||
secret = lipgloss.NewStyle().SetString(f.Secret[0:100] + "...").
|
||||
Bold(true).
|
||||
Italic(true).
|
||||
Foreground(lipgloss.Color("#f05c07"))
|
||||
}
|
||||
if len(lineEnd) > 20 {
|
||||
lineEnd = lineEnd[0:20] + "..."
|
||||
}
|
||||
|
||||
finding = fmt.Sprintf("%s%s%s%s%s\n", strings.TrimPrefix(strings.TrimLeft(start, " "), "\n"), matchBeginning, secret, matchEnd, lineEnd)
|
||||
}
|
||||
|
||||
if skipColor || isFileMatch {
|
||||
fmt.Printf("%-12s %s\n", "Finding:", f.Match)
|
||||
fmt.Printf("%-12s %s\n", "Secret:", f.Secret)
|
||||
} else {
|
||||
fmt.Printf("%-12s %s", "Finding:", finding)
|
||||
fmt.Printf("%-12s %s\n", "Secret:", secret)
|
||||
}
|
||||
|
||||
fmt.Printf("%-12s %s\n", "RuleID:", f.RuleID)
|
||||
fmt.Printf("%-12s %f\n", "Entropy:", f.Entropy)
|
||||
if f.File == "" {
|
||||
fmt.Println("")
|
||||
return
|
||||
}
|
||||
fmt.Printf("%-12s %s\n", "File:", f.File)
|
||||
fmt.Printf("%-12s %d\n", "Line:", f.StartLine)
|
||||
if f.Commit == "" {
|
||||
fmt.Printf("%-12s %s\n", "Fingerprint:", f.Fingerprint)
|
||||
fmt.Println("")
|
||||
return
|
||||
}
|
||||
fmt.Printf("%-12s %s\n", "Commit:", f.Commit)
|
||||
fmt.Printf("%-12s %s\n", "Author:", f.Author)
|
||||
fmt.Printf("%-12s %s\n", "Email:", f.Email)
|
||||
fmt.Printf("%-12s %s\n", "Date:", f.Date)
|
||||
fmt.Printf("%-12s %s\n", "Fingerprint:", f.Fingerprint)
|
||||
fmt.Println("")
|
||||
}
|
||||
|
||||
func containsDigit(s string) bool {
|
||||
for _, c := range s {
|
||||
switch c {
|
||||
case '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return true
|
||||
}
|
||||
|
||||
}
|
||||
return false
|
||||
}
|
@ -1,27 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var debugLogging bool
|
||||
|
||||
type PlainFormatter struct {
|
||||
}
|
||||
|
||||
func (f *PlainFormatter) Format(entry *log.Entry) ([]byte, error) {
|
||||
return []byte(fmt.Sprintf("%s\n", entry.Message)), nil
|
||||
}
|
||||
func toggleDebug(cmd *cobra.Command, args []string) {
|
||||
if debugLogging {
|
||||
log.Info("Debug logs enabled")
|
||||
log.SetLevel(log.DebugLevel)
|
||||
log.SetFormatter(&log.TextFormatter{})
|
||||
} else {
|
||||
plainFormatter := new(PlainFormatter)
|
||||
log.SetFormatter(plainFormatter)
|
||||
}
|
||||
}
|
@ -0,0 +1,307 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
|
||||
"github.com/Infisical/infisical-merge/config"
|
||||
"github.com/Infisical/infisical-merge/detect"
|
||||
"github.com/Infisical/infisical-merge/report"
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(detectCmd)
|
||||
detectCmd.Flags().String("log-opts", "", "git log options")
|
||||
detectCmd.Flags().Bool("no-git", false, "treat git repo as a regular directory and scan those files, --log-opts has no effect on the scan when --no-git is set")
|
||||
detectCmd.Flags().Bool("pipe", false, "scan input from stdin, ex: `cat some_file | gitleaks detect --pipe`")
|
||||
detectCmd.Flags().Bool("follow-symlinks", false, "scan files that are symlinks to other files")
|
||||
|
||||
detectCmd.Flags().StringP("config", "c", "", configDescription)
|
||||
detectCmd.Flags().Int("exit-code", 1, "exit code when leaks have been encountered")
|
||||
detectCmd.Flags().StringP("source", "s", ".", "path to source")
|
||||
detectCmd.Flags().StringP("report-path", "r", "", "report file")
|
||||
detectCmd.Flags().StringP("report-format", "f", "json", "output format (json, csv, sarif)")
|
||||
detectCmd.Flags().StringP("baseline-path", "b", "", "path to baseline with issues that can be ignored")
|
||||
detectCmd.Flags().BoolP("verbose", "v", false, "show verbose output from scan (which file, where in the file, what secret)")
|
||||
detectCmd.Flags().BoolP("no-color", "", false, "turn off color for verbose output")
|
||||
detectCmd.Flags().Int("max-target-megabytes", 0, "files larger than this will be skipped")
|
||||
detectCmd.Flags().Bool("redact", false, "redact secrets from logs and stdout")
|
||||
|
||||
err := viper.BindPFlag("config", detectCmd.Flags().Lookup("config"))
|
||||
if err != nil {
|
||||
log.Fatal().Msgf("err binding config %s", err.Error())
|
||||
}
|
||||
|
||||
// Hide the flag
|
||||
detectCmd.Flags().MarkHidden("config")
|
||||
}
|
||||
|
||||
var detectCmd = &cobra.Command{
|
||||
Use: "scan",
|
||||
Short: "scan for secrets in repos, directories, and files",
|
||||
Run: runDetect,
|
||||
}
|
||||
|
||||
func runDetect(cmd *cobra.Command, args []string) {
|
||||
initScanConfig(cmd)
|
||||
|
||||
var (
|
||||
vc config.ViperConfig
|
||||
findings []report.Finding
|
||||
err error
|
||||
)
|
||||
|
||||
// Load config
|
||||
if err = viper.Unmarshal(&vc); err != nil {
|
||||
log.Fatal().Err(err).Msg("Failed to load config")
|
||||
}
|
||||
cfg, err := vc.Translate()
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).Msg("Failed to load config")
|
||||
}
|
||||
cfg.Path, _ = cmd.Flags().GetString("config")
|
||||
|
||||
// start timer
|
||||
start := time.Now()
|
||||
|
||||
// Setup detector
|
||||
detector := detect.NewDetector(cfg)
|
||||
detector.Config.Path, err = cmd.Flags().GetString("config")
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
source, err := cmd.Flags().GetString("source")
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
// if config path is not set, then use the {source}/.gitleaks.toml path.
|
||||
// note that there may not be a `{source}/.gitleaks.toml` file, this is ok.
|
||||
if detector.Config.Path == "" {
|
||||
detector.Config.Path = filepath.Join(source, ".gitleaks.toml")
|
||||
}
|
||||
// set verbose flag
|
||||
if detector.Verbose, err = cmd.Flags().GetBool("verbose"); err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
// set redact flag
|
||||
if detector.Redact, err = cmd.Flags().GetBool("redact"); err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
if detector.MaxTargetMegaBytes, err = cmd.Flags().GetInt("max-target-megabytes"); err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
// set color flag
|
||||
if detector.NoColor, err = cmd.Flags().GetBool("no-color"); err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
|
||||
if fileExists(filepath.Join(source, ".infisicalignore")) {
|
||||
if err = detector.AddGitleaksIgnore(filepath.Join(source, ".infisicalignore")); err != nil {
|
||||
log.Fatal().Err(err).Msg("could not call AddInfisicalIgnore")
|
||||
}
|
||||
}
|
||||
|
||||
// ignore findings from the baseline (an existing report in json format generated earlier)
|
||||
baselinePath, _ := cmd.Flags().GetString("baseline-path")
|
||||
if baselinePath != "" {
|
||||
err = detector.AddBaseline(baselinePath, source)
|
||||
if err != nil {
|
||||
log.Error().Msgf("Could not load baseline. The path must point of a gitleaks report generated using the default format: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
// set follow symlinks flag
|
||||
if detector.FollowSymlinks, err = cmd.Flags().GetBool("follow-symlinks"); err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
|
||||
// set exit code
|
||||
exitCode, err := cmd.Flags().GetInt("exit-code")
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).Msg("could not get exit code")
|
||||
}
|
||||
|
||||
// determine what type of scan:
|
||||
// - git: scan the history of the repo
|
||||
// - no-git: scan files by treating the repo as a plain directory
|
||||
noGit, err := cmd.Flags().GetBool("no-git")
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).Msg("could not call GetBool() for no-git")
|
||||
}
|
||||
fromPipe, err := cmd.Flags().GetBool("pipe")
|
||||
if err != nil {
|
||||
log.Fatal().Err(err)
|
||||
}
|
||||
|
||||
// start the detector scan
|
||||
if noGit {
|
||||
findings, err = detector.DetectFiles(source)
|
||||
if err != nil {
|
||||
// don't exit on error, just log it
|
||||
log.Error().Err(err).Msg("")
|
||||
}
|
||||
} else if fromPipe {
|
||||
findings, err = detector.DetectReader(os.Stdin, 10)
|
||||
if err != nil {
|
||||
// log fatal to exit, no need to continue since a report
|
||||
// will not be generated when scanning from a pipe...for now
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
} else {
|
||||
var logOpts string
|
||||
logOpts, err = cmd.Flags().GetString("log-opts")
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
findings, err = detector.DetectGit(source, logOpts, detect.DetectType)
|
||||
if err != nil {
|
||||
// don't exit on error, just log it
|
||||
log.Error().Err(err).Msg("")
|
||||
}
|
||||
}
|
||||
|
||||
// log info about the scan
|
||||
if err == nil {
|
||||
log.Info().Msgf("scan completed in %s", FormatDuration(time.Since(start)))
|
||||
if len(findings) != 0 {
|
||||
log.Warn().Msgf("leaks found: %d", len(findings))
|
||||
} else {
|
||||
log.Info().Msg("no leaks found")
|
||||
}
|
||||
} else {
|
||||
log.Warn().Msgf("partial scan completed in %s", FormatDuration(time.Since(start)))
|
||||
if len(findings) != 0 {
|
||||
log.Warn().Msgf("%d leaks found in partial scan", len(findings))
|
||||
} else {
|
||||
log.Warn().Msg("no leaks found in partial scan")
|
||||
}
|
||||
}
|
||||
|
||||
// write report if desired
|
||||
reportPath, _ := cmd.Flags().GetString("report-path")
|
||||
ext, _ := cmd.Flags().GetString("report-format")
|
||||
if reportPath != "" {
|
||||
if err := report.Write(findings, cfg, ext, reportPath); err != nil {
|
||||
log.Fatal().Err(err).Msg("could not write")
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if len(findings) != 0 {
|
||||
os.Exit(exitCode)
|
||||
}
|
||||
}
|
||||
|
||||
func fileExists(fileName string) bool {
|
||||
// check for a .gitleaksignore file
|
||||
info, err := os.Stat(fileName)
|
||||
if err != nil && !os.IsNotExist(err) {
|
||||
return false
|
||||
}
|
||||
|
||||
if info != nil && err == nil {
|
||||
if !info.IsDir() {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func FormatDuration(d time.Duration) string {
|
||||
scale := 100 * time.Second
|
||||
// look for the max scale that is smaller than d
|
||||
for scale > d {
|
||||
scale = scale / 10
|
||||
}
|
||||
return d.Round(scale / 100).String()
|
||||
}
|
||||
|
||||
const configDescription = `config file path
|
||||
order of precedence:
|
||||
1. --config/-c
|
||||
2. env var GITLEAKS_CONFIG
|
||||
3. (--source/-s)/.gitleaks.toml
|
||||
If none of the three options are used, then gitleaks will use the default config`
|
||||
|
||||
func initScanConfig(cmd *cobra.Command) {
|
||||
cfgPath, err := cmd.Flags().GetString("config")
|
||||
if err != nil {
|
||||
log.Fatal().Msg(err.Error())
|
||||
}
|
||||
if cfgPath != "" {
|
||||
viper.SetConfigFile(cfgPath)
|
||||
log.Debug().Msgf("using gitleaks config %s from `--config`", cfgPath)
|
||||
} else if os.Getenv("GITLEAKS_CONFIG") != "" {
|
||||
envPath := os.Getenv("GITLEAKS_CONFIG")
|
||||
viper.SetConfigFile(envPath)
|
||||
log.Debug().Msgf("using gitleaks config from GITLEAKS_CONFIG env var: %s", envPath)
|
||||
} else {
|
||||
source, err := cmd.Flags().GetString("source")
|
||||
if err != nil {
|
||||
log.Fatal().Msg(err.Error())
|
||||
}
|
||||
fileInfo, err := os.Stat(source)
|
||||
if err != nil {
|
||||
log.Fatal().Msg(err.Error())
|
||||
}
|
||||
|
||||
if !fileInfo.IsDir() {
|
||||
log.Debug().Msgf("unable to load gitleaks config from %s since --source=%s is a file, using default config",
|
||||
filepath.Join(source, ".gitleaks.toml"), source)
|
||||
viper.SetConfigType("toml")
|
||||
if err = viper.ReadConfig(strings.NewReader(config.DefaultConfig)); err != nil {
|
||||
log.Fatal().Msgf("err reading toml %s", err.Error())
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if _, err := os.Stat(filepath.Join(source, ".gitleaks.toml")); os.IsNotExist(err) {
|
||||
log.Debug().Msgf("no gitleaks config found in path %s, using default gitleaks config", filepath.Join(source, ".gitleaks.toml"))
|
||||
viper.SetConfigType("toml")
|
||||
if err = viper.ReadConfig(strings.NewReader(config.DefaultConfig)); err != nil {
|
||||
log.Fatal().Msgf("err reading default config toml %s", err.Error())
|
||||
}
|
||||
return
|
||||
} else {
|
||||
log.Debug().Msgf("using existing gitleaks config %s from `(--source)/.gitleaks.toml`", filepath.Join(source, ".gitleaks.toml"))
|
||||
}
|
||||
|
||||
viper.AddConfigPath(source)
|
||||
viper.SetConfigName(".gitleaks")
|
||||
viper.SetConfigType("toml")
|
||||
}
|
||||
if err := viper.ReadInConfig(); err != nil {
|
||||
log.Fatal().Msgf("unable to load gitleaks config, err: %s", err)
|
||||
}
|
||||
}
|
@ -0,0 +1,160 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
|
||||
"github.com/Infisical/infisical-merge/config"
|
||||
"github.com/Infisical/infisical-merge/detect"
|
||||
"github.com/Infisical/infisical-merge/report"
|
||||
)
|
||||
|
||||
func init() {
|
||||
protectCmd.Flags().Bool("staged", false, "detect secrets in a --staged state")
|
||||
protectCmd.Flags().String("log-opts", "", "git log options")
|
||||
|
||||
protectCmd.Flags().StringP("config", "c", "", configDescription)
|
||||
protectCmd.Flags().Int("exit-code", 1, "exit code when leaks have been encountered")
|
||||
protectCmd.Flags().StringP("source", "s", ".", "path to source")
|
||||
protectCmd.Flags().StringP("report-path", "r", "", "report file")
|
||||
protectCmd.Flags().StringP("report-format", "f", "json", "output format (json, csv, sarif)")
|
||||
protectCmd.Flags().StringP("baseline-path", "b", "", "path to baseline with issues that can be ignored")
|
||||
protectCmd.Flags().BoolP("verbose", "v", false, "show verbose output from scan (which file, where in the file, what secret)")
|
||||
protectCmd.Flags().BoolP("no-color", "", false, "turn off color for verbose output")
|
||||
protectCmd.Flags().Int("max-target-megabytes", 0, "files larger than this will be skipped")
|
||||
protectCmd.Flags().Bool("redact", false, "redact secrets from logs and stdout")
|
||||
|
||||
err := viper.BindPFlag("config", protectCmd.Flags().Lookup("config"))
|
||||
if err != nil {
|
||||
log.Fatal().Msgf("err binding config %s", err.Error())
|
||||
}
|
||||
|
||||
// Hide the flag
|
||||
protectCmd.Flags().MarkHidden("config")
|
||||
|
||||
rootCmd.AddCommand(protectCmd)
|
||||
}
|
||||
|
||||
var protectCmd = &cobra.Command{
|
||||
Use: "prevent",
|
||||
Short: "scan for secrets in uncommitted changes in a git repo",
|
||||
Run: runProtect,
|
||||
}
|
||||
|
||||
func runProtect(cmd *cobra.Command, args []string) {
|
||||
initScanConfig(cmd)
|
||||
var vc config.ViperConfig
|
||||
|
||||
if err := viper.Unmarshal(&vc); err != nil {
|
||||
log.Fatal().Err(err).Msg("Failed to load config")
|
||||
}
|
||||
cfg, err := vc.Translate()
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).Msg("Failed to load config")
|
||||
}
|
||||
|
||||
cfg.Path, _ = cmd.Flags().GetString("config")
|
||||
exitCode, _ := cmd.Flags().GetInt("exit-code")
|
||||
staged, _ := cmd.Flags().GetBool("staged")
|
||||
start := time.Now()
|
||||
|
||||
// Setup detector
|
||||
detector := detect.NewDetector(cfg)
|
||||
detector.Config.Path, err = cmd.Flags().GetString("config")
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
source, err := cmd.Flags().GetString("source")
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
// if config path is not set, then use the {source}/.gitleaks.toml path.
|
||||
// note that there may not be a `{source}/.gitleaks.toml` file, this is ok.
|
||||
if detector.Config.Path == "" {
|
||||
detector.Config.Path = filepath.Join(source, ".gitleaks.toml")
|
||||
}
|
||||
// set verbose flag
|
||||
if detector.Verbose, err = cmd.Flags().GetBool("verbose"); err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
// set redact flag
|
||||
if detector.Redact, err = cmd.Flags().GetBool("redact"); err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
if detector.MaxTargetMegaBytes, err = cmd.Flags().GetInt("max-target-megabytes"); err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
// set color flag
|
||||
if detector.NoColor, err = cmd.Flags().GetBool("no-color"); err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
|
||||
if fileExists(filepath.Join(source, ".infisicalignore")) {
|
||||
if err = detector.AddGitleaksIgnore(filepath.Join(source, ".infisicalignore")); err != nil {
|
||||
log.Fatal().Err(err).Msg("could not call AddInfisicalIgnore")
|
||||
}
|
||||
}
|
||||
|
||||
// get log options for git scan
|
||||
logOpts, err := cmd.Flags().GetString("log-opts")
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
|
||||
// start git scan
|
||||
var findings []report.Finding
|
||||
if staged {
|
||||
findings, err = detector.DetectGit(source, logOpts, detect.ProtectStagedType)
|
||||
} else {
|
||||
findings, err = detector.DetectGit(source, logOpts, detect.ProtectType)
|
||||
}
|
||||
if err != nil {
|
||||
// don't exit on error, just log it
|
||||
log.Error().Err(err).Msg("")
|
||||
}
|
||||
|
||||
// log info about the scan
|
||||
log.Info().Msgf("scan completed in %s", FormatDuration(time.Since(start)))
|
||||
if len(findings) != 0 {
|
||||
log.Warn().Msgf("leaks found: %d", len(findings))
|
||||
} else {
|
||||
log.Info().Msg("no leaks found")
|
||||
}
|
||||
|
||||
reportPath, _ := cmd.Flags().GetString("report-path")
|
||||
ext, _ := cmd.Flags().GetString("report-format")
|
||||
if reportPath != "" {
|
||||
if err = report.Write(findings, cfg, ext, reportPath); err != nil {
|
||||
log.Fatal().Err(err).Msg("")
|
||||
}
|
||||
}
|
||||
if len(findings) != 0 {
|
||||
os.Exit(exitCode)
|
||||
}
|
||||
}
|
@ -1,17 +0,0 @@
|
||||
package models
|
||||
|
||||
import log "github.com/sirupsen/logrus"
|
||||
|
||||
// Custom error type so that we can give helpful messages in CLI
|
||||
type Error struct {
|
||||
Err error
|
||||
FriendlyMessage string
|
||||
}
|
||||
|
||||
func (e *Error) printFriendlyMessage() {
|
||||
log.Infoln(e.FriendlyMessage)
|
||||
}
|
||||
|
||||
func (e *Error) printDebuError() {
|
||||
log.Debugln(e.Err)
|
||||
}
|
@ -0,0 +1,25 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
package report
|
||||
|
||||
const version = "v8.0.0"
|
||||
const driver = "gitleaks"
|
@ -0,0 +1,81 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
import (
|
||||
"encoding/csv"
|
||||
"io"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// writeCsv writes the list of findings to a writeCloser.
|
||||
func writeCsv(f []Finding, w io.WriteCloser) error {
|
||||
if len(f) == 0 {
|
||||
return nil
|
||||
}
|
||||
defer w.Close()
|
||||
cw := csv.NewWriter(w)
|
||||
err := cw.Write([]string{"RuleID",
|
||||
"Commit",
|
||||
"File",
|
||||
"SymlinkFile",
|
||||
"Secret",
|
||||
"Match",
|
||||
"StartLine",
|
||||
"EndLine",
|
||||
"StartColumn",
|
||||
"EndColumn",
|
||||
"Author",
|
||||
"Message",
|
||||
"Date",
|
||||
"Email",
|
||||
"Fingerprint",
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, f := range f {
|
||||
err = cw.Write([]string{f.RuleID,
|
||||
f.Commit,
|
||||
f.File,
|
||||
f.SymlinkFile,
|
||||
f.Secret,
|
||||
f.Match,
|
||||
strconv.Itoa(f.StartLine),
|
||||
strconv.Itoa(f.EndLine),
|
||||
strconv.Itoa(f.StartColumn),
|
||||
strconv.Itoa(f.EndColumn),
|
||||
f.Author,
|
||||
f.Message,
|
||||
f.Date,
|
||||
f.Email,
|
||||
f.Fingerprint,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
cw.Flush()
|
||||
return cw.Error()
|
||||
}
|
@ -0,0 +1,108 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestWriteCSV(t *testing.T) {
|
||||
tests := []struct {
|
||||
findings []Finding
|
||||
testReportName string
|
||||
expected string
|
||||
wantEmpty bool
|
||||
}{
|
||||
{
|
||||
testReportName: "simple",
|
||||
expected: filepath.Join(expectPath, "report", "csv_simple.csv"),
|
||||
findings: []Finding{
|
||||
{
|
||||
RuleID: "test-rule",
|
||||
Match: "line containing secret",
|
||||
Secret: "a secret",
|
||||
StartLine: 1,
|
||||
EndLine: 2,
|
||||
StartColumn: 1,
|
||||
EndColumn: 2,
|
||||
Message: "opps",
|
||||
File: "auth.py",
|
||||
SymlinkFile: "",
|
||||
Commit: "0000000000000000",
|
||||
Author: "John Doe",
|
||||
Email: "johndoe@gmail.com",
|
||||
Date: "10-19-2003",
|
||||
Fingerprint: "fingerprint",
|
||||
},
|
||||
}},
|
||||
{
|
||||
|
||||
wantEmpty: true,
|
||||
testReportName: "empty",
|
||||
expected: filepath.Join(expectPath, "report", "this_should_not_exist.csv"),
|
||||
findings: []Finding{}},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
tmpfile, err := os.Create(filepath.Join(tmpPath, test.testReportName+".csv"))
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
err = writeCsv(test.findings, tmpfile)
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
got, err := os.ReadFile(tmpfile.Name())
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
if test.wantEmpty {
|
||||
if len(got) > 0 {
|
||||
t.Errorf("Expected empty file, got %s", got)
|
||||
}
|
||||
os.Remove(tmpfile.Name())
|
||||
continue
|
||||
}
|
||||
want, err := os.ReadFile(test.expected)
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
if string(got) != string(want) {
|
||||
err = os.WriteFile(strings.Replace(test.expected, ".csv", ".got.csv", 1), got, 0644)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
t.Errorf("got %s, want %s", string(got), string(want))
|
||||
}
|
||||
|
||||
os.Remove(tmpfile.Name())
|
||||
}
|
||||
}
|
@ -0,0 +1,72 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Finding contains information about strings that
|
||||
// have been captured by a tree-sitter query.
|
||||
type Finding struct {
|
||||
Description string
|
||||
StartLine int
|
||||
EndLine int
|
||||
StartColumn int
|
||||
EndColumn int
|
||||
|
||||
Line string `json:"-"`
|
||||
|
||||
Match string
|
||||
|
||||
// Secret contains the full content of what is matched in
|
||||
// the tree-sitter query.
|
||||
Secret string
|
||||
|
||||
// File is the name of the file containing the finding
|
||||
File string
|
||||
SymlinkFile string
|
||||
Commit string
|
||||
|
||||
// Entropy is the shannon entropy of Value
|
||||
Entropy float32
|
||||
|
||||
Author string
|
||||
Email string
|
||||
Date string
|
||||
Message string
|
||||
Tags []string
|
||||
|
||||
// Rule is the name of the rule that was matched
|
||||
RuleID string
|
||||
|
||||
// unique identifer
|
||||
Fingerprint string
|
||||
}
|
||||
|
||||
// Redact removes sensitive information from a finding.
|
||||
func (f *Finding) Redact() {
|
||||
f.Line = strings.Replace(f.Line, f.Secret, "REDACTED", -1)
|
||||
f.Match = strings.Replace(f.Match, f.Secret, "REDACTED", -1)
|
||||
f.Secret = "REDACTED"
|
||||
}
|
@ -0,0 +1,48 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
package report
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestRedact(t *testing.T) {
|
||||
tests := []struct {
|
||||
findings []Finding
|
||||
redact bool
|
||||
}{
|
||||
{
|
||||
redact: true,
|
||||
findings: []Finding{
|
||||
{
|
||||
Secret: "line containing secret",
|
||||
Match: "secret",
|
||||
},
|
||||
}},
|
||||
}
|
||||
for _, test := range tests {
|
||||
for _, f := range test.findings {
|
||||
f.Redact()
|
||||
if f.Secret != "REDACTED" {
|
||||
t.Error("redact not redacting: ", f.Secret)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,37 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
)
|
||||
|
||||
func writeJson(findings []Finding, w io.WriteCloser) error {
|
||||
if len(findings) == 0 {
|
||||
findings = []Finding{}
|
||||
}
|
||||
encoder := json.NewEncoder(w)
|
||||
encoder.SetIndent("", " ")
|
||||
return encoder.Encode(findings)
|
||||
}
|
@ -0,0 +1,111 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestWriteJSON(t *testing.T) {
|
||||
tests := []struct {
|
||||
findings []Finding
|
||||
testReportName string
|
||||
expected string
|
||||
wantEmpty bool
|
||||
}{
|
||||
{
|
||||
testReportName: "simple",
|
||||
expected: filepath.Join(expectPath, "report", "json_simple.json"),
|
||||
findings: []Finding{
|
||||
{
|
||||
|
||||
Description: "",
|
||||
RuleID: "test-rule",
|
||||
Match: "line containing secret",
|
||||
Secret: "a secret",
|
||||
StartLine: 1,
|
||||
EndLine: 2,
|
||||
StartColumn: 1,
|
||||
EndColumn: 2,
|
||||
Message: "opps",
|
||||
File: "auth.py",
|
||||
SymlinkFile: "",
|
||||
Commit: "0000000000000000",
|
||||
Author: "John Doe",
|
||||
Email: "johndoe@gmail.com",
|
||||
Date: "10-19-2003",
|
||||
Tags: []string{},
|
||||
},
|
||||
}},
|
||||
{
|
||||
|
||||
testReportName: "empty",
|
||||
expected: filepath.Join(expectPath, "report", "empty.json"),
|
||||
findings: []Finding{}},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
// create tmp file using os.TempDir()
|
||||
tmpfile, err := os.Create(filepath.Join(tmpPath, test.testReportName+".json"))
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
err = writeJson(test.findings, tmpfile)
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
got, err := os.ReadFile(tmpfile.Name())
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
if test.wantEmpty {
|
||||
if len(got) > 0 {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Errorf("Expected empty file, got %s", got)
|
||||
}
|
||||
os.Remove(tmpfile.Name())
|
||||
continue
|
||||
}
|
||||
want, err := os.ReadFile(test.expected)
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
if string(got) != string(want) {
|
||||
err = os.WriteFile(strings.Replace(test.expected, ".json", ".got.json", 1), got, 0644)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
t.Errorf("got %s, want %s", string(got), string(want))
|
||||
}
|
||||
|
||||
os.Remove(tmpfile.Name())
|
||||
}
|
||||
}
|
@ -0,0 +1,54 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/Infisical/infisical-merge/config"
|
||||
)
|
||||
|
||||
const (
|
||||
// https://cwe.mitre.org/data/definitions/798.html
|
||||
CWE = "CWE-798"
|
||||
CWE_DESCRIPTION = "Use of Hard-coded Credentials"
|
||||
)
|
||||
|
||||
func Write(findings []Finding, cfg config.Config, ext string, reportPath string) error {
|
||||
file, err := os.Create(reportPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
ext = strings.ToLower(ext)
|
||||
switch ext {
|
||||
case ".json", "json":
|
||||
err = writeJson(findings, file)
|
||||
case ".csv", "csv":
|
||||
err = writeCsv(findings, file)
|
||||
case ".sarif", "sarif":
|
||||
err = writeSarif(cfg, findings, file)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
@ -0,0 +1,133 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/Infisical/infisical-merge/config"
|
||||
)
|
||||
|
||||
const (
|
||||
expectPath = "../testdata/expected/"
|
||||
tmpPath = "../testdata/tmp"
|
||||
)
|
||||
|
||||
func TestReport(t *testing.T) {
|
||||
tests := []struct {
|
||||
findings []Finding
|
||||
ext string
|
||||
wantEmpty bool
|
||||
}{
|
||||
{
|
||||
ext: "json",
|
||||
findings: []Finding{
|
||||
{
|
||||
RuleID: "test-rule",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
ext: ".json",
|
||||
findings: []Finding{
|
||||
{
|
||||
RuleID: "test-rule",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
ext: ".jsonj",
|
||||
findings: []Finding{
|
||||
{
|
||||
RuleID: "test-rule",
|
||||
},
|
||||
},
|
||||
wantEmpty: true,
|
||||
},
|
||||
{
|
||||
ext: ".csv",
|
||||
findings: []Finding{
|
||||
{
|
||||
RuleID: "test-rule",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
ext: "csv",
|
||||
findings: []Finding{
|
||||
{
|
||||
RuleID: "test-rule",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
ext: "CSV",
|
||||
findings: []Finding{
|
||||
{
|
||||
RuleID: "test-rule",
|
||||
},
|
||||
},
|
||||
},
|
||||
// {
|
||||
// ext: "SARIF",
|
||||
// findings: []Finding{
|
||||
// {
|
||||
// RuleID: "test-rule",
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
}
|
||||
|
||||
for i, test := range tests {
|
||||
tmpfile, err := os.Create(filepath.Join(tmpPath, strconv.Itoa(i)+test.ext))
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
err = Write(test.findings, config.Config{}, test.ext, tmpfile.Name())
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
got, err := os.ReadFile(tmpfile.Name())
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
os.Remove(tmpfile.Name())
|
||||
|
||||
if len(got) == 0 && !test.wantEmpty {
|
||||
t.Errorf("got empty file with extension " + test.ext)
|
||||
}
|
||||
|
||||
if test.wantEmpty {
|
||||
if len(got) > 0 {
|
||||
t.Errorf("Expected empty file, got %s", got)
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,237 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/Infisical/infisical-merge/config"
|
||||
)
|
||||
|
||||
func writeSarif(cfg config.Config, findings []Finding, w io.WriteCloser) error {
|
||||
sarif := Sarif{
|
||||
Schema: "https://json.schemastore.org/sarif-2.1.0.json",
|
||||
Version: "2.1.0",
|
||||
Runs: getRuns(cfg, findings),
|
||||
}
|
||||
|
||||
encoder := json.NewEncoder(w)
|
||||
encoder.SetIndent("", " ")
|
||||
return encoder.Encode(sarif)
|
||||
}
|
||||
|
||||
func getRuns(cfg config.Config, findings []Finding) []Runs {
|
||||
return []Runs{
|
||||
{
|
||||
Tool: getTool(cfg),
|
||||
Results: getResults(findings),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func getTool(cfg config.Config) Tool {
|
||||
tool := Tool{
|
||||
Driver: Driver{
|
||||
Name: driver,
|
||||
SemanticVersion: version,
|
||||
InformationUri: "https://github.com/Infisical/infisical",
|
||||
Rules: getRules(cfg),
|
||||
},
|
||||
}
|
||||
|
||||
// if this tool has no rules, ensure that it is represented as [] instead of null/nil
|
||||
if hasEmptyRules(tool) {
|
||||
tool.Driver.Rules = make([]Rules, 0)
|
||||
}
|
||||
|
||||
return tool
|
||||
}
|
||||
|
||||
func hasEmptyRules(tool Tool) bool {
|
||||
return len(tool.Driver.Rules) == 0
|
||||
}
|
||||
|
||||
func getRules(cfg config.Config) []Rules {
|
||||
// TODO for _, rule := range cfg.Rules {
|
||||
var rules []Rules
|
||||
for _, rule := range cfg.OrderedRules() {
|
||||
shortDescription := ShortDescription{
|
||||
Text: rule.Description,
|
||||
}
|
||||
if rule.Regex != nil {
|
||||
shortDescription = ShortDescription{
|
||||
Text: rule.Regex.String(),
|
||||
}
|
||||
} else if rule.Path != nil {
|
||||
shortDescription = ShortDescription{
|
||||
Text: rule.Path.String(),
|
||||
}
|
||||
}
|
||||
rules = append(rules, Rules{
|
||||
ID: rule.RuleID,
|
||||
Name: rule.Description,
|
||||
Description: shortDescription,
|
||||
})
|
||||
}
|
||||
return rules
|
||||
}
|
||||
|
||||
func messageText(f Finding) string {
|
||||
if f.Commit == "" {
|
||||
return fmt.Sprintf("%s has detected secret for file %s.", f.RuleID, f.File)
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s has detected secret for file %s at commit %s.", f.RuleID, f.File, f.Commit)
|
||||
|
||||
}
|
||||
|
||||
func getResults(findings []Finding) []Results {
|
||||
results := []Results{}
|
||||
for _, f := range findings {
|
||||
r := Results{
|
||||
Message: Message{
|
||||
Text: messageText(f),
|
||||
},
|
||||
RuleId: f.RuleID,
|
||||
Locations: getLocation(f),
|
||||
// This information goes in partial fingerprings until revision
|
||||
// data can be added somewhere else
|
||||
PartialFingerPrints: PartialFingerPrints{
|
||||
CommitSha: f.Commit,
|
||||
Email: f.Email,
|
||||
CommitMessage: f.Message,
|
||||
Date: f.Date,
|
||||
Author: f.Author,
|
||||
},
|
||||
}
|
||||
results = append(results, r)
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
||||
func getLocation(f Finding) []Locations {
|
||||
uri := f.File
|
||||
if f.SymlinkFile != "" {
|
||||
uri = f.SymlinkFile
|
||||
}
|
||||
return []Locations{
|
||||
{
|
||||
PhysicalLocation: PhysicalLocation{
|
||||
ArtifactLocation: ArtifactLocation{
|
||||
URI: uri,
|
||||
},
|
||||
Region: Region{
|
||||
StartLine: f.StartLine,
|
||||
EndLine: f.EndLine,
|
||||
StartColumn: f.StartColumn,
|
||||
EndColumn: f.EndColumn,
|
||||
Snippet: Snippet{
|
||||
Text: f.Secret,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type PartialFingerPrints struct {
|
||||
CommitSha string `json:"commitSha"`
|
||||
Email string `json:"email"`
|
||||
Author string `json:"author"`
|
||||
Date string `json:"date"`
|
||||
CommitMessage string `json:"commitMessage"`
|
||||
}
|
||||
|
||||
type Sarif struct {
|
||||
Schema string `json:"$schema"`
|
||||
Version string `json:"version"`
|
||||
Runs []Runs `json:"runs"`
|
||||
}
|
||||
|
||||
type ShortDescription struct {
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
type FullDescription struct {
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
type Rules struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Description ShortDescription `json:"shortDescription"`
|
||||
}
|
||||
|
||||
type Driver struct {
|
||||
Name string `json:"name"`
|
||||
SemanticVersion string `json:"semanticVersion"`
|
||||
InformationUri string `json:"informationUri"`
|
||||
Rules []Rules `json:"rules"`
|
||||
}
|
||||
|
||||
type Tool struct {
|
||||
Driver Driver `json:"driver"`
|
||||
}
|
||||
|
||||
type Message struct {
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
type ArtifactLocation struct {
|
||||
URI string `json:"uri"`
|
||||
}
|
||||
|
||||
type Region struct {
|
||||
StartLine int `json:"startLine"`
|
||||
StartColumn int `json:"startColumn"`
|
||||
EndLine int `json:"endLine"`
|
||||
EndColumn int `json:"endColumn"`
|
||||
Snippet Snippet `json:"snippet"`
|
||||
}
|
||||
|
||||
type Snippet struct {
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
type PhysicalLocation struct {
|
||||
ArtifactLocation ArtifactLocation `json:"artifactLocation"`
|
||||
Region Region `json:"region"`
|
||||
}
|
||||
|
||||
type Locations struct {
|
||||
PhysicalLocation PhysicalLocation `json:"physicalLocation"`
|
||||
}
|
||||
|
||||
type Results struct {
|
||||
Message Message `json:"message"`
|
||||
RuleId string `json:"ruleId"`
|
||||
Locations []Locations `json:"locations"`
|
||||
PartialFingerPrints `json:"partialFingerprints"`
|
||||
}
|
||||
|
||||
type Runs struct {
|
||||
Tool Tool `json:"tool"`
|
||||
Results []Results `json:"results"`
|
||||
}
|
@ -0,0 +1,133 @@
|
||||
// MIT License
|
||||
|
||||
// Copyright (c) 2019 Zachary Rice
|
||||
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package report
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/Infisical/infisical-merge/config"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
const configPath = "../testdata/config/"
|
||||
|
||||
func TestWriteSarif(t *testing.T) {
|
||||
tests := []struct {
|
||||
findings []Finding
|
||||
testReportName string
|
||||
expected string
|
||||
wantEmpty bool
|
||||
cfgName string
|
||||
}{
|
||||
{
|
||||
cfgName: "simple",
|
||||
testReportName: "simple",
|
||||
expected: filepath.Join(expectPath, "report", "sarif_simple.sarif"),
|
||||
findings: []Finding{
|
||||
{
|
||||
|
||||
Description: "A test rule",
|
||||
RuleID: "test-rule",
|
||||
Match: "line containing secret",
|
||||
Secret: "a secret",
|
||||
StartLine: 1,
|
||||
EndLine: 2,
|
||||
StartColumn: 1,
|
||||
EndColumn: 2,
|
||||
Message: "opps",
|
||||
File: "auth.py",
|
||||
Commit: "0000000000000000",
|
||||
Author: "John Doe",
|
||||
Email: "johndoe@gmail.com",
|
||||
Date: "10-19-2003",
|
||||
Tags: []string{},
|
||||
},
|
||||
}},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
// create tmp file using os.TempDir()
|
||||
tmpfile, err := os.Create(filepath.Join(tmpPath, test.testReportName+".json"))
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
viper.Reset()
|
||||
viper.AddConfigPath(configPath)
|
||||
viper.SetConfigName(test.cfgName)
|
||||
viper.SetConfigType("toml")
|
||||
err = viper.ReadInConfig()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
var vc config.ViperConfig
|
||||
err = viper.Unmarshal(&vc)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
cfg, err := vc.Translate()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
err = writeSarif(cfg, test.findings, tmpfile)
|
||||
fmt.Println(cfg)
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
got, err := os.ReadFile(tmpfile.Name())
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
if test.wantEmpty {
|
||||
if len(got) > 0 {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Errorf("Expected empty file, got %s", got)
|
||||
}
|
||||
os.Remove(tmpfile.Name())
|
||||
continue
|
||||
}
|
||||
want, err := os.ReadFile(test.expected)
|
||||
if err != nil {
|
||||
os.Remove(tmpfile.Name())
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
if string(got) != string(want) {
|
||||
err = os.WriteFile(strings.Replace(test.expected, ".sarif", ".got.sarif", 1), got, 0644)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
t.Errorf("got %s, want %s", string(got), string(want))
|
||||
}
|
||||
|
||||
os.Remove(tmpfile.Name())
|
||||
}
|
||||
}
|
Loading…
Reference in new issue