parent
49bcd8839f
commit
dfe95ac773
@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"workbench.editor.wrapTabs": true
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,125 @@
|
|||||||
|
import { exec } from "child_process";
|
||||||
|
import { mkdir, readFile, rm, writeFile } from "fs";
|
||||||
|
import { tmpdir } from "os";
|
||||||
|
import { join } from "path"
|
||||||
|
import { SecretMatch } from "./types";
|
||||||
|
import { Octokit } from "@octokit/rest";
|
||||||
|
|
||||||
|
export async function scanContentAndGetFindings(textContent: string): Promise<SecretMatch[]> {
|
||||||
|
const tempFolder = await createTempFolder();
|
||||||
|
const filePath = join(tempFolder, "content.txt");
|
||||||
|
const findingsPath = join(tempFolder, "findings.json");
|
||||||
|
|
||||||
|
try {
|
||||||
|
await writeTextToFile(filePath, textContent);
|
||||||
|
await runInfisicalScan(filePath, findingsPath);
|
||||||
|
const findingsData = await readFindingsFile(findingsPath);
|
||||||
|
return JSON.parse(findingsData);
|
||||||
|
} finally {
|
||||||
|
await deleteTempFolder(tempFolder);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createTempFolder(): Promise<string> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const tempDir = tmpdir()
|
||||||
|
const tempFolderName = Math.random().toString(36).substring(2);
|
||||||
|
const tempFolderPath = join(tempDir, tempFolderName);
|
||||||
|
|
||||||
|
mkdir(tempFolderPath, (err: any) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
resolve(tempFolderPath);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function writeTextToFile(filePath: string, content: string): Promise<void> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
writeFile(filePath, content, (err) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function runInfisicalScan(inputPath: string, outputPath: string): Promise<void> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const command = `cat "${inputPath}" | infisical scan --exit-code=77 --pipe -r "${outputPath}"`;
|
||||||
|
exec(command, (error) => {
|
||||||
|
if (error && error.code != 77) {
|
||||||
|
reject(error);
|
||||||
|
} else {
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function readFindingsFile(filePath: string): Promise<string> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
readFile(filePath, "utf8", (err, data) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
resolve(data);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function deleteTempFolder(folderPath: string): Promise<void> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
rm(folderPath, { recursive: true }, (err) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function convertKeysToLowercase<T>(obj: T): T {
|
||||||
|
const convertedObj = {} as T;
|
||||||
|
|
||||||
|
for (const key in obj) {
|
||||||
|
if (Object.prototype.hasOwnProperty.call(obj, key)) {
|
||||||
|
const lowercaseKey = key.charAt(0).toLowerCase() + key.slice(1);
|
||||||
|
convertedObj[lowercaseKey as keyof T] = obj[key];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return convertedObj;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getCommits(octokit: Octokit, owner: string, repo: string) {
|
||||||
|
let commits: { sha: string }[] = [];
|
||||||
|
let page = 1;
|
||||||
|
while (true) {
|
||||||
|
const response = await octokit.repos.listCommits({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
per_page: 100,
|
||||||
|
page,
|
||||||
|
});
|
||||||
|
|
||||||
|
commits = commits.concat(response.data);
|
||||||
|
if (response.data.length == 0) break;
|
||||||
|
page++;
|
||||||
|
}
|
||||||
|
return commits;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getFilesFromCommit(octokit: any, owner: string, repo: string, sha: string) {
|
||||||
|
const response = await octokit.repos.getCommit({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
ref: sha,
|
||||||
|
});
|
||||||
|
}
|
@ -0,0 +1,21 @@
|
|||||||
|
export type SecretMatch = {
|
||||||
|
Description: string;
|
||||||
|
StartLine: number;
|
||||||
|
EndLine: number;
|
||||||
|
StartColumn: number;
|
||||||
|
EndColumn: number;
|
||||||
|
Match: string;
|
||||||
|
Secret: string;
|
||||||
|
File: string;
|
||||||
|
SymlinkFile: string;
|
||||||
|
Commit: string;
|
||||||
|
Entropy: number;
|
||||||
|
Author: string;
|
||||||
|
Email: string;
|
||||||
|
Date: string;
|
||||||
|
Message: string;
|
||||||
|
Tags: string[];
|
||||||
|
RuleID: string;
|
||||||
|
Fingerprint: string;
|
||||||
|
FingerPrintWithoutCommitId: string
|
||||||
|
};
|
@ -0,0 +1,72 @@
|
|||||||
|
import Queue, { Job } from "bull";
|
||||||
|
import Integration from "../../models/integration";
|
||||||
|
import IntegrationAuth from "../../models/integrationAuth";
|
||||||
|
import { BotService } from "../../services";
|
||||||
|
import { getIntegrationAuthAccessHelper } from "../../helpers";
|
||||||
|
import { syncSecrets } from "../../integrations/sync"
|
||||||
|
|
||||||
|
|
||||||
|
type TSyncSecretsToThirdPartyServices = {
|
||||||
|
workspaceId: string
|
||||||
|
environment?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
const syncSecretsToThirdPartyServices = new Queue('sync-secrets-to-third-party-services', process.env.REDIS_URL as string);
|
||||||
|
|
||||||
|
syncSecretsToThirdPartyServices.process(async (job: Job) => {
|
||||||
|
const { workspaceId, environment }: TSyncSecretsToThirdPartyServices = job.data
|
||||||
|
const integrations = await Integration.find({
|
||||||
|
workspace: workspaceId,
|
||||||
|
...(environment
|
||||||
|
? {
|
||||||
|
environment
|
||||||
|
}
|
||||||
|
: {}),
|
||||||
|
isActive: true,
|
||||||
|
app: { $ne: null }
|
||||||
|
});
|
||||||
|
|
||||||
|
// for each workspace integration, sync/push secrets
|
||||||
|
// to that integration
|
||||||
|
for await (const integration of integrations) {
|
||||||
|
// get workspace, environment (shared) secrets
|
||||||
|
const secrets = await BotService.getSecrets({
|
||||||
|
workspaceId: integration.workspace,
|
||||||
|
environment: integration.environment,
|
||||||
|
secretPath: integration.secretPath
|
||||||
|
});
|
||||||
|
|
||||||
|
const integrationAuth = await IntegrationAuth.findById(integration.integrationAuth);
|
||||||
|
|
||||||
|
if (!integrationAuth) throw new Error("Failed to find integration auth");
|
||||||
|
|
||||||
|
// get integration auth access token
|
||||||
|
const access = await getIntegrationAuthAccessHelper({
|
||||||
|
integrationAuthId: integration.integrationAuth
|
||||||
|
});
|
||||||
|
|
||||||
|
// sync secrets to integration
|
||||||
|
return await syncSecrets({
|
||||||
|
integration,
|
||||||
|
integrationAuth,
|
||||||
|
secrets,
|
||||||
|
accessId: access.accessId === undefined ? null : access.accessId,
|
||||||
|
accessToken: access.accessToken
|
||||||
|
});
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
export const syncSecretsToActiveIntegrationsQueue = (jobDetails: TSyncSecretsToThirdPartyServices) => {
|
||||||
|
syncSecretsToThirdPartyServices.add(jobDetails, {
|
||||||
|
attempts: 5,
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 1000
|
||||||
|
},
|
||||||
|
removeOnComplete: true,
|
||||||
|
removeOnFail: {
|
||||||
|
count: 20 // keep the most recent 20 jobs
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,201 @@
|
|||||||
|
// import Queue, { Job } from "bull";
|
||||||
|
// import { ProbotOctokit } from "probot"
|
||||||
|
// import { Commit, Committer, Repository } from "@octokit/webhooks-types";
|
||||||
|
// import TelemetryService from "../../services/TelemetryService";
|
||||||
|
// import { sendMail } from "../../helpers";
|
||||||
|
// import GitRisks from "../../ee/models/gitRisks";
|
||||||
|
// import { MembershipOrg, User } from "../../models";
|
||||||
|
// import { OWNER, ADMIN } from "../../variables";
|
||||||
|
// import { convertKeysToLowercase, getFilesFromCommit, scanContentAndGetFindings } from "../../ee/services/GithubSecretScanning/helper";
|
||||||
|
// import { getSecretScanningGitAppId, getSecretScanningPrivateKey } from "../../config";
|
||||||
|
|
||||||
|
// const githubFullRepositoryScan = new Queue('github-historical-secret-scanning', 'redis://redis:6379');
|
||||||
|
|
||||||
|
// type TScanFullRepositoryDetails = {
|
||||||
|
// organizationId: string,
|
||||||
|
// repositories: {
|
||||||
|
// id: number;
|
||||||
|
// node_id: string;
|
||||||
|
// name: string;
|
||||||
|
// full_name: string;
|
||||||
|
// private: boolean;
|
||||||
|
// }[] | undefined
|
||||||
|
// installationId: number
|
||||||
|
// }
|
||||||
|
|
||||||
|
// type SecretMatch = {
|
||||||
|
// Description: string;
|
||||||
|
// StartLine: number;
|
||||||
|
// EndLine: number;
|
||||||
|
// StartColumn: number;
|
||||||
|
// EndColumn: number;
|
||||||
|
// Match: string;
|
||||||
|
// Secret: string;
|
||||||
|
// File: string;
|
||||||
|
// SymlinkFile: string;
|
||||||
|
// Commit: string;
|
||||||
|
// Entropy: number;
|
||||||
|
// Author: string;
|
||||||
|
// Email: string;
|
||||||
|
// Date: string;
|
||||||
|
// Message: string;
|
||||||
|
// Tags: string[];
|
||||||
|
// RuleID: string;
|
||||||
|
// Fingerprint: string;
|
||||||
|
// FingerPrintWithoutCommitId: string
|
||||||
|
// };
|
||||||
|
|
||||||
|
// type Helllo = {
|
||||||
|
// url: string;
|
||||||
|
// sha: string;
|
||||||
|
// node_id: string;
|
||||||
|
// html_url: string;
|
||||||
|
// comments_url: string;
|
||||||
|
// commit: {
|
||||||
|
// url: string;
|
||||||
|
// author: {
|
||||||
|
// name?: string | undefined;
|
||||||
|
// email?: string | undefined;
|
||||||
|
// date?: string | undefined;
|
||||||
|
// } | null;
|
||||||
|
// verification?: {
|
||||||
|
// } | undefined;
|
||||||
|
// };
|
||||||
|
// files?: {}[] | undefined;
|
||||||
|
// }[]
|
||||||
|
|
||||||
|
|
||||||
|
// githubFullRepositoryScan.process(async (job: Job, done: Queue.DoneCallback) => {
|
||||||
|
// const { organizationId, repositories, installationId }: TScanFullRepositoryDetails = job.data
|
||||||
|
// const repositoryFullNamesList = repositories ? repositories.map(repoDetails => repoDetails.full_name) : []
|
||||||
|
// const octokit = new ProbotOctokit({
|
||||||
|
// auth: {
|
||||||
|
// appId: await getSecretScanningGitAppId(),
|
||||||
|
// privateKey: await getSecretScanningPrivateKey(),
|
||||||
|
// installationId: installationId
|
||||||
|
// },
|
||||||
|
// });
|
||||||
|
|
||||||
|
// for (const repositoryFullName of repositoryFullNamesList) {
|
||||||
|
// const [owner, repo] = repositoryFullName.split("/");
|
||||||
|
|
||||||
|
// let page = 1;
|
||||||
|
// while (true) {
|
||||||
|
// // octokit.repos.getco
|
||||||
|
// const { data } = await octokit.repos.listCommits({
|
||||||
|
// owner,
|
||||||
|
// repo,
|
||||||
|
// per_page: 100,
|
||||||
|
// page
|
||||||
|
// });
|
||||||
|
|
||||||
|
|
||||||
|
// await getFilesFromCommit(octokit, owner, repo, "646b386605177ed0a2cc0a596eeee0cf57666342")
|
||||||
|
|
||||||
|
|
||||||
|
// page++;
|
||||||
|
// }
|
||||||
|
|
||||||
|
// }
|
||||||
|
|
||||||
|
// done()
|
||||||
|
|
||||||
|
// // const allFindingsByFingerprint: { [key: string]: SecretMatch; } = {}
|
||||||
|
// // for (const commit of commits) {
|
||||||
|
// // for (const filepath of [...commit.added, ...commit.modified]) {
|
||||||
|
// // try {
|
||||||
|
// // const fileContentsResponse = await octokit.repos.getContent({
|
||||||
|
// // owner,
|
||||||
|
// // repo,
|
||||||
|
// // path: filepath,
|
||||||
|
// // });
|
||||||
|
|
||||||
|
// // const data: any = fileContentsResponse.data;
|
||||||
|
// // const fileContent = Buffer.from(data.content, "base64").toString();
|
||||||
|
|
||||||
|
// // const findings = await scanContentAndGetFindings(`\n${fileContent}`) // extra line to count lines correctly
|
||||||
|
|
||||||
|
// // for (const finding of findings) {
|
||||||
|
// // const fingerPrintWithCommitId = `${commit.id}:${filepath}:${finding.RuleID}:${finding.StartLine}`
|
||||||
|
// // const fingerPrintWithoutCommitId = `${filepath}:${finding.RuleID}:${finding.StartLine}`
|
||||||
|
// // finding.Fingerprint = fingerPrintWithCommitId
|
||||||
|
// // finding.FingerPrintWithoutCommitId = fingerPrintWithoutCommitId
|
||||||
|
// // finding.Commit = commit.id
|
||||||
|
// // finding.File = filepath
|
||||||
|
// // finding.Author = commit.author.name
|
||||||
|
// // finding.Email = commit?.author?.email ? commit?.author?.email : ""
|
||||||
|
|
||||||
|
// // allFindingsByFingerprint[fingerPrintWithCommitId] = finding
|
||||||
|
// // }
|
||||||
|
|
||||||
|
// // } catch (error) {
|
||||||
|
// // done(new Error(`gitHubHistoricalScanning.process: unable to fetch content for [filepath=${filepath}] because [error=${error}]`), null)
|
||||||
|
// // }
|
||||||
|
// // }
|
||||||
|
// // }
|
||||||
|
|
||||||
|
// // // change to update
|
||||||
|
// // for (const key in allFindingsByFingerprint) {
|
||||||
|
// // await GitRisks.findOneAndUpdate({ fingerprint: allFindingsByFingerprint[key].Fingerprint },
|
||||||
|
// // {
|
||||||
|
// // ...convertKeysToLowercase(allFindingsByFingerprint[key]),
|
||||||
|
// // installationId: installationId,
|
||||||
|
// // organization: organizationId,
|
||||||
|
// // repositoryFullName: repository.fullName,
|
||||||
|
// // repositoryId: repository.id
|
||||||
|
// // }, {
|
||||||
|
// // upsert: true
|
||||||
|
// // }).lean()
|
||||||
|
// // }
|
||||||
|
// // // get emails of admins
|
||||||
|
// // const adminsOfWork = await MembershipOrg.find({
|
||||||
|
// // organization: organizationId,
|
||||||
|
// // $or: [
|
||||||
|
// // { role: OWNER },
|
||||||
|
// // { role: ADMIN }
|
||||||
|
// // ]
|
||||||
|
// // }).lean()
|
||||||
|
|
||||||
|
// // const userEmails = await User.find({
|
||||||
|
// // _id: {
|
||||||
|
// // $in: [adminsOfWork.map(orgMembership => orgMembership.user)]
|
||||||
|
// // }
|
||||||
|
// // }).select("email").lean()
|
||||||
|
|
||||||
|
// // const adminOrOwnerEmails = userEmails.map(userObject => userObject.email)
|
||||||
|
|
||||||
|
// // const usersToNotify = pusher?.email ? [pusher.email, ...adminOrOwnerEmails] : [...adminOrOwnerEmails]
|
||||||
|
// // if (Object.keys(allFindingsByFingerprint).length) {
|
||||||
|
// // await sendMail({
|
||||||
|
// // template: "secretLeakIncident.handlebars",
|
||||||
|
// // subjectLine: `Incident alert: leaked secrets found in Github repository ${repository.fullName}`,
|
||||||
|
// // recipients: usersToNotify,
|
||||||
|
// // substitutions: {
|
||||||
|
// // numberOfSecrets: Object.keys(allFindingsByFingerprint).length,
|
||||||
|
// // pusher_email: pusher.email,
|
||||||
|
// // pusher_name: pusher.name
|
||||||
|
// // }
|
||||||
|
// // });
|
||||||
|
// // }
|
||||||
|
|
||||||
|
// // const postHogClient = await TelemetryService.getPostHogClient();
|
||||||
|
// // if (postHogClient) {
|
||||||
|
// // postHogClient.capture({
|
||||||
|
// // event: "cloud secret scan",
|
||||||
|
// // distinctId: pusher.email,
|
||||||
|
// // properties: {
|
||||||
|
// // numberOfCommitsScanned: commits.length,
|
||||||
|
// // numberOfRisksFound: Object.keys(allFindingsByFingerprint).length,
|
||||||
|
// // }
|
||||||
|
// // });
|
||||||
|
// // }
|
||||||
|
|
||||||
|
// // done(null, allFindingsByFingerprint)
|
||||||
|
|
||||||
|
// })
|
||||||
|
|
||||||
|
// export const scanGithubFullRepositoryForSecretLeaks = (scanFullRepositoryDetails: TScanFullRepositoryDetails) => {
|
||||||
|
// console.log("full repo scan started")
|
||||||
|
// githubFullRepositoryScan.add(scanFullRepositoryDetails)
|
||||||
|
// }
|
||||||
|
|
@ -0,0 +1,148 @@
|
|||||||
|
import Queue, { Job } from "bull";
|
||||||
|
import { ProbotOctokit } from "probot"
|
||||||
|
import { Commit, Committer, Repository } from "@octokit/webhooks-types";
|
||||||
|
import TelemetryService from "../../services/TelemetryService";
|
||||||
|
import { sendMail } from "../../helpers";
|
||||||
|
import GitRisks from "../../ee/models/gitRisks";
|
||||||
|
import { MembershipOrg, User } from "../../models";
|
||||||
|
import { OWNER, ADMIN } from "../../variables";
|
||||||
|
import { convertKeysToLowercase, scanContentAndGetFindings } from "../../ee/services/GithubSecretScanning/helper";
|
||||||
|
import { getSecretScanningGitAppId, getSecretScanningPrivateKey } from "../../config";
|
||||||
|
import { SecretMatch } from "../../ee/services/GithubSecretScanning/types";
|
||||||
|
|
||||||
|
const githubPushEventSecretScan = new Queue('github-push-event-secret-scanning', 'redis://redis:6379');
|
||||||
|
|
||||||
|
type TScanPushEventQueueDetails = {
|
||||||
|
organizationId: string,
|
||||||
|
commits: Commit[]
|
||||||
|
pusher: {
|
||||||
|
name: string,
|
||||||
|
email: string | null
|
||||||
|
},
|
||||||
|
repository: {
|
||||||
|
id: number,
|
||||||
|
fullName: string,
|
||||||
|
},
|
||||||
|
installationId: number
|
||||||
|
}
|
||||||
|
|
||||||
|
githubPushEventSecretScan.process(async (job: Job, done: Queue.DoneCallback) => {
|
||||||
|
const { organizationId, commits, pusher, repository, installationId }: TScanPushEventQueueDetails = job.data
|
||||||
|
const [owner, repo] = repository.fullName.split("/");
|
||||||
|
const octokit = new ProbotOctokit({
|
||||||
|
auth: {
|
||||||
|
appId: await getSecretScanningGitAppId(),
|
||||||
|
privateKey: await getSecretScanningPrivateKey(),
|
||||||
|
installationId: installationId
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const allFindingsByFingerprint: { [key: string]: SecretMatch; } = {}
|
||||||
|
|
||||||
|
for (const commit of commits) {
|
||||||
|
for (const filepath of [...commit.added, ...commit.modified]) {
|
||||||
|
try {
|
||||||
|
const fileContentsResponse = await octokit.repos.getContent({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
path: filepath,
|
||||||
|
});
|
||||||
|
|
||||||
|
const data: any = fileContentsResponse.data;
|
||||||
|
const fileContent = Buffer.from(data.content, "base64").toString();
|
||||||
|
|
||||||
|
const findings = await scanContentAndGetFindings(`\n${fileContent}`) // extra line to count lines correctly
|
||||||
|
|
||||||
|
for (const finding of findings) {
|
||||||
|
const fingerPrintWithCommitId = `${commit.id}:${filepath}:${finding.RuleID}:${finding.StartLine}`
|
||||||
|
const fingerPrintWithoutCommitId = `${filepath}:${finding.RuleID}:${finding.StartLine}`
|
||||||
|
finding.Fingerprint = fingerPrintWithCommitId
|
||||||
|
finding.FingerPrintWithoutCommitId = fingerPrintWithoutCommitId
|
||||||
|
finding.Commit = commit.id
|
||||||
|
finding.File = filepath
|
||||||
|
finding.Author = commit.author.name
|
||||||
|
finding.Email = commit?.author?.email ? commit?.author?.email : ""
|
||||||
|
|
||||||
|
allFindingsByFingerprint[fingerPrintWithCommitId] = finding
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
done(new Error(`gitHubHistoricalScanning.process: unable to fetch content for [filepath=${filepath}] because [error=${error}]`), null)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// change to update
|
||||||
|
for (const key in allFindingsByFingerprint) {
|
||||||
|
await GitRisks.findOneAndUpdate({ fingerprint: allFindingsByFingerprint[key].Fingerprint },
|
||||||
|
{
|
||||||
|
...convertKeysToLowercase(allFindingsByFingerprint[key]),
|
||||||
|
installationId: installationId,
|
||||||
|
organization: organizationId,
|
||||||
|
repositoryFullName: repository.fullName,
|
||||||
|
repositoryId: repository.id
|
||||||
|
}, {
|
||||||
|
upsert: true
|
||||||
|
}).lean()
|
||||||
|
}
|
||||||
|
// get emails of admins
|
||||||
|
const adminsOfWork = await MembershipOrg.find({
|
||||||
|
organization: organizationId,
|
||||||
|
$or: [
|
||||||
|
{ role: OWNER },
|
||||||
|
{ role: ADMIN }
|
||||||
|
]
|
||||||
|
}).lean()
|
||||||
|
|
||||||
|
const userEmails = await User.find({
|
||||||
|
_id: {
|
||||||
|
$in: [adminsOfWork.map(orgMembership => orgMembership.user)]
|
||||||
|
}
|
||||||
|
}).select("email").lean()
|
||||||
|
|
||||||
|
const adminOrOwnerEmails = userEmails.map(userObject => userObject.email)
|
||||||
|
|
||||||
|
const usersToNotify = pusher?.email ? [pusher.email, ...adminOrOwnerEmails] : [...adminOrOwnerEmails]
|
||||||
|
if (Object.keys(allFindingsByFingerprint).length) {
|
||||||
|
await sendMail({
|
||||||
|
template: "secretLeakIncident.handlebars",
|
||||||
|
subjectLine: `Incident alert: leaked secrets found in Github repository ${repository.fullName}`,
|
||||||
|
recipients: usersToNotify,
|
||||||
|
substitutions: {
|
||||||
|
numberOfSecrets: Object.keys(allFindingsByFingerprint).length,
|
||||||
|
pusher_email: pusher.email,
|
||||||
|
pusher_name: pusher.name
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const postHogClient = await TelemetryService.getPostHogClient();
|
||||||
|
if (postHogClient) {
|
||||||
|
postHogClient.capture({
|
||||||
|
event: "cloud secret scan",
|
||||||
|
distinctId: pusher.email,
|
||||||
|
properties: {
|
||||||
|
numberOfCommitsScanned: commits.length,
|
||||||
|
numberOfRisksFound: Object.keys(allFindingsByFingerprint).length,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
done(null, allFindingsByFingerprint)
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
export const scanGithubPushEventForSecretLeaks = (pushEventPayload: TScanPushEventQueueDetails) => {
|
||||||
|
githubPushEventSecretScan.add(pushEventPayload, {
|
||||||
|
attempts: 3,
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 5000
|
||||||
|
},
|
||||||
|
removeOnComplete: true,
|
||||||
|
removeOnFail: {
|
||||||
|
count: 20 // keep the most recent 20 jobs
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in new issue