add bull queue

add-bull-queue
Maidul Islam 9 months ago
parent 49bcd8839f
commit dfe95ac773

@ -25,6 +25,9 @@ JWT_PROVIDER_AUTH_LIFETIME=
# Required
MONGO_URL=mongodb://root:example@mongo:27017/?authSource=admin
# Redis
REDIS_URL=redis://redis:6379
# Optional credentials for MongoDB container instance and Mongo-Express
MONGO_USERNAME=root
MONGO_PASSWORD=example

@ -1,3 +0,0 @@
{
"workbench.editor.wrapTabs": true
}

File diff suppressed because it is too large Load Diff

@ -84,6 +84,7 @@
"@posthog/plugin-scaffold": "^1.3.4",
"@types/bcrypt": "^5.0.0",
"@types/bcryptjs": "^2.4.2",
"@types/bull": "^4.10.0",
"@types/cookie-parser": "^1.4.3",
"@types/cors": "^2.8.12",
"@types/express": "^4.17.14",

@ -68,6 +68,8 @@ export const getSecretScanningWebhookSecret = async () => (await client.getSecre
export const getSecretScanningGitAppId = async () => (await client.getSecret("SECRET_SCANNING_GIT_APP_ID")).secretValue;
export const getSecretScanningPrivateKey = async () => (await client.getSecret("SECRET_SCANNING_PRIVATE_KEY")).secretValue;
export const getRedisUrl = async () => (await client.getSecret("REDIS_URL")).secretValue;
export const getLicenseKey = async () => {
const secretValue = (await client.getSecret("LICENSE_KEY")).secretValue;
return secretValue === "" ? undefined : secretValue;

@ -1,58 +1,29 @@
import { Probot } from "probot";
import { exec } from "child_process";
import { mkdir, readFile, rm, writeFile } from "fs";
import { tmpdir } from "os";
import { join } from "path"
import GitRisks from "../../models/gitRisks";
import GitAppOrganizationInstallation from "../../models/gitAppOrganizationInstallation";
import MembershipOrg from "../../../models/membershipOrg";
import { ADMIN, OWNER } from "../../../variables";
import User from "../../../models/user";
import { sendMail } from "../../../helpers";
import TelemetryService from "../../../services/TelemetryService";
type SecretMatch = {
Description: string;
StartLine: number;
EndLine: number;
StartColumn: number;
EndColumn: number;
Match: string;
Secret: string;
File: string;
SymlinkFile: string;
Commit: string;
Entropy: number;
Author: string;
Email: string;
Date: string;
Message: string;
Tags: string[];
RuleID: string;
Fingerprint: string;
FingerPrintWithoutCommitId: string
};
import { scanGithubPushEventForSecretLeaks } from "../../../queues/secret-scanning/githubScanPushEvent";
export default async (app: Probot) => {
app.on("installation.deleted", async (context) => {
const { payload } = context;
const { installation, repositories } = payload;
if (installation.repository_selection == "all") {
await GitRisks.deleteMany({ installationId: installation.id })
await GitAppOrganizationInstallation.deleteOne({ installationId: installation.id })
} else {
if (repositories) {
for (const repository of repositories) {
await GitRisks.deleteMany({ repositoryId: repository.id })
}
if (repositories) {
for (const repository of repositories) {
await GitRisks.deleteMany({ repositoryId: repository.id })
}
await GitAppOrganizationInstallation.deleteOne({ installationId: installation.id })
}
})
app.on("installation", async (context) => {
const { payload } = context;
payload.repositories
const { installation, repositories } = payload;
// TODO: start full repo scans
})
app.on("push", async (context) => {
const { payload } = context;
const { commits, repository, installation, pusher } = payload;
const [owner, repo] = repository.full_name.split("/");
if (!commits || !repository || !installation || !pusher) {
return
@ -63,188 +34,12 @@ export default async (app: Probot) => {
return
}
const allFindingsByFingerprint: { [key: string]: SecretMatch; } = {}
for (const commit of commits) {
for (const filepath of [...commit.added, ...commit.modified]) {
try {
const fileContentsResponse = await context.octokit.repos.getContent({
owner,
repo,
path: filepath,
});
const data: any = fileContentsResponse.data;
const fileContent = Buffer.from(data.content, "base64").toString();
const findings = await scanContentAndGetFindings(`\n${fileContent}`) // extra line to count lines correctly
for (const finding of findings) {
const fingerPrintWithCommitId = `${commit.id}:${filepath}:${finding.RuleID}:${finding.StartLine}`
const fingerPrintWithoutCommitId = `${filepath}:${finding.RuleID}:${finding.StartLine}`
finding.Fingerprint = fingerPrintWithCommitId
finding.FingerPrintWithoutCommitId = fingerPrintWithoutCommitId
finding.Commit = commit.id
finding.File = filepath
finding.Author = commit.author.name
finding.Email = commit?.author?.email ? commit?.author?.email : ""
allFindingsByFingerprint[fingerPrintWithCommitId] = finding
}
} catch (error) {
console.error(`Error fetching content for ${filepath}`, error); // eslint-disable-line
}
}
}
// change to update
for (const key in allFindingsByFingerprint) {
const risk = await GitRisks.findOneAndUpdate({ fingerprint: allFindingsByFingerprint[key].Fingerprint },
{
...convertKeysToLowercase(allFindingsByFingerprint[key]),
installationId: installation.id,
organization: installationLinkToOrgExists.organizationId,
repositoryFullName: repository.full_name,
repositoryId: repository.id
}, {
upsert: true
}).lean()
}
// get emails of admins
const adminsOfWork = await MembershipOrg.find({
organization: installationLinkToOrgExists.organizationId,
$or: [
{ role: OWNER },
{ role: ADMIN }
]
}).lean()
const userEmails = await User.find({
_id: {
$in: [adminsOfWork.map(orgMembership => orgMembership.user)]
}
}).select("email").lean()
const adminOrOwnerEmails = userEmails.map(userObject => userObject.email)
const usersToNotify = pusher?.email ? [pusher.email, ...adminOrOwnerEmails] : [...adminOrOwnerEmails]
if (Object.keys(allFindingsByFingerprint).length) {
await sendMail({
template: "secretLeakIncident.handlebars",
subjectLine: `Incident alert: leaked secrets found in Github repository ${repository.full_name}`,
recipients: usersToNotify,
substitutions: {
numberOfSecrets: Object.keys(allFindingsByFingerprint).length,
pusher_email: pusher.email,
pusher_name: pusher.name
}
});
}
const postHogClient = await TelemetryService.getPostHogClient();
if (postHogClient) {
postHogClient.capture({
event: "cloud secret scan",
distinctId: pusher.email,
properties: {
numberOfCommitsScanned: commits.length,
numberOfRisksFound: Object.keys(allFindingsByFingerprint).length,
}
});
}
scanGithubPushEventForSecretLeaks({
commits: commits,
pusher: { name: pusher.name, email: pusher.email },
repository: { fullName: repository.full_name, id: repository.id },
organizationId: installationLinkToOrgExists.organizationId,
installationId: installation.id
})
});
};
async function scanContentAndGetFindings(textContent: string): Promise<SecretMatch[]> {
const tempFolder = await createTempFolder();
const filePath = join(tempFolder, "content.txt");
const findingsPath = join(tempFolder, "findings.json");
try {
await writeTextToFile(filePath, textContent);
await runInfisicalScan(filePath, findingsPath);
const findingsData = await readFindingsFile(findingsPath);
return JSON.parse(findingsData);
} finally {
await deleteTempFolder(tempFolder);
}
}
function createTempFolder(): Promise<string> {
return new Promise((resolve, reject) => {
const tempDir = tmpdir()
const tempFolderName = Math.random().toString(36).substring(2);
const tempFolderPath = join(tempDir, tempFolderName);
mkdir(tempFolderPath, (err: any) => {
if (err) {
reject(err);
} else {
resolve(tempFolderPath);
}
});
});
}
function writeTextToFile(filePath: string, content: string): Promise<void> {
return new Promise((resolve, reject) => {
writeFile(filePath, content, (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
function runInfisicalScan(inputPath: string, outputPath: string): Promise<void> {
return new Promise((resolve, reject) => {
const command = `cat "${inputPath}" | infisical scan --exit-code=77 --pipe -r "${outputPath}"`;
exec(command, (error) => {
if (error && error.code != 77) {
reject(error);
} else {
resolve();
}
});
});
}
function readFindingsFile(filePath: string): Promise<string> {
return new Promise((resolve, reject) => {
readFile(filePath, "utf8", (err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
}
function deleteTempFolder(folderPath: string): Promise<void> {
return new Promise((resolve, reject) => {
rm(folderPath, { recursive: true }, (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
function convertKeysToLowercase<T>(obj: T): T {
const convertedObj = {} as T;
for (const key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) {
const lowercaseKey = key.charAt(0).toLowerCase() + key.slice(1);
convertedObj[lowercaseKey as keyof T] = obj[key];
}
}
return convertedObj;
}

@ -0,0 +1,125 @@
import { exec } from "child_process";
import { mkdir, readFile, rm, writeFile } from "fs";
import { tmpdir } from "os";
import { join } from "path"
import { SecretMatch } from "./types";
import { Octokit } from "@octokit/rest";
export async function scanContentAndGetFindings(textContent: string): Promise<SecretMatch[]> {
const tempFolder = await createTempFolder();
const filePath = join(tempFolder, "content.txt");
const findingsPath = join(tempFolder, "findings.json");
try {
await writeTextToFile(filePath, textContent);
await runInfisicalScan(filePath, findingsPath);
const findingsData = await readFindingsFile(findingsPath);
return JSON.parse(findingsData);
} finally {
await deleteTempFolder(tempFolder);
}
}
export function createTempFolder(): Promise<string> {
return new Promise((resolve, reject) => {
const tempDir = tmpdir()
const tempFolderName = Math.random().toString(36).substring(2);
const tempFolderPath = join(tempDir, tempFolderName);
mkdir(tempFolderPath, (err: any) => {
if (err) {
reject(err);
} else {
resolve(tempFolderPath);
}
});
});
}
export function writeTextToFile(filePath: string, content: string): Promise<void> {
return new Promise((resolve, reject) => {
writeFile(filePath, content, (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
export function runInfisicalScan(inputPath: string, outputPath: string): Promise<void> {
return new Promise((resolve, reject) => {
const command = `cat "${inputPath}" | infisical scan --exit-code=77 --pipe -r "${outputPath}"`;
exec(command, (error) => {
if (error && error.code != 77) {
reject(error);
} else {
resolve();
}
});
});
}
export function readFindingsFile(filePath: string): Promise<string> {
return new Promise((resolve, reject) => {
readFile(filePath, "utf8", (err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
}
export function deleteTempFolder(folderPath: string): Promise<void> {
return new Promise((resolve, reject) => {
rm(folderPath, { recursive: true }, (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
export function convertKeysToLowercase<T>(obj: T): T {
const convertedObj = {} as T;
for (const key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) {
const lowercaseKey = key.charAt(0).toLowerCase() + key.slice(1);
convertedObj[lowercaseKey as keyof T] = obj[key];
}
}
return convertedObj;
}
export async function getCommits(octokit: Octokit, owner: string, repo: string) {
let commits: { sha: string }[] = [];
let page = 1;
while (true) {
const response = await octokit.repos.listCommits({
owner,
repo,
per_page: 100,
page,
});
commits = commits.concat(response.data);
if (response.data.length == 0) break;
page++;
}
return commits;
}
export async function getFilesFromCommit(octokit: any, owner: string, repo: string, sha: string) {
const response = await octokit.repos.getCommit({
owner,
repo,
ref: sha,
});
}

@ -0,0 +1,21 @@
export type SecretMatch = {
Description: string;
StartLine: number;
EndLine: number;
StartColumn: number;
EndColumn: number;
Match: string;
Secret: string;
File: string;
SymlinkFile: string;
Commit: string;
Entropy: number;
Author: string;
Email: string;
Date: string;
Message: string;
Tags: string[];
RuleID: string;
Fingerprint: string;
FingerPrintWithoutCommitId: string
};

@ -32,7 +32,7 @@ export const handleEventHelper = async ({ event }: { event: Event }) => {
switch (event.name) {
case EVENT_PUSH_SECRETS:
if (bot) {
await IntegrationService.syncIntegrations({
IntegrationService.syncIntegrations({
workspaceId,
environment
});

@ -1,6 +1,6 @@
import { Types } from "mongoose";
import { Bot, Integration, IntegrationAuth } from "../models";
import { exchangeCode, exchangeRefresh, syncSecrets } from "../integrations";
import { Bot, IntegrationAuth } from "../models";
import { exchangeCode, exchangeRefresh } from "../integrations";
import { BotService } from "../services";
import {
ALGORITHM_AES_256_GCM,
@ -9,7 +9,7 @@ import {
INTEGRATION_VERCEL
} from "../variables";
import { UnauthorizedRequestError } from "../utils/errors";
import * as Sentry from "@sentry/node";
import { syncSecretsToActiveIntegrationsQueue } from "../queues/integrations/syncSecretsToThirdPartyServices"
interface Update {
workspace: string;
@ -102,69 +102,6 @@ export const handleOAuthExchangeHelper = async ({
return integrationAuth;
};
/**
* Sync/push environment variables in workspace with id [workspaceId] to
* all active integrations for that workspace
* @param {Object} obj
* @param {Object} obj.workspaceId - id of workspace
*/
export const syncIntegrationsHelper = async ({
workspaceId,
environment
}: {
workspaceId: Types.ObjectId;
environment?: string;
}) => {
try {
const integrations = await Integration.find({
workspace: workspaceId,
...(environment
? {
environment
}
: {}),
isActive: true,
app: { $ne: null }
});
// for each workspace integration, sync/push secrets
// to that integration
for await (const integration of integrations) {
// get workspace, environment (shared) secrets
const secrets = await BotService.getSecrets({
workspaceId: integration.workspace,
environment: integration.environment,
secretPath: integration.secretPath
});
const integrationAuth = await IntegrationAuth.findById(integration.integrationAuth);
if (!integrationAuth) throw new Error("Failed to find integration auth");
// get integration auth access token
const access = await getIntegrationAuthAccessHelper({
integrationAuthId: integration.integrationAuth
});
// sync secrets to integration
await syncSecrets({
integration,
integrationAuth,
secrets,
accessId: access.accessId === undefined ? null : access.accessId,
accessToken: access.accessToken
});
}
} catch (err) {
Sentry.captureException(err);
// eslint-disable-next-line
console.log(
`syncIntegrationsHelper: failed with [workspaceId=${workspaceId}] [environment=${environment}]`,
err
); // eslint-disable-line no-use-before-define
throw err;
}
};
/**
* Return decrypted refresh token using the bot's copy

@ -1062,6 +1062,7 @@ export const expandSecrets = async (
Object.keys(secrets).forEach((key) => {
if (secrets[key].value.match(INTERPOLATION_SYNTAX_REG)) {
console.log("KEY that matches ====>", key)
interpolatedSec[key] = secrets[key].value;
} else {
expandedSec[key] = secrets[key].value;

@ -2,7 +2,6 @@ import { exchangeCode } from "./exchange";
import { exchangeRefresh } from "./refresh";
import { getApps } from "./apps";
import { getTeams } from "./teams";
import { syncSecrets } from "./sync";
import { revokeAccess } from "./revoke";
export {
@ -10,6 +9,5 @@ export {
exchangeRefresh,
getApps,
getTeams,
syncSecrets,
revokeAccess,
}

@ -0,0 +1,72 @@
import Queue, { Job } from "bull";
import Integration from "../../models/integration";
import IntegrationAuth from "../../models/integrationAuth";
import { BotService } from "../../services";
import { getIntegrationAuthAccessHelper } from "../../helpers";
import { syncSecrets } from "../../integrations/sync"
type TSyncSecretsToThirdPartyServices = {
workspaceId: string
environment?: string
}
const syncSecretsToThirdPartyServices = new Queue('sync-secrets-to-third-party-services', process.env.REDIS_URL as string);
syncSecretsToThirdPartyServices.process(async (job: Job) => {
const { workspaceId, environment }: TSyncSecretsToThirdPartyServices = job.data
const integrations = await Integration.find({
workspace: workspaceId,
...(environment
? {
environment
}
: {}),
isActive: true,
app: { $ne: null }
});
// for each workspace integration, sync/push secrets
// to that integration
for await (const integration of integrations) {
// get workspace, environment (shared) secrets
const secrets = await BotService.getSecrets({
workspaceId: integration.workspace,
environment: integration.environment,
secretPath: integration.secretPath
});
const integrationAuth = await IntegrationAuth.findById(integration.integrationAuth);
if (!integrationAuth) throw new Error("Failed to find integration auth");
// get integration auth access token
const access = await getIntegrationAuthAccessHelper({
integrationAuthId: integration.integrationAuth
});
// sync secrets to integration
return await syncSecrets({
integration,
integrationAuth,
secrets,
accessId: access.accessId === undefined ? null : access.accessId,
accessToken: access.accessToken
});
}
})
export const syncSecretsToActiveIntegrationsQueue = (jobDetails: TSyncSecretsToThirdPartyServices) => {
syncSecretsToThirdPartyServices.add(jobDetails, {
attempts: 5,
backoff: {
type: "exponential",
delay: 1000
},
removeOnComplete: true,
removeOnFail: {
count: 20 // keep the most recent 20 jobs
}
})
}

@ -0,0 +1,201 @@
// import Queue, { Job } from "bull";
// import { ProbotOctokit } from "probot"
// import { Commit, Committer, Repository } from "@octokit/webhooks-types";
// import TelemetryService from "../../services/TelemetryService";
// import { sendMail } from "../../helpers";
// import GitRisks from "../../ee/models/gitRisks";
// import { MembershipOrg, User } from "../../models";
// import { OWNER, ADMIN } from "../../variables";
// import { convertKeysToLowercase, getFilesFromCommit, scanContentAndGetFindings } from "../../ee/services/GithubSecretScanning/helper";
// import { getSecretScanningGitAppId, getSecretScanningPrivateKey } from "../../config";
// const githubFullRepositoryScan = new Queue('github-historical-secret-scanning', 'redis://redis:6379');
// type TScanFullRepositoryDetails = {
// organizationId: string,
// repositories: {
// id: number;
// node_id: string;
// name: string;
// full_name: string;
// private: boolean;
// }[] | undefined
// installationId: number
// }
// type SecretMatch = {
// Description: string;
// StartLine: number;
// EndLine: number;
// StartColumn: number;
// EndColumn: number;
// Match: string;
// Secret: string;
// File: string;
// SymlinkFile: string;
// Commit: string;
// Entropy: number;
// Author: string;
// Email: string;
// Date: string;
// Message: string;
// Tags: string[];
// RuleID: string;
// Fingerprint: string;
// FingerPrintWithoutCommitId: string
// };
// type Helllo = {
// url: string;
// sha: string;
// node_id: string;
// html_url: string;
// comments_url: string;
// commit: {
// url: string;
// author: {
// name?: string | undefined;
// email?: string | undefined;
// date?: string | undefined;
// } | null;
// verification?: {
// } | undefined;
// };
// files?: {}[] | undefined;
// }[]
// githubFullRepositoryScan.process(async (job: Job, done: Queue.DoneCallback) => {
// const { organizationId, repositories, installationId }: TScanFullRepositoryDetails = job.data
// const repositoryFullNamesList = repositories ? repositories.map(repoDetails => repoDetails.full_name) : []
// const octokit = new ProbotOctokit({
// auth: {
// appId: await getSecretScanningGitAppId(),
// privateKey: await getSecretScanningPrivateKey(),
// installationId: installationId
// },
// });
// for (const repositoryFullName of repositoryFullNamesList) {
// const [owner, repo] = repositoryFullName.split("/");
// let page = 1;
// while (true) {
// // octokit.repos.getco
// const { data } = await octokit.repos.listCommits({
// owner,
// repo,
// per_page: 100,
// page
// });
// await getFilesFromCommit(octokit, owner, repo, "646b386605177ed0a2cc0a596eeee0cf57666342")
// page++;
// }
// }
// done()
// // const allFindingsByFingerprint: { [key: string]: SecretMatch; } = {}
// // for (const commit of commits) {
// // for (const filepath of [...commit.added, ...commit.modified]) {
// // try {
// // const fileContentsResponse = await octokit.repos.getContent({
// // owner,
// // repo,
// // path: filepath,
// // });
// // const data: any = fileContentsResponse.data;
// // const fileContent = Buffer.from(data.content, "base64").toString();
// // const findings = await scanContentAndGetFindings(`\n${fileContent}`) // extra line to count lines correctly
// // for (const finding of findings) {
// // const fingerPrintWithCommitId = `${commit.id}:${filepath}:${finding.RuleID}:${finding.StartLine}`
// // const fingerPrintWithoutCommitId = `${filepath}:${finding.RuleID}:${finding.StartLine}`
// // finding.Fingerprint = fingerPrintWithCommitId
// // finding.FingerPrintWithoutCommitId = fingerPrintWithoutCommitId
// // finding.Commit = commit.id
// // finding.File = filepath
// // finding.Author = commit.author.name
// // finding.Email = commit?.author?.email ? commit?.author?.email : ""
// // allFindingsByFingerprint[fingerPrintWithCommitId] = finding
// // }
// // } catch (error) {
// // done(new Error(`gitHubHistoricalScanning.process: unable to fetch content for [filepath=${filepath}] because [error=${error}]`), null)
// // }
// // }
// // }
// // // change to update
// // for (const key in allFindingsByFingerprint) {
// // await GitRisks.findOneAndUpdate({ fingerprint: allFindingsByFingerprint[key].Fingerprint },
// // {
// // ...convertKeysToLowercase(allFindingsByFingerprint[key]),
// // installationId: installationId,
// // organization: organizationId,
// // repositoryFullName: repository.fullName,
// // repositoryId: repository.id
// // }, {
// // upsert: true
// // }).lean()
// // }
// // // get emails of admins
// // const adminsOfWork = await MembershipOrg.find({
// // organization: organizationId,
// // $or: [
// // { role: OWNER },
// // { role: ADMIN }
// // ]
// // }).lean()
// // const userEmails = await User.find({
// // _id: {
// // $in: [adminsOfWork.map(orgMembership => orgMembership.user)]
// // }
// // }).select("email").lean()
// // const adminOrOwnerEmails = userEmails.map(userObject => userObject.email)
// // const usersToNotify = pusher?.email ? [pusher.email, ...adminOrOwnerEmails] : [...adminOrOwnerEmails]
// // if (Object.keys(allFindingsByFingerprint).length) {
// // await sendMail({
// // template: "secretLeakIncident.handlebars",
// // subjectLine: `Incident alert: leaked secrets found in Github repository ${repository.fullName}`,
// // recipients: usersToNotify,
// // substitutions: {
// // numberOfSecrets: Object.keys(allFindingsByFingerprint).length,
// // pusher_email: pusher.email,
// // pusher_name: pusher.name
// // }
// // });
// // }
// // const postHogClient = await TelemetryService.getPostHogClient();
// // if (postHogClient) {
// // postHogClient.capture({
// // event: "cloud secret scan",
// // distinctId: pusher.email,
// // properties: {
// // numberOfCommitsScanned: commits.length,
// // numberOfRisksFound: Object.keys(allFindingsByFingerprint).length,
// // }
// // });
// // }
// // done(null, allFindingsByFingerprint)
// })
// export const scanGithubFullRepositoryForSecretLeaks = (scanFullRepositoryDetails: TScanFullRepositoryDetails) => {
// console.log("full repo scan started")
// githubFullRepositoryScan.add(scanFullRepositoryDetails)
// }

@ -0,0 +1,148 @@
import Queue, { Job } from "bull";
import { ProbotOctokit } from "probot"
import { Commit, Committer, Repository } from "@octokit/webhooks-types";
import TelemetryService from "../../services/TelemetryService";
import { sendMail } from "../../helpers";
import GitRisks from "../../ee/models/gitRisks";
import { MembershipOrg, User } from "../../models";
import { OWNER, ADMIN } from "../../variables";
import { convertKeysToLowercase, scanContentAndGetFindings } from "../../ee/services/GithubSecretScanning/helper";
import { getSecretScanningGitAppId, getSecretScanningPrivateKey } from "../../config";
import { SecretMatch } from "../../ee/services/GithubSecretScanning/types";
const githubPushEventSecretScan = new Queue('github-push-event-secret-scanning', 'redis://redis:6379');
type TScanPushEventQueueDetails = {
organizationId: string,
commits: Commit[]
pusher: {
name: string,
email: string | null
},
repository: {
id: number,
fullName: string,
},
installationId: number
}
githubPushEventSecretScan.process(async (job: Job, done: Queue.DoneCallback) => {
const { organizationId, commits, pusher, repository, installationId }: TScanPushEventQueueDetails = job.data
const [owner, repo] = repository.fullName.split("/");
const octokit = new ProbotOctokit({
auth: {
appId: await getSecretScanningGitAppId(),
privateKey: await getSecretScanningPrivateKey(),
installationId: installationId
},
});
const allFindingsByFingerprint: { [key: string]: SecretMatch; } = {}
for (const commit of commits) {
for (const filepath of [...commit.added, ...commit.modified]) {
try {
const fileContentsResponse = await octokit.repos.getContent({
owner,
repo,
path: filepath,
});
const data: any = fileContentsResponse.data;
const fileContent = Buffer.from(data.content, "base64").toString();
const findings = await scanContentAndGetFindings(`\n${fileContent}`) // extra line to count lines correctly
for (const finding of findings) {
const fingerPrintWithCommitId = `${commit.id}:${filepath}:${finding.RuleID}:${finding.StartLine}`
const fingerPrintWithoutCommitId = `${filepath}:${finding.RuleID}:${finding.StartLine}`
finding.Fingerprint = fingerPrintWithCommitId
finding.FingerPrintWithoutCommitId = fingerPrintWithoutCommitId
finding.Commit = commit.id
finding.File = filepath
finding.Author = commit.author.name
finding.Email = commit?.author?.email ? commit?.author?.email : ""
allFindingsByFingerprint[fingerPrintWithCommitId] = finding
}
} catch (error) {
done(new Error(`gitHubHistoricalScanning.process: unable to fetch content for [filepath=${filepath}] because [error=${error}]`), null)
}
}
}
// change to update
for (const key in allFindingsByFingerprint) {
await GitRisks.findOneAndUpdate({ fingerprint: allFindingsByFingerprint[key].Fingerprint },
{
...convertKeysToLowercase(allFindingsByFingerprint[key]),
installationId: installationId,
organization: organizationId,
repositoryFullName: repository.fullName,
repositoryId: repository.id
}, {
upsert: true
}).lean()
}
// get emails of admins
const adminsOfWork = await MembershipOrg.find({
organization: organizationId,
$or: [
{ role: OWNER },
{ role: ADMIN }
]
}).lean()
const userEmails = await User.find({
_id: {
$in: [adminsOfWork.map(orgMembership => orgMembership.user)]
}
}).select("email").lean()
const adminOrOwnerEmails = userEmails.map(userObject => userObject.email)
const usersToNotify = pusher?.email ? [pusher.email, ...adminOrOwnerEmails] : [...adminOrOwnerEmails]
if (Object.keys(allFindingsByFingerprint).length) {
await sendMail({
template: "secretLeakIncident.handlebars",
subjectLine: `Incident alert: leaked secrets found in Github repository ${repository.fullName}`,
recipients: usersToNotify,
substitutions: {
numberOfSecrets: Object.keys(allFindingsByFingerprint).length,
pusher_email: pusher.email,
pusher_name: pusher.name
}
});
}
const postHogClient = await TelemetryService.getPostHogClient();
if (postHogClient) {
postHogClient.capture({
event: "cloud secret scan",
distinctId: pusher.email,
properties: {
numberOfCommitsScanned: commits.length,
numberOfRisksFound: Object.keys(allFindingsByFingerprint).length,
}
});
}
done(null, allFindingsByFingerprint)
})
export const scanGithubPushEventForSecretLeaks = (pushEventPayload: TScanPushEventQueueDetails) => {
githubPushEventSecretScan.add(pushEventPayload, {
attempts: 3,
backoff: {
type: "exponential",
delay: 5000
},
removeOnComplete: true,
removeOnFail: {
count: 20 // keep the most recent 20 jobs
}
})
}

@ -1,4 +1,4 @@
import express from "express";
import express, { Request, Response } from "express";
const router = express.Router();
import { requireAuth, requireWorkspaceAuth, validateRequest } from "../../middleware";
import { body, param, query } from "express-validator";

@ -1,18 +1,18 @@
import { Types } from "mongoose";
import {
import {
getIntegrationAuthAccessHelper,
getIntegrationAuthRefreshHelper,
handleOAuthExchangeHelper,
setIntegrationAuthAccessHelper,
setIntegrationAuthRefreshHelper,
syncIntegrationsHelper,
} from "../helpers/integration";
import { syncSecretsToActiveIntegrationsQueue } from "../queues/integrations/syncSecretsToThirdPartyServices";
/**
* Class to handle integrations
*/
class IntegrationService {
/**
* Perform OAuth2 code-token exchange for workspace with id [workspaceId] and integration
* named [integration]
@ -26,12 +26,12 @@ class IntegrationService {
* @param {String} obj1.code - code
* @returns {IntegrationAuth} integrationAuth - integration authorization after OAuth2 code-token exchange
*/
static async handleOAuthExchange({
static async handleOAuthExchange({
workspaceId,
integration,
code,
environment,
}: {
}: {
workspaceId: string;
integration: string;
code: string;
@ -44,25 +44,23 @@ class IntegrationService {
environment,
});
}
/**
* Sync/push environment variables in workspace with id [workspaceId] to
* all associated integrations
* @param {Object} obj
* @param {Object} obj.workspaceId - id of workspace
*/
static async syncIntegrations({
static syncIntegrations({
workspaceId,
environment,
}: {
workspaceId: Types.ObjectId;
environment?: string;
}) {
return await syncIntegrationsHelper({
workspaceId,
});
syncSecretsToActiveIntegrationsQueue({ workspaceId: workspaceId.toString(), environment: environment })
}
/**
* Return decrypted refresh token for integration auth
* with id [integrationAuthId]
@ -70,12 +68,12 @@ class IntegrationService {
* @param {String} obj.integrationAuthId - id of integration auth
* @param {String} refreshToken - decrypted refresh token
*/
static async getIntegrationAuthRefresh({ integrationAuthId }: { integrationAuthId: Types.ObjectId}) {
static async getIntegrationAuthRefresh({ integrationAuthId }: { integrationAuthId: Types.ObjectId }) {
return await getIntegrationAuthRefreshHelper({
integrationAuthId,
});
}
/**
* Return decrypted access token for integration auth
* with id [integrationAuthId]
@ -98,11 +96,11 @@ class IntegrationService {
* @param {String} obj.refreshToken - refresh token
* @returns {IntegrationAuth} integrationAuth - updated integration auth
*/
static async setIntegrationAuthRefresh({
static async setIntegrationAuthRefresh({
integrationAuthId,
refreshToken,
}: {
integrationAuthId: string;
refreshToken,
}: {
integrationAuthId: string;
refreshToken: string;
}) {
return await setIntegrationAuthRefreshHelper({
@ -122,12 +120,12 @@ class IntegrationService {
* @param {Date} obj.accessExpiresAt - expiration date of access token
* @returns {IntegrationAuth} - updated integration auth
*/
static async setIntegrationAuthAccess({
static async setIntegrationAuthAccess({
integrationAuthId,
accessId,
accessToken,
accessExpiresAt,
}: {
}: {
integrationAuthId: string;
accessId: string | null;
accessToken: string;

@ -21,6 +21,7 @@ services:
depends_on:
- mongo
- smtp-server
- redis
build:
context: ./backend
dockerfile: Dockerfile
@ -99,9 +100,36 @@ services:
networks:
- infisical-dev
redis:
image: redis
container_name: infisical-dev-redis
environment:
- ALLOW_EMPTY_PASSWORD=yes
ports:
- 6379:6379
volumes:
- redis_data:/data
networks:
- infisical-dev
redis-commander:
container_name: infisical-dev-redis-commander
image: rediscommander/redis-commander
restart: always
depends_on:
- redis
environment:
- REDIS_HOSTS=local:redis:6379
ports:
- "8085:8081"
networks:
- infisical-dev
volumes:
mongo-data:
driver: local
redis_data:
driver: local
networks:
infisical-dev:

@ -41,19 +41,17 @@ services:
networks:
- infisical
# secret-scanning-git-app:
# container_name: infisical-secret-scanning-git-app
# restart: unless-stopped
# depends_on:
# - backend
# - frontend
# - mongo
# ports:
# - "3000:3001"
# image: infisical/staging_deployment_secret-scanning-git-app
# env_file: .env
# networks:
# - infisical
redis:
image: redis
container_name: infisical-dev-redis
environment:
- ALLOW_EMPTY_PASSWORD=yes
ports:
- 6379:6379
networks:
- infisical
volumes:
- redis_data:/data
mongo:
container_name: infisical-mongo
@ -71,6 +69,8 @@ services:
volumes:
mongo-data:
driver: local
redis_data:
driver: local
networks:
infisical:

@ -41,7 +41,7 @@ export default function SecretScanning() {
const generateNewIntegrationSession = async () => {
const session = await createNewIntegrationSession(String(localStorage.getItem("orgData.id")))
router.push(`https://github.com/apps/infisical-radar/installations/new?state=${session.sessionId}`)
router.push(`https://github.com/apps/infisical-radar-dev/installations/new?state=${session.sessionId}`)
}
return (

@ -5,8 +5,11 @@ dependencies:
- name: mailhog
repository: https://codecentric.github.io/helm-charts
version: 5.2.3
- name: redis
repository: https://charts.bitnami.com/bitnami
version: 17.15.0
- name: ingress-nginx
repository: https://kubernetes.github.io/ingress-nginx
version: 4.0.13
digest: sha256:d1a679e6c30e37da96b7a4b6115e285f61e6ce0dd921ffbe2cf557418c229f33
generated: "2023-04-08T15:59:12.950942-07:00"
digest: sha256:1762132c45000bb6d410c6da2291ac5c65f91331550a473b370374ba042d0744
generated: "2023-08-10T15:03:12.219788-04:00"

@ -7,7 +7,7 @@ type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 0.2.1
version: 0.3.0
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
@ -24,6 +24,10 @@ dependencies:
version: "~5.2.3"
repository: "https://codecentric.github.io/helm-charts"
condition: mailhog.enabled
- name: redis
version: 17.15.0
repository: https://charts.bitnami.com/bitnami
condition: redis.enabled
- name: ingress-nginx
version: 4.0.13
repository: https://kubernetes.github.io/ingress-nginx

@ -127,4 +127,32 @@ Create the mongodb connection string.
{{- $connectionString = .Values.mongodbConnection.externalMongoDBConnectionString -}}
{{- end -}}
{{- printf "%s" $connectionString -}}
{{- end -}}
{{/*
Create a fully qualified redis name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
*/}}
{{- define "infisical.redis.fullname" -}}
{{- if .Values.redis.fullnameOverride -}}
{{- .Values.redis.fullnameOverride | trunc 63 | trimSuffix "-" -}}
{{- else -}}
{{- $name := default .Chart.Name .Values.nameOverride -}}
{{- if contains $name .Release.Name -}}
{{- printf "%s-%s" .Release.Name .Values.redis.name | trunc 63 | trimSuffix "-" -}}
{{- else -}}
{{- printf "%s-%s-%s" .Release.Name $name .Values.redis.name | trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{- end -}}
{{- end -}}
{{/*
Create the redis connection string.
*/}}
{{- define "infisical.redis.connectionString" -}}
{{- $host := include "infisical.redis.fullname" . -}}
{{- $pass := .Values.redis.auth.password | default "root" -}}
{{- $connectionString := printf "redis://redis:%s@%s:6379" $pass $host -}}
{{- printf "%s" $connectionString -}}
{{- end -}}

@ -83,6 +83,7 @@ stringData:
"JWT_SERVICE_SECRET" (randAlphaNum 32 | lower)
"JWT_MFA_SECRET" (randAlphaNum 32 | lower)
"JWT_PROVIDER_AUTH_SECRET" (randAlphaNum 32 | lower)
"REDIS_URL" (include "infisical.redis.connectionString" .)
"MONGO_URL" (include "infisical.mongodb.connectionString" .) }}
{{- $secretObj := (lookup "v1" "Secret" .Release.Namespace (include "infisical.backend.fullname" .)) | default dict }}
{{- $secretData := (get $secretObj "data") | default dict }}

@ -169,6 +169,9 @@ backendEnvironmentVariables:
##
MONGO_URL: ""
## @param backendEnvironmentVariables.REDIS_URL
REDIS_URL: ""
## @section MongoDB(&reg;) parameters
## Documentation : https://github.com/bitnami/charts/blob/main/bitnami/mongodb/values.yaml
##
@ -419,3 +422,12 @@ mailhog:
paths:
- path: "/"
pathType: Prefix
redis:
name: "redis"
fullnameOverride: "redis"
usePassword: true
enabled: true
cluster:
enabled: false
auth:
password: "mysecretpassword"

Loading…
Cancel
Save