feat: add collection vars, folder vars, request vars support to cli

This commit is contained in:
lohxt1 2024-09-24 19:09:47 +05:30
parent 858afdbf03
commit 9c2c86baf6
7 changed files with 370 additions and 27 deletions

4
package-lock.json generated
View File

@ -18621,7 +18621,7 @@
},
"packages/bruno-electron": {
"name": "bruno",
"version": "v1.28.0",
"version": "v1.30.1",
"dependencies": {
"@aws-sdk/credential-providers": "3.525.0",
"@usebruno/common": "0.1.0",
@ -18862,4 +18862,4 @@
}
}
}
}
}

View File

@ -7,7 +7,7 @@ const { runSingleRequest } = require('../runner/run-single-request');
const { bruToEnvJson, getEnvVars } = require('../utils/bru');
const makeJUnitOutput = require('../reporters/junit');
const makeHtmlOutput = require('../reporters/html');
const { rpad } = require('../utils/common');
const { rpad, uuid } = require('../utils/common');
const { bruToJson, getOptions, collectionBruToJson } = require('../utils/bru');
const { dotenvToJson } = require('@usebruno/lang');
const constants = require('../constants');
@ -93,8 +93,87 @@ const printRunSummary = (results) => {
};
};
const getCollection = (dir, testsOnly) => {
const environmentsPath = `${dir}/environments`;
const getFilesInOrder = (dir) => {
let collection = {
pathname: dir
};
const traverse = (currentPath) => {
const filesInCurrentDir = fs.readdirSync(currentPath);
if (currentPath.includes('node_modules')) {
return;
}
const currentDirBruJsons = [];
for (const file of filesInCurrentDir) {
const filePath = path.join(currentPath, file);
const stats = fs.lstatSync(filePath);
if (
stats.isDirectory() &&
filePath !== environmentsPath &&
!filePath.startsWith('.git') &&
!filePath.startsWith('node_modules')
) {
let folderItem = { uid: uuid(), name: file, pathname: filePath, type: 'folder', items: traverse(filePath) }
const folderBruFilePath = path.join(filePath, 'folder.bru');
const folderBruFileExists = fs.existsSync(folderBruFilePath);
if(folderBruFileExists) {
const folderBruContent = fs.readFileSync(folderBruFilePath, 'utf8');
let folderBruJson = collectionBruToJson(folderBruContent);
folderItem.root = folderBruJson;
}
currentDirBruJsons.push(folderItem);
}
}
for (const file of filesInCurrentDir) {
if (['collection.bru', 'folder.bru'].includes(file)) {
continue;
}
const filePath = path.join(currentPath, file);
const stats = fs.lstatSync(filePath);
if (!stats.isDirectory() && path.extname(filePath) === '.bru') {
const bruContent = fs.readFileSync(filePath, 'utf8');
const bruJson = bruToJson(bruContent);
const requestHasTests = bruJson.request?.tests;
const requestHasActiveAsserts = bruJson.request?.assertions.some((x) => x.enabled) || false;
if (testsOnly) {
if (requestHasTests || requestHasActiveAsserts) {
currentDirBruJsons.push({
bruFilepath: filePath,
bruJson
});
}
} else {
currentDirBruJsons.push({
uid: uuid(),
name: file,
pathname: filePath,
...bruJson
});
}
}
}
return currentDirBruJsons
};
collection.items = traverse(dir);
return collection;
};
return getFilesInOrder(dir);
};
const getBruFilesRecursively = (dir, testsOnly) => {
const environmentsPath = 'environments';
const collection = {};
const getFilesInOrder = (dir) => {
let bruJsons = [];
@ -131,6 +210,7 @@ const getBruFilesRecursively = (dir, testsOnly) => {
if (!stats.isDirectory() && path.extname(filePath) === '.bru') {
const bruContent = fs.readFileSync(filePath, 'utf8');
// console.log('filePath 2', filePath);
const bruJson = bruToJson(bruContent);
const requestHasTests = bruJson.request?.tests;
const requestHasActiveAsserts = bruJson.request?.assertions.some((x) => x.enabled) || false;
@ -329,6 +409,7 @@ const handler = async function (argv) {
const brunoConfigFile = fs.readFileSync(brunoJsonPath, 'utf8');
const brunoConfig = JSON.parse(brunoConfigFile);
const collectionRoot = getCollectionRoot(collectionPath);
const collection = getCollection(collectionPath);
if (filename && filename.length) {
const pathExists = await exists(filename);
@ -516,7 +597,8 @@ const handler = async function (argv) {
processEnvVars,
brunoConfig,
collectionRoot,
runtime
runtime,
collection
);
results.push({

View File

@ -12,14 +12,17 @@ const getContentType = (headers = {}) => {
return contentType;
};
const interpolateVars = (request, envVars = {}, runtimeVariables = {}, processEnvVars = {}) => {
const interpolateVars = (request, envVariables = {}, runtimeVariables = {}, processEnvVars = {}) => {
const collectionVariables = request?.collectionVariables || {};
const folderVariables = request?.folderVariables || {};
const requestVariables = request?.requestVariables || {};
// we clone envVars because we don't want to modify the original object
envVars = cloneDeep(envVars);
envVariables = cloneDeep(envVariables);
// envVars can inturn have values as {{process.env.VAR_NAME}}
// so we need to interpolate envVars first with processEnvVars
forOwn(envVars, (value, key) => {
envVars[key] = interpolate(value, {
forOwn(envVariables, (value, key) => {
envVariables[key] = interpolate(value, {
process: {
env: {
...processEnvVars
@ -35,7 +38,10 @@ const interpolateVars = (request, envVars = {}, runtimeVariables = {}, processEn
// runtimeVariables take precedence over envVars
const combinedVars = {
...envVars,
...collectionVariables,
...envVariables,
...folderVariables,
...requestVariables,
...runtimeVariables,
process: {
env: {

View File

@ -1,23 +1,255 @@
const { get, each, filter } = require('lodash');
const { get, each, filter, find } = require('lodash');
const fs = require('fs');
var JSONbig = require('json-bigint');
const decomment = require('decomment');
const prepareRequest = (request, collectionRoot) => {
const headers = {};
let contentTypeDefined = false;
const mergeHeaders = (collection, request, requestTreePath) => {
let headers = new Map();
// collection headers
each(get(collectionRoot, 'request.headers', []), (h) => {
if (h.enabled) {
headers[h.name] = h.value;
if (h.name.toLowerCase() === 'content-type') {
contentTypeDefined = true;
}
let collectionHeaders = get(collection, 'root.request.headers', []);
collectionHeaders.forEach((header) => {
if (header.enabled) {
headers.set(header.name, header.value);
}
});
each(request.headers, (h) => {
for (let i of requestTreePath) {
if (i.type === 'folder') {
let _headers = get(i, 'root.request.headers', []);
_headers.forEach((header) => {
if (header.enabled) {
headers.set(header.name, header.value);
}
});
} else {
const _headers = i?.draft ? get(i, 'draft.request.headers', []) : get(i, 'request.headers', []);
_headers.forEach((header) => {
if (header.enabled) {
headers.set(header.name, header.value);
}
});
}
}
request.headers = Array.from(headers, ([name, value]) => ({ name, value, enabled: true }));
};
const mergeVars = (collection, request, requestTreePath) => {
let reqVars = new Map();
let collectionRequestVars = get(collection, 'root.request.vars.req', []);
let collectionVariables = {};
collectionRequestVars.forEach((_var) => {
if (_var.enabled) {
reqVars.set(_var.name, _var.value);
collectionVariables[_var.name] = _var.value;
}
});
let folderVariables = {};
let requestVariables = {};
for (let i of requestTreePath) {
if (i.type === 'folder') {
let vars = get(i, 'root.request.vars.req', []);
vars.forEach((_var) => {
if (_var.enabled) {
reqVars.set(_var.name, _var.value);
folderVariables[_var.name] = _var.value;
}
});
} else {
const vars = i?.draft ? get(i, 'draft.request.vars.req', []) : get(i, 'request.vars.req', []);
vars.forEach((_var) => {
if (_var.enabled) {
reqVars.set(_var.name, _var.value);
requestVariables[_var.name] = _var.value;
}
});
}
}
request.collectionVariables = collectionVariables;
request.folderVariables = folderVariables;
request.requestVariables = requestVariables;
if(request?.vars) {
request.vars.req = Array.from(reqVars, ([name, value]) => ({
name,
value,
enabled: true,
type: 'request'
}));
}
let resVars = new Map();
let collectionResponseVars = get(collection, 'root.request.vars.res', []);
collectionResponseVars.forEach((_var) => {
if (_var.enabled) {
resVars.set(_var.name, _var.value);
}
});
for (let i of requestTreePath) {
if (i.type === 'folder') {
let vars = get(i, 'root.request.vars.res', []);
vars.forEach((_var) => {
if (_var.enabled) {
resVars.set(_var.name, _var.value);
}
});
} else {
const vars = i?.draft ? get(i, 'draft.request.vars.res', []) : get(i, 'request.vars.res', []);
vars.forEach((_var) => {
if (_var.enabled) {
resVars.set(_var.name, _var.value);
}
});
}
}
if(request?.vars) {
request.vars.res = Array.from(resVars, ([name, value]) => ({
name,
value,
enabled: true,
type: 'response'
}));
}
};
const mergeFolderLevelScripts = (request, requestTreePath, scriptFlow) => {
let folderCombinedPreReqScript = [];
let folderCombinedPostResScript = [];
let folderCombinedTests = [];
for (let i of requestTreePath) {
if (i.type === 'folder') {
let preReqScript = get(i, 'root.request.script.req', '');
if (preReqScript && preReqScript.trim() !== '') {
folderCombinedPreReqScript.push(preReqScript);
}
let postResScript = get(i, 'root.request.script.res', '');
if (postResScript && postResScript.trim() !== '') {
folderCombinedPostResScript.push(postResScript);
}
let tests = get(i, 'root.request.tests', '');
if (tests && tests?.trim?.() !== '') {
folderCombinedTests.push(tests);
}
}
}
if (folderCombinedPreReqScript.length) {
request.script.req = compact([...folderCombinedPreReqScript, request?.script?.req || '']).join(os.EOL);
}
if (folderCombinedPostResScript.length) {
if (scriptFlow === 'sequential') {
request.script.res = compact([...folderCombinedPostResScript, request?.script?.res || '']).join(os.EOL);
} else {
request.script.res = compact([request?.script?.res || '', ...folderCombinedPostResScript.reverse()]).join(os.EOL);
}
}
if (folderCombinedTests.length) {
if (scriptFlow === 'sequential') {
request.tests = compact([...folderCombinedTests, request?.tests || '']).join(os.EOL);
} else {
request.tests = compact([request?.tests || '', ...folderCombinedTests.reverse()]).join(os.EOL);
}
}
};
const findItemInCollection = (collection, itemId) => {
let item = null;
if (collection.uid === itemId) {
return collection;
}
if (collection.items && collection.items.length) {
collection.items.forEach((item) => {
if (item.uid === itemId) {
item = item;
} else if (item.type === 'folder') {
item = findItemInCollection(item, itemId);
}
});
}
return item;
};
const findItemInCollectionByPath = (collection, pathname) => {
let item = null;
if (collection.pathname === pathname) {
return collection;
}
if (collection.items && collection.items.length) {
collection.items.forEach((_item) => {
if (_item.pathname === pathname) {
item = _item;
} else if (_item.type === 'folder') {
item = findItemInCollectionByPath(_item, pathname);
}
});
}
return item;
};
const flattenItems = (items = []) => {
const flattenedItems = [];
const flatten = (itms, flattened) => {
each(itms, (i) => {
flattened.push(i);
if (i.items && i.items.length) {
flatten(i.items, flattened);
}
});
};
flatten(items, flattenedItems);
return flattenedItems;
};
const findParentItemInCollectionByPath = (collection, pathname) => {
let flattenedItems = flattenItems(collection.items);
return find(flattenedItems, (item) => {
return item.items && find(item.items, (i) => i.pathname === pathname);
});
};
const getTreePathFromCollectionToItem = (collection, _item) => {
let path = [];
let item = findItemInCollectionByPath(collection, _item.pathname);
while (item) {
path.unshift(item);
item = findParentItemInCollectionByPath(collection, item.pathname);
}
return path;
};
const prepareRequest = (item = {}, collection = {}) => {
const request = item?.request;
const brunoConfig = get(collection, 'brunoConfig', {});
const headers = {};
let contentTypeDefined = false;
const scriptFlow = brunoConfig?.scripts?.flow ?? 'sandwich';
const requestTreePath = getTreePathFromCollectionToItem(collection, item);
if (requestTreePath && requestTreePath.length > 0) {
mergeHeaders(collection, request, requestTreePath);
mergeFolderLevelScripts(request, requestTreePath, scriptFlow);
mergeVars(collection, request, requestTreePath);
}
each(get(request, 'headers', []), (h) => {
if (h.enabled) {
headers[h.name] = h.value;
if (h.name.toLowerCase() === 'content-type') {
@ -33,7 +265,7 @@ const prepareRequest = (request, collectionRoot) => {
pathParams: request?.params?.filter((param) => param.type === 'path')
};
const collectionAuth = get(collectionRoot, 'request.auth');
const collectionAuth = get(collection, 'root.request.auth');
if (collectionAuth && request.auth.mode === 'inherit') {
if (collectionAuth.mode === 'basic') {
axiosRequest.auth = {
@ -148,6 +380,11 @@ const prepareRequest = (request, collectionRoot) => {
axiosRequest.script = request.script;
}
axiosRequest.vars = request.vars;
axiosRequest.collectionVariables = request.collectionVariables;
axiosRequest.folderVariables = request.folderVariables;
axiosRequest.requestVariables = request.requestVariables;
return axiosRequest;
};

View File

@ -34,13 +34,20 @@ const runSingleRequest = async function (
processEnvVars,
brunoConfig,
collectionRoot,
runtime
runtime,
collection
) {
try {
let request;
let nextRequestName;
request = prepareRequest(bruJson.request, collectionRoot);
collection = {
root: collectionRoot,
brunoConfig,
...collection
};
request = prepareRequest({ pathname: `${collectionPath}/${filename}`, ...bruJson }, collection);
const scriptingConfig = get(brunoConfig, 'scripts', {});
scriptingConfig.runtime = runtime;

View File

@ -1,3 +1,5 @@
const { customAlphabet } = require('nanoid');
const lpad = (str, width) => {
let paddedStr = str;
while (paddedStr.length < width) {
@ -14,7 +16,16 @@ const rpad = (str, width) => {
return paddedStr;
};
const uuid = () => {
// https://github.com/ai/nanoid/blob/main/url-alphabet/index.js
const urlAlphabet = 'useandom26T198340PX75pxJACKVERYMINDBUSHWOLFGQZbfghjklqvwyzrict';
const customNanoId = customAlphabet(urlAlphabet, 21);
return customNanoId();
};
module.exports = {
lpad,
rpad
rpad,
uuid
};

View File

@ -7,14 +7,14 @@ describe('prepare-request: prepareRequest', () => {
it('If request body is valid JSON', async () => {
const body = { mode: 'json', json: '{\n"test": "{{someVar}}" // comment\n}' };
const expected = { test: '{{someVar}}' };
const result = prepareRequest({ body });
const result = prepareRequest({ body }, {});
expect(result.data).toEqual(expected);
});
it('If request body is not valid JSON', async () => {
const body = { mode: 'json', json: '{\n"test": {{someVar}} // comment\n}' };
const expected = '{\n"test": {{someVar}} \n}';
const result = prepareRequest({ body });
const result = prepareRequest({ body }, {});
expect(result.data).toEqual(expected);
});
});