Add @usebruno/filestore package (#5130)

This commit is contained in:
naman-bruno
2025-07-24 18:48:25 +05:30
committed by GitHub
parent a36f33746d
commit 4badee903a
33 changed files with 1366 additions and 555 deletions

View File

@ -30,6 +30,7 @@ jobs:
npm run sandbox:bundle-libraries --workspace=packages/bruno-js npm run sandbox:bundle-libraries --workspace=packages/bruno-js
npm run build --workspace=packages/bruno-converters npm run build --workspace=packages/bruno-converters
npm run build --workspace=packages/bruno-requests npm run build --workspace=packages/bruno-requests
npm run build --workspace=packages/bruno-filestore
- name: Lint Check - name: Lint Check
run: npm run lint run: npm run lint
@ -80,6 +81,7 @@ jobs:
npm run sandbox:bundle-libraries --workspace=packages/bruno-js npm run sandbox:bundle-libraries --workspace=packages/bruno-js
npm run build --workspace=packages/bruno-converters npm run build --workspace=packages/bruno-converters
npm run build --workspace=packages/bruno-requests npm run build --workspace=packages/bruno-requests
npm run build --workspace=packages/bruno-filestore
- name: Run tests - name: Run tests
run: | run: |
@ -125,6 +127,7 @@ jobs:
npm run sandbox:bundle-libraries --workspace=packages/bruno-js npm run sandbox:bundle-libraries --workspace=packages/bruno-js
npm run build:bruno-converters npm run build:bruno-converters
npm run build:bruno-requests npm run build:bruno-requests
npm run build:bruno-filestore
- name: Run Playwright tests - name: Run Playwright tests
run: | run: |

98
package-lock.json generated
View File

@ -18,7 +18,8 @@
"packages/bruno-tests", "packages/bruno-tests",
"packages/bruno-toml", "packages/bruno-toml",
"packages/bruno-graphql-docs", "packages/bruno-graphql-docs",
"packages/bruno-requests" "packages/bruno-requests",
"packages/bruno-filestore"
], ],
"devDependencies": { "devDependencies": {
"@faker-js/faker": "^7.6.0", "@faker-js/faker": "^7.6.0",
@ -8725,6 +8726,10 @@
"integrity": "sha512-khvEnRF6/UVDw4df06j+6lFWGNDYWlcWnxfmEgU2o/CdsGY291NC1Cexz99ud7sbGBQP2d8JUXZe4zXPkGNJpQ==", "integrity": "sha512-khvEnRF6/UVDw4df06j+6lFWGNDYWlcWnxfmEgU2o/CdsGY291NC1Cexz99ud7sbGBQP2d8JUXZe4zXPkGNJpQ==",
"license": "MIT" "license": "MIT"
}, },
"node_modules/@usebruno/filestore": {
"resolved": "packages/bruno-filestore",
"link": true
},
"node_modules/@usebruno/graphql-docs": { "node_modules/@usebruno/graphql-docs": {
"resolved": "packages/bruno-graphql-docs", "resolved": "packages/bruno-graphql-docs",
"link": true "link": true
@ -29922,6 +29927,7 @@
"@aws-sdk/credential-providers": "3.750.0", "@aws-sdk/credential-providers": "3.750.0",
"@usebruno/common": "0.1.0", "@usebruno/common": "0.1.0",
"@usebruno/converters": "^0.1.0", "@usebruno/converters": "^0.1.0",
"@usebruno/filestore": "^0.1.0",
"@usebruno/js": "0.12.0", "@usebruno/js": "0.12.0",
"@usebruno/lang": "0.12.0", "@usebruno/lang": "0.12.0",
"@usebruno/requests": "^0.1.0", "@usebruno/requests": "^0.1.0",
@ -31688,6 +31694,7 @@
"@aws-sdk/credential-providers": "3.750.0", "@aws-sdk/credential-providers": "3.750.0",
"@usebruno/common": "0.1.0", "@usebruno/common": "0.1.0",
"@usebruno/converters": "^0.1.0", "@usebruno/converters": "^0.1.0",
"@usebruno/filestore": "^0.1.0",
"@usebruno/js": "0.12.0", "@usebruno/js": "0.12.0",
"@usebruno/lang": "0.12.0", "@usebruno/lang": "0.12.0",
"@usebruno/node-machine-id": "^2.0.0", "@usebruno/node-machine-id": "^2.0.0",
@ -32803,6 +32810,95 @@
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
} }
}, },
"packages/bruno-filestore": {
"name": "@usebruno/filestore",
"version": "0.1.0",
"license": "MIT",
"dependencies": {
"@usebruno/lang": "0.12.0",
"lodash": "^4.17.21"
},
"devDependencies": {
"@babel/preset-env": "^7.22.0",
"@babel/preset-typescript": "^7.22.0",
"@rollup/plugin-commonjs": "^23.0.2",
"@rollup/plugin-node-resolve": "^15.0.1",
"@rollup/plugin-typescript": "^9.0.2",
"@types/jest": "^29.5.11",
"@types/lodash": "^4.14.191",
"babel-jest": "^29.7.0",
"jest": "^29.2.0",
"rimraf": "^3.0.2",
"rollup": "3.29.5",
"rollup-plugin-dts": "^5.0.0",
"rollup-plugin-peer-deps-external": "^2.2.4",
"rollup-plugin-terser": "^7.0.2",
"typescript": "^4.8.4"
}
},
"packages/bruno-filestore/node_modules/brace-expansion": {
"version": "1.1.12",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
"dev": true,
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
"packages/bruno-filestore/node_modules/glob": {
"version": "7.2.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
"deprecated": "Glob versions prior to v9 are no longer supported",
"dev": true,
"license": "ISC",
"dependencies": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.1.1",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
},
"engines": {
"node": "*"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"packages/bruno-filestore/node_modules/minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dev": true,
"license": "ISC",
"dependencies": {
"brace-expansion": "^1.1.7"
},
"engines": {
"node": "*"
}
},
"packages/bruno-filestore/node_modules/rimraf": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
"integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
"deprecated": "Rimraf versions prior to v4 are no longer supported",
"dev": true,
"license": "ISC",
"dependencies": {
"glob": "^7.1.3"
},
"bin": {
"rimraf": "bin.js"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"packages/bruno-graphql-docs": { "packages/bruno-graphql-docs": {
"name": "@usebruno/graphql-docs", "name": "@usebruno/graphql-docs",
"version": "0.1.0", "version": "0.1.0",

View File

@ -14,7 +14,8 @@
"packages/bruno-tests", "packages/bruno-tests",
"packages/bruno-toml", "packages/bruno-toml",
"packages/bruno-graphql-docs", "packages/bruno-graphql-docs",
"packages/bruno-requests" "packages/bruno-requests",
"packages/bruno-filestore"
], ],
"homepage": "https://usebruno.com", "homepage": "https://usebruno.com",
"devDependencies": { "devDependencies": {
@ -48,6 +49,7 @@
"dev:electron:debug": "npm run debug --workspace=packages/bruno-electron", "dev:electron:debug": "npm run debug --workspace=packages/bruno-electron",
"build:bruno-common": "npm run build --workspace=packages/bruno-common", "build:bruno-common": "npm run build --workspace=packages/bruno-common",
"build:bruno-requests": "npm run build --workspace=packages/bruno-requests", "build:bruno-requests": "npm run build --workspace=packages/bruno-requests",
"build:bruno-filestore": "npm run build --workspace=packages/bruno-filestore",
"build:bruno-converters": "npm run build --workspace=packages/bruno-converters", "build:bruno-converters": "npm run build --workspace=packages/bruno-converters",
"build:bruno-query": "npm run build --workspace=packages/bruno-query", "build:bruno-query": "npm run build --workspace=packages/bruno-query",
"build:graphql-docs": "npm run build --workspace=packages/bruno-graphql-docs", "build:graphql-docs": "npm run build --workspace=packages/bruno-graphql-docs",

View File

@ -53,6 +53,7 @@
"@usebruno/vm2": "^3.9.13", "@usebruno/vm2": "^3.9.13",
"@usebruno/requests": "^0.1.0", "@usebruno/requests": "^0.1.0",
"@usebruno/converters": "^0.1.0", "@usebruno/converters": "^0.1.0",
"@usebruno/filestore": "^0.1.0",
"aws4-axios": "^3.3.0", "aws4-axios": "^3.3.0",
"axios": "^1.8.3", "axios": "^1.8.3",
"axios-ntlm": "^1.4.2", "axios-ntlm": "^1.4.2",

View File

@ -5,15 +5,15 @@ const { forOwn, cloneDeep } = require('lodash');
const { getRunnerSummary } = require('@usebruno/common/runner'); const { getRunnerSummary } = require('@usebruno/common/runner');
const { exists, isFile, isDirectory } = require('../utils/filesystem'); const { exists, isFile, isDirectory } = require('../utils/filesystem');
const { runSingleRequest } = require('../runner/run-single-request'); const { runSingleRequest } = require('../runner/run-single-request');
const { bruToEnvJson, getEnvVars } = require('../utils/bru'); const { getEnvVars } = require('../utils/bru');
const { isRequestTagsIncluded } = require("@usebruno/common") const { isRequestTagsIncluded } = require("@usebruno/common")
const makeJUnitOutput = require('../reporters/junit'); const makeJUnitOutput = require('../reporters/junit');
const makeHtmlOutput = require('../reporters/html'); const makeHtmlOutput = require('../reporters/html');
const { rpad } = require('../utils/common'); const { rpad } = require('../utils/common');
const { bruToJson, getOptions, collectionBruToJson } = require('../utils/bru'); const { getOptions } = require('../utils/bru');
const { dotenvToJson } = require('@usebruno/lang'); const { parseDotEnv, parseEnvironment } = require('@usebruno/filestore');
const constants = require('../constants'); const constants = require('../constants');
const { findItemInCollection, getAllRequestsInFolder, createCollectionJsonFromPathname, getCallStack } = require('../utils/collection'); const { findItemInCollection, createCollectionJsonFromPathname, getCallStack } = require('../utils/collection');
const command = 'run [paths...]'; const command = 'run [paths...]';
const desc = 'Run one or more requests/folders'; const desc = 'Run one or more requests/folders';
@ -346,7 +346,7 @@ const handler = async function (argv) {
} }
const envBruContent = fs.readFileSync(envFilePath, 'utf8').replace(/\r\n/g, '\n'); const envBruContent = fs.readFileSync(envFilePath, 'utf8').replace(/\r\n/g, '\n');
const envJson = bruToEnvJson(envBruContent); const envJson = parseEnvironment(envBruContent);
envVars = getEnvVars(envJson); envVars = getEnvVars(envJson);
envVars.__name__ = envFile ? path.basename(envFilePath, '.bru') : env; envVars.__name__ = envFile ? path.basename(envFilePath, '.bru') : env;
} }
@ -439,7 +439,7 @@ const handler = async function (argv) {
}; };
if (dotEnvExists) { if (dotEnvExists) {
const content = fs.readFileSync(dotEnvPath, 'utf8'); const content = fs.readFileSync(dotEnvPath, 'utf8');
const jsonData = dotenvToJson(content); const jsonData = parseDotEnv(content);
forOwn(jsonData, (value, key) => { forOwn(jsonData, (value, key) => {
processEnvVars[key] = value; processEnvVars[key] = value;

View File

@ -1,9 +1,12 @@
const _ = require('lodash'); const _ = require('lodash');
const { bruToEnvJsonV2, bruToJsonV2, collectionBruToJson: _collectionBruToJson } = require('@usebruno/lang'); const {
parseRequest: _parseRequest,
parseCollection: _parseCollection
} = require('@usebruno/filestore');
const collectionBruToJson = (bru) => { const collectionBruToJson = (bru) => {
try { try {
const json = _collectionBruToJson(bru); const json = _parseCollection(bru);
const transformedJson = { const transformedJson = {
request: { request: {
@ -46,7 +49,7 @@ const collectionBruToJson = (bru) => {
*/ */
const bruToJson = (bru) => { const bruToJson = (bru) => {
try { try {
const json = bruToJsonV2(bru); const json = _parseRequest(bru);
let requestType = _.get(json, 'meta.type'); let requestType = _.get(json, 'meta.type');
if (requestType === 'http') { if (requestType === 'http') {
@ -88,14 +91,6 @@ const bruToJson = (bru) => {
} }
}; };
const bruToEnvJson = (bru) => {
try {
return bruToEnvJsonV2(bru);
} catch (err) {
return Promise.reject(err);
}
};
const getEnvVars = (environment = {}) => { const getEnvVars = (environment = {}) => {
const variables = environment.variables; const variables = environment.variables;
if (!variables || !variables.length) { if (!variables || !variables.length) {
@ -119,7 +114,6 @@ const getOptions = () => {
module.exports = { module.exports = {
bruToJson, bruToJson,
bruToEnvJson,
getEnvVars, getEnvVars,
getOptions, getOptions,
collectionBruToJson collectionBruToJson

View File

@ -2,9 +2,8 @@ const { get, each, find, compact } = require('lodash');
const os = require('os'); const os = require('os');
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
const { jsonToBruV2, envJsonToBruV2, jsonToCollectionBru } = require('@usebruno/lang');
const { sanitizeName } = require('./filesystem'); const { sanitizeName } = require('./filesystem');
const { bruToJson, collectionBruToJson } = require('./bru'); const { parseRequest, parseCollection, parseFolder, stringifyCollection, stringifyFolder, stringifyEnvironment } = require('@usebruno/filestore');
const constants = require('../constants'); const constants = require('../constants');
const chalk = require('chalk'); const chalk = require('chalk');
@ -46,7 +45,7 @@ const createCollectionJsonFromPathname = (collectionPath) => {
// get the request item // get the request item
const bruContent = fs.readFileSync(filePath, 'utf8'); const bruContent = fs.readFileSync(filePath, 'utf8');
const requestItem = bruToJson(bruContent); const requestItem = parseRequest(bruContent);
currentDirItems.push({ currentDirItems.push({
name: file, name: file,
pathname: filePath, pathname: filePath,
@ -97,7 +96,7 @@ const getCollectionRoot = (dir) => {
} }
const content = fs.readFileSync(collectionRootPath, 'utf8'); const content = fs.readFileSync(collectionRootPath, 'utf8');
return collectionBruToJson(content); return parseCollection(content);
}; };
const getFolderRoot = (dir) => { const getFolderRoot = (dir) => {
@ -108,7 +107,7 @@ const getFolderRoot = (dir) => {
} }
const content = fs.readFileSync(folderRootPath, 'utf8'); const content = fs.readFileSync(folderRootPath, 'utf8');
return collectionBruToJson(content); return parseFolder(content);
}; };
const mergeHeaders = (collection, request, requestTreePath) => { const mergeHeaders = (collection, request, requestTreePath) => {
@ -417,7 +416,7 @@ const createCollectionFromBrunoObject = async (collection, dirPath) => {
// Create collection.bru if root exists // Create collection.bru if root exists
if (collection.root) { if (collection.root) {
const collectionContent = await jsonToCollectionBru(collection.root); const collectionContent = await stringifyCollection(collection.root);
fs.writeFileSync(path.join(dirPath, 'collection.bru'), collectionContent); fs.writeFileSync(path.join(dirPath, 'collection.bru'), collectionContent);
} }
@ -427,7 +426,7 @@ const createCollectionFromBrunoObject = async (collection, dirPath) => {
fs.mkdirSync(envDirPath, { recursive: true }); fs.mkdirSync(envDirPath, { recursive: true });
for (const env of collection.environments) { for (const env of collection.environments) {
const content = await envJsonToBruV2(env); const content = await stringifyEnvironment(env);
const filename = sanitizeName(`${env.name}.bru`); const filename = sanitizeName(`${env.name}.bru`);
fs.writeFileSync(path.join(envDirPath, filename), content); fs.writeFileSync(path.join(envDirPath, filename), content);
} }
@ -459,10 +458,7 @@ const processCollectionItems = async (items = [], currentPath) => {
if (item.seq) { if (item.seq) {
item.root.meta.seq = item.seq; item.root.meta.seq = item.seq;
} }
const folderContent = await jsonToCollectionBru( const folderContent = await stringifyFolder(item.root);
item.root,
true
);
safeWriteFileSync(folderBruFilePath, folderContent); safeWriteFileSync(folderBruFilePath, folderContent);
} }
@ -506,7 +502,7 @@ const processCollectionItems = async (items = [], currentPath) => {
}; };
// Convert to BRU format and write to file // Convert to BRU format and write to file
const content = await jsonToBruV2(bruJson); const content = await stringifyRequest(bruJson);
safeWriteFileSync(path.join(currentPath, sanitizedFilename), content); safeWriteFileSync(path.join(currentPath, sanitizedFilename), content);
} }
} }

View File

@ -38,6 +38,7 @@
"@usebruno/schema": "0.7.0", "@usebruno/schema": "0.7.0",
"@usebruno/vm2": "^3.9.13", "@usebruno/vm2": "^3.9.13",
"@usebruno/requests": "^0.1.0", "@usebruno/requests": "^0.1.0",
"@usebruno/filestore": "^0.1.0",
"about-window": "^1.15.2", "about-window": "^1.15.2",
"aws4-axios": "^3.3.0", "aws4-axios": "^3.3.0",
"axios": "^1.8.3", "axios": "^1.8.3",

View File

@ -3,8 +3,14 @@ const fs = require('fs');
const path = require('path'); const path = require('path');
const chokidar = require('chokidar'); const chokidar = require('chokidar');
const { hasBruExtension, isWSLPath, normalizeAndResolvePath, sizeInMB } = require('../utils/filesystem'); const { hasBruExtension, isWSLPath, normalizeAndResolvePath, sizeInMB } = require('../utils/filesystem');
const { bruToEnvJson, bruToJson, bruToJsonViaWorker, collectionBruToJson } = require('../bru'); const {
const { dotenvToJson } = require('@usebruno/lang'); parseEnvironment,
parseRequest,
parseRequestViaWorker,
parseCollection,
parseFolder
} = require('@usebruno/filestore');
const { parseDotEnv } = require('@usebruno/filestore');
const { uuid } = require('../utils/common'); const { uuid } = require('../utils/common');
const { getRequestUid } = require('../cache/requestUids'); const { getRequestUid } = require('../cache/requestUids');
@ -80,7 +86,7 @@ const addEnvironmentFile = async (win, pathname, collectionUid, collectionPath)
let bruContent = fs.readFileSync(pathname, 'utf8'); let bruContent = fs.readFileSync(pathname, 'utf8');
file.data = await bruToEnvJson(bruContent); file.data = await parseEnvironment(bruContent);
file.data.name = basename.substring(0, basename.length - 4); file.data.name = basename.substring(0, basename.length - 4);
file.data.uid = getRequestUid(pathname); file.data.uid = getRequestUid(pathname);
@ -115,7 +121,7 @@ const changeEnvironmentFile = async (win, pathname, collectionUid, collectionPat
}; };
const bruContent = fs.readFileSync(pathname, 'utf8'); const bruContent = fs.readFileSync(pathname, 'utf8');
file.data = await bruToEnvJson(bruContent); file.data = await parseEnvironment(bruContent);
file.data.name = basename.substring(0, basename.length - 4); file.data.name = basename.substring(0, basename.length - 4);
file.data.uid = getRequestUid(pathname); file.data.uid = getRequestUid(pathname);
_.each(_.get(file, 'data.variables', []), (variable) => (variable.uid = uuid())); _.each(_.get(file, 'data.variables', []), (variable) => (variable.uid = uuid()));
@ -177,7 +183,7 @@ const add = async (win, pathname, collectionUid, collectionPath, useWorkerThread
if (isDotEnvFile(pathname, collectionPath)) { if (isDotEnvFile(pathname, collectionPath)) {
try { try {
const content = fs.readFileSync(pathname, 'utf8'); const content = fs.readFileSync(pathname, 'utf8');
const jsonData = dotenvToJson(content); const jsonData = parseDotEnv(content);
setDotEnvVars(collectionUid, jsonData); setDotEnvVars(collectionUid, jsonData);
const payload = { const payload = {
@ -209,7 +215,7 @@ const add = async (win, pathname, collectionUid, collectionPath, useWorkerThread
try { try {
let bruContent = fs.readFileSync(pathname, 'utf8'); let bruContent = fs.readFileSync(pathname, 'utf8');
file.data = await collectionBruToJson(bruContent); file.data = await parseCollection(bruContent);
hydrateBruCollectionFileWithUuid(file.data); hydrateBruCollectionFileWithUuid(file.data);
win.webContents.send('main:collection-tree-updated', 'addFile', file); win.webContents.send('main:collection-tree-updated', 'addFile', file);
@ -233,7 +239,7 @@ const add = async (win, pathname, collectionUid, collectionPath, useWorkerThread
try { try {
let bruContent = fs.readFileSync(pathname, 'utf8'); let bruContent = fs.readFileSync(pathname, 'utf8');
file.data = await collectionBruToJson(bruContent); file.data = await parseCollection(bruContent);
hydrateBruCollectionFileWithUuid(file.data); hydrateBruCollectionFileWithUuid(file.data);
win.webContents.send('main:collection-tree-updated', 'addFile', file); win.webContents.send('main:collection-tree-updated', 'addFile', file);
@ -258,7 +264,7 @@ const add = async (win, pathname, collectionUid, collectionPath, useWorkerThread
// If worker thread is not used, we can directly parse the file // If worker thread is not used, we can directly parse the file
if (!useWorkerThread) { if (!useWorkerThread) {
try { try {
file.data = await bruToJson(bruContent); file.data = await parseRequest(bruContent);
file.partial = false; file.partial = false;
file.loading = false; file.loading = false;
file.size = sizeInMB(fileStats?.size); file.size = sizeInMB(fileStats?.size);
@ -278,7 +284,7 @@ const add = async (win, pathname, collectionUid, collectionPath, useWorkerThread
type: 'http-request' type: 'http-request'
}; };
const metaJson = await bruToJson(parseBruFileMeta(bruContent), true); const metaJson = parseBruFileMeta(bruContent);
file.data = metaJson; file.data = metaJson;
file.partial = true; file.partial = true;
file.loading = false; file.loading = false;
@ -295,7 +301,7 @@ const add = async (win, pathname, collectionUid, collectionPath, useWorkerThread
win.webContents.send('main:collection-tree-updated', 'addFile', file); win.webContents.send('main:collection-tree-updated', 'addFile', file);
// This is to update the file info in the UI // This is to update the file info in the UI
file.data = await bruToJsonViaWorker(bruContent); file.data = await parseRequestViaWorker(bruContent);
file.partial = false; file.partial = false;
file.loading = false; file.loading = false;
hydrateRequestWithUuid(file.data, pathname); hydrateRequestWithUuid(file.data, pathname);
@ -331,7 +337,7 @@ const addDirectory = async (win, pathname, collectionUid, collectionPath) => {
if (fs.existsSync(folderBruFilePath)) { if (fs.existsSync(folderBruFilePath)) {
let folderBruFileContent = fs.readFileSync(folderBruFilePath, 'utf8'); let folderBruFileContent = fs.readFileSync(folderBruFilePath, 'utf8');
let folderBruData = await collectionBruToJson(folderBruFileContent); let folderBruData = await parseFolder(folderBruFileContent);
name = folderBruData?.meta?.name || name; name = folderBruData?.meta?.name || name;
seq = folderBruData?.meta?.seq; seq = folderBruData?.meta?.seq;
} }
@ -370,7 +376,7 @@ const change = async (win, pathname, collectionUid, collectionPath) => {
if (isDotEnvFile(pathname, collectionPath)) { if (isDotEnvFile(pathname, collectionPath)) {
try { try {
const content = fs.readFileSync(pathname, 'utf8'); const content = fs.readFileSync(pathname, 'utf8');
const jsonData = dotenvToJson(content); const jsonData = parseDotEnv(content);
setDotEnvVars(collectionUid, jsonData); setDotEnvVars(collectionUid, jsonData);
const payload = { const payload = {
@ -402,7 +408,7 @@ const change = async (win, pathname, collectionUid, collectionPath) => {
try { try {
let bruContent = fs.readFileSync(pathname, 'utf8'); let bruContent = fs.readFileSync(pathname, 'utf8');
file.data = await collectionBruToJson(bruContent); file.data = await parseCollection(bruContent);
hydrateBruCollectionFileWithUuid(file.data); hydrateBruCollectionFileWithUuid(file.data);
win.webContents.send('main:collection-tree-updated', 'change', file); win.webContents.send('main:collection-tree-updated', 'change', file);
return; return;
@ -425,7 +431,7 @@ const change = async (win, pathname, collectionUid, collectionPath) => {
try { try {
let bruContent = fs.readFileSync(pathname, 'utf8'); let bruContent = fs.readFileSync(pathname, 'utf8');
file.data = await collectionBruToJson(bruContent); file.data = await parseCollection(bruContent);
hydrateBruCollectionFileWithUuid(file.data); hydrateBruCollectionFileWithUuid(file.data);
win.webContents.send('main:collection-tree-updated', 'change', file); win.webContents.send('main:collection-tree-updated', 'change', file);
@ -447,7 +453,7 @@ const change = async (win, pathname, collectionUid, collectionPath) => {
}; };
const bru = fs.readFileSync(pathname, 'utf8'); const bru = fs.readFileSync(pathname, 'utf8');
file.data = await bruToJson(bru); file.data = await parseRequest(bru);
hydrateRequestWithUuid(file.data, pathname); hydrateRequestWithUuid(file.data, pathname);
win.webContents.send('main:collection-tree-updated', 'change', file); win.webContents.send('main:collection-tree-updated', 'change', file);
@ -490,7 +496,7 @@ const unlinkDir = async (win, pathname, collectionUid, collectionPath) => {
if (fs.existsSync(folderBruFilePath)) { if (fs.existsSync(folderBruFilePath)) {
let folderBruFileContent = fs.readFileSync(folderBruFilePath, 'utf8'); let folderBruFileContent = fs.readFileSync(folderBruFilePath, 'utf8');
let folderBruData = await collectionBruToJson(folderBruFileContent); let folderBruData = await parseFolder(folderBruFileContent);
name = folderBruData?.meta?.name || name; name = folderBruData?.meta?.name || name;
} }

View File

@ -1,279 +0,0 @@
const _ = require('lodash');
const {
bruToJsonV2,
jsonToBruV2,
bruToEnvJsonV2,
envJsonToBruV2,
collectionBruToJson: _collectionBruToJson,
jsonToCollectionBru: _jsonToCollectionBru
} = require('@usebruno/lang');
const BruParserWorker = require('./workers');
const bruParserWorker = new BruParserWorker();
const collectionBruToJson = async (data, parsed = false) => {
try {
const json = parsed ? data : _collectionBruToJson(data);
const transformedJson = {
request: {
headers: _.get(json, 'headers', []),
auth: _.get(json, 'auth', {}),
script: _.get(json, 'script', {}),
vars: _.get(json, 'vars', {}),
tests: _.get(json, 'tests', '')
},
settings: _.get(json, 'settings', {}),
docs: _.get(json, 'docs', '')
};
// add meta if it exists
// this is only for folder bru file
// in the future, all of this will be replaced by standard bru lang
const sequence = _.get(json, 'meta.seq');
if (json?.meta) {
transformedJson.meta = {
name: json.meta.name,
};
if (sequence) {
transformedJson.meta.seq = Number(sequence);
}
}
return transformedJson;
} catch (error) {
return Promise.reject(error);
}
};
const jsonToCollectionBru = async (json, isFolder) => {
try {
const collectionBruJson = {
headers: _.get(json, 'request.headers', []),
script: {
req: _.get(json, 'request.script.req', ''),
res: _.get(json, 'request.script.res', '')
},
vars: {
req: _.get(json, 'request.vars.req', []),
res: _.get(json, 'request.vars.res', [])
},
tests: _.get(json, 'request.tests', ''),
auth: _.get(json, 'request.auth', {}),
docs: _.get(json, 'docs', '')
};
// add meta if it exists
// this is only for folder bru file
// in the future, all of this will be replaced by standard bru lang
const sequence = _.get(json, 'meta.seq');
if (json?.meta) {
collectionBruJson.meta = {
name: json.meta.name,
};
if (sequence) {
collectionBruJson.meta.seq = Number(sequence);
}
}
return _jsonToCollectionBru(collectionBruJson);
} catch (error) {
return Promise.reject(error);
}
};
const bruToEnvJson = async (bru) => {
try {
const json = bruToEnvJsonV2(bru);
// the app env format requires each variable to have a type
// this need to be evaluated and safely removed
// i don't see it being used in schema validation
if (json && json.variables && json.variables.length) {
_.each(json.variables, (v) => (v.type = 'text'));
}
return json;
} catch (error) {
return Promise.reject(error);
}
};
const envJsonToBru = async (json) => {
try {
const bru = envJsonToBruV2(json);
return bru;
} catch (error) {
return Promise.reject(error);
}
};
/**
* The transformer function for converting a BRU file to JSON.
*
* We map the json response from the bru lang and transform it into the DSL
* format that the app uses
*
* @param {string} data The BRU file content.
* @returns {object} The JSON representation of the BRU file.
*/
const bruToJson = (data, parsed = false) => {
try {
const json = parsed ? data : bruToJsonV2(data);
let requestType = _.get(json, 'meta.type');
if (requestType === 'http') {
requestType = 'http-request';
} else if (requestType === 'graphql') {
requestType = 'graphql-request';
} else {
requestType = 'http-request';
}
const sequence = _.get(json, 'meta.seq');
const transformedJson = {
type: requestType,
name: _.get(json, 'meta.name'),
seq: !_.isNaN(sequence) ? Number(sequence) : 1,
settings: _.get(json, 'settings', {}),
tags: _.get(json, 'meta.tags', []),
request: {
method: _.upperCase(_.get(json, 'http.method')),
url: _.get(json, 'http.url'),
params: _.get(json, 'params', []),
headers: _.get(json, 'headers', []),
auth: _.get(json, 'auth', {}),
body: _.get(json, 'body', {}),
script: _.get(json, 'script', {}),
vars: _.get(json, 'vars', {}),
assertions: _.get(json, 'assertions', []),
tests: _.get(json, 'tests', ''),
docs: _.get(json, 'docs', '')
}
};
transformedJson.request.auth.mode = _.get(json, 'http.auth', 'none');
transformedJson.request.body.mode = _.get(json, 'http.body', 'none');
return transformedJson;
} catch (e) {
return Promise.reject(e);
}
};
const bruToJsonViaWorker = async (data) => {
try {
const json = await bruParserWorker?.bruToJson(data);
return bruToJson(json, true);
} catch (e) {
return Promise.reject(e);
}
};
/**
* The transformer function for converting a JSON to BRU file.
*
* We map the json response from the app and transform it into the DSL
* format that the bru lang understands
*
* @param {object} json The JSON representation of the BRU file.
* @returns {string} The BRU file content.
*/
const jsonToBru = async (json) => {
let type = _.get(json, 'type');
if (type === 'http-request') {
type = 'http';
} else if (type === 'graphql-request') {
type = 'graphql';
} else {
type = 'http';
}
const sequence = _.get(json, 'seq');
const bruJson = {
meta: {
name: _.get(json, 'name'),
type: type,
seq: !_.isNaN(sequence) ? Number(sequence) : 1,
tags: _.get(json, 'tags', []),
},
http: {
method: _.lowerCase(_.get(json, 'request.method')),
url: _.get(json, 'request.url'),
auth: _.get(json, 'request.auth.mode', 'none'),
body: _.get(json, 'request.body.mode', 'none')
},
params: _.get(json, 'request.params', []),
headers: _.get(json, 'request.headers', []),
auth: _.get(json, 'request.auth', {}),
body: _.get(json, 'request.body', {}),
script: _.get(json, 'request.script', {}),
vars: {
req: _.get(json, 'request.vars.req', []),
res: _.get(json, 'request.vars.res', [])
},
assertions: _.get(json, 'request.assertions', []),
tests: _.get(json, 'request.tests', ''),
settings: _.get(json, 'settings', {}),
docs: _.get(json, 'request.docs', '')
};
const bru = jsonToBruV2(bruJson);
return bru;
};
const jsonToBruViaWorker = async (json) => {
let type = _.get(json, 'type');
if (type === 'http-request') {
type = 'http';
} else if (type === 'graphql-request') {
type = 'graphql';
} else {
type = 'http';
}
const sequence = _.get(json, 'seq');
const bruJson = {
meta: {
name: _.get(json, 'name'),
type: type,
seq: !_.isNaN(sequence) ? Number(sequence) : 1,
tags: _.get(json, 'tags', [])
},
http: {
method: _.lowerCase(_.get(json, 'request.method')),
url: _.get(json, 'request.url'),
auth: _.get(json, 'request.auth.mode', 'none'),
body: _.get(json, 'request.body.mode', 'none')
},
params: _.get(json, 'request.params', []),
headers: _.get(json, 'request.headers', []),
auth: _.get(json, 'request.auth', {}),
body: _.get(json, 'request.body', {}),
script: _.get(json, 'request.script', {}),
vars: {
req: _.get(json, 'request.vars.req', []),
res: _.get(json, 'request.vars.res', [])
},
assertions: _.get(json, 'request.assertions', []),
tests: _.get(json, 'request.tests', ''),
settings: _.get(json, 'settings', {}),
docs: _.get(json, 'request.docs', '')
};
const bru = await bruParserWorker?.jsonToBru(bruJson)
return bru;
};
module.exports = {
bruToJson,
bruToJsonViaWorker,
jsonToBru,
bruToEnvJson,
envJsonToBru,
collectionBruToJson,
jsonToCollectionBru,
jsonToBruViaWorker
};

View File

@ -1,64 +0,0 @@
const { sizeInMB } = require("../../utils/filesystem");
const WorkerQueue = require("../../workers");
const path = require("path");
const getSize = (data) => {
return sizeInMB(typeof data === 'string' ? Buffer.byteLength(data, 'utf8') : Buffer.byteLength(JSON.stringify(data), 'utf8'));
}
/**
* Lanes are used to determine which worker queue to use based on the size of the data.
*
* The first lane is for smaller files (<0.1MB), the second lane is for larger files (>=0.1MB).
* This helps with parsing performance.
*/
const LANES = [{
maxSize: 0.005
},{
maxSize: 0.1
},{
maxSize: 1
},{
maxSize: 10
},{
maxSize: 100
}];
class BruParserWorker {
constructor() {
this.workerQueues = LANES?.map(lane => ({
maxSize: lane?.maxSize,
workerQueue: new WorkerQueue()
}));
}
getWorkerQueue(size) {
// Find the first queue that can handle the given size
// or fallback to the last queue for largest files
const queueForSize = this.workerQueues.find((queue) =>
queue.maxSize >= size
);
return queueForSize?.workerQueue ?? this.workerQueues.at(-1).workerQueue;
}
async enqueueTask({data, scriptFile }) {
const size = getSize(data);
const workerQueue = this.getWorkerQueue(size);
return workerQueue.enqueue({
data,
priority: size,
scriptPath: path.join(__dirname, `./scripts/${scriptFile}.js`)
});
}
async bruToJson(data) {
return this.enqueueTask({ data, scriptFile: `bru-to-json` });
}
async jsonToBru(data) {
return this.enqueueTask({ data, scriptFile: `json-to-bru` });
}
}
module.exports = BruParserWorker;

View File

@ -1,16 +0,0 @@
const { parentPort } = require('worker_threads');
const {
bruToJsonV2,
} = require('@usebruno/lang');
parentPort.on('message', (workerData) => {
try {
const bru = workerData;
const json = bruToJsonV2(bru);
parentPort.postMessage(json);
}
catch(error) {
console.error(error);
parentPort.postMessage({ error: error?.message });
}
});

View File

@ -1,16 +0,0 @@
const { parentPort } = require('worker_threads');
const {
jsonToBruV2,
} = require('@usebruno/lang');
parentPort.on('message', (workerData) => {
try {
const json = workerData;
const bru = jsonToBruV2(json);
parentPort.postMessage(bru);
}
catch(error) {
console.error(error);
parentPort.postMessage({ error: error?.message });
}
});

View File

@ -5,7 +5,18 @@ const fsExtra = require('fs-extra');
const os = require('os'); const os = require('os');
const path = require('path'); const path = require('path');
const { ipcMain, shell, dialog, app } = require('electron'); const { ipcMain, shell, dialog, app } = require('electron');
const { envJsonToBru, bruToJson, jsonToBru, jsonToBruViaWorker, collectionBruToJson, jsonToCollectionBru, bruToJsonViaWorker } = require('../bru'); const {
parseRequest,
stringifyRequest,
parseRequestViaWorker,
stringifyRequestViaWorker,
parseCollection,
stringifyCollection,
parseFolder,
stringifyFolder,
parseEnvironment,
stringifyEnvironment
} = require('@usebruno/filestore');
const brunoConverters = require('@usebruno/converters'); const brunoConverters = require('@usebruno/converters');
const { postmanToBruno } = brunoConverters; const { postmanToBruno } = brunoConverters;
@ -225,10 +236,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
}; };
} }
const content = await jsonToCollectionBru( const content = await stringifyFolder(folderRoot);
folderRoot,
true // isFolder
);
await writeFile(folderBruFilePath, content); await writeFile(folderBruFilePath, content);
} catch (error) { } catch (error) {
return Promise.reject(error); return Promise.reject(error);
@ -238,7 +246,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
try { try {
const collectionBruFilePath = path.join(collectionPathname, 'collection.bru'); const collectionBruFilePath = path.join(collectionPathname, 'collection.bru');
const content = await jsonToCollectionBru(collectionRoot); const content = await stringifyCollection(collectionRoot);
await writeFile(collectionBruFilePath, content); await writeFile(collectionBruFilePath, content);
} catch (error) { } catch (error) {
return Promise.reject(error); return Promise.reject(error);
@ -256,7 +264,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
throw new Error(`${request.filename}.bru is not a valid filename`); throw new Error(`${request.filename}.bru is not a valid filename`);
} }
validatePathIsInsideCollection(pathname, lastOpenedCollections); validatePathIsInsideCollection(pathname, lastOpenedCollections);
const content = await jsonToBruViaWorker(request); const content = await stringifyRequestViaWorker(request);
await writeFile(pathname, content); await writeFile(pathname, content);
} catch (error) { } catch (error) {
return Promise.reject(error); return Promise.reject(error);
@ -270,7 +278,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
throw new Error(`path: ${pathname} does not exist`); throw new Error(`path: ${pathname} does not exist`);
} }
const content = await jsonToBruViaWorker(request); const content = await stringifyRequestViaWorker(request);
await writeFile(pathname, content); await writeFile(pathname, content);
} catch (error) { } catch (error) {
return Promise.reject(error); return Promise.reject(error);
@ -288,7 +296,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
throw new Error(`path: ${pathname} does not exist`); throw new Error(`path: ${pathname} does not exist`);
} }
const content = await jsonToBruViaWorker(request); const content = await stringifyRequestViaWorker(request);
await writeFile(pathname, content); await writeFile(pathname, content);
} }
} catch (error) { } catch (error) {
@ -318,7 +326,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
environmentSecretsStore.storeEnvSecrets(collectionPathname, environment); environmentSecretsStore.storeEnvSecrets(collectionPathname, environment);
} }
const content = await envJsonToBru(environment); const content = await stringifyEnvironment(environment);
await writeFile(envFilePath, content); await writeFile(envFilePath, content);
} catch (error) { } catch (error) {
@ -343,7 +351,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
environmentSecretsStore.storeEnvSecrets(collectionPathname, environment); environmentSecretsStore.storeEnvSecrets(collectionPathname, environment);
} }
const content = await envJsonToBru(environment); const content = await stringifyEnvironment(environment);
await writeFile(envFilePath, content); await writeFile(envFilePath, content);
} catch (error) { } catch (error) {
return Promise.reject(error); return Promise.reject(error);
@ -402,7 +410,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
let folderBruFileJsonContent; let folderBruFileJsonContent;
if (fs.existsSync(folderBruFilePath)) { if (fs.existsSync(folderBruFilePath)) {
const oldFolderBruFileContent = await fs.promises.readFile(folderBruFilePath, 'utf8'); const oldFolderBruFileContent = await fs.promises.readFile(folderBruFilePath, 'utf8');
folderBruFileJsonContent = await collectionBruToJson(oldFolderBruFileContent); folderBruFileJsonContent = await parseFolder(oldFolderBruFileContent);
folderBruFileJsonContent.meta.name = newName; folderBruFileJsonContent.meta.name = newName;
} else { } else {
folderBruFileJsonContent = { folderBruFileJsonContent = {
@ -412,7 +420,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
}; };
} }
const folderBruFileContent = await jsonToCollectionBru(folderBruFileJsonContent, true); const folderBruFileContent = await stringifyFolder(folderBruFileJsonContent);
await writeFile(folderBruFilePath, folderBruFileContent); await writeFile(folderBruFilePath, folderBruFileContent);
return; return;
@ -424,9 +432,9 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
} }
const data = fs.readFileSync(itemPath, 'utf8'); const data = fs.readFileSync(itemPath, 'utf8');
const jsonData = await bruToJson(data); const jsonData = parseRequest(data);
jsonData.name = newName; jsonData.name = newName;
const content = await jsonToBru(jsonData); const content = stringifyRequest(jsonData);
await writeFile(itemPath, content); await writeFile(itemPath, content);
} catch (error) { } catch (error) {
return Promise.reject(error); return Promise.reject(error);
@ -452,7 +460,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
let folderBruFileJsonContent; let folderBruFileJsonContent;
if (fs.existsSync(folderBruFilePath)) { if (fs.existsSync(folderBruFilePath)) {
const oldFolderBruFileContent = await fs.promises.readFile(folderBruFilePath, 'utf8'); const oldFolderBruFileContent = await fs.promises.readFile(folderBruFilePath, 'utf8');
folderBruFileJsonContent = await collectionBruToJson(oldFolderBruFileContent); folderBruFileJsonContent = await parseFolder(oldFolderBruFileContent);
folderBruFileJsonContent.meta.name = newName; folderBruFileJsonContent.meta.name = newName;
} else { } else {
folderBruFileJsonContent = { folderBruFileJsonContent = {
@ -462,7 +470,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
}; };
} }
const folderBruFileContent = await jsonToCollectionBru(folderBruFileJsonContent, true); const folderBruFileContent = await stringifyFolder(folderBruFileJsonContent);
await writeFile(folderBruFilePath, folderBruFileContent); await writeFile(folderBruFilePath, folderBruFileContent);
const bruFilesAtSource = await searchForBruFiles(oldPath); const bruFilesAtSource = await searchForBruFiles(oldPath);
@ -503,11 +511,11 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
// update name in file and save new copy, then delete old copy // update name in file and save new copy, then delete old copy
const data = await fs.promises.readFile(oldPath, 'utf8'); // Use async read const data = await fs.promises.readFile(oldPath, 'utf8'); // Use async read
const jsonData = await bruToJsonViaWorker(data); const jsonData = parseRequest(data);
jsonData.name = newName; jsonData.name = newName;
moveRequestUid(oldPath, newPath); moveRequestUid(oldPath, newPath);
const content = await jsonToBruViaWorker(jsonData); const content = stringifyRequest(jsonData);
await fs.promises.unlink(oldPath); await fs.promises.unlink(oldPath);
await writeFile(newPath, content); await writeFile(newPath, content);
@ -538,7 +546,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
if (!fs.existsSync(pathname)) { if (!fs.existsSync(pathname)) {
fs.mkdirSync(pathname); fs.mkdirSync(pathname);
const folderBruFilePath = path.join(pathname, 'folder.bru'); const folderBruFilePath = path.join(pathname, 'folder.bru');
const content = await jsonToCollectionBru(folderBruJsonData, true); // isFolder flag const content = await stringifyFolder(folderBruJsonData);
await writeFile(folderBruFilePath, content); await writeFile(folderBruFilePath, content);
} else { } else {
return Promise.reject(new Error('The directory already exists')); return Promise.reject(new Error('The directory already exists'));
@ -611,7 +619,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
items.forEach(async (item) => { items.forEach(async (item) => {
if (['http-request', 'graphql-request'].includes(item.type)) { if (['http-request', 'graphql-request'].includes(item.type)) {
let sanitizedFilename = sanitizeName(item?.filename || `${item.name}.bru`); let sanitizedFilename = sanitizeName(item?.filename || `${item.name}.bru`);
const content = await jsonToBruViaWorker(item); const content = await stringifyRequestViaWorker(item);
const filePath = path.join(currentPath, sanitizedFilename); const filePath = path.join(currentPath, sanitizedFilename);
safeWriteFileSync(filePath, content); safeWriteFileSync(filePath, content);
} }
@ -623,10 +631,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
if (item?.root?.meta?.name) { if (item?.root?.meta?.name) {
const folderBruFilePath = path.join(folderPath, 'folder.bru'); const folderBruFilePath = path.join(folderPath, 'folder.bru');
item.root.meta.seq = item.seq; item.root.meta.seq = item.seq;
const folderContent = await jsonToCollectionBru( const folderContent = await stringifyFolder(item.root);
item.root,
true // isFolder
);
safeWriteFileSync(folderBruFilePath, folderContent); safeWriteFileSync(folderBruFilePath, folderContent);
} }
@ -650,7 +655,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
} }
environments.forEach(async (env) => { environments.forEach(async (env) => {
const content = await envJsonToBru(env); const content = await stringifyEnvironment(env);
let sanitizedEnvFilename = sanitizeName(`${env.name}.bru`); let sanitizedEnvFilename = sanitizeName(`${env.name}.bru`);
const filePath = path.join(envDirPath, sanitizedEnvFilename); const filePath = path.join(envDirPath, sanitizedEnvFilename);
safeWriteFileSync(filePath, content); safeWriteFileSync(filePath, content);
@ -681,7 +686,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
// Write the Bruno configuration to a file // Write the Bruno configuration to a file
await writeFile(path.join(collectionPath, 'bruno.json'), stringifiedBrunoConfig); await writeFile(path.join(collectionPath, 'bruno.json'), stringifiedBrunoConfig);
const collectionContent = await jsonToCollectionBru(collection.root); const collectionContent = await stringifyCollection(collection.root);
await writeFile(path.join(collectionPath, 'collection.bru'), collectionContent); await writeFile(path.join(collectionPath, 'collection.bru'), collectionContent);
const { size, filesCount } = await getCollectionStats(collectionPath); const { size, filesCount } = await getCollectionStats(collectionPath);
@ -711,7 +716,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
const parseCollectionItems = (items = [], currentPath) => { const parseCollectionItems = (items = [], currentPath) => {
items.forEach(async (item) => { items.forEach(async (item) => {
if (['http-request', 'graphql-request'].includes(item.type)) { if (['http-request', 'graphql-request'].includes(item.type)) {
const content = await jsonToBruViaWorker(item); const content = await stringifyRequestViaWorker(item);
const filePath = path.join(currentPath, item.filename); const filePath = path.join(currentPath, item.filename);
safeWriteFileSync(filePath, content); safeWriteFileSync(filePath, content);
} }
@ -721,7 +726,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
// If folder has a root element, then I should write its folder.bru file // If folder has a root element, then I should write its folder.bru file
if (item.root) { if (item.root) {
const folderContent = await jsonToCollectionBru(item.root, true); const folderContent = await stringifyFolder(item.root);
folderContent.name = item.name; folderContent.name = item.name;
if (folderContent) { if (folderContent) {
const bruFolderPath = path.join(folderPath, `folder.bru`); const bruFolderPath = path.join(folderPath, `folder.bru`);
@ -740,7 +745,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
// If initial folder has a root element, then I should write its folder.bru file // If initial folder has a root element, then I should write its folder.bru file
if (itemFolder.root) { if (itemFolder.root) {
const folderContent = await jsonToCollectionBru(itemFolder.root, true); const folderContent = await stringifyFolder(itemFolder.root);
if (folderContent) { if (folderContent) {
const bruFolderPath = path.join(collectionPath, `folder.bru`); const bruFolderPath = path.join(collectionPath, `folder.bru`);
safeWriteFileSync(bruFolderPath, folderContent); safeWriteFileSync(bruFolderPath, folderContent);
@ -767,7 +772,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
}; };
if (fs.existsSync(folderRootPath)) { if (fs.existsSync(folderRootPath)) {
const bru = fs.readFileSync(folderRootPath, 'utf8'); const bru = fs.readFileSync(folderRootPath, 'utf8');
folderBruJsonData = await collectionBruToJson(bru); folderBruJsonData = await parseCollection(bru);
if (!folderBruJsonData?.meta) { if (!folderBruJsonData?.meta) {
folderBruJsonData.meta = { folderBruJsonData.meta = {
name: path.basename(item.pathname), name: path.basename(item.pathname),
@ -779,12 +784,12 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
} }
folderBruJsonData.meta.seq = item.seq; folderBruJsonData.meta.seq = item.seq;
} }
const content = await jsonToCollectionBru(folderBruJsonData); const content = await stringifyFolder(folderBruJsonData);
await writeFile(folderRootPath, content); await writeFile(folderRootPath, content);
} else { } else {
if (fs.existsSync(item.pathname)) { if (fs.existsSync(item.pathname)) {
const itemToSave = transformRequestToSaveToFilesystem(item); const itemToSave = transformRequestToSaveToFilesystem(item);
const content = await jsonToBruViaWorker(itemToSave); const content = await stringifyRequestViaWorker(itemToSave);
await writeFile(item.pathname, content); await writeFile(item.pathname, content);
} }
} }
@ -1065,14 +1070,14 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
} }
}; };
let bruContent = fs.readFileSync(pathname, 'utf8'); let bruContent = fs.readFileSync(pathname, 'utf8');
const metaJson = await bruToJson(parseBruFileMeta(bruContent), true); const metaJson = parseBruFileMeta(bruContent);
file.data = metaJson; file.data = metaJson;
file.loading = true; file.loading = true;
file.partial = true; file.partial = true;
file.size = sizeInMB(fileStats?.size); file.size = sizeInMB(fileStats?.size);
hydrateRequestWithUuid(file.data, pathname); hydrateRequestWithUuid(file.data, pathname);
mainWindow.webContents.send('main:collection-tree-updated', 'addFile', file); mainWindow.webContents.send('main:collection-tree-updated', 'addFile', file);
file.data = await bruToJsonViaWorker(bruContent); file.data = await parseRequestViaWorker(bruContent);
file.partial = false; file.partial = false;
file.loading = true; file.loading = true;
file.size = sizeInMB(fileStats?.size); file.size = sizeInMB(fileStats?.size);
@ -1089,7 +1094,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
} }
}; };
let bruContent = fs.readFileSync(pathname, 'utf8'); let bruContent = fs.readFileSync(pathname, 'utf8');
const metaJson = await bruToJson(parseBruFileMeta(bruContent), true); const metaJson = parseRequest(parseBruFileMeta(bruContent));
file.data = metaJson; file.data = metaJson;
file.partial = true; file.partial = true;
file.loading = false; file.loading = false;
@ -1140,14 +1145,14 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
} }
}; };
let bruContent = fs.readFileSync(pathname, 'utf8'); let bruContent = fs.readFileSync(pathname, 'utf8');
const metaJson = await bruToJson(parseBruFileMeta(bruContent), true); const metaJson = parseRequest(parseBruFileMeta(bruContent));
file.data = metaJson; file.data = metaJson;
file.loading = true; file.loading = true;
file.partial = true; file.partial = true;
file.size = sizeInMB(fileStats?.size); file.size = sizeInMB(fileStats?.size);
hydrateRequestWithUuid(file.data, pathname); hydrateRequestWithUuid(file.data, pathname);
mainWindow.webContents.send('main:collection-tree-updated', 'addFile', file); mainWindow.webContents.send('main:collection-tree-updated', 'addFile', file);
file.data = bruToJson(bruContent); file.data = parseRequest(bruContent);
file.partial = false; file.partial = false;
file.loading = true; file.loading = true;
file.size = sizeInMB(fileStats?.size); file.size = sizeInMB(fileStats?.size);
@ -1164,7 +1169,7 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
} }
}; };
let bruContent = fs.readFileSync(pathname, 'utf8'); let bruContent = fs.readFileSync(pathname, 'utf8');
const metaJson = await bruToJson(parseBruFileMeta(bruContent), true); const metaJson = parseRequest(parseBruFileMeta(bruContent));
file.data = metaJson; file.data = metaJson;
file.partial = true; file.partial = true;
file.loading = false; file.loading = false;

View File

@ -237,12 +237,47 @@ const parseBruFileMeta = (data) => {
metaJson[key] = isNaN(value) ? value : Number(value); metaJson[key] = isNaN(value) ? value : Number(value);
} }
}); });
return { meta: metaJson };
// Transform to the format expected by bruno-app
let requestType = metaJson.type;
if (requestType === 'http') {
requestType = 'http-request';
} else if (requestType === 'graphql') {
requestType = 'graphql-request';
} else {
requestType = 'http-request';
}
const sequence = metaJson.seq;
const transformedJson = {
type: requestType,
name: metaJson.name,
seq: !isNaN(sequence) ? Number(sequence) : 1,
settings: {},
tags: metaJson.tags || [],
request: {
method: '',
url: '',
params: [],
headers: [],
auth: { mode: 'none' },
body: { mode: 'none' },
script: {},
vars: {},
assertions: [],
tests: '',
docs: ''
}
};
return transformedJson;
} else { } else {
console.log('No "meta" block found in the file.'); console.log('No "meta" block found in the file.');
return null;
} }
} catch (err) { } catch (err) {
console.error('Error reading file:', err); console.error('Error reading file:', err);
return null;
} }
} }

View File

@ -1,68 +0,0 @@
const { Worker } = require('worker_threads');
class WorkerQueue {
constructor() {
this.queue = [];
this.isProcessing = false;
this.workers = {};
}
async getWorkerForScriptPath(scriptPath) {
if (!this.workers) this.workers = {};
let worker = this.workers[scriptPath];
if (!worker || worker.threadId === -1) {
this.workers[scriptPath] = worker = new Worker(scriptPath);
}
return worker;
}
async enqueue(task) {
const { priority, scriptPath, data } = task;
return new Promise((resolve, reject) => {
this.queue.push({ priority, scriptPath, data, resolve, reject });
this.queue?.sort((taskX, taskY) => taskX?.priority - taskY?.priority);
this.processQueue();
});
}
async processQueue() {
if (this.isProcessing || this.queue.length === 0){
return;
}
this.isProcessing = true;
const { scriptPath, data, resolve, reject } = this.queue.shift();
try {
const result = await this.runWorker({ scriptPath, data });
resolve(result);
} catch (error) {
reject(error);
} finally {
this.isProcessing = false;
this.processQueue();
}
}
async runWorker({ scriptPath, data }) {
return new Promise(async (resolve, reject) => {
let worker = await this.getWorkerForScriptPath(scriptPath);
worker.postMessage(data);
worker.on('message', (data) => {
if (data?.error) {
reject(new Error(data?.error));
}
resolve(data);
});
worker.on('error', (error) => {
reject(error);
});
worker.on('exit', (code) => {
reject(new Error(`stopped with ${code} exit code`));
});
});
}
}
module.exports = WorkerQueue;

View File

@ -11,22 +11,35 @@ describe('parseBruFileMeta', () => {
const result = parseBruFileMeta(data); const result = parseBruFileMeta(data);
expect(result).toEqual({ expect(result).toEqual({
meta: { type: 'http-request',
name: '0.2_mb', name: '0.2_mb',
type: 'http', seq: 1,
seq: 1, settings: {},
}, tags: [],
request: {
method: '',
url: '',
params: [],
headers: [],
auth: { mode: 'none' },
body: { mode: 'none' },
script: {},
vars: {},
assertions: [],
tests: '',
docs: ''
}
}); });
}); });
test('returns undefined for missing meta block', () => { test('returns null for missing meta block', () => {
const data = `someOtherBlock { const data = `someOtherBlock {
key: value key: value
}`; }`;
const result = parseBruFileMeta(data); const result = parseBruFileMeta(data);
expect(result).toBeUndefined(); expect(result).toBeNull();
}); });
test('handles empty meta block gracefully', () => { test('handles empty meta block gracefully', () => {
@ -34,7 +47,26 @@ describe('parseBruFileMeta', () => {
const result = parseBruFileMeta(data); const result = parseBruFileMeta(data);
expect(result).toEqual({ meta: {} }); expect(result).toEqual({
type: 'http-request',
name: undefined,
seq: 1,
settings: {},
tags: [],
request: {
method: '',
url: '',
params: [],
headers: [],
auth: { mode: 'none' },
body: { mode: 'none' },
script: {},
vars: {},
assertions: [],
tests: '',
docs: ''
}
});
}); });
test('ignores invalid lines in meta block', () => { test('ignores invalid lines in meta block', () => {
@ -47,10 +79,24 @@ describe('parseBruFileMeta', () => {
const result = parseBruFileMeta(data); const result = parseBruFileMeta(data);
expect(result).toEqual({ expect(result).toEqual({
meta: { type: 'http-request',
name: '0.2_mb', name: '0.2_mb',
seq: 1, seq: 1,
}, settings: {},
tags: [],
request: {
method: '',
url: '',
params: [],
headers: [],
auth: { mode: 'none' },
body: { mode: 'none' },
script: {},
vars: {},
assertions: [],
tests: '',
docs: ''
}
}); });
}); });
@ -59,7 +105,7 @@ describe('parseBruFileMeta', () => {
const result = parseBruFileMeta(data); const result = parseBruFileMeta(data);
expect(result).toBeUndefined(); expect(result).toBeNull();
}); });
test('handles missing colon gracefully', () => { test('handles missing colon gracefully', () => {
@ -71,9 +117,24 @@ describe('parseBruFileMeta', () => {
const result = parseBruFileMeta(data); const result = parseBruFileMeta(data);
expect(result).toEqual({ expect(result).toEqual({
meta: { type: 'http-request',
seq: 1, name: undefined,
}, seq: 1,
settings: {},
tags: [],
request: {
method: '',
url: '',
params: [],
headers: [],
auth: { mode: 'none' },
body: { mode: 'none' },
script: {},
vars: {},
assertions: [],
tests: '',
docs: ''
}
}); });
}); });
@ -82,16 +143,30 @@ describe('parseBruFileMeta', () => {
numValue: 1234 numValue: 1234
floatValue: 12.34 floatValue: 12.34
strValue: some_text strValue: some_text
seq: 5
}`; }`;
const result = parseBruFileMeta(data); const result = parseBruFileMeta(data);
expect(result).toEqual({ expect(result).toEqual({
meta: { type: 'http-request',
numValue: 1234, name: undefined,
floatValue: 12.34, seq: 5,
strValue: 'some_text', settings: {},
}, tags: [],
request: {
method: '',
url: '',
params: [],
headers: [],
auth: { mode: 'none' },
body: { mode: 'none' },
script: {},
vars: {},
assertions: [],
tests: '',
docs: ''
}
}); });
}); });
@ -104,7 +179,7 @@ describe('parseBruFileMeta', () => {
const result = parseBruFileMeta(data); const result = parseBruFileMeta(data);
expect(result).toBeUndefined(); expect(result).toBeNull();
}); });
test('handles syntax error in meta block 2', () => { test('handles syntax error in meta block 2', () => {
@ -116,6 +191,98 @@ describe('parseBruFileMeta', () => {
const result = parseBruFileMeta(data); const result = parseBruFileMeta(data);
expect(result).toBeUndefined(); expect(result).toBeNull();
});
test('handles graphql type correctly', () => {
const data = `meta {
name: graphql_query
type: graphql
seq: 2
}`;
const result = parseBruFileMeta(data);
expect(result).toEqual({
type: 'graphql-request',
name: 'graphql_query',
seq: 2,
settings: {},
tags: [],
request: {
method: '',
url: '',
params: [],
headers: [],
auth: { mode: 'none' },
body: { mode: 'none' },
script: {},
vars: {},
assertions: [],
tests: '',
docs: ''
}
});
});
test('handles unknown type correctly', () => {
const data = `meta {
name: unknown_request
type: unknown
seq: 3
}`;
const result = parseBruFileMeta(data);
expect(result).toEqual({
type: 'http-request',
name: 'unknown_request',
seq: 3,
settings: {},
tags: [],
request: {
method: '',
url: '',
params: [],
headers: [],
auth: { mode: 'none' },
body: { mode: 'none' },
script: {},
vars: {},
assertions: [],
tests: '',
docs: ''
}
});
});
test('handles missing seq gracefully', () => {
const data = `meta {
name: no_seq_request
type: http
}`;
const result = parseBruFileMeta(data);
expect(result).toEqual({
type: 'http-request',
name: 'no_seq_request',
seq: 1, // Default fallback
settings: {},
tags: [],
request: {
method: '',
url: '',
params: [],
headers: [],
auth: { mode: 'none' },
body: { mode: 'none' },
script: {},
vars: {},
assertions: [],
tests: '',
docs: ''
}
});
}); });
}); });

5
packages/bruno-filestore/.gitignore vendored Normal file
View File

@ -0,0 +1,5 @@
node_modules
.DS_Store
*.log
dist
coverage

View File

@ -0,0 +1,22 @@
MIT License
Copyright (c) 2022 Anoop M D, Anusree P S and Contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,50 @@
# Bruno Filestore
A generic file storage and parsing package for Bruno API client.
## Purpose
This package abstracts the file format operations for Bruno, providing a clean interface for parsing and stringifying Bruno requests, collections, folders, and environments.
## Features
- Format-agnostic APIs for file operations
- Currently supports Bruno's custom `.bru` format
- Designed for future extensibility to support YAML and other formats
## Usage
```javascript
const {
parseRequest,
stringifyRequest,
parseCollection,
stringifyCollection,
parseEnvironment,
stringifyEnvironment,
parseDotEnv
} = require('@usebruno/filestore');
// Parse a .bru request file
const requestData = parseRequest(bruContent);
// Stringify request data to .bru format
const bruContent = stringifyRequest(requestData);
// Example with future format support (not yet implemented)
const requestData = parseRequest(yamlContent, { format: 'yaml' });
```
## API
The package provides the following functions:
- `parseRequest(content, options = { format: 'bru' })`: Parse request file content
- `stringifyRequest(requestObj, options = { format: 'bru' })`: Convert request object to file content
- `parseCollection(content, options = { format: 'bru' })`: Parse collection file content
- `stringifyCollection(collectionObj, options = { format: 'bru' })`: Convert collection object to file content
- `parseFolder(content, options = { format: 'bru' })`: Parse folder file content
- `stringifyFolder(folderObj, options = { format: 'bru' })`: Convert folder object to file content
- `parseEnvironment(content, options = { format: 'bru' })`: Parse environment file content
- `stringifyEnvironment(envObj, options = { format: 'bru' })`: Convert environment object to file content
- `parseDotEnv(content)`: Parse .env file content

View File

@ -0,0 +1,6 @@
module.exports = {
presets: [
['@babel/preset-env', { targets: { node: 'current' } }],
'@babel/preset-typescript',
],
};

View File

@ -0,0 +1,13 @@
module.exports = {
testEnvironment: 'node',
transform: {
'^.+\\.(js|ts)$': 'babel-jest',
},
moduleFileExtensions: ['js', 'ts'],
testMatch: ['**/__tests__/**/*.(js|ts)', '**/*.(test|spec).(js|ts)'],
collectCoverageFrom: [
'src/**/*.(js|ts)',
'!src/**/*.d.ts',
],
setupFilesAfterEnv: [],
};

View File

@ -0,0 +1,46 @@
{
"name": "@usebruno/filestore",
"version": "0.1.0",
"license": "MIT",
"main": "dist/cjs/index.js",
"module": "dist/esm/index.js",
"types": "dist/index.d.ts",
"files": [
"dist",
"src",
"package.json"
],
"scripts": {
"clean": "rimraf dist",
"prebuild": "npm run clean",
"build": "rollup -c",
"watch": "rollup -c -w",
"test": "jest",
"test:watch": "jest --watch",
"prepack": "npm run test && npm run build"
},
"devDependencies": {
"@babel/preset-env": "^7.22.0",
"@babel/preset-typescript": "^7.22.0",
"@rollup/plugin-commonjs": "^23.0.2",
"@rollup/plugin-node-resolve": "^15.0.1",
"@rollup/plugin-typescript": "^9.0.2",
"@types/jest": "^29.5.11",
"@types/lodash": "^4.14.191",
"babel-jest": "^29.7.0",
"jest": "^29.2.0",
"rimraf": "^3.0.2",
"rollup": "3.29.5",
"rollup-plugin-dts": "^5.0.0",
"rollup-plugin-peer-deps-external": "^2.2.4",
"rollup-plugin-terser": "^7.0.2",
"typescript": "^4.8.4"
},
"overrides": {
"rollup": "3.29.5"
},
"dependencies": {
"@usebruno/lang": "0.12.0",
"lodash": "^4.17.21"
}
}

View File

@ -0,0 +1,63 @@
const { nodeResolve } = require('@rollup/plugin-node-resolve');
const commonjs = require('@rollup/plugin-commonjs');
const typescript = require('@rollup/plugin-typescript');
const dts = require('rollup-plugin-dts');
const { terser } = require('rollup-plugin-terser');
const peerDepsExternal = require('rollup-plugin-peer-deps-external');
const packageJson = require('./package.json');
module.exports = [
{
input: 'src/index.ts',
output: [
{
file: packageJson.main,
format: 'cjs',
sourcemap: true,
exports: 'named'
},
{
file: packageJson.module,
format: 'esm',
sourcemap: true,
exports: 'named'
}
],
plugins: [
peerDepsExternal(),
nodeResolve({
extensions: ['.js', '.ts', '.tsx', '.json', '.css']
}),
commonjs(),
typescript({ tsconfig: './tsconfig.json' }),
terser(),
],
external: ['@usebruno/lang', 'lodash', 'worker_threads', 'path']
},
{
input: 'src/workers/worker-script.ts',
output: [
{
file: 'dist/cjs/workers/worker-script.js',
format: 'cjs',
sourcemap: true
},
{
file: 'dist/esm/workers/worker-script.js',
format: 'cjs',
sourcemap: true
}
],
plugins: [
peerDepsExternal(),
nodeResolve({
extensions: ['.js', '.ts', '.tsx', '.json', '.css']
}),
commonjs(),
typescript({ tsconfig: './tsconfig.json' }),
terser(),
],
external: ['@usebruno/lang', 'lodash', 'worker_threads', 'path']
}
];

View File

@ -0,0 +1,203 @@
import * as _ from 'lodash';
import {
bruToJsonV2,
jsonToBruV2,
bruToEnvJsonV2,
envJsonToBruV2,
collectionBruToJson as _collectionBruToJson,
jsonToCollectionBru as _jsonToCollectionBru
} from '@usebruno/lang';
export const bruRequestToJson = (data: string | any, parsed: boolean = false): any => {
try {
const json = parsed ? data : bruToJsonV2(data);
let requestType = _.get(json, 'meta.type');
if (requestType === 'http') {
requestType = 'http-request';
} else if (requestType === 'graphql') {
requestType = 'graphql-request';
} else {
requestType = 'http-request';
}
const sequence = _.get(json, 'meta.seq');
const transformedJson = {
type: requestType,
name: _.get(json, 'meta.name'),
seq: !_.isNaN(sequence) ? Number(sequence) : 1,
settings: _.get(json, 'settings', {}),
tags: _.get(json, 'meta.tags', []),
request: {
method: _.upperCase(_.get(json, 'http.method')),
url: _.get(json, 'http.url'),
params: _.get(json, 'params', []),
headers: _.get(json, 'headers', []),
auth: _.get(json, 'auth', {}),
body: _.get(json, 'body', {}),
script: _.get(json, 'script', {}),
vars: _.get(json, 'vars', {}),
assertions: _.get(json, 'assertions', []),
tests: _.get(json, 'tests', ''),
docs: _.get(json, 'docs', '')
}
};
transformedJson.request.auth.mode = _.get(json, 'http.auth', 'none');
transformedJson.request.body.mode = _.get(json, 'http.body', 'none');
return transformedJson;
} catch (e) {
return Promise.reject(e);
}
};
export const jsonRequestToBru = (json: any): string => {
try {
let type = _.get(json, 'type');
if (type === 'http-request') {
type = 'http';
} else if (type === 'graphql-request') {
type = 'graphql';
} else {
type = 'http';
}
const sequence = _.get(json, 'seq');
const bruJson = {
meta: {
name: _.get(json, 'name'),
type: type,
seq: !_.isNaN(sequence) ? Number(sequence) : 1,
tags: _.get(json, 'tags', []),
},
http: {
method: _.lowerCase(_.get(json, 'request.method')),
url: _.get(json, 'request.url'),
auth: _.get(json, 'request.auth.mode', 'none'),
body: _.get(json, 'request.body.mode', 'none')
},
params: _.get(json, 'request.params', []),
headers: _.get(json, 'request.headers', []),
auth: _.get(json, 'request.auth', {}),
body: _.get(json, 'request.body', {}),
script: _.get(json, 'request.script', {}),
vars: {
req: _.get(json, 'request.vars.req', []),
res: _.get(json, 'request.vars.res', [])
},
assertions: _.get(json, 'request.assertions', []),
tests: _.get(json, 'request.tests', ''),
settings: _.get(json, 'settings', {}),
docs: _.get(json, 'request.docs', '')
};
const bru = jsonToBruV2(bruJson);
return bru;
} catch (error) {
throw error;
}
};
export const bruCollectionToJson = (data: string | any, parsed: boolean = false): any => {
try {
const json = parsed ? data : _collectionBruToJson(data);
const transformedJson: any = {
request: {
headers: _.get(json, 'headers', []),
auth: _.get(json, 'auth', {}),
script: _.get(json, 'script', {}),
vars: _.get(json, 'vars', {}),
tests: _.get(json, 'tests', '')
},
settings: _.get(json, 'settings', {}),
docs: _.get(json, 'docs', '')
};
// add meta if it exists
// this is only for folder bru file
if (json.meta) {
transformedJson.meta = {
name: json.meta.name
};
// Include seq if it exists
if (json.meta.seq !== undefined) {
const sequence = json.meta.seq;
transformedJson.meta.seq = !isNaN(sequence) ? Number(sequence) : 1;
}
}
return transformedJson;
} catch (error) {
return Promise.reject(error);
}
};
export const jsonCollectionToBru = (json: any, isFolder?: boolean): string => {
try {
const collectionBruJson: any = {
headers: _.get(json, 'request.headers', []),
script: {
req: _.get(json, 'request.script.req', ''),
res: _.get(json, 'request.script.res', '')
},
vars: {
req: _.get(json, 'request.vars.req', []),
res: _.get(json, 'request.vars.res', [])
},
tests: _.get(json, 'request.tests', ''),
auth: _.get(json, 'request.auth', {}),
docs: _.get(json, 'docs', '')
};
// add meta if it exists
// this is only for folder bru file
if (json?.meta) {
collectionBruJson.meta = {
name: json.meta.name
};
// Include seq if it exists
if (json.meta.seq !== undefined) {
const sequence = json.meta.seq;
collectionBruJson.meta.seq = !isNaN(sequence) ? Number(sequence) : 1;
}
}
if (!isFolder) {
collectionBruJson.auth = _.get(json, 'request.auth', {});
}
return _jsonToCollectionBru(collectionBruJson);
} catch (error) {
throw error;
}
};
export const bruEnvironmentToJson = (bru: string): any => {
try {
const json = bruToEnvJsonV2(bru);
// the app env format requires each variable to have a type
// this need to be evaluated and safely removed
// i don't see it being used in schema validation
if (json && json.variables && json.variables.length) {
_.each(json.variables, (v: any) => (v.type = 'text'));
}
return json;
} catch (error) {
return Promise.reject(error);
}
};
export const jsonEnvironmentToBru = (json: any): string => {
try {
const bru = envJsonToBruV2(json);
return bru;
} catch (error) {
throw error;
}
};

View File

@ -0,0 +1,140 @@
import {
bruRequestToJson,
jsonRequestToBru,
bruCollectionToJson,
jsonCollectionToBru,
bruEnvironmentToJson,
jsonEnvironmentToBru
} from './formats/bru';
import { dotenvToJson } from '@usebruno/lang';
import BruParserWorker from './workers';
import {
ParseOptions,
StringifyOptions,
ParsedRequest,
ParsedCollection,
ParsedEnvironment
} from './types';
export const parseRequest = (content: string, options: ParseOptions = { format: 'bru' }): any => {
if (options.format === 'bru') {
return bruRequestToJson(content);
}
throw new Error(`Unsupported format: ${options.format}`);
};
export const stringifyRequest = (requestObj: ParsedRequest, options: StringifyOptions = { format: 'bru' }): string => {
if (options.format === 'bru') {
return jsonRequestToBru(requestObj);
}
throw new Error(`Unsupported format: ${options.format}`);
};
let globalWorkerInstance: BruParserWorker | null = null;
let cleanupHandlersRegistered = false;
const getWorkerInstance = (): BruParserWorker => {
if (!globalWorkerInstance) {
globalWorkerInstance = new BruParserWorker();
if (!cleanupHandlersRegistered) {
const cleanup = async () => {
if (globalWorkerInstance) {
await globalWorkerInstance.cleanup();
globalWorkerInstance = null;
}
};
// Handle various exit scenarios
process.on('exit', () => {
// Note: async operations won't work in 'exit' event
// We handle termination in other events
});
process.on('SIGINT', async () => {
await cleanup();
process.exit(0);
});
process.on('SIGTERM', async () => {
await cleanup();
process.exit(0);
});
process.on('uncaughtException', async (error) => {
console.error('Uncaught Exception:', error);
await cleanup();
process.exit(1);
});
process.on('unhandledRejection', async (reason) => {
console.error('Unhandled Rejection:', reason);
await cleanup();
process.exit(1);
});
cleanupHandlersRegistered = true;
}
}
return globalWorkerInstance;
};
export const parseRequestViaWorker = async (content: string): Promise<any> => {
const fileParserWorker = getWorkerInstance();
return await fileParserWorker.parseRequest(content);
};
export const stringifyRequestViaWorker = async (requestObj: any): Promise<string> => {
const fileParserWorker = getWorkerInstance();
return await fileParserWorker.stringifyRequest(requestObj);
};
export const parseCollection = (content: string, options: ParseOptions = { format: 'bru' }): any => {
if (options.format === 'bru') {
return bruCollectionToJson(content);
}
throw new Error(`Unsupported format: ${options.format}`);
};
export const stringifyCollection = (collectionObj: ParsedCollection, options: StringifyOptions = { format: 'bru' }): string => {
if (options.format === 'bru') {
return jsonCollectionToBru(collectionObj, false);
}
throw new Error(`Unsupported format: ${options.format}`);
};
export const parseFolder = (content: string, options: ParseOptions = { format: 'bru' }): any => {
if (options.format === 'bru') {
return bruCollectionToJson(content);
}
throw new Error(`Unsupported format: ${options.format}`);
};
export const stringifyFolder = (folderObj: any, options: StringifyOptions = { format: 'bru' }): string => {
if (options.format === 'bru') {
return jsonCollectionToBru(folderObj, true);
}
throw new Error(`Unsupported format: ${options.format}`);
};
export const parseEnvironment = (content: string, options: ParseOptions = { format: 'bru' }): any => {
if (options.format === 'bru') {
return bruEnvironmentToJson(content);
}
throw new Error(`Unsupported format: ${options.format}`);
};
export const stringifyEnvironment = (envObj: ParsedEnvironment, options: StringifyOptions = { format: 'bru' }): string => {
if (options.format === 'bru') {
return jsonEnvironmentToBru(envObj);
}
throw new Error(`Unsupported format: ${options.format}`);
};
export const parseDotEnv = (content: string): Record<string, string> => {
return dotenvToJson(content);
};
export { BruParserWorker };
export * from './types';

View File

@ -0,0 +1,141 @@
export interface ParseOptions {
format?: 'bru' | 'yaml';
}
export interface StringifyOptions {
format?: 'bru' | 'yaml';
}
export interface RequestBody {
mode?: string;
raw?: string;
formUrlEncoded?: Array<{ name: string; value: string; enabled: boolean }>;
multipartForm?: Array<{ name: string; value: string; type: string; enabled: boolean }>;
json?: string;
xml?: string;
sparql?: string;
graphql?: {
query?: string;
variables?: string;
};
}
export interface AuthConfig {
mode?: string;
basic?: {
username?: string;
password?: string;
};
bearer?: {
token?: string;
};
apikey?: {
key?: string;
value?: string;
placement?: string;
};
awsv4?: {
accessKeyId?: string;
secretAccessKey?: string;
sessionToken?: string;
service?: string;
region?: string;
profileName?: string;
};
oauth2?: {
grantType?: string;
callbackUrl?: string;
authorizationUrl?: string;
accessTokenUrl?: string;
clientId?: string;
clientSecret?: string;
scope?: string;
state?: string;
pkce?: boolean;
};
}
export interface RequestParam {
name: string;
value: string;
enabled: boolean;
}
export interface RequestHeader {
name: string;
value: string;
enabled: boolean;
}
export interface RequestAssertion {
name: string;
value: string;
enabled: boolean;
}
export interface RequestVars {
req?: Array<{ name: string; value: string; enabled: boolean }>;
res?: Array<{ name: string; value: string; enabled: boolean }>;
}
export interface RequestScript {
req?: string;
res?: string;
}
export interface RequestSettings {
[key: string]: any;
}
export interface RequestData {
method: string;
url: string;
params: RequestParam[];
headers: RequestHeader[];
auth: AuthConfig;
body: RequestBody;
script: RequestScript;
vars: RequestVars;
assertions: RequestAssertion[];
tests: string;
docs: string;
}
export interface ParsedRequest {
type: 'http-request' | 'graphql-request';
name: string;
seq: number;
settings: RequestSettings;
tags: string[];
request: RequestData;
}
export interface ParsedCollection {
name: string;
type?: string;
version?: string;
[key: string]: any;
}
export interface EnvironmentVariable {
name: string;
value: string;
enabled: boolean;
}
export interface ParsedEnvironment {
variables: EnvironmentVariable[];
}
export interface WorkerTask {
data: any;
priority: number;
scriptPath: string;
taskType?: 'parse' | 'stringify';
resolve?: (value: any) => void;
reject?: (reason?: any) => void;
}
export interface Lane {
maxSize: number;
}

View File

@ -0,0 +1,9 @@
declare module '@usebruno/lang' {
export function bruToJsonV2(bruContent: string): any;
export function jsonToBruV2(jsonData: any): string;
export function bruToEnvJsonV2(bruContent: string): any;
export function envJsonToBruV2(jsonData: any): string;
export function collectionBruToJson(bruContent: string): any;
export function jsonToCollectionBru(jsonData: any): string;
export function dotenvToJson(envContent: string): Record<string, string>;
}

View File

@ -0,0 +1,114 @@
import { Worker } from 'worker_threads';
interface QueuedTask {
priority: number;
scriptPath: string;
data: any;
taskType: 'parse' | 'stringify';
resolve?: (value: any) => void;
reject?: (reason?: any) => void;
}
class WorkerQueue {
private queue: QueuedTask[];
private isProcessing: boolean;
private workers: Record<string, Worker>;
constructor() {
this.queue = [];
this.isProcessing = false;
this.workers = {};
}
async getWorkerForScriptPath(scriptPath: string) {
if (!this.workers) this.workers = {};
let worker = this.workers[scriptPath];
if (!worker || worker.threadId === -1) {
this.workers[scriptPath] = worker = new Worker(scriptPath);
}
return worker;
}
async enqueue(task: QueuedTask) {
const { priority, scriptPath, data, taskType } = task;
return new Promise((resolve, reject) => {
this.queue.push({ priority, scriptPath, data, taskType, resolve, reject });
this.queue?.sort((taskX, taskY) => taskX?.priority - taskY?.priority);
this.processQueue();
});
}
async processQueue() {
if (this.isProcessing || this.queue.length === 0){
return;
}
this.isProcessing = true;
const { scriptPath, data, taskType, resolve, reject } = this.queue.shift() as QueuedTask;
try {
const result = await this.runWorker({ scriptPath, data, taskType });
resolve?.(result);
} catch (error) {
reject?.(error);
} finally {
this.isProcessing = false;
this.processQueue();
}
}
async runWorker({ scriptPath, data, taskType }: { scriptPath: string; data: any; taskType: 'parse' | 'stringify' }) {
return new Promise(async (resolve, reject) => {
let worker = await this.getWorkerForScriptPath(scriptPath);
const messageHandler = (data: any) => {
worker.off('message', messageHandler);
worker.off('error', errorHandler);
worker.off('exit', exitHandler);
if (data?.error) {
reject(new Error(data?.error));
} else {
resolve(data);
}
};
const errorHandler = (error: Error) => {
worker.off('message', messageHandler);
worker.off('error', errorHandler);
worker.off('exit', exitHandler);
reject(error);
};
const exitHandler = (code: number) => {
worker.off('message', messageHandler);
worker.off('error', errorHandler);
worker.off('exit', exitHandler);
// Remove dead worker from cache
delete this.workers[scriptPath];
reject(new Error(`Worker stopped with exit code ${code}`));
};
worker.on('message', messageHandler);
worker.on('error', errorHandler);
worker.on('exit', exitHandler);
worker.postMessage({ taskType, data });
});
}
async cleanup() {
const promises = Object.values(this.workers).map(worker => {
if (worker.threadId !== -1) {
return worker.terminate();
}
return Promise.resolve();
});
await Promise.allSettled(promises);
this.workers = {};
}
}
export default WorkerQueue;

View File

@ -0,0 +1,86 @@
import WorkerQueue from "./WorkerQueue";
import { Lane } from "../types";
import path from "path";
const sizeInMB = (size: number): number => {
return size / (1024 * 1024);
}
const getSize = (data: any): number => {
return sizeInMB(typeof data === 'string' ? Buffer.byteLength(data, 'utf8') : Buffer.byteLength(JSON.stringify(data), 'utf8'));
}
/**
* Lanes are used to determine which worker queue to use based on the size of the data.
*
* The first lane is for smaller files (<0.1MB), the second lane is for larger files (>=0.1MB).
* This helps with parsing performance.
*/
const LANES: Lane[] = [{
maxSize: 0.005
},{
maxSize: 0.1
},{
maxSize: 1
},{
maxSize: 10
},{
maxSize: 100
}];
interface WorkerQueueWithSize {
maxSize: number;
workerQueue: WorkerQueue;
}
class BruParserWorker {
private workerQueues: WorkerQueueWithSize[];
constructor() {
this.workerQueues = LANES?.map(lane => ({
maxSize: lane?.maxSize,
workerQueue: new WorkerQueue()
}));
}
private getWorkerQueue(size: number): WorkerQueue {
// Find the first queue that can handle the given size
// or fallback to the last queue for largest files
const queueForSize = this.workerQueues.find((queue) =>
queue.maxSize >= size
);
return queueForSize?.workerQueue ?? this.workerQueues[this.workerQueues.length - 1].workerQueue;
}
private async enqueueTask({ data, taskType }: { data: any; taskType: 'parse' | 'stringify' }): Promise<any> {
const size = getSize(data);
const workerQueue = this.getWorkerQueue(size);
const workerScriptPath = path.join(__dirname, './workers/worker-script.js');
return workerQueue.enqueue({
data,
priority: size,
scriptPath: workerScriptPath,
taskType,
});
}
async parseRequest(data: any): Promise<any> {
return this.enqueueTask({ data, taskType: 'parse' });
}
async stringifyRequest(data: any): Promise<any> {
return this.enqueueTask({ data, taskType: 'stringify' });
}
async cleanup(): Promise<void> {
const cleanupPromises = this.workerQueues.map(({ workerQueue }) =>
workerQueue.cleanup()
);
await Promise.allSettled(cleanupPromises);
}
}
export default BruParserWorker;

View File

@ -0,0 +1,27 @@
import { parentPort } from 'worker_threads';
import { bruRequestToJson, jsonRequestToBru } from '../formats/bru';
interface WorkerMessage {
taskType: 'parse' | 'stringify';
data: any;
}
parentPort?.on('message', async (message: WorkerMessage) => {
try {
const { taskType, data } = message;
let result: any;
if (taskType === 'parse') {
result = bruRequestToJson(data);
} else if (taskType === 'stringify') {
result = jsonRequestToBru(data);
} else {
throw new Error(`Unknown task type: ${taskType}`);
}
parentPort?.postMessage(result);
} catch (error: any) {
console.error('Worker error:', error);
parentPort?.postMessage({ error: error?.message });
}
});

View File

@ -0,0 +1,22 @@
{
"compilerOptions": {
"target": "ES2020",
"module": "ESNext",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"outDir": "./dist",
"rootDir": "./src",
"resolveJsonModule": true,
"allowSyntheticDefaultImports": true,
"moduleResolution": "node",
"declaration": true,
"declarationDir": "./dist/types",
"allowJs": true,
"checkJs": false,
"typeRoots": ["./node_modules/@types", "./src/types"]
},
"include": ["src/**/*.ts", "src/**/*.tsx", "src/**/*.js", "src/**/*.d.ts"],
"exclude": ["node_modules", "dist"]
}

View File

@ -76,6 +76,7 @@ async function setup() {
execCommand('npm run build:bruno-common', 'Building bruno-common'); execCommand('npm run build:bruno-common', 'Building bruno-common');
execCommand('npm run build:bruno-converters', 'Building bruno-converters'); execCommand('npm run build:bruno-converters', 'Building bruno-converters');
execCommand('npm run build:bruno-requests', 'Building bruno-requests'); execCommand('npm run build:bruno-requests', 'Building bruno-requests');
execCommand('npm run build:bruno-filestore', 'Building bruno-filestore');
// Bundle JS sandbox libraries // Bundle JS sandbox libraries
execCommand( execCommand(