Merge branch 'main' into chore/remove-crypto-js-3.1.9-1

This commit is contained in:
Yash 2024-12-15 11:28:14 +05:30
commit ad2bbfd129
55 changed files with 6138 additions and 3361 deletions

View File

@ -15,7 +15,10 @@ permissions:
jobs:
test:
name: CLI Tests
runs-on: ubuntu-latest
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v3

8396
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -31,6 +31,6 @@ yarn-error.log*
# next.js
.next/
out/
dist/
.env

View File

@ -1,22 +0,0 @@
module.exports = {
output: 'export',
reactStrictMode: false,
publicRuntimeConfig: {
CI: process.env.CI,
PLAYWRIGHT: process.env.PLAYWRIGHT,
ENV: process.env.ENV
},
webpack: (config, { isServer }) => {
// Fixes npm packages that depend on `fs` module
if (!isServer) {
config.resolve.fallback.fs = false;
}
Object.defineProperty(config, 'devtool', {
get() {
return 'source-map';
},
set() {},
});
return config;
},
};

View File

@ -3,10 +3,9 @@
"version": "0.3.0",
"private": true,
"scripts": {
"dev": "cross-env ENV=dev next dev -p 3000",
"build": "next build",
"start": "next start",
"lint": "next lint",
"dev": "rsbuild dev",
"build": "rsbuild build -m production",
"preview": "rsbuild preview",
"test": "jest",
"test:prettier": "prettier --check \"./src/**/*.{js,jsx,json,ts,tsx}\"",
"prettier": "prettier --write \"./src/**/*.{js,jsx,json,ts,tsx}\""
@ -49,7 +48,6 @@
"markdown-it-replace-link": "^1.2.0",
"mousetrap": "^1.6.5",
"nanoid": "3.3.4",
"next": "14.2.16",
"path": "^0.12.7",
"pdfjs-dist": "4.4.168",
"platform": "^1.3.6",
@ -57,17 +55,17 @@
"prettier": "^2.7.1",
"qs": "^6.11.0",
"query-string": "^7.0.1",
"react": "18.2.0",
"react": "19.0.0",
"react-copy-to-clipboard": "^5.1.0",
"react-dnd": "^16.0.1",
"react-dnd-html5-backend": "^16.0.1",
"react-dom": "18.2.0",
"react-dom": "19.0.0",
"react-hot-toast": "^2.4.0",
"react-i18next": "^15.0.1",
"react-inspector": "^6.0.2",
"react-pdf": "9.1.1",
"react-player": "^2.16.0",
"react-redux": "^7.2.6",
"react-redux": "^7.2.9",
"react-tooltip": "^5.5.2",
"sass": "^1.46.0",
"strip-json-comments": "^5.0.1",
@ -79,13 +77,14 @@
"yup": "^0.32.11"
},
"devDependencies": {
"@babel/core": "^7.16.0",
"@babel/plugin-transform-spread": "^7.16.7",
"@babel/preset-env": "^7.16.4",
"@babel/preset-react": "^7.16.0",
"@babel/runtime": "^7.16.3",
"@rsbuild/core": "^1.1.2",
"@rsbuild/plugin-babel": "^1.0.3",
"@rsbuild/plugin-node-polyfill": "^1.2.0",
"@rsbuild/plugin-react": "^1.0.7",
"@rsbuild/plugin-sass": "^1.1.0",
"@rsbuild/plugin-styled-components": "1.1.0",
"autoprefixer": "10.4.20",
"babel-loader": "^8.2.3",
"babel-plugin-react-compiler": "19.0.0-beta-a7bf2bd-20241110",
"cross-env": "^7.0.3",
"css-loader": "7.1.2",
"file-loader": "^6.2.0",

View File

@ -0,0 +1,27 @@
import { defineConfig } from '@rsbuild/core';
import { pluginReact } from '@rsbuild/plugin-react';
import { pluginBabel } from '@rsbuild/plugin-babel';
import { pluginStyledComponents } from '@rsbuild/plugin-styled-components';
import { pluginSass } from '@rsbuild/plugin-sass';
import { pluginNodePolyfill } from '@rsbuild/plugin-node-polyfill'
export default defineConfig({
plugins: [
pluginNodePolyfill(),
pluginReact(),
pluginStyledComponents(),
pluginSass(),
pluginBabel({
include: /\.(?:js|jsx|tsx)$/,
babelLoaderOptions(opts) {
opts.plugins?.unshift('babel-plugin-react-compiler');
}
})
],
source: {
tsconfigPath: './jsconfig.json', // Specifies the path to the JavaScript/TypeScript configuration file
},
html: {
title: 'Bruno'
},
});

View File

@ -10,6 +10,12 @@ import Modal from 'components/Modal';
const CreateEnvironment = ({ collection, onClose }) => {
const dispatch = useDispatch();
const inputRef = useRef();
// todo: Add this to global env too.
const validateEnvironmentName = (name) => {
return !collection?.environments?.some((env) => env?.name?.toLowerCase().trim() === name?.toLowerCase().trim());
};
const formik = useFormik({
enableReinitialize: true,
initialValues: {
@ -17,9 +23,10 @@ const CreateEnvironment = ({ collection, onClose }) => {
},
validationSchema: Yup.object({
name: Yup.string()
.min(1, 'must be at least 1 character')
.max(50, 'must be 50 characters or less')
.required('name is required')
.min(1, 'Must be at least 1 character')
.max(50, 'Must be 50 characters or less')
.required('Name is required')
.test('duplicate-name', 'Environment already exists', validateEnvironmentName)
}),
onSubmit: (values) => {
dispatch(addEnvironment(values.name, collection.uid))

View File

@ -1,8 +1,6 @@
import { createPortal } from 'react-dom';
function Portal({ children, wrapperId }) {
wrapperId = wrapperId || 'bruno-app-body';
return createPortal(children, document.getElementById(wrapperId));
function Portal({ children }) {
return createPortal(children, document.body);
}
export default Portal;

View File

@ -70,7 +70,7 @@ const QueryUrl = ({ item, collection, handleRun }) => {
const handleGenerateCode = (e) => {
e.stopPropagation();
if (item.request.url !== '' || (item.draft?.request.url !== undefined && item.draft?.request.url !== '')) {
if (item?.request?.url !== '' || (item.draft?.request?.url !== undefined && item.draft?.request?.url !== '')) {
setGenerateCodeItemModalOpen(true);
} else {
toast.error('URL is required');

View File

@ -10,6 +10,7 @@ const StyledWrapper = styled.div`
align-items: center;
justify-content: center;
width: 10px;
min-width: 10px;
padding: 0;
cursor: col-resize;
background: transparent;

View File

@ -183,7 +183,7 @@ const CollectionItem = ({ item, collection, searchText }) => {
const handleGenerateCode = (e) => {
e.stopPropagation();
dropdownTippyRef.current.hide();
if (item.request.url !== '' || (item.draft?.request.url !== undefined && item.draft?.request.url !== '')) {
if (item?.request?.url !== '' || (item?.draft?.request?.url !== undefined && item?.draft?.request?.url !== '')) {
setGenerateCodeItemModalOpen(true);
} else {
toast.error('URL is required');

View File

@ -17,7 +17,6 @@ const RenameCollection = ({ collection, onClose }) => {
validationSchema: Yup.object({
name: Yup.string()
.min(1, 'must be at least 1 character')
.max(50, 'must be 50 characters or less')
.required('name is required')
}),
onSubmit: (values) => {

View File

@ -184,7 +184,7 @@ const Sidebar = () => {
Star
</GitHubButton> */}
</div>
<div className="flex flex-grow items-center justify-end text-xs mr-2">v1.34.2</div>
<div className="flex flex-grow items-center justify-end text-xs mr-2">v1.36.0</div>
</div>
</div>
</div>

View File

@ -63,16 +63,16 @@ const Table = ({ minColumnWidth = 1, headers = [], children }) => {
[activeColumnIndex, columns, minColumnWidth]
);
const handleMouseUp = useCallback(() => {
setActiveColumnIndex(null);
removeListeners();
}, [removeListeners]);
const removeListeners = useCallback(() => {
window.removeEventListener('mousemove', handleMouseMove);
window.removeEventListener('mouseup', removeListeners);
}, [handleMouseMove]);
const handleMouseUp = useCallback(() => {
setActiveColumnIndex(null);
removeListeners?.();
}, [removeListeners]);
useEffect(() => {
if (activeColumnIndex !== null) {
window.addEventListener('mousemove', handleMouseMove);

View File

@ -0,0 +1,14 @@
import React from 'react';
import ReactDOM from 'react-dom/client';
import App from './pages/index';
const rootElement = document.getElementById('root');
if (rootElement) {
const root = ReactDOM.createRoot(rootElement);
root.render(
<React.StrictMode>
<App />
</React.StrictMode>
);
}

View File

@ -25,31 +25,7 @@ import '@fontsource/inter/900.css';
import { setupPolyfills } from 'utils/common/setupPolyfills';
setupPolyfills();
function SafeHydrate({ children }) {
return <div suppressHydrationWarning>{typeof window === 'undefined' ? null : children}</div>;
}
function NoSsr({ children }) {
const SERVER_RENDERED = typeof window === 'undefined';
if (SERVER_RENDERED) {
return null;
}
return <>{children}</>;
}
function MyApp({ Component, pageProps }) {
const [domLoaded, setDomLoaded] = useState(false);
useEffect(() => {
setDomLoaded(true);
}, []);
if (!domLoaded) {
return null;
}
function Main({ children }) {
if (!window.ipcRenderer) {
return (
<div class="bg-red-100 border border-red-400 text-red-700 px-4 py-3 mx-10 my-10 rounded relative" role="alert">
@ -65,23 +41,21 @@ function MyApp({ Component, pageProps }) {
return (
<ErrorBoundary>
<SafeHydrate>
<NoSsr>
<Provider store={ReduxStore}>
<ThemeProvider>
<ToastProvider>
<AppProvider>
<HotkeysProvider>
<Component {...pageProps} />
</HotkeysProvider>
</AppProvider>
</ToastProvider>
</ThemeProvider>
</Provider>
</NoSsr>
</SafeHydrate>
<Provider store={ReduxStore}>
<ThemeProvider>
<ToastProvider>
<AppProvider>
<HotkeysProvider>
{children}
</HotkeysProvider>
</AppProvider>
</ToastProvider>
</ThemeProvider>
</Provider>
</ErrorBoundary>
);
}
export default MyApp;
export default Main;

View File

@ -1,41 +0,0 @@
import Document, { Html, Head, Main, NextScript } from 'next/document';
import { ServerStyleSheet } from 'styled-components';
export default class MyDocument extends Document {
static async getInitialProps(ctx) {
const sheet = new ServerStyleSheet();
const originalRenderPage = ctx.renderPage;
try {
ctx.renderPage = () =>
originalRenderPage({
enhanceApp: (App) => (props) => sheet.collectStyles(<App {...props} />)
});
const initialProps = await Document.getInitialProps(ctx);
return {
...initialProps,
styles: (
<>
{initialProps.styles}
{sheet.getStyleElement()}
</>
)
};
} finally {
sheet.seal();
}
}
render() {
return (
<Html>
<Head />
<body id="bruno-app-body">
<Main />
<NextScript />
</body>
</Html>
);
}
}

View File

@ -1,20 +1,16 @@
import Head from 'next/head';
import Bruno from './Bruno';
import GlobalStyle from '../globalStyles';
import '../i18n';
import Main from './Main';
export default function Home() {
export default function App() {
return (
<div>
<Head>
<title>Bruno</title>
<link rel="icon" href="/favicon.ico" />
</Head>
<GlobalStyle />
<main>
<Bruno />
<Main>
<GlobalStyle />
<Bruno />
</Main>
</main>
</div>
);

View File

@ -7,21 +7,19 @@
*/
import { useEffect } from 'react';
import getConfig from 'next/config';
import { PostHog } from 'posthog-node';
import platformLib from 'platform';
import { uuid } from 'utils/common';
const { publicRuntimeConfig } = getConfig();
const posthogApiKey = process.env.NEXT_PUBLIC_POSTHOG_API_KEY;
let posthogClient = null;
const isPlaywrightTestRunning = () => {
return publicRuntimeConfig.PLAYWRIGHT ? true : false;
return process.env.PLAYWRIGHT ? true : false;
};
const isDevEnv = () => {
return publicRuntimeConfig.ENV === 'dev';
return import.meta.env.MODE === 'development';
};
const getPosthogClient = () => {
@ -60,7 +58,7 @@ const trackStart = () => {
event: 'start',
properties: {
os: platformLib.os.family,
version: '1.34.2'
version: '1.36.0'
}
});
};

View File

@ -1,4 +1,3 @@
import getConfig from 'next/config';
import { configureStore } from '@reduxjs/toolkit';
import tasksMiddleware from './middlewares/tasks/middleware';
import debugMiddleware from './middlewares/debug/middleware';
@ -8,9 +7,8 @@ import tabsReducer from './slices/tabs';
import notificationsReducer from './slices/notifications';
import globalEnvironmentsReducer from './slices/global-environments';
const { publicRuntimeConfig } = getConfig();
const isDevEnv = () => {
return publicRuntimeConfig.ENV === 'dev';
return import.meta.env.MODE === 'development';
};
let middleware = [tasksMiddleware.middleware];

View File

@ -9,6 +9,7 @@ const getReadNotificationIds = () => {
return readNotificationIds;
} catch (err) {
toast.error('An error occurred while fetching read notifications');
return [];
}
};
@ -34,7 +35,6 @@ export const notificationSlice = createSlice({
state.loading = action.payload.fetching;
},
setNotifications: (state, action) => {
console.log('notifications', notifications);
let notifications = action.payload.notifications || [];
let readNotificationIds = state.readNotificationIds;
@ -58,14 +58,16 @@ export const notificationSlice = createSlice({
});
},
markNotificationAsRead: (state, action) => {
if (state.readNotificationIds.includes(action.payload.notificationId)) return;
const { notificationId } = action.payload;
if (state.readNotificationIds.includes(notificationId)) return;
const notification = state.notifications.find(
(notification) => notification.id === action.payload.notificationId
(notification) => notification.id === notificationId
);
if (!notification) return;
state.readNotificationIds.push(action.payload.notificationId);
state.readNotificationIds.push(notificationId);
setReadNotificationsIds(state.readNotificationIds);
notification.read = true;
},

View File

@ -303,7 +303,8 @@ export const transformCollectionToSaveToExportAsFile = (collection, options = {}
script: si.request.script,
vars: si.request.vars,
assertions: si.request.assertions,
tests: si.request.tests
tests: si.request.tests,
docs: si.request.docs
};
// Handle auth object dynamically

View File

@ -151,7 +151,15 @@ export const relativeDate = (dateString) => {
export const humanizeDate = (dateString) => {
// See this discussion for why .split is necessary
// https://stackoverflow.com/questions/7556591/is-the-javascript-date-object-always-one-day-off
const date = new Date(dateString.split('-'));
if (!dateString || typeof dateString !== 'string') {
return 'Invalid Date';
}
const date = new Date(dateString);
if (isNaN(date.getTime())) {
return 'Invalid Date';
}
return date.toLocaleDateString('en-US', {
year: 'numeric',
month: 'long',

View File

@ -58,6 +58,18 @@ describe('common utils', () => {
it('should return invalid date if the date is invalid', () => {
expect(humanizeDate('9999-99-99')).toBe('Invalid Date');
});
it('should return "Invalid Date" if the date is null', () => {
expect(humanizeDate(null)).toBe('Invalid Date');
});
it('should return a humanized date for a valid date in ISO format', () => {
expect(humanizeDate('2024-11-28T00:00:00Z')).toBe('November 28, 2024');
});
it('should return "Invalid Date" for a non-date string', () => {
expect(humanizeDate('some random text')).toBe('Invalid Date');
});
});
describe('relativeDate', () => {

View File

@ -57,6 +57,8 @@ function getDataString(request) {
console.error('Failed to parse JSON data:', error);
return { data: request.data.toString() };
}
} else if (contentType && contentType.includes('application/xml')) {
return { data: request.data };
}
const parsedQueryString = querystring.parse(request.data, { sort: false });

View File

@ -60,7 +60,7 @@ export const getRequestFromCurlCommand = (curlCommand, requestType = 'http-reque
} else if (contentType.includes('application/json')) {
body.mode = 'json';
body.json = convertToCodeMirrorJson(parsedBody);
} else if (contentType.includes('text/xml')) {
} else if (contentType.includes('xml')) {
body.mode = 'xml';
body.xml = parsedBody;
} else if (contentType.includes('application/x-www-form-urlencoded')) {

View File

@ -93,8 +93,68 @@ const printRunSummary = (results) => {
};
};
const createCollectionFromPath = (collectionPath) => {
const environmentsPath = path.join(collectionPath, `environments`);
const getFilesInOrder = (collectionPath) => {
let collection = {
pathname: collectionPath
};
const traverse = (currentPath) => {
const filesInCurrentDir = fs.readdirSync(currentPath);
if (currentPath.includes('node_modules')) {
return;
}
const currentDirItems = [];
for (const file of filesInCurrentDir) {
const filePath = path.join(currentPath, file);
const stats = fs.lstatSync(filePath);
if (
stats.isDirectory() &&
filePath !== environmentsPath &&
!filePath.startsWith('.git') &&
!filePath.startsWith('node_modules')
) {
let folderItem = { name: file, pathname: filePath, type: 'folder', items: traverse(filePath) }
const folderBruFilePath = path.join(filePath, 'folder.bru');
const folderBruFileExists = fs.existsSync(folderBruFilePath);
if(folderBruFileExists) {
const folderBruContent = fs.readFileSync(folderBruFilePath, 'utf8');
let folderBruJson = collectionBruToJson(folderBruContent);
folderItem.root = folderBruJson;
}
currentDirItems.push(folderItem);
}
}
for (const file of filesInCurrentDir) {
if (['collection.bru', 'folder.bru'].includes(file)) {
continue;
}
const filePath = path.join(currentPath, file);
const stats = fs.lstatSync(filePath);
if (!stats.isDirectory() && path.extname(filePath) === '.bru') {
const bruContent = fs.readFileSync(filePath, 'utf8');
const bruJson = bruToJson(bruContent);
currentDirItems.push({
name: file,
pathname: filePath,
...bruJson
});
}
}
return currentDirItems
};
collection.items = traverse(collectionPath);
return collection;
};
return getFilesInOrder(collectionPath);
};
const getBruFilesRecursively = (dir, testsOnly) => {
const environmentsPath = 'environments';
const collection = {};
const getFilesInOrder = (dir) => {
let bruJsons = [];
@ -359,6 +419,12 @@ const handler = async function (argv) {
const brunoConfigFile = fs.readFileSync(brunoJsonPath, 'utf8');
const brunoConfig = JSON.parse(brunoConfigFile);
const collectionRoot = getCollectionRoot(collectionPath);
let collection = createCollectionFromPath(collectionPath);
collection = {
brunoConfig,
root: collectionRoot,
...collection
}
if (clientCertConfig) {
try {
@ -584,7 +650,8 @@ const handler = async function (argv) {
processEnvVars,
brunoConfig,
collectionRoot,
runtime
runtime,
collection
);
results.push({

View File

@ -13,14 +13,17 @@ const getContentType = (headers = {}) => {
return contentType;
};
const interpolateVars = (request, envVars = {}, runtimeVariables = {}, processEnvVars = {}) => {
const interpolateVars = (request, envVariables = {}, runtimeVariables = {}, processEnvVars = {}) => {
const collectionVariables = request?.collectionVariables || {};
const folderVariables = request?.folderVariables || {};
const requestVariables = request?.requestVariables || {};
// we clone envVars because we don't want to modify the original object
envVars = cloneDeep(envVars);
envVariables = cloneDeep(envVariables);
// envVars can inturn have values as {{process.env.VAR_NAME}}
// so we need to interpolate envVars first with processEnvVars
forOwn(envVars, (value, key) => {
envVars[key] = interpolate(value, {
forOwn(envVariables, (value, key) => {
envVariables[key] = interpolate(value, {
process: {
env: {
...processEnvVars
@ -36,7 +39,10 @@ const interpolateVars = (request, envVars = {}, runtimeVariables = {}, processEn
// runtimeVariables take precedence over envVars
const combinedVars = {
...envVars,
...collectionVariables,
...envVariables,
...folderVariables,
...requestVariables,
...runtimeVariables,
process: {
env: {

View File

@ -1,24 +1,224 @@
const { get, each, filter } = require('lodash');
const { get, each, filter, find, compact } = require('lodash');
const fs = require('fs');
const os = require('os');
var JSONbig = require('json-bigint');
const decomment = require('decomment');
const crypto = require('node:crypto');
const prepareRequest = (request, collectionRoot) => {
const headers = {};
let contentTypeDefined = false;
const mergeHeaders = (collection, request, requestTreePath) => {
let headers = new Map();
// collection headers
each(get(collectionRoot, 'request.headers', []), (h) => {
if (h.enabled) {
headers[h.name] = h.value;
if (h.name.toLowerCase() === 'content-type') {
contentTypeDefined = true;
}
let collectionHeaders = get(collection, 'root.request.headers', []);
collectionHeaders.forEach((header) => {
if (header.enabled) {
headers.set(header.name, header.value);
}
});
each(request.headers, (h) => {
for (let i of requestTreePath) {
if (i.type === 'folder') {
let _headers = get(i, 'root.request.headers', []);
_headers.forEach((header) => {
if (header.enabled) {
headers.set(header.name, header.value);
}
});
} else {
const _headers = i?.draft ? get(i, 'draft.request.headers', []) : get(i, 'request.headers', []);
_headers.forEach((header) => {
if (header.enabled) {
headers.set(header.name, header.value);
}
});
}
}
request.headers = Array.from(headers, ([name, value]) => ({ name, value, enabled: true }));
};
const mergeVars = (collection, request, requestTreePath) => {
let reqVars = new Map();
let collectionRequestVars = get(collection, 'root.request.vars.req', []);
let collectionVariables = {};
collectionRequestVars.forEach((_var) => {
if (_var.enabled) {
reqVars.set(_var.name, _var.value);
collectionVariables[_var.name] = _var.value;
}
});
let folderVariables = {};
let requestVariables = {};
for (let i of requestTreePath) {
if (i.type === 'folder') {
let vars = get(i, 'root.request.vars.req', []);
vars.forEach((_var) => {
if (_var.enabled) {
reqVars.set(_var.name, _var.value);
folderVariables[_var.name] = _var.value;
}
});
} else {
const vars = i?.draft ? get(i, 'draft.request.vars.req', []) : get(i, 'request.vars.req', []);
vars.forEach((_var) => {
if (_var.enabled) {
reqVars.set(_var.name, _var.value);
requestVariables[_var.name] = _var.value;
}
});
}
}
request.collectionVariables = collectionVariables;
request.folderVariables = folderVariables;
request.requestVariables = requestVariables;
if(request?.vars) {
request.vars.req = Array.from(reqVars, ([name, value]) => ({
name,
value,
enabled: true,
type: 'request'
}));
}
let resVars = new Map();
let collectionResponseVars = get(collection, 'root.request.vars.res', []);
collectionResponseVars.forEach((_var) => {
if (_var.enabled) {
resVars.set(_var.name, _var.value);
}
});
for (let i of requestTreePath) {
if (i.type === 'folder') {
let vars = get(i, 'root.request.vars.res', []);
vars.forEach((_var) => {
if (_var.enabled) {
resVars.set(_var.name, _var.value);
}
});
} else {
const vars = i?.draft ? get(i, 'draft.request.vars.res', []) : get(i, 'request.vars.res', []);
vars.forEach((_var) => {
if (_var.enabled) {
resVars.set(_var.name, _var.value);
}
});
}
}
if(request?.vars) {
request.vars.res = Array.from(resVars, ([name, value]) => ({
name,
value,
enabled: true,
type: 'response'
}));
}
};
const mergeScripts = (collection, request, requestTreePath, scriptFlow) => {
let collectionPreReqScript = get(collection, 'root.request.script.req', '');
let collectionPostResScript = get(collection, 'root.request.script.res', '');
let collectionTests = get(collection, 'root.request.tests', '');
let combinedPreReqScript = [];
let combinedPostResScript = [];
let combinedTests = [];
for (let i of requestTreePath) {
if (i.type === 'folder') {
let preReqScript = get(i, 'root.request.script.req', '');
if (preReqScript && preReqScript.trim() !== '') {
combinedPreReqScript.push(preReqScript);
}
let postResScript = get(i, 'root.request.script.res', '');
if (postResScript && postResScript.trim() !== '') {
combinedPostResScript.push(postResScript);
}
let tests = get(i, 'root.request.tests', '');
if (tests && tests?.trim?.() !== '') {
combinedTests.push(tests);
}
}
}
request.script.req = compact([collectionPreReqScript, ...combinedPreReqScript, request?.script?.req || '']).join(os.EOL);
if (scriptFlow === 'sequential') {
request.script.res = compact([collectionPostResScript, ...combinedPostResScript, request?.script?.res || '']).join(os.EOL);
} else {
request.script.res = compact([request?.script?.res || '', ...combinedPostResScript.reverse(), collectionPostResScript]).join(os.EOL);
}
if (scriptFlow === 'sequential') {
request.tests = compact([collectionTests, ...combinedTests, request?.tests || '']).join(os.EOL);
} else {
request.tests = compact([request?.tests || '', ...combinedTests.reverse(), collectionTests]).join(os.EOL);
}
};
const findItem = (items = [], pathname) => {
return find(items, (i) => i.pathname === pathname);
};
const findItemInCollection = (collection, pathname) => {
let flattenedItems = flattenItems(collection.items);
return findItem(flattenedItems, pathname);
};
const findParentItemInCollection = (collection, pathname) => {
let flattenedItems = flattenItems(collection.items);
return find(flattenedItems, (item) => {
return item.items && find(item.items, (i) => i.pathname === pathname);
});
};
const flattenItems = (items = []) => {
const flattenedItems = [];
const flatten = (itms, flattened) => {
each(itms, (i) => {
flattened.push(i);
if (i.items && i.items.length) {
flatten(i.items, flattened);
}
});
};
flatten(items, flattenedItems);
return flattenedItems;
};
const getTreePathFromCollectionToItem = (collection, _item) => {
let path = [];
let item = findItemInCollection(collection, _item.pathname);
while (item) {
path.unshift(item);
item = findParentItemInCollection(collection, item.pathname);
}
return path;
};
const prepareRequest = (item = {}, collection = {}) => {
const request = item?.request;
const brunoConfig = get(collection, 'brunoConfig', {});
const headers = {};
let contentTypeDefined = false;
const scriptFlow = brunoConfig?.scripts?.flow ?? 'sandwich';
const requestTreePath = getTreePathFromCollectionToItem(collection, item);
if (requestTreePath && requestTreePath.length > 0) {
mergeHeaders(collection, request, requestTreePath);
mergeScripts(collection, request, requestTreePath, scriptFlow);
mergeVars(collection, request, requestTreePath);
}
each(get(request, 'headers', []), (h) => {
if (h.enabled) {
headers[h.name] = h.value;
if (h.name.toLowerCase() === 'content-type') {
@ -34,7 +234,7 @@ const prepareRequest = (request, collectionRoot) => {
pathParams: request?.params?.filter((param) => param.type === 'path')
};
const collectionAuth = get(collectionRoot, 'request.auth');
const collectionAuth = get(collection, 'root.request.auth');
if (collectionAuth && request.auth.mode === 'inherit') {
if (collectionAuth.mode === 'basic') {
axiosRequest.auth = {
@ -157,10 +357,19 @@ const prepareRequest = (request, collectionRoot) => {
axiosRequest.data = graphqlQuery;
}
if (request.script && request.script.length) {
if (request.script) {
axiosRequest.script = request.script;
}
if (request.tests) {
axiosRequest.tests = request.tests;
}
axiosRequest.vars = request.vars;
axiosRequest.collectionVariables = request.collectionVariables;
axiosRequest.folderVariables = request.folderVariables;
axiosRequest.requestVariables = request.requestVariables;
return axiosRequest;
};

View File

@ -36,13 +36,17 @@ const runSingleRequest = async function (
processEnvVars,
brunoConfig,
collectionRoot,
runtime
runtime,
collection
) {
try {
let request;
let nextRequestName;
request = prepareRequest(bruJson.request, collectionRoot);
let item = {
pathname: path.join(collectionPath, filename),
...bruJson
}
request = prepareRequest(item, collection);
request.__bruno__executionMode = 'cli';
@ -50,10 +54,7 @@ const runSingleRequest = async function (
scriptingConfig.runtime = runtime;
// run pre request script
const requestScriptFile = compact([
get(collectionRoot, 'request.script.req'),
get(bruJson, 'request.script.req')
]).join(os.EOL);
const requestScriptFile = get(request, 'script.req');
if (requestScriptFile?.length) {
const scriptRuntime = new ScriptRuntime({ runtime: scriptingConfig?.runtime });
const result = await scriptRuntime.runRequestScript(
@ -291,10 +292,7 @@ const runSingleRequest = async function (
}
// run post response script
const responseScriptFile = compact([
get(collectionRoot, 'request.script.res'),
get(bruJson, 'request.script.res')
]).join(os.EOL);
const responseScriptFile = get(request, 'script.res');
if (responseScriptFile?.length) {
const scriptRuntime = new ScriptRuntime({ runtime: scriptingConfig?.runtime });
const result = await scriptRuntime.runResponseScript(
@ -339,7 +337,7 @@ const runSingleRequest = async function (
// run tests
let testResults = [];
const testFile = compact([get(collectionRoot, 'request.tests'), get(bruJson, 'request.tests')]).join(os.EOL);
const testFile = get(request, 'tests');
if (typeof testFile === 'string') {
const testRuntime = new TestRuntime({ runtime: scriptingConfig?.runtime });
const result = await testRuntime.runTests(

View File

@ -58,7 +58,7 @@ const bruToJson = (bru) => {
body: _.get(json, 'body', {}),
vars: _.get(json, 'vars', []),
assertions: _.get(json, 'assertions', []),
script: _.get(json, 'script', ''),
script: _.get(json, 'script', {}),
tests: _.get(json, 'tests', '')
}
};

View File

@ -7,14 +7,14 @@ describe('prepare-request: prepareRequest', () => {
it('If request body is valid JSON', async () => {
const body = { mode: 'json', json: '{\n"test": "{{someVar}}" // comment\n}' };
const expected = { test: '{{someVar}}' };
const result = prepareRequest({ body });
const result = prepareRequest({ request: { body } });
expect(result.data).toEqual(expected);
});
it('If request body is not valid JSON', async () => {
const body = { mode: 'json', json: '{\n"test": {{someVar}} // comment\n}' };
const expected = '{\n"test": {{someVar}} \n}';
const result = prepareRequest({ body });
const result = prepareRequest({ request: { body } });
expect(result.data).toEqual(expected);
});
});

View File

@ -1,5 +1,5 @@
{
"version": "v1.34.2",
"version": "v1.36.0",
"name": "bruno",
"description": "Opensource API Client for Exploring and Testing APIs",
"homepage": "https://www.usebruno.com",

View File

@ -389,6 +389,8 @@ const change = async (win, pathname, collectionUid, collectionPath) => {
};
const unlink = (win, pathname, collectionUid, collectionPath) => {
console.log(`watcher unlink: ${pathname}`);
if (isBruEnvironmentConfig(pathname, collectionPath)) {
return unlinkEnvironmentFile(win, pathname, collectionUid);
}
@ -506,6 +508,33 @@ class Watcher {
this.watchers[watchPath] = null;
}
}
getWatcherByItemPath(itemPath) {
const paths = Object.keys(this.watchers);
const watcherPath = paths?.find(collectionPath => {
const absCollectionPath = path.resolve(collectionPath);
const absItemPath = path.resolve(itemPath);
return absItemPath.startsWith(absCollectionPath);
});
return watcherPath ? this.watchers[watcherPath] : null;
}
unlinkItemPathInWatcher(itemPath) {
const watcher = this.getWatcherByItemPath(itemPath);
if (watcher) {
watcher.unwatch(itemPath);
}
}
addItemPathInWatcher(itemPath) {
const watcher = this.getWatcherByItemPath(itemPath);
if (watcher && !watcher?.has?.(itemPath)) {
watcher?.add?.(itemPath);
}
}
}
module.exports = Watcher;

View File

@ -1,5 +1,7 @@
const _ = require('lodash');
const fs = require('fs');
const fsExtra = require('fs-extra');
const os = require('os');
const path = require('path');
const { ipcMain, shell, dialog, app } = require('electron');
const { envJsonToBru, bruToJson, jsonToBru, jsonToCollectionBru } = require('../bru');
@ -17,7 +19,10 @@ const {
isWSLPath,
normalizeWslPath,
normalizeAndResolvePath,
safeToRename
safeToRename,
isWindowsOS,
isValidFilename,
hasSubDirectories,
} = require('../utils/filesystem');
const { openCollectionDialog } = require('../app/collections');
const { generateUidBasedOnHash, stringifyJson, safeParseJSON, safeStringifyJSON } = require('../utils/common');
@ -201,7 +206,9 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
if (fs.existsSync(pathname)) {
throw new Error(`path: ${pathname} already exists`);
}
if (!isValidFilename(request.name)) {
throw new Error(`path: ${request.name}.bru is not a valid filename`);
}
const content = jsonToBru(request);
await writeFile(pathname, content);
} catch (error) {
@ -337,6 +344,12 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
// rename item
ipcMain.handle('renderer:rename-item', async (event, oldPath, newPath, newName) => {
const tempDir = path.join(os.tmpdir(), `temp-folder-${Date.now()}`);
const parentDir = path.dirname(oldPath);
const isWindowsOSAndNotWSLAndItemHasSubDirectories = isWindowsOS() && !isWSLPath(oldPath) && hasSubDirectories(oldPath);
let parentDirUnwatched = false;
let parentDirRewatched = false;
try {
// Normalize paths if they are WSL paths
oldPath = isWSLPath(oldPath) ? normalizeWslPath(oldPath) : normalizeAndResolvePath(oldPath);
@ -358,27 +371,72 @@ const registerRendererEventHandlers = (mainWindow, watcher, lastOpenedCollection
const newBruFilePath = bruFile.replace(oldPath, newPath);
moveRequestUid(bruFile, newBruFilePath);
}
return fs.renameSync(oldPath, newPath);
watcher.unlinkItemPathInWatcher(parentDir);
parentDirUnwatched = true;
/**
* If it is windows OS
* And it is not WSL path (meaning its not linux running on windows using WSL)
* And it has sub directories
* Only then we need to use the temp dir approach to rename the folder
*
* Windows OS would sometimes throw error when renaming a folder with sub directories
* This is a alternative approach to avoid that error
*/
if (isWindowsOSAndNotWSLAndItemHasSubDirectories) {
await fsExtra.copy(oldPath, tempDir);
await fsExtra.remove(oldPath);
await fsExtra.move(tempDir, newPath, { overwrite: true });
await fsExtra.remove(tempDir);
} else {
await fs.renameSync(oldPath, newPath);
}
watcher.addItemPathInWatcher(parentDir);
parentDirRewatched = true;
return newPath;
}
const isBru = hasBruExtension(oldPath);
if (!isBru) {
if (!hasBruExtension(oldPath)) {
throw new Error(`path: ${oldPath} is not a bru file`);
}
// update name in file and save new copy, then delete old copy
const data = fs.readFileSync(oldPath, 'utf8');
const jsonData = bruToJson(data);
if (!isValidFilename(newName)) {
throw new Error(`path: ${newName} is not a valid filename`);
}
// update name in file and save new copy, then delete old copy
const data = await fs.promises.readFile(oldPath, 'utf8'); // Use async read
const jsonData = bruToJson(data);
jsonData.name = newName;
moveRequestUid(oldPath, newPath);
const content = jsonToBru(jsonData);
await fs.unlinkSync(oldPath);
await fs.promises.unlink(oldPath);
await writeFile(newPath, content);
return newPath;
} catch (error) {
// in case an error occurs during the rename file operations after unlinking the parent dir
// and the rewatch fails, we need to add it back to watcher
if (parentDirUnwatched && !parentDirRewatched) {
watcher.addItemPathInWatcher(parentDir);
}
// in case the rename file operations fails, and we see that the temp dir exists
// and the old path does not exist, we need to restore the data from the temp dir to the old path
if (isWindowsOSAndNotWSLAndItemHasSubDirectories) {
if (fsExtra.pathExistsSync(tempDir) && !fsExtra.pathExistsSync(oldPath)) {
try {
await fsExtra.copy(tempDir, oldPath);
await fsExtra.remove(tempDir);
} catch (err) {
console.error("Failed to restore data to the old path:", err);
}
}
}
return Promise.reject(error);
}
});

View File

@ -38,6 +38,11 @@ const isDirectory = (dirPath) => {
}
};
const hasSubDirectories = (dir) => {
const files = fs.readdirSync(dir);
return files.some(file => fs.statSync(path.join(dir, file)).isDirectory());
};
const normalizeAndResolvePath = (pathname) => {
if (isSymbolicLink(pathname)) {
const absPath = path.dirname(pathname);
@ -160,6 +165,24 @@ const sanitizeDirectoryName = (name) => {
return name.replace(/[<>:"/\\|?*\x00-\x1F]+/g, '-');
};
const isWindowsOS = () => {
return os.platform() === 'win32';
}
const isValidFilename = (fileName) => {
const inValidChars = /[\\/:*?"<>|]/;
if (!fileName || inValidChars.test(fileName)) {
return false;
}
if (fileName.endsWith(' ') || fileName.endsWith('.') || fileName.startsWith('.')) {
return false;
}
return true;
};
const safeToRename = (oldPath, newPath) => {
try {
// If the new path doesn't exist, it's safe to rename
@ -170,7 +193,7 @@ const safeToRename = (oldPath, newPath) => {
const oldStat = fs.statSync(oldPath);
const newStat = fs.statSync(newPath);
if (os.platform() === 'win32') {
if (isWindowsOS()) {
// Windows-specific comparison:
// Check if both files have the same birth time, size (Since, Win FAT-32 doesn't use inodes)
@ -204,5 +227,8 @@ module.exports = {
searchForFiles,
searchForBruFiles,
sanitizeDirectoryName,
safeToRename
isWindowsOS,
safeToRename,
isValidFilename,
hasSubDirectories
};

View File

@ -20,7 +20,7 @@
"graphql": "^16.6.0",
"markdown-it": "^13.0.1",
"postcss": "8.4.47",
"react": "18.2.0",
"react": "19.0.0",
"react-dom": "18.2.0",
"rollup":"3.29.5",
"rollup-plugin-dts": "^5.0.0",

View File

@ -1,6 +1,7 @@
headers {
check: again
token: {{collection_pre_var_token}}
collection-header: collection-header-value
}
auth {
@ -14,6 +15,28 @@ auth:bearer {
vars:pre-request {
collection_pre_var: collection_pre_var_value
collection_pre_var_token: {{request_pre_var_token}}
collection-var: collection-var-value
}
script:pre-request {
// used by `scripting/js/folder-collection script-tests`
const shouldTestCollectionScripts = bru.getVar('should-test-collection-scripts');
if(shouldTestCollectionScripts) {
bru.setVar('collection-var-set-by-collection-script', 'collection-var-value-set-by-collection-script');
}
}
tests {
// used by `scripting/js/folder-collection script-tests`
const shouldTestCollectionScripts = bru.getVar('should-test-collection-scripts');
const collectionVar = bru.getVar("collection-var-set-by-collection-script");
if (shouldTestCollectionScripts && collectionVar) {
test("collection level test - should get the var that was set by the collection script", function() {
expect(collectionVar).to.equal("collection-var-value-set-by-collection-script");
});
bru.setVar('collection-var-set-by-collection-script', null);
bru.setVar('should-test-collection-scripts', null);
}
}
docs {

View File

@ -0,0 +1,7 @@
meta {
name: bru
}
vars:pre-request {
folder-var: folder-var-value
}

View File

@ -0,0 +1,18 @@
meta {
name: getCollectionVar
type: http
seq: 9
}
get {
url: {{host}}/ping
body: none
auth: none
}
tests {
test("should get collection var in scripts", function() {
const testVar = bru.getCollectionVar("collection-var");
expect(testVar).to.equal("collection-var-value");
});
}

View File

@ -0,0 +1,18 @@
meta {
name: getFolderVar
type: http
seq: 8
}
get {
url: {{host}}/ping
body: none
auth: none
}
tests {
test("should get folder var in scripts", function() {
const testVar = bru.getFolderVar("folder-var");
expect(testVar).to.equal("folder-var-value");
});
}

View File

@ -0,0 +1,22 @@
meta {
name: getRequestVar
type: http
seq: 7
}
get {
url: {{host}}/ping
body: none
auth: none
}
vars:pre-request {
request-var: request-var-value
}
tests {
test("should get request var in scripts", function() {
const testVar = bru.getRequestVar("request-var");
expect(testVar).to.equal("request-var-value");
});
}

View File

@ -0,0 +1,16 @@
meta {
name: folder-collection script-tests pre
type: http
seq: 4
}
post {
url: {{echo-host}}
body: none
auth: none
}
script:pre-request {
bru.setVar('should-test-collection-scripts', true);
bru.setVar('should-test-folder-scripts', true);
}

View File

@ -0,0 +1,28 @@
meta {
name: folder-collection script-tests
type: http
seq: 5
}
post {
url: {{echo-host}}
body: none
auth: none
}
script:pre-request {
// do not delete - the collection/folder scripts/tests run during this request execution
}
tests {
const collectionHeader = req.getHeader("collection-header");
const folderHeader = req.getHeader("folder-header");
test("should get the header value set at collection level", function() {
expect(collectionHeader).to.equal("collection-header-value");
});
test("should get the header value set at folder level", function() {
expect(folderHeader).to.equal("folder-header-value");
});
}

View File

@ -0,0 +1,28 @@
meta {
name: js
}
headers {
folder-header: folder-header-value
}
script:pre-request {
// used by `scripting/js/folder-collection script-tests`
const shouldTestFolderScripts = bru.getVar('should-test-folder-scripts');
if(shouldTestFolderScripts) {
bru.setVar('folder-var-set-by-folder-script', 'folder-var-value-set-by-folder-script');
}
}
tests {
// used by `scripting/js/folder-collection script-tests`
const shouldTestFolderScripts = bru.getVar('should-test-folder-scripts');
const folderVar = bru.getVar("folder-var-set-by-folder-script");
if (shouldTestFolderScripts && folderVar) {
test("folder level test - should get the var that was set by the folder script", function() {
expect(folderVar).to.equal("folder-var-value-set-by-folder-script");
});
bru.setVar('folder-var-set-by-folder-script', null);
bru.setVar('should-test-folder-scripts', null);
}
}

View File

@ -0,0 +1,12 @@
{
"version": "1",
"name": "sandwich_exec",
"type": "collection",
"ignore": [
"node_modules",
".git"
],
"scripts": {
"flow": "sandwich"
}
}

View File

@ -0,0 +1,13 @@
script:pre-request {
console.log("collection pre");
}
script:post-response {
{
console.log("collection post");
const sequence = bru.getVar('sequence') || [];
sequence.push(1);
bru.setVar('sequence', sequence);
console.log("sequence", bru.getVar('sequence'));
}
}

View File

@ -0,0 +1,16 @@
meta {
name: folder
}
script:pre-request {
console.log("folder pre");
}
script:post-response {
{
const sequence = bru.getVar('sequence') || [];
sequence.push(2);
bru.setVar('sequence', sequence);
}
console.log("folder post");
}

View File

@ -0,0 +1,33 @@
meta {
name: request
type: http
seq: 1
}
get {
url: https://www.example.com
body: none
auth: none
}
script:pre-request {
console.log("request pre");
}
script:post-response {
{
console.log("request post");
const sequence = bru.getVar('sequence') || [];
sequence.push(3);
bru.setVar('sequence', sequence);
}
}
tests {
test("sandwich script execution is proper", function() {
const sequence = bru.getVar('sequence');
bru.setVar('sequence', null);
expect(sequence.toString()).to.equal([3,2,1].toString());
});
}

View File

@ -0,0 +1,12 @@
{
"version": "1",
"name": "sequential_exec",
"type": "collection",
"ignore": [
"node_modules",
".git"
],
"scripts": {
"flow": "sequential"
}
}

View File

@ -0,0 +1,12 @@
script:pre-request {
console.log("collection pre");
}
script:post-response {
{
console.log("collection post");
const sequence = bru.getVar('sequence') || [];
sequence.push(1);
bru.setVar('sequence', sequence);
}
}

View File

@ -0,0 +1,16 @@
meta {
name: folder
}
script:pre-request {
console.log("folder pre");
}
script:post-response {
{
console.log("folder post");
const sequence = bru.getVar('sequence') || [];
sequence.push(2);
bru.setVar('sequence', sequence);
}
}

View File

@ -0,0 +1,34 @@
meta {
name: request
type: http
seq: 1
}
get {
url: https://www.example.com
body: none
auth: none
}
script:pre-request {
console.log("request pre");
}
script:post-response {
{
console.log("request post");
const sequence = bru.getVar('sequence') || [];
sequence.push(3);
bru.setVar('sequence', sequence);
console.log("sequence", bru.getVar('sequence'));
}
}
tests {
test("sequential script execution is proper", function() {
const sequence = bru.getVar('sequence');
bru.setVar('sequence', null);
expect(sequence.toString()).to.equal([1,2,3].toString());
});
}

View File

@ -44,12 +44,12 @@ Bruno is offline-only. There are no plans to add cloud-sync to Bruno, ever. We v
![bruno](assets/images/landing-2.png) <br /><br />
## Golden Edition
## Commercial Versions
Majority of our features are free and open source.
We strive to strike a harmonious balance between [open-source principles and sustainability](https://github.com/usebruno/bruno/discussions/269)
You can buy the [Golden Edition](https://www.usebruno.com/pricing) for a one-time payment of **$19**! <br/>
You can explore our [paid versions](https://www.usebruno.com/pricing) to see if there are additional features that you or your team may find useful! <br/>
## Table of Contents
- [Installation](#installation)

View File

@ -10,11 +10,11 @@ rm -rf packages/bruno-electron/web
mkdir packages/bruno-electron/web
# Copy build
cp -r packages/bruno-app/out/* packages/bruno-electron/web
cp -r packages/bruno-app/dist/* packages/bruno-electron/web
# Change paths in next
sed -i'' -e 's@/_next/@_next/@g' packages/bruno-electron/web/**.html
sed -i'' -e 's@/static/@static/@g' packages/bruno-electron/web/**.html
# Remove sourcemaps
find packages/bruno-electron/web -name '*.map' -type f -delete