ARTICLE AD BOX
In my project I am using this babel.config.js file to convert from browser-compatible JavaScript into Node.js-compatible JavaScript before running my Jest tests:
module.exports = { presets: [['@babel/preset-env', {targets: {node: 'current'}}]], plugins: ['babel-plugin-rewire', 'export-toplevel'], };It works if I try to run npm test. However, if I run bun test, I get the following screen:
Bun appears to, for some reason, ignore the content of the babel.config.js file. How do I make it call Babel (and the export-toplevel module) before trying to run the tests?
Here is the tokenizer.js file:
"use strict"; function tokenize(input) { let tokenized = []; let areWeInAString = false; let areWeInAComment = false; let currentLine = 1; // Don't care about columns, lines in assembly language // are always short. let currentToken = ""; for (let i = 0; i < input.length; i++) { if (areWeInAComment && areWeInAString) { alert( "Tokenizer got into a forbidden state because of some bug in it! Line #" + currentLine); return []; } if (input[i] == ";" && !areWeInAString) { areWeInAComment = true; tokenized.push(new TreeNode(currentToken, currentLine)); tokenized.push(new TreeNode("\n", currentLine)); continue; } if (areWeInAComment && input[i] != "\n") continue; if (areWeInAComment && input[i] == "\n") { areWeInAComment = false; currentLine++; currentToken = ""; continue; } if (input[i] == '"' && !areWeInAString) { areWeInAString = true; tokenized.push(new TreeNode(currentToken, currentLine)); currentToken = '"'; continue; } if (input[i] == "\n" && areWeInAString) { alert("Unterminated string literal on line " + currentLine); return []; } if (input[i] == '"') { areWeInAString = false; currentToken += '"'; tokenized.push(new TreeNode(currentToken, currentLine)); currentToken = ""; continue; } if (input[i] == "\n") { tokenized.push(new TreeNode(currentToken, currentLine)); currentToken = ""; tokenized.push(new TreeNode( "\n", currentLine++)); // Because assembly language is a // whitespace-sensitive language, the new-line // characters are tokens visible to the parser. continue; } if ( (input[i] == " " || input[i] == "\t") && !areWeInAString // https://github.com/FlatAssembler/PicoBlaze_Simulator_in_JS/issues/5 ) { tokenized.push(new TreeNode(currentToken, currentLine)); currentToken = ""; continue; } if ((input[i] == "(" || input[i] == ")" || input[i] == "[" || input[i] == "]" || input[i] == "{" || input[i] == "}" || input[i] == "," || input[i] == "/" || input[i] == "*" || input[i] == "-" || input[i] == "+" || input[i] == "^" || input[i] == "<" || input[i] == ">" || input[i] == "=" || input[i] == "&" || input[i] == "|" || input[i] == "?" || input[i] == ':') && !areWeInAString) { tokenized.push(new TreeNode(currentToken, currentLine)); tokenized.push(new TreeNode(input[i], currentLine)); currentToken = ""; continue; } if (input[i] == ":" && !areWeInAString) { // https://github.com/FlatAssembler/PicoBlaze_Simulator_in_JS/issues/39 tokenized.push(new TreeNode(currentToken + ":", currentLine)); currentToken = ""; continue; } currentToken += input[i]; } if (currentToken.length) { tokenized.push(new TreeNode(currentToken, currentLine)); tokenized.push(new TreeNode("\n", currentLine)); } if (tokenized[tokenized.length - 1].text != "\n") tokenized.push(new TreeNode("\n", currentLine)); for (let i = 0; i < tokenized.length; i++) { if (!(tokenized[i] instanceof TreeNode)) { alert("Internal compiler error in tokenizer, the token #" + i + " is not of type TreeNode!"); return []; } if (tokenized[i].text == "") { tokenized.splice(i, 1); i--; } } // Labels are single tokens. for (let i = 0; i < tokenized.length; i++) if (tokenized[i].text == ':' && (tokenized[i + 1].text == '\n' || (i < 2 // https://github.com/FlatAssembler/PicoBlaze_Simulator_in_JS/issues/32 || tokenized[i - 2].text == '\n'))) { // https://github.com/FlatAssembler/PicoBlaze_Simulator_in_JS/issues/31 tokenized[i - 1].text += ':'; tokenized.splice(i, 1); i--; } // Functions in the preprocessor. for (let i = 0; i < tokenized.length - 1; i++) if ([ "invertbits", "bitand", "bitor", "mod" ].includes( tokenized[i].text.toLowerCase()) && tokenized[i + 1].text == '(') { tokenized[i].text += "("; tokenized.splice(i + 1, 1); } return tokenized; }Notice it does not export anything; instead, it relies on Babel's export-toplevel module to export the tokenize function. And here is the tokenizer.test.js file:
/* Hackish way to import TreeNode using a babel plugin to export all without modifying the actual code. Ideally the project should be using ES modules or a bundler like webpack. */ const tree = require("../TreeNode.js"); global.TreeNode = tree.TreeNode; //referenced by tokenizer const tokenizer = require("../tokenizer.js"); describe("PicoBlaze Tokenizer", () => { test("ignores comments", () => { const tokens = tokenizer.tokenize(` load s0, 123 ;this is a comment `); expect(tokens.map((t) => t.text)).toEqual([ "\n", "load", "s0", ",", "123", "\n", "\n", ]); }); test("includes binary literals", () => { const tokens = tokenizer.tokenize("load s0, 10100000'b"); expect(tokens.map((t) => t.text)).toEqual([ "load", "s0", ",", "10100000'b", "\n", ]); }); test("is whitespace insensitive", () => { const tokens = tokenizer.tokenize("load s0, 0"); expect(tokens.map((t) => t.text)).toEqual(["load", "s0", ",", "0", "\n"]); }); test("is newline sensitive", () => { const tokens = tokenizer.tokenize("addr\ness 0"); expect(tokens.map((t) => t.text)).toEqual(["addr", "\n", "ess", "0", "\n"]); }); test('" " is a single token', () => { const tokens = tokenizer.tokenize( 'load s9, " " ; https://github.com/FlatAssembler/PicoBlaze_Simulator_in_JS/issues/5', ); expect(tokens.map((t) => t.text)).toEqual(["load", "s9", ",", '" "', "\n"]); }); test("Labels are tokenized correctly 1", () => { const tokens = tokenizer.tokenize( `inst 2+2<5?1:0 label: jump label`, ); expect(tokens.map((t) => t.text)).toEqual([ "inst", "2", "+", "2", "<", "5", "?", "1", ":", "0", "\n", "label:", "\n", "jump", "label", "\n", ]); }); test("Labels are tokenized correctly 2",() => { // https://github.com/FlatAssembler/PicoBlaze_Simulator_in_JS/issues/31 const tokens=tokenizer.tokenize( `address 0 label1: load s0, s1 label2: load s1, 1` ); expect(tokens.map((t) => t.text)).toEqual( [ "address", "0", "\n", "label1:", "\n", "load", "s0", ",", "s1", "\n", "label2:", "load", "s1", ",", "1", "\n", ] ) }) });So it invokes the tokenize function repeatedly expecting it to have been exported by Babel, and, of course, fails.

