Skip to content

Commit

Permalink
fix: 🐛 fix script parsing
Browse files Browse the repository at this point in the history
  • Loading branch information
jr-codes committed Aug 21, 2021
1 parent 8966c46 commit bc5e4be
Show file tree
Hide file tree
Showing 4 changed files with 148 additions and 1 deletion.
40 changes: 40 additions & 0 deletions lib/__tests__/parse-script.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,46 @@ describe('parse-script', () => {
options: { env: { NODE_ENV: 'development' } },
},
],
[
['nodemon --help'],
{ nodemon: 'path/to/nodemon' },
{
name: 'nodemon',
path: 'path/to/nodemon',
args: ['--help'],
options: {},
},
],
[
'lerna --message "This is a message"',
{ lerna: 'path/to/lerna' },
{
name: 'lerna',
path: 'path/to/lerna',
args: ['--message', 'This is a message'],
options: {},
},
],
[
"lerna --message 'This is a message'",
{ lerna: 'path/to/lerna' },
{
name: 'lerna',
path: 'path/to/lerna',
args: ['--message', 'This is a message'],
options: {},
},
],
[
'lerna --message \'This is a "message"\'',
{ lerna: 'path/to/lerna' },
{
name: 'lerna',
path: 'path/to/lerna',
args: ['--message', 'This is a "message"'],
options: {},
},
],
])('parses %j', (script, scripts, expected) => {
const result = parseScript(script, scripts)
expect(result).toEqual(expected)
Expand Down
47 changes: 47 additions & 0 deletions lib/__tests__/tokenize.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
'use strict'

const tokenize = require('../tokenize')

test.each([
['eslint --fix .', ['eslint', '--fix', '.']],
['eslint --fix . ', ['eslint', '--fix', '.']],
[' nodemon --help', ['nodemon', '--help']],
[
'eslint --ext .jsx --ext .js lib/',
['eslint', '--ext', '.jsx', '--ext', '.js', 'lib/'],
],
[
'git commit -m "remove unused dependencies"',
['git', 'commit', '-m', 'remove unused dependencies'],
],
[
"git commit -m 'remove unused dependencies'",
['git', 'commit', '-m', 'remove unused dependencies'],
],
['echo "1 2 3" "4 5 6" 7 8 9', ['echo', '1 2 3', '4 5 6', '7', '8', '9']],
[
'lerna version -m "chore(release): publish" --force-publish',
['lerna', 'version', '-m', 'chore(release): publish', '--force-publish'],
],
[
"lerna version -m 'chore(release): publish' --force-publish",
['lerna', 'version', '-m', 'chore(release): publish', '--force-publish'],
],
['', []],
])('parses %p', (input, expected) => {
const result = tokenize(input)
expect(result).toEqual(expected)
})

test.each([
[null],
[undefined],
[3],
[[1, 2, 3]],
[{ foo: 'bar' }],
[['array', 'of', 'strings']],
])('throws on %p', (input) => {
expect(() => {
tokenize(input)
}).toThrow()
})
3 changes: 2 additions & 1 deletion lib/parse-script.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
'use strict'

const debug = require('debug')('cli-rewire:parse-script')
const tokenize = require('./tokenize')

/**
* @typedef {Object} ScriptOptions Options used when running a script
Expand Down Expand Up @@ -43,7 +44,7 @@ function parseScript(rawScript, scriptMap) {
const script = typeof rawScript === 'string' ? [rawScript] : rawScript

// Split the script command string into its name and arg parts
const [name, ...args] = script[0].split(' ')
const [name, ...args] = tokenize(script[0])

/** @type {Script} */
const parsedScript = {
Expand Down
59 changes: 59 additions & 0 deletions lib/tokenize.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
'use strict'

/**
* Splits a string into an array based on spaces and quotations.
*
* @param {string} string String to tokenize
* @returns {string[]} array of strings
*
* @example
* tokenize('eslint --ext .jsx --ext .js lib/')
* // Returns ['eslint', '--ext', '.jsx', '--ext', '.js', 'lib/']
*
* @example
* tokenize('git commit -m "remove unused dependencies"')
* // Returns ['git', 'commit', '-m', 'remove unused dependencies']
*/
function tokenize(string) {
if (typeof string !== 'string') {
throw new TypeError('string should be a string')
}

const tokens = []
let index = 0
let quote = ''

for (const character of string) {
// Add character to token
// if inside quotes and character's not a close quote OR
// if outside quotes and character's not a space or open quote
if (
(quote && character !== quote) ||
(!quote && character !== ' ' && character !== "'" && character !== '"')
) {
// Initialize token if empty
if (!tokens[index]) tokens[index] = ''
tokens[index] += character
}

// Move to next token if token's not empty AND
// closing quotes OR outside quotes and character's a space
if (
tokens[index] &&
((quote && character === quote) || (!quote && character === ' '))
) {
index += 1
}

// Close quote if character equals quote
// Open quote if character is a quote
if (quote && character === quote) {
quote = ''
} else if (!quote && (character === '"' || character === "'"))
quote = character
}

return tokens
}

module.exports = tokenize

0 comments on commit bc5e4be

Please sign in to comment.