Skip to content

Commit

Permalink
first commit
Browse files Browse the repository at this point in the history
  • Loading branch information
fcavallarin committed Oct 16, 2019
1 parent 1f6898b commit 507c528
Show file tree
Hide file tree
Showing 6 changed files with 471 additions and 2 deletions.
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
node_modules/
.*
!/.gitignore
package-lock.json
44 changes: 42 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,2 +1,42 @@
# domdig
DOM XSS scanner for Single Page Applications
## DOMDig
DOMDig is a DOM XSS scanner that runs inside the Chromium web browser and can scan single page applications (SPA) recursively.
It is based on [htcrawl](https://htcrawl.org), a node library powerful enough to easily crawl a gmail account.


## KEY FEATURES
- Runs inside a real browser (Chromium)
- Recursive DOM crawling engine
- Handles XHR, fetch, JSONP and websockets requests
- Supports cookies, proxy, custom headers, http auth and more
- Scriptable login sequences

## GETTING STARTED
### Installation
```
git clone https://github.com/fcavallarin/domdig.git
cd domdig && npm i && cd ..
node domdig/domdig.js
```

### Example
```
node domdig.js -c 'foo=bar' -p http:127.0.0.1:8080 https://htcap.org/scanme/domxss.php
```

### Login Sequence
A login sequence (or initial sequence) is a json object containing a list of actions to take before the scan starts.
Each element of the list is an array where the first element is the name of the action to take and the remaining elements are "parameters" to those actions.
Actions are:
- write <selector> <text>
- click <selector>
- clickToNavigate <selector>
- sleep <seconds>

#### Example
```
[
["write", "#username", "demo"],
["write", "#password", "demo"],
["clickToNavigate", "#btn-login"]
]
```
151 changes: 151 additions & 0 deletions domdig.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,151 @@
const htcrawl = require('htcrawl');
const utils = require('./utils');
const payloads = require('./payloads');

const PAYLOADMAP = {};
const VULNSJAR = [];
var VERBOSE = true;

function getNewPayload(payload, element){
const k = "" + Math.floor(Math.random()*4000000000);
const p = payload.replace("{0}", k);
PAYLOADMAP[k] = {payload:payload, element:element};
return p;
}

async function crawlAndFuzz(targetUrl, payload, options){
var hashSet = false;

// instantiate htcrawl
const crawler = await htcrawl.launch(targetUrl, options);

// set a sink on page scope
crawler.page().exposeFunction("___xssSink", function(key) {
utils.addVulnerability(PAYLOADMAP[key], VULNSJAR, VERBOSE);
});

// fill all inputs with a payload
crawler.on("fillinput", async function(e, crawler){
const p = getNewPayload(payload, e.params.element);
try{
await crawler.page().$eval(e.params.element, (i, p) => i.value = p, p);
}catch(e){}
// return false to prevent element to be automatically filled with a random value
return false;
});


// change page hash before the triggering of the first event
crawler.on("triggerevent", async function(e, crawler){
if(!hashSet){
const p = getNewPayload(payload, "hash");
await crawler.page().evaluate(p => document.location.hash = p, p);
hashSet = true;
}
});

try{
await crawler.load();
} catch(e){
console.log(`Error ${e}`);
process.exit(-3);
}

if(options.initSequence){
let seqline = 1;
for(let seq of options.initSequence){
switch(seq[0]){
case "sleep":
await crawler.page().waitFor(seq[1]);
break;
case "write":
try{
await crawler.page().type(seq[1], seq[2]);
} catch(e){
utils.sequenceError("element not found", seqline);
}
break;
case "click":
try{
await crawler.page().click(seq[1]);
} catch(e){
utils.sequenceError("element not found", seqline);
}
await crawler.waitForRequestsCompletion();
break;
case "clickToNavigate":
try{
await crawler.clickToNavigate(seq[1], seq[2]);
} catch(err){
utils.sequenceError(err, seqline);
}
break;
default:
utils.sequenceError("action not found", seqline);
}
seqline++;
}
}


try{
await crawler.start();
} catch(e){
console.log(`Error ${e}`);
process.exit(-4);
}
try{
await crawler.reload();
}catch(e){
utils.error(e);
}
await crawler.page().waitFor(200);
crawler.browser().close();
}

function ps(message){
if(VERBOSE)utils.printStatus(message);
}

(async () => {
const argv = require('minimist')(process.argv.slice(2), {boolean:["l", "J", "q"]});
if(argv.q)VERBOSE = false;
if(VERBOSE)utils.banner();
if('h' in argv){
utils.usage();
process.exit(0);
}

const targetUrl = argv._[0];
const options = utils.parseArgs(argv);

if(!targetUrl){
utils.usage();
process.exit(1);
}
ps("starting scan");

let cnt = 1;
for(let payload of payloads.all){
ps("crawling page");
await crawlAndFuzz(targetUrl, payload, options);
ps(cnt + "/" + payloads.all.length + " payloads checked");
cnt++;
}

if(VERBOSE)console.log("");
ps("scan finished, tot vulnerabilities: " + VULNSJAR.length);

if(argv.J){
console.log(utils.prettifyJson(VULNSJAR));
} else if(VERBOSE){
for(let v of VULNSJAR){
utils.printVulnerability(v[0], v[1]);
}
}

if(argv.o){
let fn = utils.writeJSON(argv.o, VULNSJAR);
ps("findings saved to " + fn)
}
})();
39 changes: 39 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
{
"name": "domdig",
"version": "1.0.0",
"description": "DOM XSS scanner for Single Page Applications",
"main": "domdig.js",
"dependencies": {
"htcrawl": "^1.0.1",
"minimist": "^1.2.0",
"chalk": "^2.4.2"
},
"devDependencies": {
"htcrawl": "^1.0.1",
"minimist": "^1.2.0",
"chalk": "^2.4.2"
},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "Filippo Cavallarin",
"license": "GPL-3.0",
"repository": {
"type": "git",
"url": "git+https://github.com/fcavallarin/domdig.git"
},
"keywords": [
"DOM XSS",
"XSS",
"Cross Site Scripting",
"scanner",
"vulnerability",
"htcrawl",
"puppeteer",
"SPA",
"headless-chrome"
],
"bugs": {
"url": "https://github.com/fcavallarin/domdig/issues"
}
}
14 changes: 14 additions & 0 deletions payloads.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
exports.all = [
';window.___xssSink({0});',
'<img src="a" onerror="window.___xssSink({0})">',
'javascript:window.___xssSink({0})',
"'><img src=a onerror='window.___xssSink({0})'>",
'"><img src=a onerror="window.___xssSink({0})">',
"'><img src=a onerror='window.___xssSink({0})'>",
'1 --><img src="a" onerror="window.___xssSink({0})">',
']]><img src="a" onerror="window.___xssSink({0})">',
' onerror="window.___xssSink({0})"',
'" onerror="window.___xssSink({0})" a="',
"' onerror='window.___xssSink({0})' a='",
'data:text/javascript;,window.___xssSink({0})'
];
Loading

0 comments on commit 507c528

Please sign in to comment.