Skip to content

Commit

Permalink
Merge pull request #92 from msftrncs/LineEndRuleCnt
Browse files Browse the repository at this point in the history
Fix line end rule count and debug
  • Loading branch information
alexdima authored Jul 12, 2019
2 parents c349c70 + f4980ee commit 3174cb7
Show file tree
Hide file tree
Showing 5 changed files with 98 additions and 86 deletions.
69 changes: 1 addition & 68 deletions scripts/inspect.js
Original file line number Diff line number Diff line change
@@ -1,68 +1 @@
if (process.argv.length < 4) {
console.log('usage: node index.js <mainGrammarPath> [<additionalGrammarPath1> ...] <filePath>');
process.exit(0);
}

var GRAMMAR_PATHS = process.argv.slice(2, process.argv.length - 1);
var FILE_PATH = process.argv[process.argv.length - 1];

process.env['VSCODE_TEXTMATE_DEBUG'] = true;

var fs = require('fs');
var main = require('../out/main');

var Registry = main.Registry;
var registry = new Registry();
var grammarPromise = null;
for (let path of GRAMMAR_PATHS) {
console.log('LOADING GRAMMAR: ' + path);
var content = fs.readFileSync(path).toString();
var rawGrammar = main.parseRawGrammar(content, path);
var g = registry.addGrammar(rawGrammar);
grammarPromise = grammarPromise || g;
}
grammarPromise.then(grammar => {
var fileContents = fs.readFileSync(FILE_PATH).toString();
var lines = fileContents.split(/\r\n|\r|\n/);
var ruleStack = null;
var lastElementId = 0;
for (var i = 0; i < lines.length; i++) {
var line = lines[i];

console.log('');
console.log('');
console.log('===========================================');
console.log('TOKENIZING LINE ' + (i + 1) + ': |' + line + '|');

var r = grammar.tokenizeLine(line, ruleStack);

console.log('');

var stackElement = r.ruleStack;
var cnt = 0;
while (stackElement) {
cnt++;
stackElement = stackElement._parent;
}

console.log('@@LINE END RULE STACK CONTAINS ' + cnt + ' RULES:');
stackElement = r.ruleStack;
var list = [];
while (stackElement) {
if (!stackElement._instanceId) {
stackElement._instanceId = (++lastElementId);
}
var ruleDesc = grammar._ruleId2desc[stackElement._ruleId]
if (!ruleDesc) {
list.push(' * no rule description found for rule id: ' + stackElement._ruleId);
} else {
list.push(' * ' + ruleDesc.debugName + ' -- [' + ruleDesc.id + ',' + stackElement._instanceId + '] "' + stackElement._scopeName + '"');
}
stackElement = stackElement._parent;
}
list.reverse();
console.log(list.join('\n'));

ruleStack = r.ruleStack;
}
});
require('../out/inspect');
5 changes: 3 additions & 2 deletions src/debug.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,6 @@
*--------------------------------------------------------*/
'use strict';

export const CAPTURE_METADATA = (typeof process !== 'undefined') && process.env['VSCODE_TEXTMATE_DEBUG'];
export const IN_DEBUG_MODE = (typeof process !== 'undefined') && process.env['VSCODE_TEXTMATE_DEBUG'];
export let DebugFlags = {
InDebugMode: (typeof process !== 'undefined' && process.env['VSCODE_TEXTMATE_DEBUG'])
};
26 changes: 13 additions & 13 deletions src/grammar.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import { IRawGrammar, IRawRepository, IRawRule, IOnigLib, IOnigCaptureIndex, Oni
import { IRuleRegistry, IRuleFactoryHelper, RuleFactory, Rule, CaptureRule, BeginEndRule, BeginWhileRule, MatchRule, ICompiledRule } from './rule';
import { createMatchers, Matcher } from './matcher';
import { MetadataConsts, IGrammar, ITokenizeLineResult, ITokenizeLineResult2, IToken, IEmbeddedLanguagesMap, StandardTokenType, StackElement as StackElementDef, ITokenTypeMap } from './main';
import { IN_DEBUG_MODE } from './debug';
import { DebugFlags } from './debug';
import { FontStyle, ThemeTrieElementRule } from './theme';

export const enum TemporaryStandardTokenType {
Expand Down Expand Up @@ -590,7 +590,7 @@ function matchInjections(injections: Injection[], grammar: Grammar, lineText: On
}
let ruleScanner = grammar.getRule(injection.ruleId).compile(grammar, null, isFirstLine, linePos === anchorPosition);
let matchResult = ruleScanner.scanner.findNextMatchSync(lineText, linePos);
if (IN_DEBUG_MODE) {
if (DebugFlags.InDebugMode) {
console.log(' scanning for injections');
console.log(debugCompiledRuleToString(ruleScanner));
}
Expand Down Expand Up @@ -636,7 +636,7 @@ function matchRule(grammar: Grammar, lineText: OnigString, isFirstLine: boolean,
let rule = stack.getRule(grammar);
let ruleScanner = rule.compile(grammar, stack.endRule, isFirstLine, linePos === anchorPosition);
let r = ruleScanner.scanner.findNextMatchSync(lineText, linePos);
if (IN_DEBUG_MODE) {
if (DebugFlags.InDebugMode) {
//console.log(' scanning for');
//console.log(debugCompiledRuleToString(ruleScanner));
if (r) {
Expand Down Expand Up @@ -719,7 +719,7 @@ function _checkWhileConditions(grammar: Grammar, lineText: OnigString, isFirstLi
for (let whileRule = whileRules.pop(); whileRule; whileRule = whileRules.pop()) {
let ruleScanner = whileRule.rule.compileWhile(grammar, whileRule.stack.endRule, isFirstLine, anchorPosition === linePos);
let r = ruleScanner.scanner.findNextMatchSync(lineText, linePos);
if (IN_DEBUG_MODE) {
if (DebugFlags.InDebugMode) {
console.log(' scanning for while rule');
console.log(debugCompiledRuleToString(ruleScanner));
}
Expand Down Expand Up @@ -769,14 +769,14 @@ function _tokenizeString(grammar: Grammar, lineText: OnigString, isFirstLine: bo
}

function scanNext(): void {
if (IN_DEBUG_MODE) {
if (DebugFlags.InDebugMode) {
console.log('');
console.log(`@@scanNext ${linePos}: |${lineText.content.substr(linePos).replace(/\n$/, '\\n')}|`);
}
let r = matchRuleOrInjections(grammar, lineText, isFirstLine, linePos, stack, anchorPosition);

if (!r) {
if (IN_DEBUG_MODE) {
if (DebugFlags.InDebugMode) {
console.log(' no more matches.');
}
// No match
Expand All @@ -794,7 +794,7 @@ function _tokenizeString(grammar: Grammar, lineText: OnigString, isFirstLine: bo
// We matched the `end` for this rule => pop it
let poppedRule = <BeginEndRule>stack.getRule(grammar);

if (IN_DEBUG_MODE) {
if (DebugFlags.InDebugMode) {
console.log(' popping ' + poppedRule.debugName + ' - ' + poppedRule.debugEndRegExp);
}

Expand Down Expand Up @@ -834,7 +834,7 @@ function _tokenizeString(grammar: Grammar, lineText: OnigString, isFirstLine: bo

if (_rule instanceof BeginEndRule) {
let pushedRule = <BeginEndRule>_rule;
if (IN_DEBUG_MODE) {
if (DebugFlags.InDebugMode) {
console.log(' pushing ' + pushedRule.debugName + ' - ' + pushedRule.debugBeginRegExp);
}

Expand All @@ -860,7 +860,7 @@ function _tokenizeString(grammar: Grammar, lineText: OnigString, isFirstLine: bo
}
} else if (_rule instanceof BeginWhileRule) {
let pushedRule = <BeginWhileRule>_rule;
if (IN_DEBUG_MODE) {
if (DebugFlags.InDebugMode) {
console.log(' pushing ' + pushedRule.debugName);
}

Expand All @@ -885,7 +885,7 @@ function _tokenizeString(grammar: Grammar, lineText: OnigString, isFirstLine: bo
}
} else {
let matchingRule = <MatchRule>_rule;
if (IN_DEBUG_MODE) {
if (DebugFlags.InDebugMode) {
console.log(' matched ' + matchingRule.debugName + ' - ' + matchingRule.debugMatchRegExp);
}

Expand Down Expand Up @@ -1336,7 +1336,7 @@ class LineTokens {

private readonly _emitBinaryTokens: boolean;
/**
* defined only if `IN_DEBUG_MODE`.
* defined only if `DebugFlags.InDebugMode`.
*/
private readonly _lineText: string;
/**
Expand All @@ -1355,7 +1355,7 @@ class LineTokens {
constructor(emitBinaryTokens: boolean, lineText: string, tokenTypeOverrides: TokenTypeMatcher[]) {
this._emitBinaryTokens = emitBinaryTokens;
this._tokenTypeOverrides = tokenTypeOverrides;
if (IN_DEBUG_MODE) {
if (DebugFlags.InDebugMode) {
this._lineText = lineText;
}
if (this._emitBinaryTokens) {
Expand Down Expand Up @@ -1399,7 +1399,7 @@ class LineTokens {

let scopes = scopesList.generateScopes();

if (IN_DEBUG_MODE) {
if (DebugFlags.InDebugMode) {
console.log(' token: |' + this._lineText.substring(this._lastTokenEndIndex, endIndex).replace(/\n$/, '\\n') + '|');
for (var k = 0; k < scopes.length; k++) {
console.log(' * ' + scopes[k]);
Expand Down
6 changes: 3 additions & 3 deletions src/grammarReader.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import { IRawGrammar } from './types';
import * as plist from './plist';
import { CAPTURE_METADATA } from './debug';
import { DebugFlags } from './debug';
import { parse as manualParseJSON } from './json';

export function parseRawGrammar(content: string, filePath: string): IRawGrammar {
Expand All @@ -16,14 +16,14 @@ export function parseRawGrammar(content: string, filePath: string): IRawGrammar
}

function parseJSONGrammar(contents: string, filename: string): IRawGrammar {
if (CAPTURE_METADATA) {
if (DebugFlags.InDebugMode) {
return <IRawGrammar>manualParseJSON(contents, filename, true);
}
return <IRawGrammar>JSON.parse(contents);
}

function parsePLISTGrammar(contents: string, filename: string): IRawGrammar {
if (CAPTURE_METADATA) {
if (DebugFlags.InDebugMode) {
return <IRawGrammar>plist.parseWithLocation(contents, filename, '$vscodeTextmateLocation');
}
return <IRawGrammar>plist.parse(contents);
Expand Down
78 changes: 78 additions & 0 deletions src/inspect.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
/*---------------------------------------------------------
* Copyright (C) Microsoft Corporation. All rights reserved.
*--------------------------------------------------------*/

import * as fs from 'fs';
import { Registry, IGrammar, parseRawGrammar, Thenable } from './main';
import { StackElement as StackElementImpl, Grammar as GrammarImpl } from './grammar';
import * as debug from './debug';

class ExtendedStackElement extends StackElementImpl {
_instanceId?: number;
}

debug.DebugFlags.InDebugMode = true;

if (process.argv.length < 4) {
console.log('usage: node index.js <mainGrammarPath> [<additionalGrammarPath1> ...] <filePath>');
process.exit(0);
}

const GRAMMAR_PATHS = process.argv.slice(2, process.argv.length - 1);
const FILE_PATH = process.argv[process.argv.length - 1];

const registry = new Registry();
let grammarPromises: Thenable<IGrammar>[] = [];
for (let path of GRAMMAR_PATHS) {
console.log('LOADING GRAMMAR: ' + path);
const content = fs.readFileSync(path).toString();
const rawGrammar = parseRawGrammar(content, path);
grammarPromises.push(registry.addGrammar(rawGrammar));
}

Promise.all(grammarPromises).then(_grammars => {
const grammar = _grammars[0];
const fileContents = fs.readFileSync(FILE_PATH).toString();
const lines = fileContents.split(/\r\n|\r|\n/);
let ruleStack = null;
let lastElementId = 0;
for (let i = 0; i < lines.length; i++) {
const line = lines[i];

console.log('');
console.log('');
console.log('===========================================');
console.log('TOKENIZING LINE ' + (i + 1) + ': |' + line + '|');

const r = grammar.tokenizeLine(line, ruleStack);

console.log('');

let stackElement = <ExtendedStackElement>r.ruleStack;
let cnt = 0;
while (stackElement) {
cnt++;
stackElement = stackElement.parent;
}

console.log('@@LINE END RULE STACK CONTAINS ' + cnt + ' RULES:');
stackElement = <ExtendedStackElement>r.ruleStack;
let list: string[] = [];
while (stackElement) {
if (!stackElement._instanceId) {
stackElement._instanceId = (++lastElementId);
}
let ruleDesc = (<GrammarImpl>grammar).getRule(stackElement.ruleId);
if (!ruleDesc) {
list.push(' * no rule description found for rule id: ' + stackElement.ruleId);
} else {
list.push(' * ' + ruleDesc.debugName + ' -- [' + ruleDesc.id + ',' + stackElement._instanceId + '] "' + stackElement.nameScopesList.generateScopes() + '", "' + stackElement.contentNameScopesList.generateScopes() + '"');
}
stackElement = stackElement.parent;
}
list.reverse();
console.log(list.join('\n'));

ruleStack = r.ruleStack;
}
});

0 comments on commit 3174cb7

Please sign in to comment.