Skip to content

Commit

Permalink
Support for paragraph
Browse files Browse the repository at this point in the history
  • Loading branch information
arobase-che authored and kouhei-fuji committed Apr 8, 2020
1 parent 339905c commit 734c2a3
Show file tree
Hide file tree
Showing 3 changed files with 164 additions and 4 deletions.
21 changes: 21 additions & 0 deletions __tests__/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,17 @@ test('footnote', t => {
</div>`));
});

test('paragraph', t => {
const footnotes = `This is a nice little paragraph.
{.with .some .extra .classes}
Another without.
`;
const {contents} = renderFootnotes(footnotes);
t.deepEqual(parse(contents), parse(`<p class="with some extra classes">This is a nice little paragraph.</p>
<p>Another without.</p>`));
});

/* Readme tests
*
* Should be act acording to the README.md
Expand Down Expand Up @@ -316,3 +327,13 @@ This is a test image : <img src="img.jpg" alt="test" data-i=2></p>`));
This is a test image : <img src="img.jpg" alt="test"></p>`));
});

test('empty paragraph', t => {
const footnotes = `{#p .extra .classes}
Normal paragraph.
`;
const {contents} = renderFootnotes(footnotes);
t.deepEqual(parse(contents), parse(`<p id="p" class="extra classes"></p>
<p>Normal paragraph.</p>`));
});

72 changes: 70 additions & 2 deletions dist/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@ var parseAttr = require('md-attr-parser');

var htmlElemAttr = require('html-element-attributes');

var supportedElements = ['link', 'atxHeading', 'strong', 'emphasis', 'deletion', 'code', 'setextHeading', 'fencedCode', 'reference'];
var supportedElements = ['link', 'atxHeading', 'strong', 'emphasis', 'deletion', 'code', 'setextHeading', 'fencedCode', 'reference', 'paragraph'];
var blockElements = ['atxHeading', 'setextHeading'];
var particularElements = ['fencedCode'];
var particularElements = ['fencedCode', 'paragraph'];
var particularTokenize = {};

var DOMEventHandler = require('./dom-event-handler.js');
Expand All @@ -29,6 +29,7 @@ var convTypeTag = {
inlineCode: 'code',
code: 'code',
linkReference: 'a',
paragraph: 'p',
'*': '*'
};
/* This function is a generic function that transform
Expand Down Expand Up @@ -258,10 +259,77 @@ function tokenizeFencedCode(oldParser, config) {
} // Return the new tokenizer function


return token;
}
/* This is a special modification of the function tokenizeGenerator
* to parse the paragraph.
* customAttr parser
*/


function tokenizeParagraph(oldParser, config) {
function token(eat, value, silent) {
// This we call the old tokenize
var self = this;
var eaten = oldParser.call(self, eat, value, silent);
var type = convTypeTag[eaten.type];

if (!eaten || !eaten.position || !eaten.children || eaten.children.length === 0) {
return undefined;
} // Looking for the last line of the last child.
// The last child must be of type text


var lastChild = eaten.children[eaten.children.length - 1];

if (!lastChild || !lastChild.type || lastChild.type !== 'text') {
return undefined;
}

var lcLines = lastChild.value.split('\n');

if (lcLines.length === 0) {
return undefined;
}

var attrs = lcLines[lcLines.length - 1];
var parsedAttr = parseAttr(attrs, 0, config.mdAttrConfig);

if (parsedAttr) {
if (!parsedAttr.eaten || parsedAttr.eaten !== attrs.trimEnd()) {
return undefined;
}

if (parsedAttr.eaten.trim()[0] !== '{' || parsedAttr.eaten.trim().slice(-1) !== '}') {
return undefined;
}

if (config.scope && config.scope !== 'none') {
var filtredProp = filterAttributes(parsedAttr.prop, config, type);

if (filtredProp !== {}) {
if (eaten.data) {
eaten.data.hProperties = filtredProp;
} else {
eaten.data = {
hProperties: filtredProp
};
}
}
}

lastChild.value = lcLines.slice(0, -1).join('\n');
}

return eaten;
} // Return the new tokenizer function


return token;
}

particularTokenize.fencedCode = tokenizeFencedCode;
particularTokenize.paragraph = tokenizeParagraph;
remarkAttr.SUPPORTED_ELEMENTS = supportedElements;
module.exports = remarkAttr;
/* Function that is exported */
Expand Down
75 changes: 73 additions & 2 deletions src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
const parseAttr = require('md-attr-parser');
const htmlElemAttr = require('html-element-attributes');

const supportedElements = ['link', 'atxHeading', 'strong', 'emphasis', 'deletion', 'code', 'setextHeading', 'fencedCode', 'reference'];
const supportedElements = ['link', 'atxHeading', 'strong', 'emphasis', 'deletion', 'code', 'setextHeading', 'fencedCode', 'reference', 'paragraph'];
const blockElements = ['atxHeading', 'setextHeading'];
const particularElements = ['fencedCode'];
const particularElements = ['fencedCode', 'paragraph'];

const particularTokenize = {};

Expand All @@ -22,6 +22,7 @@ const convTypeTag = {
inlineCode: 'code',
code: 'code',
linkReference: 'a',
paragraph: 'p',
'*': '*',
};

Expand Down Expand Up @@ -224,7 +225,77 @@ function tokenizeFencedCode(oldParser, config) {
return token;
}

/* This is a special modification of the function tokenizeGenerator
* to parse the paragraph.
* customAttr parser
*/

function tokenizeParagraph(oldParser, config) {
function token(eat, value, silent) {
// This we call the old tokenize
const self = this;
const eaten = oldParser.call(self, eat, value, silent);

const type = convTypeTag[eaten.type];

if (!eaten || !eaten.position ||
!eaten.children || eaten.children.length === 0) {
return undefined;
}

// Looking for the last line of the last child.
// The last child must be of type text

const lastChild = eaten.children[eaten.children.length - 1];

if (!lastChild || !lastChild.type || lastChild.type !== 'text') {
return undefined;
}

const lcLines = lastChild.value.split('\n');

if (lcLines.length === 0) {
return undefined;
}

const attrs = lcLines[lcLines.length - 1];

const parsedAttr = parseAttr(attrs, 0, config.mdAttrConfig);

if (parsedAttr) {
if (!parsedAttr.eaten || parsedAttr.eaten !== attrs.trimEnd()) {
return undefined;
}

if (parsedAttr.eaten.trim()[0] !== '{' || parsedAttr.eaten.trim().slice(-1) !== '}') {
return undefined;
}

if (config.scope && config.scope !== 'none') {
const filtredProp = filterAttributes(parsedAttr.prop, config, type);

if (filtredProp !== {}) {
if (eaten.data) {
eaten.data.hProperties = filtredProp;
} else {
eaten.data = {
hProperties: filtredProp,
};
}
}
}

lastChild.value = lcLines.slice(0, -1).join('\n');
}

return eaten;
} // Return the new tokenizer function

return token;
}

particularTokenize.fencedCode = tokenizeFencedCode;
particularTokenize.paragraph = tokenizeParagraph;

remarkAttr.SUPPORTED_ELEMENTS = supportedElements;

Expand Down

0 comments on commit 734c2a3

Please sign in to comment.