[MFM] Fix hashtag parsing
This commit is contained in:
parent
ef30f36f55
commit
8f5f3985f4
2 changed files with 41 additions and 20 deletions
|
@ -110,7 +110,7 @@ const mfm = P.createLanguage({
|
|||
const text = input.substr(i);
|
||||
const match = text.match(/^#([^\s\.,!\?#]+)/i);
|
||||
if (!match) return P.makeFailure(i, 'not a hashtag');
|
||||
if (input[i - 1] != ' ' && input[i - 1] != null) return P.makeFailure(i, 'require space before "#"');
|
||||
if (input[i - 1] != '\n' && input[i - 1] != ' ' && input[i - 1] != null) return P.makeFailure(i, 'require space before "#"');
|
||||
return P.makeSuccess(i + match[0].length, makeNode('hashtag', { hashtag: match[1] }));
|
||||
}),
|
||||
//#endregion
|
||||
|
|
59
test/mfm.ts
59
test/mfm.ts
|
@ -162,27 +162,48 @@ describe('Text', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('hashtag', () => {
|
||||
const tokens1 = analyze('Strawberry Pasta #alice');
|
||||
assert.deepEqual([
|
||||
text('Strawberry Pasta '),
|
||||
node('hashtag', { hashtag: 'alice' })
|
||||
], tokens1);
|
||||
describe('hashtag', () => {
|
||||
it('simple', () => {
|
||||
const tokens = analyze('#alice');
|
||||
assert.deepEqual([
|
||||
node('hashtag', { hashtag: 'alice' })
|
||||
], tokens);
|
||||
});
|
||||
|
||||
const tokens2 = analyze('Foo #bar, baz #piyo.');
|
||||
assert.deepEqual([
|
||||
text('Foo '),
|
||||
node('hashtag', { hashtag: 'bar' }),
|
||||
text(', baz '),
|
||||
node('hashtag', { hashtag: 'piyo' }),
|
||||
text('.'),
|
||||
], tokens2);
|
||||
it('after line break', () => {
|
||||
const tokens = analyze('foo\n#alice');
|
||||
assert.deepEqual([
|
||||
text('foo\n'),
|
||||
node('hashtag', { hashtag: 'alice' })
|
||||
], tokens);
|
||||
});
|
||||
|
||||
const tokens3 = analyze('#Foo!');
|
||||
assert.deepEqual([
|
||||
node('hashtag', { hashtag: 'Foo' }),
|
||||
text('!'),
|
||||
], tokens3);
|
||||
it('with text', () => {
|
||||
const tokens = analyze('Strawberry Pasta #alice');
|
||||
assert.deepEqual([
|
||||
text('Strawberry Pasta '),
|
||||
node('hashtag', { hashtag: 'alice' })
|
||||
], tokens);
|
||||
});
|
||||
|
||||
it('ignore comma and period', () => {
|
||||
const tokens = analyze('Foo #bar, baz #piyo.');
|
||||
assert.deepEqual([
|
||||
text('Foo '),
|
||||
node('hashtag', { hashtag: 'bar' }),
|
||||
text(', baz '),
|
||||
node('hashtag', { hashtag: 'piyo' }),
|
||||
text('.'),
|
||||
], tokens);
|
||||
});
|
||||
|
||||
it('ignore exclamation mark', () => {
|
||||
const tokens = analyze('#Foo!');
|
||||
assert.deepEqual([
|
||||
node('hashtag', { hashtag: 'Foo' }),
|
||||
text('!'),
|
||||
], tokens);
|
||||
});
|
||||
});
|
||||
|
||||
describe('quote', () => {
|
||||
|
|
Loading…
Reference in a new issue