Files
boris/node_modules/micromark-core-commonmark/lib/content.js
Gigi 5d53a827e0 feat: initialize markr nostr bookmark client
- Add project structure with TypeScript, React, and Vite
- Implement nostr authentication using browser extension (NIP-07)
- Add NIP-51 compliant bookmark fetching and display
- Create minimal UI with login and bookmark components
- Integrate applesauce-core and applesauce-react libraries
- Add responsive styling with dark/light mode support
- Include comprehensive README with setup instructions

This is a minimal MVP for a nostr bookmark client that allows users to
view their bookmarks according to NIP-51 specification.
2025-10-02 07:17:07 +02:00

163 lines
3.2 KiB
JavaScript
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

/**
* @import {
* Construct,
* Resolver,
* State,
* TokenizeContext,
* Tokenizer,
* Token
* } from 'micromark-util-types'
*/
import { factorySpace } from 'micromark-factory-space';
import { markdownLineEnding } from 'micromark-util-character';
import { subtokenize } from 'micromark-util-subtokenize';
/**
* No name because it must not be turned off.
* @type {Construct}
*/
export const content = {
resolve: resolveContent,
tokenize: tokenizeContent
};
/** @type {Construct} */
const continuationConstruct = {
partial: true,
tokenize: tokenizeContinuation
};
/**
* Content is transparent: its parsed right now. That way, definitions are also
* parsed right now: before text in paragraphs (specifically, media) are parsed.
*
* @type {Resolver}
*/
function resolveContent(events) {
subtokenize(events);
return events;
}
/**
* @this {TokenizeContext}
* Context.
* @type {Tokenizer}
*/
function tokenizeContent(effects, ok) {
/** @type {Token | undefined} */
let previous;
return chunkStart;
/**
* Before a content chunk.
*
* ```markdown
* > | abc
* ^
* ```
*
* @type {State}
*/
function chunkStart(code) {
effects.enter("content");
previous = effects.enter("chunkContent", {
contentType: "content"
});
return chunkInside(code);
}
/**
* In a content chunk.
*
* ```markdown
* > | abc
* ^^^
* ```
*
* @type {State}
*/
function chunkInside(code) {
if (code === null) {
return contentEnd(code);
}
// To do: in `markdown-rs`, each line is parsed on its own, and everything
// is stitched together resolving.
if (markdownLineEnding(code)) {
return effects.check(continuationConstruct, contentContinue, contentEnd)(code);
}
// Data.
effects.consume(code);
return chunkInside;
}
/**
*
*
* @type {State}
*/
function contentEnd(code) {
effects.exit("chunkContent");
effects.exit("content");
return ok(code);
}
/**
*
*
* @type {State}
*/
function contentContinue(code) {
effects.consume(code);
effects.exit("chunkContent");
previous.next = effects.enter("chunkContent", {
contentType: "content",
previous
});
previous = previous.next;
return chunkInside;
}
}
/**
* @this {TokenizeContext}
* Context.
* @type {Tokenizer}
*/
function tokenizeContinuation(effects, ok, nok) {
const self = this;
return startLookahead;
/**
*
*
* @type {State}
*/
function startLookahead(code) {
effects.exit("chunkContent");
effects.enter("lineEnding");
effects.consume(code);
effects.exit("lineEnding");
return factorySpace(effects, prefixed, "linePrefix");
}
/**
*
*
* @type {State}
*/
function prefixed(code) {
if (code === null || markdownLineEnding(code)) {
return nok(code);
}
// Always populated by defaults.
const tail = self.events[self.events.length - 1];
if (!self.parser.constructs.disable.null.includes('codeIndented') && tail && tail[1].type === "linePrefix" && tail[2].sliceSerialize(tail[1], true).length >= 4) {
return ok(code);
}
return effects.interrupt(self.parser.constructs.flow, nok, ok)(code);
}
}