mirror of
https://github.com/dergigi/boris.git
synced 2025-12-19 07:34:28 +01:00
feat: integrate applesauce-content for proper content parsing and rendering
- Install applesauce-content package for content parsing - Use getParsedContent() to parse nostr content according to applesauce patterns - Create proper TypeScript interfaces for ParsedNode and ParsedContent - Add renderParsedContent() component to render parsed content with proper styling - Handle mentions, links, and text content with appropriate styling - Add CSS styles for nostr-mention and nostr-link classes - Follow applesauce-content documentation patterns for content rendering - Maintain type safety with proper interfaces instead of 'any' types This follows the applesauce-content documentation exactly as shown in the examples, providing proper content parsing and rendering.
This commit is contained in:
90
node_modules/@scure/bip39/README.md
generated
vendored
90
node_modules/@scure/bip39/README.md
generated
vendored
@@ -1,41 +1,38 @@
|
||||
# scure-bip39
|
||||
|
||||
Audited & minimal JS implementation of [BIP39 mnemonic phrases](https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki).
|
||||
Secure, [audited](#security) & minimal implementation of BIP39 mnemonic phrases.
|
||||
|
||||
- 🔒 [**Audited**](#security) by an independent security firm
|
||||
- 🔻 Tree-shakeable: unused code is excluded from your builds
|
||||
- 📦 ESM and common.js
|
||||
- ➰ Only 2 audited dependencies by the same author:
|
||||
[noble-hashes](https://github.com/paulmillr/noble-hashes) and [scure-base](https://github.com/paulmillr/scure-base)
|
||||
- 🪶 14KB gzipped with one wordlist, 79KB with all of them: much smaller than similar libraries
|
||||
Compared to popular `bip39` package, scure-bip39:
|
||||
|
||||
- Supports ESM and common.js
|
||||
- Supports tree-shaking: only actually used wordlists are bundled
|
||||
- Is 491KB all-bundled instead of 1.3MB
|
||||
- Uses 2 dependencies instead of 15
|
||||
- Wordlists are 157KB instead of 315KB
|
||||
- Had an external security [audit](#security) by Cure53
|
||||
|
||||
Check out [scure-bip32](https://github.com/paulmillr/scure-bip32) if you need
|
||||
hierarchical deterministic wallets ("HD Wallets").
|
||||
|
||||
### This library belongs to _scure_
|
||||
### This library belongs to *scure*
|
||||
|
||||
> **scure** — audited micro-libraries.
|
||||
> **scure** — secure, independently audited packages for every use case.
|
||||
|
||||
- Zero or minimal dependencies
|
||||
- Highly readable TypeScript / JS code
|
||||
- PGP-signed releases and transparent NPM builds
|
||||
- Check out [homepage](https://paulmillr.com/noble/#scure) & all libraries:
|
||||
- All releases are signed with PGP keys
|
||||
- As minimal as possible
|
||||
- Check out all libraries:
|
||||
[base](https://github.com/paulmillr/scure-base),
|
||||
[bip32](https://github.com/paulmillr/scure-bip32),
|
||||
[bip39](https://github.com/paulmillr/scure-bip39),
|
||||
[btc-signer](https://github.com/paulmillr/scure-btc-signer),
|
||||
[starknet](https://github.com/paulmillr/scure-starknet)
|
||||
[btc-signer](https://github.com/paulmillr/scure-btc-signer)
|
||||
|
||||
## Usage
|
||||
|
||||
> `npm install @scure/bip39`
|
||||
> npm install @scure/bip39
|
||||
|
||||
> `deno add jsr:@scure/bip39`
|
||||
Or
|
||||
|
||||
> `deno doc jsr:@scure/bip39` # command-line documentation
|
||||
|
||||
We don't provide source maps.
|
||||
Wordlists are large, including source maps would double package size.
|
||||
> yarn add @scure/bip39
|
||||
|
||||
```js
|
||||
import * as bip39 from '@scure/bip39';
|
||||
@@ -46,7 +43,7 @@ const mn = bip39.generateMnemonic(wordlist);
|
||||
console.log(mn);
|
||||
|
||||
// Reversible: Converts mnemonic string to raw entropy in form of byte array.
|
||||
const ent = bip39.mnemonicToEntropy(mn, wordlist);
|
||||
const ent = bip39.mnemonicToEntropy(mn, wordlist)
|
||||
|
||||
// Reversible: Converts raw entropy in form of byte array to mnemonic string.
|
||||
bip39.entropyToMnemonic(ent, wordlist);
|
||||
@@ -59,7 +56,7 @@ await bip39.mnemonicToSeed(mn, 'password');
|
||||
bip39.mnemonicToSeedSync(mn, 'password');
|
||||
```
|
||||
|
||||
This submodule contains the word lists defined by BIP39 for Czech, English, French, Italian, Japanese, Korean, Portuguese, Simplified and Traditional Chinese, and Spanish. These are not imported by default, as that would increase bundle sizes too much. Instead, you should import and use them explicitly.
|
||||
This submodule contains the word lists defined by BIP39 for Czech, English, French, Italian, Japanese, Korean, Simplified and Traditional Chinese, and Spanish. These are not imported by default, as that would increase bundle sizes too much. Instead, you should import and use them explicitly.
|
||||
|
||||
```typescript
|
||||
function generateMnemonic(wordlist: string[], strength?: number): string;
|
||||
@@ -79,52 +76,19 @@ import { wordlist as french } from '@scure/bip39/wordlists/french';
|
||||
import { wordlist as italian } from '@scure/bip39/wordlists/italian';
|
||||
import { wordlist as japanese } from '@scure/bip39/wordlists/japanese';
|
||||
import { wordlist as korean } from '@scure/bip39/wordlists/korean';
|
||||
import { wordlist as portuguese } from '@scure/bip39/wordlists/portuguese';
|
||||
import { wordlist as simplifiedChinese } from '@scure/bip39/wordlists/simplified-chinese';
|
||||
import { wordlist as simp } from '@scure/bip39/wordlists/simplified-chinese';
|
||||
import { wordlist as spanish } from '@scure/bip39/wordlists/spanish';
|
||||
import { wordlist as traditionalChinese } from '@scure/bip39/wordlists/traditional-chinese';
|
||||
import { wordlist as trad } from '@scure/bip39/wordlists/traditional-chinese';
|
||||
```
|
||||
|
||||
## Security
|
||||
|
||||
To audit wordlist content, run `node scripts/fetch-wordlist.js`.
|
||||
The library has been audited by Cure53 on Jan 5, 2022. Check out the audit [PDF](./audit/2022-01-05-cure53-audit-nbl2.pdf) & [URL](https://cure53.de/pentest-report_hashing-libs.pdf). See [changes since audit](https://github.com/paulmillr/scure-bip39/compare/1.0.0..main).
|
||||
|
||||
The library has been independently audited:
|
||||
|
||||
- at version 1.0.0, in Jan 2022, by [cure53](https://cure53.de)
|
||||
- PDFs: [online](https://cure53.de/pentest-report_hashing-libs.pdf), [offline](./audit/2022-01-05-cure53-audit-nbl2.pdf)
|
||||
- [Changes since audit](https://github.com/paulmillr/scure-bip39/compare/1.0.0..main).
|
||||
- The audit has been funded by [Ethereum Foundation](https://ethereum.org/en/) with help of [Nomic Labs](https://nomiclabs.io)
|
||||
|
||||
The library was initially developed for [js-ethereum-cryptography](https://github.com/ethereum/js-ethereum-cryptography).
|
||||
At commit [ae00e6d7](https://github.com/ethereum/js-ethereum-cryptography/commit/ae00e6d7d24fb3c76a1c7fe10039f6ecd120b77e),
|
||||
it was extracted to a separate package called `micro-bip39`.
|
||||
After the audit we've decided to use `@scure` NPM namespace for security.
|
||||
|
||||
### Supply chain security
|
||||
|
||||
- **Commits** are signed with PGP keys, to prevent forgery. Make sure to verify commit signatures
|
||||
- **Releases** are transparent and built on GitHub CI. Make sure to verify [provenance](https://docs.npmjs.com/generating-provenance-statements) logs
|
||||
- Use GitHub CLI to verify single-file builds:
|
||||
`gh attestation verify --owner paulmillr scure-bip39.js`
|
||||
- **Rare releasing** is followed to ensure less re-audit need for end-users
|
||||
- **Dependencies** are minimized and locked-down: any dependency could get hacked and users will be downloading malware with every install.
|
||||
- We make sure to use as few dependencies as possible
|
||||
- Automatic dep updates are prevented by locking-down version ranges; diffs are checked with `npm-diff`
|
||||
- **Dev Dependencies** are disabled for end-users; they are only used to develop / build the source code
|
||||
|
||||
For this package, there are 2 dependencies; and a few dev dependencies:
|
||||
|
||||
- [noble-hashes](https://github.com/paulmillr/noble-hashes) provides cryptographic hashing functionality
|
||||
- [scure-base](https://github.com/paulmillr/scure-base) provides low-level wordlist utilities
|
||||
- micro-bmark, micro-should and jsbt are used for benchmarking / testing / build tooling and developed by the same author
|
||||
- prettier, fast-check and typescript are used for code quality / test generation / ts compilation. It's hard to audit their source code thoroughly and fully because of their size
|
||||
|
||||
## Contributing & testing
|
||||
|
||||
- `npm install && npm run build && npm test` will build the code and run tests.
|
||||
- `npm run lint` / `npm run format` will run linter / fix linter issues.
|
||||
- `npm run build:release` will build single file
|
||||
1. The library was initially developed for [js-ethereum-cryptography](https://github.com/ethereum/js-ethereum-cryptography)
|
||||
2. At commit [ae00e6d7](https://github.com/ethereum/js-ethereum-cryptography/commit/ae00e6d7d24fb3c76a1c7fe10039f6ecd120b77e), it
|
||||
was extracted to a separate package called `micro-bip39`
|
||||
3. After the audit we've decided to use NPM namespace for security. Since `@micro` namespace was taken, we've renamed the package to `@scure/bip39`
|
||||
|
||||
## License
|
||||
|
||||
|
||||
92
node_modules/@scure/bip39/esm/index.d.ts
generated
vendored
92
node_modules/@scure/bip39/esm/index.d.ts
generated
vendored
@@ -1,92 +0,0 @@
|
||||
/**
|
||||
* Audited & minimal JS implementation of
|
||||
* [BIP39 mnemonic phrases](https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki).
|
||||
* @module
|
||||
* @example
|
||||
```js
|
||||
import * as bip39 from '@scure/bip39';
|
||||
import { wordlist } from '@scure/bip39/wordlists/english';
|
||||
const mn = bip39.generateMnemonic(wordlist);
|
||||
console.log(mn);
|
||||
const ent = bip39.mnemonicToEntropy(mn, wordlist)
|
||||
bip39.entropyToMnemonic(ent, wordlist);
|
||||
bip39.validateMnemonic(mn, wordlist);
|
||||
await bip39.mnemonicToSeed(mn, 'password');
|
||||
bip39.mnemonicToSeedSync(mn, 'password');
|
||||
|
||||
// Wordlists
|
||||
import { wordlist as czech } from '@scure/bip39/wordlists/czech';
|
||||
import { wordlist as english } from '@scure/bip39/wordlists/english';
|
||||
import { wordlist as french } from '@scure/bip39/wordlists/french';
|
||||
import { wordlist as italian } from '@scure/bip39/wordlists/italian';
|
||||
import { wordlist as japanese } from '@scure/bip39/wordlists/japanese';
|
||||
import { wordlist as korean } from '@scure/bip39/wordlists/korean';
|
||||
import { wordlist as portuguese } from '@scure/bip39/wordlists/portuguese';
|
||||
import { wordlist as simplifiedChinese } from '@scure/bip39/wordlists/simplified-chinese';
|
||||
import { wordlist as spanish } from '@scure/bip39/wordlists/spanish';
|
||||
import { wordlist as traditionalChinese } from '@scure/bip39/wordlists/traditional-chinese';
|
||||
```
|
||||
*/
|
||||
/**
|
||||
* Generate x random words. Uses Cryptographically-Secure Random Number Generator.
|
||||
* @param wordlist imported wordlist for specific language
|
||||
* @param strength mnemonic strength 128-256 bits
|
||||
* @example
|
||||
* generateMnemonic(wordlist, 128)
|
||||
* // 'legal winner thank year wave sausage worth useful legal winner thank yellow'
|
||||
*/
|
||||
export declare function generateMnemonic(wordlist: string[], strength?: number): string;
|
||||
/**
|
||||
* Reversible: Converts mnemonic string to raw entropy in form of byte array.
|
||||
* @param mnemonic 12-24 words
|
||||
* @param wordlist imported wordlist for specific language
|
||||
* @example
|
||||
* const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';
|
||||
* mnemonicToEntropy(mnem, wordlist)
|
||||
* // Produces
|
||||
* new Uint8Array([
|
||||
* 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
|
||||
* 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f
|
||||
* ])
|
||||
*/
|
||||
export declare function mnemonicToEntropy(mnemonic: string, wordlist: string[]): Uint8Array;
|
||||
/**
|
||||
* Reversible: Converts raw entropy in form of byte array to mnemonic string.
|
||||
* @param entropy byte array
|
||||
* @param wordlist imported wordlist for specific language
|
||||
* @returns 12-24 words
|
||||
* @example
|
||||
* const ent = new Uint8Array([
|
||||
* 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
|
||||
* 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f
|
||||
* ]);
|
||||
* entropyToMnemonic(ent, wordlist);
|
||||
* // 'legal winner thank year wave sausage worth useful legal winner thank yellow'
|
||||
*/
|
||||
export declare function entropyToMnemonic(entropy: Uint8Array, wordlist: string[]): string;
|
||||
/**
|
||||
* Validates mnemonic for being 12-24 words contained in `wordlist`.
|
||||
*/
|
||||
export declare function validateMnemonic(mnemonic: string, wordlist: string[]): boolean;
|
||||
/**
|
||||
* Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.
|
||||
* @param mnemonic 12-24 words
|
||||
* @param passphrase string that will additionally protect the key
|
||||
* @returns 64 bytes of key data
|
||||
* @example
|
||||
* const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';
|
||||
* await mnemonicToSeed(mnem, 'password');
|
||||
* // new Uint8Array([...64 bytes])
|
||||
*/
|
||||
export declare function mnemonicToSeed(mnemonic: string, passphrase?: string): Promise<Uint8Array>;
|
||||
/**
|
||||
* Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.
|
||||
* @param mnemonic 12-24 words
|
||||
* @param passphrase string that will additionally protect the key
|
||||
* @returns 64 bytes of key data
|
||||
* @example
|
||||
* const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';
|
||||
* mnemonicToSeedSync(mnem, 'password');
|
||||
* // new Uint8Array([...64 bytes])
|
||||
*/
|
||||
export declare function mnemonicToSeedSync(mnemonic: string, passphrase?: string): Uint8Array;
|
||||
124
node_modules/@scure/bip39/esm/index.js
generated
vendored
124
node_modules/@scure/bip39/esm/index.js
generated
vendored
@@ -1,46 +1,13 @@
|
||||
/**
|
||||
* Audited & minimal JS implementation of
|
||||
* [BIP39 mnemonic phrases](https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki).
|
||||
* @module
|
||||
* @example
|
||||
```js
|
||||
import * as bip39 from '@scure/bip39';
|
||||
import { wordlist } from '@scure/bip39/wordlists/english';
|
||||
const mn = bip39.generateMnemonic(wordlist);
|
||||
console.log(mn);
|
||||
const ent = bip39.mnemonicToEntropy(mn, wordlist)
|
||||
bip39.entropyToMnemonic(ent, wordlist);
|
||||
bip39.validateMnemonic(mn, wordlist);
|
||||
await bip39.mnemonicToSeed(mn, 'password');
|
||||
bip39.mnemonicToSeedSync(mn, 'password');
|
||||
|
||||
// Wordlists
|
||||
import { wordlist as czech } from '@scure/bip39/wordlists/czech';
|
||||
import { wordlist as english } from '@scure/bip39/wordlists/english';
|
||||
import { wordlist as french } from '@scure/bip39/wordlists/french';
|
||||
import { wordlist as italian } from '@scure/bip39/wordlists/italian';
|
||||
import { wordlist as japanese } from '@scure/bip39/wordlists/japanese';
|
||||
import { wordlist as korean } from '@scure/bip39/wordlists/korean';
|
||||
import { wordlist as portuguese } from '@scure/bip39/wordlists/portuguese';
|
||||
import { wordlist as simplifiedChinese } from '@scure/bip39/wordlists/simplified-chinese';
|
||||
import { wordlist as spanish } from '@scure/bip39/wordlists/spanish';
|
||||
import { wordlist as traditionalChinese } from '@scure/bip39/wordlists/traditional-chinese';
|
||||
```
|
||||
*/
|
||||
/*! scure-bip39 - MIT License (c) 2022 Patricio Palladino, Paul Miller (paulmillr.com) */
|
||||
import assert from '@noble/hashes/_assert';
|
||||
import { pbkdf2, pbkdf2Async } from '@noble/hashes/pbkdf2';
|
||||
import { sha256, sha512 } from '@noble/hashes/sha2';
|
||||
import { abytes, anumber, randomBytes } from '@noble/hashes/utils';
|
||||
import { sha256 } from '@noble/hashes/sha256';
|
||||
import { sha512 } from '@noble/hashes/sha512';
|
||||
import { randomBytes } from '@noble/hashes/utils';
|
||||
import { utils as baseUtils } from '@scure/base';
|
||||
// Japanese wordlist
|
||||
const isJapanese = (wordlist) => wordlist[0] === '\u3042\u3044\u3053\u304f\u3057\u3093';
|
||||
// Normalization replaces equivalent sequences of characters
|
||||
// so that any two texts that are equivalent will be reduced
|
||||
// to the same sequence of code points, called the normal form of the original text.
|
||||
// https://tonsky.me/blog/unicode/#why-is-a----
|
||||
function nfkd(str) {
|
||||
if (typeof str !== 'string')
|
||||
throw new TypeError('invalid mnemonic type: ' + typeof str);
|
||||
throw new TypeError(`Invalid mnemonic type: ${typeof str}`);
|
||||
return str.normalize('NFKD');
|
||||
}
|
||||
function normalize(str) {
|
||||
@@ -50,79 +17,39 @@ function normalize(str) {
|
||||
throw new Error('Invalid mnemonic');
|
||||
return { nfkd: norm, words };
|
||||
}
|
||||
function aentropy(ent) {
|
||||
abytes(ent, 16, 20, 24, 28, 32);
|
||||
function assertEntropy(entropy) {
|
||||
assert.bytes(entropy, 16, 20, 24, 28, 32);
|
||||
}
|
||||
/**
|
||||
* Generate x random words. Uses Cryptographically-Secure Random Number Generator.
|
||||
* @param wordlist imported wordlist for specific language
|
||||
* @param strength mnemonic strength 128-256 bits
|
||||
* @example
|
||||
* generateMnemonic(wordlist, 128)
|
||||
* // 'legal winner thank year wave sausage worth useful legal winner thank yellow'
|
||||
*/
|
||||
export function generateMnemonic(wordlist, strength = 128) {
|
||||
anumber(strength);
|
||||
assert.number(strength);
|
||||
if (strength % 32 !== 0 || strength > 256)
|
||||
throw new TypeError('Invalid entropy');
|
||||
return entropyToMnemonic(randomBytes(strength / 8), wordlist);
|
||||
}
|
||||
const calcChecksum = (entropy) => {
|
||||
// Checksum is ent.length/4 bits long
|
||||
const bitsLeft = 8 - entropy.length / 4;
|
||||
// Zero rightmost "bitsLeft" bits in byte
|
||||
// For example: bitsLeft=4 val=10111101 -> 10110000
|
||||
return new Uint8Array([(sha256(entropy)[0] >> bitsLeft) << bitsLeft]);
|
||||
};
|
||||
function getCoder(wordlist) {
|
||||
if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== 'string')
|
||||
throw new Error('Wordlist: expected array of 2048 strings');
|
||||
throw new Error('Worlist: expected array of 2048 strings');
|
||||
wordlist.forEach((i) => {
|
||||
if (typeof i !== 'string')
|
||||
throw new Error('wordlist: non-string element: ' + i);
|
||||
throw new Error(`Wordlist: non-string element: ${i}`);
|
||||
});
|
||||
return baseUtils.chain(baseUtils.checksum(1, calcChecksum), baseUtils.radix2(11, true), baseUtils.alphabet(wordlist));
|
||||
}
|
||||
/**
|
||||
* Reversible: Converts mnemonic string to raw entropy in form of byte array.
|
||||
* @param mnemonic 12-24 words
|
||||
* @param wordlist imported wordlist for specific language
|
||||
* @example
|
||||
* const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';
|
||||
* mnemonicToEntropy(mnem, wordlist)
|
||||
* // Produces
|
||||
* new Uint8Array([
|
||||
* 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
|
||||
* 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f
|
||||
* ])
|
||||
*/
|
||||
export function mnemonicToEntropy(mnemonic, wordlist) {
|
||||
const { words } = normalize(mnemonic);
|
||||
const entropy = getCoder(wordlist).decode(words);
|
||||
aentropy(entropy);
|
||||
assertEntropy(entropy);
|
||||
return entropy;
|
||||
}
|
||||
/**
|
||||
* Reversible: Converts raw entropy in form of byte array to mnemonic string.
|
||||
* @param entropy byte array
|
||||
* @param wordlist imported wordlist for specific language
|
||||
* @returns 12-24 words
|
||||
* @example
|
||||
* const ent = new Uint8Array([
|
||||
* 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
|
||||
* 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f
|
||||
* ]);
|
||||
* entropyToMnemonic(ent, wordlist);
|
||||
* // 'legal winner thank year wave sausage worth useful legal winner thank yellow'
|
||||
*/
|
||||
export function entropyToMnemonic(entropy, wordlist) {
|
||||
aentropy(entropy);
|
||||
assertEntropy(entropy);
|
||||
const words = getCoder(wordlist).encode(entropy);
|
||||
return words.join(isJapanese(wordlist) ? '\u3000' : ' ');
|
||||
}
|
||||
/**
|
||||
* Validates mnemonic for being 12-24 words contained in `wordlist`.
|
||||
*/
|
||||
export function validateMnemonic(mnemonic, wordlist) {
|
||||
try {
|
||||
mnemonicToEntropy(mnemonic, wordlist);
|
||||
@@ -132,30 +59,11 @@ export function validateMnemonic(mnemonic, wordlist) {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
const psalt = (passphrase) => nfkd('mnemonic' + passphrase);
|
||||
/**
|
||||
* Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.
|
||||
* @param mnemonic 12-24 words
|
||||
* @param passphrase string that will additionally protect the key
|
||||
* @returns 64 bytes of key data
|
||||
* @example
|
||||
* const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';
|
||||
* await mnemonicToSeed(mnem, 'password');
|
||||
* // new Uint8Array([...64 bytes])
|
||||
*/
|
||||
const salt = (passphrase) => nfkd(`mnemonic${passphrase}`);
|
||||
export function mnemonicToSeed(mnemonic, passphrase = '') {
|
||||
return pbkdf2Async(sha512, normalize(mnemonic).nfkd, psalt(passphrase), { c: 2048, dkLen: 64 });
|
||||
return pbkdf2Async(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 });
|
||||
}
|
||||
/**
|
||||
* Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.
|
||||
* @param mnemonic 12-24 words
|
||||
* @param passphrase string that will additionally protect the key
|
||||
* @returns 64 bytes of key data
|
||||
* @example
|
||||
* const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';
|
||||
* mnemonicToSeedSync(mnem, 'password');
|
||||
* // new Uint8Array([...64 bytes])
|
||||
*/
|
||||
export function mnemonicToSeedSync(mnemonic, passphrase = '') {
|
||||
return pbkdf2(sha512, normalize(mnemonic).nfkd, psalt(passphrase), { c: 2048, dkLen: 64 });
|
||||
return pbkdf2(sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 });
|
||||
}
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
node_modules/@scure/bip39/esm/wordlists/czech.d.ts
generated
vendored
1
node_modules/@scure/bip39/esm/wordlists/czech.d.ts
generated
vendored
@@ -1 +0,0 @@
|
||||
export declare const wordlist: string[];
|
||||
1
node_modules/@scure/bip39/esm/wordlists/czech.js
generated
vendored
1
node_modules/@scure/bip39/esm/wordlists/czech.js
generated
vendored
@@ -2046,3 +2046,4 @@ zvon
|
||||
zvrat
|
||||
zvukovod
|
||||
zvyk`.split('\n');
|
||||
//# sourceMappingURL=czech.js.map
|
||||
1
node_modules/@scure/bip39/esm/wordlists/english.d.ts
generated
vendored
1
node_modules/@scure/bip39/esm/wordlists/english.d.ts
generated
vendored
@@ -1 +0,0 @@
|
||||
export declare const wordlist: string[];
|
||||
1
node_modules/@scure/bip39/esm/wordlists/english.js
generated
vendored
1
node_modules/@scure/bip39/esm/wordlists/english.js
generated
vendored
@@ -2046,3 +2046,4 @@ zebra
|
||||
zero
|
||||
zone
|
||||
zoo`.split('\n');
|
||||
//# sourceMappingURL=english.js.map
|
||||
1
node_modules/@scure/bip39/esm/wordlists/french.d.ts
generated
vendored
1
node_modules/@scure/bip39/esm/wordlists/french.d.ts
generated
vendored
@@ -1 +0,0 @@
|
||||
export declare const wordlist: string[];
|
||||
1
node_modules/@scure/bip39/esm/wordlists/french.js
generated
vendored
1
node_modules/@scure/bip39/esm/wordlists/french.js
generated
vendored
@@ -2046,3 +2046,4 @@ zèbre
|
||||
zénith
|
||||
zeste
|
||||
zoologie`.split('\n');
|
||||
//# sourceMappingURL=french.js.map
|
||||
1
node_modules/@scure/bip39/esm/wordlists/italian.d.ts
generated
vendored
1
node_modules/@scure/bip39/esm/wordlists/italian.d.ts
generated
vendored
@@ -1 +0,0 @@
|
||||
export declare const wordlist: string[];
|
||||
1
node_modules/@scure/bip39/esm/wordlists/italian.js
generated
vendored
1
node_modules/@scure/bip39/esm/wordlists/italian.js
generated
vendored
@@ -2046,3 +2046,4 @@ zucchero
|
||||
zufolo
|
||||
zulu
|
||||
zuppa`.split('\n');
|
||||
//# sourceMappingURL=italian.js.map
|
||||
1
node_modules/@scure/bip39/esm/wordlists/japanese.d.ts
generated
vendored
1
node_modules/@scure/bip39/esm/wordlists/japanese.d.ts
generated
vendored
@@ -1 +0,0 @@
|
||||
export declare const wordlist: string[];
|
||||
1
node_modules/@scure/bip39/esm/wordlists/japanese.js
generated
vendored
1
node_modules/@scure/bip39/esm/wordlists/japanese.js
generated
vendored
@@ -2046,3 +2046,4 @@ export const wordlist = `あいこくしん
|
||||
わすれもの
|
||||
わらう
|
||||
われる`.split('\n');
|
||||
//# sourceMappingURL=japanese.js.map
|
||||
1
node_modules/@scure/bip39/esm/wordlists/korean.d.ts
generated
vendored
1
node_modules/@scure/bip39/esm/wordlists/korean.d.ts
generated
vendored
@@ -1 +0,0 @@
|
||||
export declare const wordlist: string[];
|
||||
1
node_modules/@scure/bip39/esm/wordlists/korean.js
generated
vendored
1
node_modules/@scure/bip39/esm/wordlists/korean.js
generated
vendored
@@ -2046,3 +2046,4 @@ export const wordlist = `가격
|
||||
희생
|
||||
흰색
|
||||
힘껏`.split('\n');
|
||||
//# sourceMappingURL=korean.js.map
|
||||
1
node_modules/@scure/bip39/esm/wordlists/portuguese.d.ts
generated
vendored
1
node_modules/@scure/bip39/esm/wordlists/portuguese.d.ts
generated
vendored
@@ -1 +0,0 @@
|
||||
export declare const wordlist: string[];
|
||||
2048
node_modules/@scure/bip39/esm/wordlists/portuguese.js
generated
vendored
2048
node_modules/@scure/bip39/esm/wordlists/portuguese.js
generated
vendored
File diff suppressed because it is too large
Load Diff
1
node_modules/@scure/bip39/esm/wordlists/simplified-chinese.d.ts
generated
vendored
1
node_modules/@scure/bip39/esm/wordlists/simplified-chinese.d.ts
generated
vendored
@@ -1 +0,0 @@
|
||||
export declare const wordlist: string[];
|
||||
1
node_modules/@scure/bip39/esm/wordlists/simplified-chinese.js
generated
vendored
1
node_modules/@scure/bip39/esm/wordlists/simplified-chinese.js
generated
vendored
@@ -2046,3 +2046,4 @@ export const wordlist = `的
|
||||
怨
|
||||
矮
|
||||
歇`.split('\n');
|
||||
//# sourceMappingURL=simplified-chinese.js.map
|
||||
1
node_modules/@scure/bip39/esm/wordlists/spanish.d.ts
generated
vendored
1
node_modules/@scure/bip39/esm/wordlists/spanish.d.ts
generated
vendored
@@ -1 +0,0 @@
|
||||
export declare const wordlist: string[];
|
||||
1
node_modules/@scure/bip39/esm/wordlists/spanish.js
generated
vendored
1
node_modules/@scure/bip39/esm/wordlists/spanish.js
generated
vendored
@@ -2046,3 +2046,4 @@ zona
|
||||
zorro
|
||||
zumo
|
||||
zurdo`.split('\n');
|
||||
//# sourceMappingURL=spanish.js.map
|
||||
1
node_modules/@scure/bip39/esm/wordlists/traditional-chinese.d.ts
generated
vendored
1
node_modules/@scure/bip39/esm/wordlists/traditional-chinese.d.ts
generated
vendored
@@ -1 +0,0 @@
|
||||
export declare const wordlist: string[];
|
||||
1
node_modules/@scure/bip39/esm/wordlists/traditional-chinese.js
generated
vendored
1
node_modules/@scure/bip39/esm/wordlists/traditional-chinese.js
generated
vendored
@@ -2046,3 +2046,4 @@ export const wordlist = `的
|
||||
怨
|
||||
矮
|
||||
歇`.split('\n');
|
||||
//# sourceMappingURL=traditional-chinese.js.map
|
||||
29
node_modules/@scure/bip39/index.d.ts
generated
vendored
29
node_modules/@scure/bip39/index.d.ts
generated
vendored
@@ -1,32 +1,3 @@
|
||||
/**
|
||||
* Audited & minimal JS implementation of
|
||||
* [BIP39 mnemonic phrases](https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki).
|
||||
* @module
|
||||
* @example
|
||||
```js
|
||||
import * as bip39 from '@scure/bip39';
|
||||
import { wordlist } from '@scure/bip39/wordlists/english';
|
||||
const mn = bip39.generateMnemonic(wordlist);
|
||||
console.log(mn);
|
||||
const ent = bip39.mnemonicToEntropy(mn, wordlist)
|
||||
bip39.entropyToMnemonic(ent, wordlist);
|
||||
bip39.validateMnemonic(mn, wordlist);
|
||||
await bip39.mnemonicToSeed(mn, 'password');
|
||||
bip39.mnemonicToSeedSync(mn, 'password');
|
||||
|
||||
// Wordlists
|
||||
import { wordlist as czech } from '@scure/bip39/wordlists/czech';
|
||||
import { wordlist as english } from '@scure/bip39/wordlists/english';
|
||||
import { wordlist as french } from '@scure/bip39/wordlists/french';
|
||||
import { wordlist as italian } from '@scure/bip39/wordlists/italian';
|
||||
import { wordlist as japanese } from '@scure/bip39/wordlists/japanese';
|
||||
import { wordlist as korean } from '@scure/bip39/wordlists/korean';
|
||||
import { wordlist as portuguese } from '@scure/bip39/wordlists/portuguese';
|
||||
import { wordlist as simplifiedChinese } from '@scure/bip39/wordlists/simplified-chinese';
|
||||
import { wordlist as spanish } from '@scure/bip39/wordlists/spanish';
|
||||
import { wordlist as traditionalChinese } from '@scure/bip39/wordlists/traditional-chinese';
|
||||
```
|
||||
*/
|
||||
/**
|
||||
* Generate x random words. Uses Cryptographically-Secure Random Number Generator.
|
||||
* @param wordlist imported wordlist for specific language
|
||||
|
||||
71
node_modules/@scure/bip39/index.js
generated
vendored
71
node_modules/@scure/bip39/index.js
generated
vendored
@@ -1,43 +1,11 @@
|
||||
"use strict";
|
||||
/**
|
||||
* Audited & minimal JS implementation of
|
||||
* [BIP39 mnemonic phrases](https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki).
|
||||
* @module
|
||||
* @example
|
||||
```js
|
||||
import * as bip39 from '@scure/bip39';
|
||||
import { wordlist } from '@scure/bip39/wordlists/english';
|
||||
const mn = bip39.generateMnemonic(wordlist);
|
||||
console.log(mn);
|
||||
const ent = bip39.mnemonicToEntropy(mn, wordlist)
|
||||
bip39.entropyToMnemonic(ent, wordlist);
|
||||
bip39.validateMnemonic(mn, wordlist);
|
||||
await bip39.mnemonicToSeed(mn, 'password');
|
||||
bip39.mnemonicToSeedSync(mn, 'password');
|
||||
|
||||
// Wordlists
|
||||
import { wordlist as czech } from '@scure/bip39/wordlists/czech';
|
||||
import { wordlist as english } from '@scure/bip39/wordlists/english';
|
||||
import { wordlist as french } from '@scure/bip39/wordlists/french';
|
||||
import { wordlist as italian } from '@scure/bip39/wordlists/italian';
|
||||
import { wordlist as japanese } from '@scure/bip39/wordlists/japanese';
|
||||
import { wordlist as korean } from '@scure/bip39/wordlists/korean';
|
||||
import { wordlist as portuguese } from '@scure/bip39/wordlists/portuguese';
|
||||
import { wordlist as simplifiedChinese } from '@scure/bip39/wordlists/simplified-chinese';
|
||||
import { wordlist as spanish } from '@scure/bip39/wordlists/spanish';
|
||||
import { wordlist as traditionalChinese } from '@scure/bip39/wordlists/traditional-chinese';
|
||||
```
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.generateMnemonic = generateMnemonic;
|
||||
exports.mnemonicToEntropy = mnemonicToEntropy;
|
||||
exports.entropyToMnemonic = entropyToMnemonic;
|
||||
exports.validateMnemonic = validateMnemonic;
|
||||
exports.mnemonicToSeed = mnemonicToSeed;
|
||||
exports.mnemonicToSeedSync = mnemonicToSeedSync;
|
||||
exports.mnemonicToSeedSync = exports.mnemonicToSeed = exports.validateMnemonic = exports.entropyToMnemonic = exports.mnemonicToEntropy = exports.generateMnemonic = void 0;
|
||||
/*! scure-bip39 - MIT License (c) 2022 Patricio Palladino, Paul Miller (paulmillr.com) */
|
||||
const _assert_1 = require("@noble/hashes/_assert");
|
||||
const pbkdf2_1 = require("@noble/hashes/pbkdf2");
|
||||
const sha2_1 = require("@noble/hashes/sha2");
|
||||
const sha256_1 = require("@noble/hashes/sha256");
|
||||
const sha512_1 = require("@noble/hashes/sha512");
|
||||
const utils_1 = require("@noble/hashes/utils");
|
||||
const base_1 = require("@scure/base");
|
||||
// Japanese wordlist
|
||||
@@ -45,10 +13,9 @@ const isJapanese = (wordlist) => wordlist[0] === '\u3042\u3044\u3053\u304f\u3057
|
||||
// Normalization replaces equivalent sequences of characters
|
||||
// so that any two texts that are equivalent will be reduced
|
||||
// to the same sequence of code points, called the normal form of the original text.
|
||||
// https://tonsky.me/blog/unicode/#why-is-a----
|
||||
function nfkd(str) {
|
||||
if (typeof str !== 'string')
|
||||
throw new TypeError('invalid mnemonic type: ' + typeof str);
|
||||
throw new TypeError(`Invalid mnemonic type: ${typeof str}`);
|
||||
return str.normalize('NFKD');
|
||||
}
|
||||
function normalize(str) {
|
||||
@@ -58,8 +25,8 @@ function normalize(str) {
|
||||
throw new Error('Invalid mnemonic');
|
||||
return { nfkd: norm, words };
|
||||
}
|
||||
function aentropy(ent) {
|
||||
(0, utils_1.abytes)(ent, 16, 20, 24, 28, 32);
|
||||
function assertEntropy(entropy) {
|
||||
_assert_1.default.bytes(entropy, 16, 20, 24, 28, 32);
|
||||
}
|
||||
/**
|
||||
* Generate x random words. Uses Cryptographically-Secure Random Number Generator.
|
||||
@@ -70,24 +37,25 @@ function aentropy(ent) {
|
||||
* // 'legal winner thank year wave sausage worth useful legal winner thank yellow'
|
||||
*/
|
||||
function generateMnemonic(wordlist, strength = 128) {
|
||||
(0, utils_1.anumber)(strength);
|
||||
_assert_1.default.number(strength);
|
||||
if (strength % 32 !== 0 || strength > 256)
|
||||
throw new TypeError('Invalid entropy');
|
||||
return entropyToMnemonic((0, utils_1.randomBytes)(strength / 8), wordlist);
|
||||
}
|
||||
exports.generateMnemonic = generateMnemonic;
|
||||
const calcChecksum = (entropy) => {
|
||||
// Checksum is ent.length/4 bits long
|
||||
const bitsLeft = 8 - entropy.length / 4;
|
||||
// Zero rightmost "bitsLeft" bits in byte
|
||||
// For example: bitsLeft=4 val=10111101 -> 10110000
|
||||
return new Uint8Array([((0, sha2_1.sha256)(entropy)[0] >> bitsLeft) << bitsLeft]);
|
||||
return new Uint8Array([((0, sha256_1.sha256)(entropy)[0] >> bitsLeft) << bitsLeft]);
|
||||
};
|
||||
function getCoder(wordlist) {
|
||||
if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== 'string')
|
||||
throw new Error('Wordlist: expected array of 2048 strings');
|
||||
throw new Error('Worlist: expected array of 2048 strings');
|
||||
wordlist.forEach((i) => {
|
||||
if (typeof i !== 'string')
|
||||
throw new Error('wordlist: non-string element: ' + i);
|
||||
throw new Error(`Wordlist: non-string element: ${i}`);
|
||||
});
|
||||
return base_1.utils.chain(base_1.utils.checksum(1, calcChecksum), base_1.utils.radix2(11, true), base_1.utils.alphabet(wordlist));
|
||||
}
|
||||
@@ -107,9 +75,10 @@ function getCoder(wordlist) {
|
||||
function mnemonicToEntropy(mnemonic, wordlist) {
|
||||
const { words } = normalize(mnemonic);
|
||||
const entropy = getCoder(wordlist).decode(words);
|
||||
aentropy(entropy);
|
||||
assertEntropy(entropy);
|
||||
return entropy;
|
||||
}
|
||||
exports.mnemonicToEntropy = mnemonicToEntropy;
|
||||
/**
|
||||
* Reversible: Converts raw entropy in form of byte array to mnemonic string.
|
||||
* @param entropy byte array
|
||||
@@ -124,10 +93,11 @@ function mnemonicToEntropy(mnemonic, wordlist) {
|
||||
* // 'legal winner thank year wave sausage worth useful legal winner thank yellow'
|
||||
*/
|
||||
function entropyToMnemonic(entropy, wordlist) {
|
||||
aentropy(entropy);
|
||||
assertEntropy(entropy);
|
||||
const words = getCoder(wordlist).encode(entropy);
|
||||
return words.join(isJapanese(wordlist) ? '\u3000' : ' ');
|
||||
}
|
||||
exports.entropyToMnemonic = entropyToMnemonic;
|
||||
/**
|
||||
* Validates mnemonic for being 12-24 words contained in `wordlist`.
|
||||
*/
|
||||
@@ -140,7 +110,8 @@ function validateMnemonic(mnemonic, wordlist) {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
const psalt = (passphrase) => nfkd('mnemonic' + passphrase);
|
||||
exports.validateMnemonic = validateMnemonic;
|
||||
const salt = (passphrase) => nfkd(`mnemonic${passphrase}`);
|
||||
/**
|
||||
* Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.
|
||||
* @param mnemonic 12-24 words
|
||||
@@ -152,8 +123,9 @@ const psalt = (passphrase) => nfkd('mnemonic' + passphrase);
|
||||
* // new Uint8Array([...64 bytes])
|
||||
*/
|
||||
function mnemonicToSeed(mnemonic, passphrase = '') {
|
||||
return (0, pbkdf2_1.pbkdf2Async)(sha2_1.sha512, normalize(mnemonic).nfkd, psalt(passphrase), { c: 2048, dkLen: 64 });
|
||||
return (0, pbkdf2_1.pbkdf2Async)(sha512_1.sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 });
|
||||
}
|
||||
exports.mnemonicToSeed = mnemonicToSeed;
|
||||
/**
|
||||
* Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.
|
||||
* @param mnemonic 12-24 words
|
||||
@@ -165,5 +137,6 @@ function mnemonicToSeed(mnemonic, passphrase = '') {
|
||||
* // new Uint8Array([...64 bytes])
|
||||
*/
|
||||
function mnemonicToSeedSync(mnemonic, passphrase = '') {
|
||||
return (0, pbkdf2_1.pbkdf2)(sha2_1.sha512, normalize(mnemonic).nfkd, psalt(passphrase), { c: 2048, dkLen: 64 });
|
||||
return (0, pbkdf2_1.pbkdf2)(sha512_1.sha512, normalize(mnemonic).nfkd, salt(passphrase), { c: 2048, dkLen: 64 });
|
||||
}
|
||||
exports.mnemonicToSeedSync = mnemonicToSeedSync;
|
||||
|
||||
86
node_modules/@scure/bip39/package.json
generated
vendored
86
node_modules/@scure/bip39/package.json
generated
vendored
@@ -1,94 +1,103 @@
|
||||
{
|
||||
"name": "@scure/bip39",
|
||||
"version": "1.6.0",
|
||||
"version": "1.2.1",
|
||||
"description": "Secure, audited & minimal implementation of BIP39 mnemonic phrases",
|
||||
"main": "index.js",
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts",
|
||||
"wordlists/*.js",
|
||||
"wordlists/*.d.ts",
|
||||
"esm",
|
||||
"src/index.ts"
|
||||
"esm"
|
||||
],
|
||||
"types": "index.d.ts",
|
||||
"dependencies": {
|
||||
"@noble/hashes": "~1.8.0",
|
||||
"@scure/base": "~1.2.5"
|
||||
"@noble/hashes": "~1.3.0",
|
||||
"@scure/base": "~1.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@paulmillr/jsbt": "0.3.3",
|
||||
"micro-bmark": "0.4.1",
|
||||
"micro-should": "0.5.2",
|
||||
"prettier": "3.5.3",
|
||||
"typescript": "5.8.3"
|
||||
"micro-should": "0.4.0",
|
||||
"prettier": "2.8.4",
|
||||
"typescript": "5.0.2"
|
||||
},
|
||||
"author": "Paul Miller (https://paulmillr.com)",
|
||||
"homepage": "https://paulmillr.com/",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/paulmillr/scure-bip39.git"
|
||||
"url": "https://github.com/paulmillr/scure-bip39.git"
|
||||
},
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Patricio Palladino",
|
||||
"email": "patricio@nomiclabs.io"
|
||||
},
|
||||
{
|
||||
"name": "Paul Miller",
|
||||
"url": "https://paulmillr.com"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"build": "tsc && tsc -p tsconfig.cjs.json",
|
||||
"build:release": "npx jsbt esbuild test/build",
|
||||
"lint": "prettier --check 'src/**/*.ts' 'test/*.test.ts' 'scripts/*.js'",
|
||||
"format": "prettier --write 'src/**/*.ts' 'test/*.test.ts' 'scripts/*.js'",
|
||||
"test": "node test/index.js",
|
||||
"test:bun": "bun test/index.js",
|
||||
"test:deno": "deno --allow-env --allow-read test/index.js",
|
||||
"fetch-wordlist": "./scripts/fetch-wordlist.js"
|
||||
"build": "tsc && tsc -p tsconfig.esm.json",
|
||||
"lint": "prettier --check 'src/**/*.ts' 'test/*.test.ts'",
|
||||
"format": "prettier --write 'src/**/*.ts' 'test/*.test.ts'",
|
||||
"test": "cd test && tsc && node bip39.test.js"
|
||||
},
|
||||
"sideEffects": false,
|
||||
"main": "index.js",
|
||||
"types": "./index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./index.d.ts",
|
||||
"import": "./esm/index.js",
|
||||
"require": "./index.js"
|
||||
"default": "./index.js"
|
||||
},
|
||||
"./index": {
|
||||
"types": "./index.d.ts",
|
||||
"import": "./esm/index.js",
|
||||
"require": "./index.js"
|
||||
"default": "./index.js"
|
||||
},
|
||||
"./wordlists/czech": {
|
||||
"types": "./wordlists/czech.d.ts",
|
||||
"import": "./esm/wordlists/czech.js",
|
||||
"require": "./wordlists/czech.js"
|
||||
"default": "./wordlists/czech.js"
|
||||
},
|
||||
"./wordlists/english": {
|
||||
"types": "./wordlists/english.d.ts",
|
||||
"import": "./esm/wordlists/english.js",
|
||||
"require": "./wordlists/english.js"
|
||||
"default": "./wordlists/english.js"
|
||||
},
|
||||
"./wordlists/french": {
|
||||
"types": "./wordlists/french.d.ts",
|
||||
"import": "./esm/wordlists/french.js",
|
||||
"require": "./wordlists/french.js"
|
||||
"default": "./wordlists/french.js"
|
||||
},
|
||||
"./wordlists/italian": {
|
||||
"types": "./wordlists/italian.d.ts",
|
||||
"import": "./esm/wordlists/italian.js",
|
||||
"require": "./wordlists/italian.js"
|
||||
"default": "./wordlists/italian.js"
|
||||
},
|
||||
"./wordlists/japanese": {
|
||||
"types": "./wordlists/japanese.d.ts",
|
||||
"import": "./esm/wordlists/japanese.js",
|
||||
"require": "./wordlists/japanese.js"
|
||||
"default": "./wordlists/japanese.js"
|
||||
},
|
||||
"./wordlists/korean": {
|
||||
"types": "./wordlists/korean.d.ts",
|
||||
"import": "./esm/wordlists/korean.js",
|
||||
"require": "./wordlists/korean.js"
|
||||
},
|
||||
"./wordlists/portuguese": {
|
||||
"import": "./esm/wordlists/portuguese.js",
|
||||
"require": "./wordlists/portuguese.js"
|
||||
"default": "./wordlists/korean.js"
|
||||
},
|
||||
"./wordlists/simplified-chinese": {
|
||||
"types": "./wordlists/simplified-chinese.d.ts",
|
||||
"import": "./esm/wordlists/simplified-chinese.js",
|
||||
"require": "./wordlists/simplified-chinese.js"
|
||||
"default": "./wordlists/simplified-chinese.js"
|
||||
},
|
||||
"./wordlists/spanish": {
|
||||
"types": "./wordlists/spanish.d.ts",
|
||||
"import": "./esm/wordlists/spanish.js",
|
||||
"require": "./wordlists/spanish.js"
|
||||
"default": "./wordlists/spanish.js"
|
||||
},
|
||||
"./wordlists/traditional-chinese": {
|
||||
"types": "./wordlists/traditional-chinese.d.ts",
|
||||
"import": "./esm/wordlists/traditional-chinese.js",
|
||||
"require": "./wordlists/traditional-chinese.js"
|
||||
"default": "./wordlists/traditional-chinese.js"
|
||||
}
|
||||
},
|
||||
"keywords": [
|
||||
@@ -98,8 +107,9 @@
|
||||
"code",
|
||||
"bip0039",
|
||||
"bip-39",
|
||||
"wordlist",
|
||||
"micro",
|
||||
"scure",
|
||||
"wordlist",
|
||||
"noble"
|
||||
],
|
||||
"funding": "https://paulmillr.com/funding/"
|
||||
|
||||
174
node_modules/@scure/bip39/src/index.ts
generated
vendored
174
node_modules/@scure/bip39/src/index.ts
generated
vendored
@@ -1,174 +0,0 @@
|
||||
/**
|
||||
* Audited & minimal JS implementation of
|
||||
* [BIP39 mnemonic phrases](https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki).
|
||||
* @module
|
||||
* @example
|
||||
```js
|
||||
import * as bip39 from '@scure/bip39';
|
||||
import { wordlist } from '@scure/bip39/wordlists/english';
|
||||
const mn = bip39.generateMnemonic(wordlist);
|
||||
console.log(mn);
|
||||
const ent = bip39.mnemonicToEntropy(mn, wordlist)
|
||||
bip39.entropyToMnemonic(ent, wordlist);
|
||||
bip39.validateMnemonic(mn, wordlist);
|
||||
await bip39.mnemonicToSeed(mn, 'password');
|
||||
bip39.mnemonicToSeedSync(mn, 'password');
|
||||
|
||||
// Wordlists
|
||||
import { wordlist as czech } from '@scure/bip39/wordlists/czech';
|
||||
import { wordlist as english } from '@scure/bip39/wordlists/english';
|
||||
import { wordlist as french } from '@scure/bip39/wordlists/french';
|
||||
import { wordlist as italian } from '@scure/bip39/wordlists/italian';
|
||||
import { wordlist as japanese } from '@scure/bip39/wordlists/japanese';
|
||||
import { wordlist as korean } from '@scure/bip39/wordlists/korean';
|
||||
import { wordlist as portuguese } from '@scure/bip39/wordlists/portuguese';
|
||||
import { wordlist as simplifiedChinese } from '@scure/bip39/wordlists/simplified-chinese';
|
||||
import { wordlist as spanish } from '@scure/bip39/wordlists/spanish';
|
||||
import { wordlist as traditionalChinese } from '@scure/bip39/wordlists/traditional-chinese';
|
||||
```
|
||||
*/
|
||||
|
||||
/*! scure-bip39 - MIT License (c) 2022 Patricio Palladino, Paul Miller (paulmillr.com) */
|
||||
import { pbkdf2, pbkdf2Async } from '@noble/hashes/pbkdf2';
|
||||
import { sha256, sha512 } from '@noble/hashes/sha2';
|
||||
import { abytes, anumber, randomBytes } from '@noble/hashes/utils';
|
||||
import { utils as baseUtils } from '@scure/base';
|
||||
|
||||
// Japanese wordlist
|
||||
const isJapanese = (wordlist: string[]) => wordlist[0] === '\u3042\u3044\u3053\u304f\u3057\u3093';
|
||||
|
||||
// Normalization replaces equivalent sequences of characters
|
||||
// so that any two texts that are equivalent will be reduced
|
||||
// to the same sequence of code points, called the normal form of the original text.
|
||||
// https://tonsky.me/blog/unicode/#why-is-a----
|
||||
function nfkd(str: string) {
|
||||
if (typeof str !== 'string') throw new TypeError('invalid mnemonic type: ' + typeof str);
|
||||
return str.normalize('NFKD');
|
||||
}
|
||||
|
||||
function normalize(str: string) {
|
||||
const norm = nfkd(str);
|
||||
const words = norm.split(' ');
|
||||
if (![12, 15, 18, 21, 24].includes(words.length)) throw new Error('Invalid mnemonic');
|
||||
return { nfkd: norm, words };
|
||||
}
|
||||
|
||||
function aentropy(ent: Uint8Array) {
|
||||
abytes(ent, 16, 20, 24, 28, 32);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate x random words. Uses Cryptographically-Secure Random Number Generator.
|
||||
* @param wordlist imported wordlist for specific language
|
||||
* @param strength mnemonic strength 128-256 bits
|
||||
* @example
|
||||
* generateMnemonic(wordlist, 128)
|
||||
* // 'legal winner thank year wave sausage worth useful legal winner thank yellow'
|
||||
*/
|
||||
export function generateMnemonic(wordlist: string[], strength: number = 128): string {
|
||||
anumber(strength);
|
||||
if (strength % 32 !== 0 || strength > 256) throw new TypeError('Invalid entropy');
|
||||
return entropyToMnemonic(randomBytes(strength / 8), wordlist);
|
||||
}
|
||||
|
||||
const calcChecksum = (entropy: Uint8Array) => {
|
||||
// Checksum is ent.length/4 bits long
|
||||
const bitsLeft = 8 - entropy.length / 4;
|
||||
// Zero rightmost "bitsLeft" bits in byte
|
||||
// For example: bitsLeft=4 val=10111101 -> 10110000
|
||||
return new Uint8Array([(sha256(entropy)[0]! >> bitsLeft) << bitsLeft]);
|
||||
};
|
||||
|
||||
function getCoder(wordlist: string[]) {
|
||||
if (!Array.isArray(wordlist) || wordlist.length !== 2048 || typeof wordlist[0] !== 'string')
|
||||
throw new Error('Wordlist: expected array of 2048 strings');
|
||||
wordlist.forEach((i) => {
|
||||
if (typeof i !== 'string') throw new Error('wordlist: non-string element: ' + i);
|
||||
});
|
||||
return baseUtils.chain(
|
||||
baseUtils.checksum(1, calcChecksum),
|
||||
baseUtils.radix2(11, true),
|
||||
baseUtils.alphabet(wordlist)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reversible: Converts mnemonic string to raw entropy in form of byte array.
|
||||
* @param mnemonic 12-24 words
|
||||
* @param wordlist imported wordlist for specific language
|
||||
* @example
|
||||
* const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';
|
||||
* mnemonicToEntropy(mnem, wordlist)
|
||||
* // Produces
|
||||
* new Uint8Array([
|
||||
* 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
|
||||
* 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f
|
||||
* ])
|
||||
*/
|
||||
export function mnemonicToEntropy(mnemonic: string, wordlist: string[]): Uint8Array {
|
||||
const { words } = normalize(mnemonic);
|
||||
const entropy = getCoder(wordlist).decode(words);
|
||||
aentropy(entropy);
|
||||
return entropy;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reversible: Converts raw entropy in form of byte array to mnemonic string.
|
||||
* @param entropy byte array
|
||||
* @param wordlist imported wordlist for specific language
|
||||
* @returns 12-24 words
|
||||
* @example
|
||||
* const ent = new Uint8Array([
|
||||
* 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
|
||||
* 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f
|
||||
* ]);
|
||||
* entropyToMnemonic(ent, wordlist);
|
||||
* // 'legal winner thank year wave sausage worth useful legal winner thank yellow'
|
||||
*/
|
||||
export function entropyToMnemonic(entropy: Uint8Array, wordlist: string[]): string {
|
||||
aentropy(entropy);
|
||||
const words = getCoder(wordlist).encode(entropy);
|
||||
return words.join(isJapanese(wordlist) ? '\u3000' : ' ');
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates mnemonic for being 12-24 words contained in `wordlist`.
|
||||
*/
|
||||
export function validateMnemonic(mnemonic: string, wordlist: string[]): boolean {
|
||||
try {
|
||||
mnemonicToEntropy(mnemonic, wordlist);
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
const psalt = (passphrase: string) => nfkd('mnemonic' + passphrase);
|
||||
|
||||
/**
|
||||
* Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.
|
||||
* @param mnemonic 12-24 words
|
||||
* @param passphrase string that will additionally protect the key
|
||||
* @returns 64 bytes of key data
|
||||
* @example
|
||||
* const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';
|
||||
* await mnemonicToSeed(mnem, 'password');
|
||||
* // new Uint8Array([...64 bytes])
|
||||
*/
|
||||
export function mnemonicToSeed(mnemonic: string, passphrase = ''): Promise<Uint8Array> {
|
||||
return pbkdf2Async(sha512, normalize(mnemonic).nfkd, psalt(passphrase), { c: 2048, dkLen: 64 });
|
||||
}
|
||||
|
||||
/**
|
||||
* Irreversible: Uses KDF to derive 64 bytes of key data from mnemonic + optional password.
|
||||
* @param mnemonic 12-24 words
|
||||
* @param passphrase string that will additionally protect the key
|
||||
* @returns 64 bytes of key data
|
||||
* @example
|
||||
* const mnem = 'legal winner thank year wave sausage worth useful legal winner thank yellow';
|
||||
* mnemonicToSeedSync(mnem, 'password');
|
||||
* // new Uint8Array([...64 bytes])
|
||||
*/
|
||||
export function mnemonicToSeedSync(mnemonic: string, passphrase = ''): Uint8Array {
|
||||
return pbkdf2(sha512, normalize(mnemonic).nfkd, psalt(passphrase), { c: 2048, dkLen: 64 });
|
||||
}
|
||||
1
node_modules/@scure/bip39/wordlists/portuguese.d.ts
generated
vendored
1
node_modules/@scure/bip39/wordlists/portuguese.d.ts
generated
vendored
@@ -1 +0,0 @@
|
||||
export declare const wordlist: string[];
|
||||
2051
node_modules/@scure/bip39/wordlists/portuguese.js
generated
vendored
2051
node_modules/@scure/bip39/wordlists/portuguese.js
generated
vendored
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user