mirror of
https://github.com/aljazceru/notedeck.git
synced 2025-12-18 09:04:21 +01:00
Add Fluent-based localization manager and add script to export source strings for translations
Changelog-Added: Added Fluent-based localization manager and added script to export source strings for translations Signed-off-by: Terry Yiu <git@tyiu.xyz>
This commit is contained in:
committed by
William Casarin
parent
80820a52d2
commit
d07c3e9135
369
Cargo.lock
generated
369
Cargo.lock
generated
@@ -204,7 +204,7 @@ checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -377,7 +377,7 @@ checksum = "0289cba6d5143bfe8251d57b4a8cac036adf158525a76533a7082ba65ec76398"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -407,7 +407,7 @@ checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -442,7 +442,7 @@ checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -490,9 +490,9 @@ checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "autocfg"
|
name = "autocfg"
|
||||||
version = "1.4.0"
|
version = "1.5.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
|
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "av1-grain"
|
name = "av1-grain"
|
||||||
@@ -510,9 +510,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "avif-serialize"
|
name = "avif-serialize"
|
||||||
version = "0.8.3"
|
version = "0.8.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "98922d6a4cfbcb08820c69d8eeccc05bb1f29bfa06b4f5b1dbfe9a868bd7608e"
|
checksum = "19135c0c7a60bfee564dbe44ab5ce0557c6bf3884e5291a50be76a15640c4fbd"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrayvec",
|
"arrayvec",
|
||||||
]
|
]
|
||||||
@@ -614,15 +614,15 @@ dependencies = [
|
|||||||
"regex",
|
"regex",
|
||||||
"rustc-hash 1.1.0",
|
"rustc-hash 1.1.0",
|
||||||
"shlex",
|
"shlex",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
"which",
|
"which",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bip39"
|
name = "bip39"
|
||||||
version = "2.1.0"
|
version = "2.2.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "33415e24172c1b7d6066f6d999545375ab8e1d95421d6784bdfff9496f292387"
|
checksum = "43d193de1f7487df1914d3a568b772458861d33f9c54249612cc2893d6915054"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitcoin_hashes 0.13.0",
|
"bitcoin_hashes 0.13.0",
|
||||||
"serde",
|
"serde",
|
||||||
@@ -810,9 +810,9 @@ checksum = "56ed6191a7e78c36abdb16ab65341eefd73d64d303fffccdbb00d51e4205967b"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bumpalo"
|
name = "bumpalo"
|
||||||
version = "3.18.1"
|
version = "3.19.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "793db76d6187cd04dff33004d8e6c9cc4e05cd330500379d2394209271b4aeee"
|
checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bytemuck"
|
name = "bytemuck"
|
||||||
@@ -831,7 +831,7 @@ checksum = "7ecc273b49b3205b83d648f0690daa588925572cc5063745bfe547fe7ec8e1a1"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -982,6 +982,12 @@ dependencies = [
|
|||||||
"windows-link",
|
"windows-link",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "chunky-vec"
|
||||||
|
version = "0.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bb7bdea464ae038f09197b82430b921c53619fc8d2bcaf7b151013b3ca008017"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cipher"
|
name = "cipher"
|
||||||
version = "0.4.4"
|
version = "0.4.4"
|
||||||
@@ -1194,7 +1200,7 @@ dependencies = [
|
|||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"strsim",
|
"strsim",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1205,7 +1211,7 @@ checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"darling_core",
|
"darling_core",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1247,7 +1253,7 @@ dependencies = [
|
|||||||
"darling",
|
"darling",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1257,7 +1263,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c"
|
checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"derive_builder_core",
|
"derive_builder_core",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1334,7 +1340,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1553,6 +1559,15 @@ version = "1.15.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
|
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "elsa"
|
||||||
|
version = "1.11.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9abf33c656a7256451ebb7d0082c5a471820c31269e49d807c538c252352186e"
|
||||||
|
dependencies = [
|
||||||
|
"stable_deref_trait",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "emath"
|
name = "emath"
|
||||||
version = "0.31.1"
|
version = "0.31.1"
|
||||||
@@ -1613,7 +1628,7 @@ checksum = "f282cfdfe92516eb26c2af8589c274c7c17681f5ecc03c18255fe741c6aa64eb"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1634,7 +1649,7 @@ checksum = "67c78a4d8fdf9953a5c9d458f9efe940fd97a0cab0941c075a813ac594733827"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1645,7 +1660,7 @@ checksum = "2f9ed6b3789237c8a0c1c505af1c7eb2c560df6186f01b098c3a1064ea532f38"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1697,7 +1712,7 @@ checksum = "44f23cf4b44bfce11a86ace86f8a73ffdec849c9fd00a386a53d278bd9e81fb3"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1708,12 +1723,12 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "errno"
|
name = "errno"
|
||||||
version = "0.3.12"
|
version = "0.3.13"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "cea14ef9355e3beab063703aa9dab15afd25f0667c341310c1e5274bb1d0da18"
|
checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
"windows-sys 0.59.0",
|
"windows-sys 0.60.2",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1825,6 +1840,82 @@ version = "0.9.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4"
|
checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fluent"
|
||||||
|
version = "0.17.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8137a6d5a2c50d6b0ebfcb9aaa91a28154e0a70605f112d30cb0cd4a78670477"
|
||||||
|
dependencies = [
|
||||||
|
"fluent-bundle",
|
||||||
|
"unic-langid",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fluent-bundle"
|
||||||
|
version = "0.16.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "01203cb8918f5711e73891b347816d932046f95f54207710bda99beaeb423bf4"
|
||||||
|
dependencies = [
|
||||||
|
"fluent-langneg",
|
||||||
|
"fluent-syntax",
|
||||||
|
"intl-memoizer",
|
||||||
|
"intl_pluralrules",
|
||||||
|
"rustc-hash 2.1.1",
|
||||||
|
"self_cell",
|
||||||
|
"smallvec",
|
||||||
|
"unic-langid",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fluent-fallback"
|
||||||
|
version = "0.7.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "38637647e8853f0bae81ffb20f53b2b3b60fec70ab30ad8a84583682fc02629b"
|
||||||
|
dependencies = [
|
||||||
|
"async-trait",
|
||||||
|
"chunky-vec",
|
||||||
|
"fluent-bundle",
|
||||||
|
"futures",
|
||||||
|
"once_cell",
|
||||||
|
"pin-cell",
|
||||||
|
"rustc-hash 2.1.1",
|
||||||
|
"unic-langid",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fluent-langneg"
|
||||||
|
version = "0.13.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2c4ad0989667548f06ccd0e306ed56b61bd4d35458d54df5ec7587c0e8ed5e94"
|
||||||
|
dependencies = [
|
||||||
|
"unic-langid",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fluent-resmgr"
|
||||||
|
version = "0.0.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5579ef08073c77fe7622558d04d56fa548419c81dfd31d549eb5dff9102cc0c3"
|
||||||
|
dependencies = [
|
||||||
|
"elsa",
|
||||||
|
"fluent-bundle",
|
||||||
|
"fluent-fallback",
|
||||||
|
"futures",
|
||||||
|
"rustc-hash 2.1.1",
|
||||||
|
"thiserror 2.0.12",
|
||||||
|
"unic-langid",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fluent-syntax"
|
||||||
|
version = "0.12.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "54f0d287c53ffd184d04d8677f590f4ac5379785529e5e08b1c8083acdd5c198"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
"thiserror 2.0.12",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fnv"
|
name = "fnv"
|
||||||
version = "1.0.7"
|
version = "1.0.7"
|
||||||
@@ -1855,7 +1946,7 @@ checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1942,7 +2033,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -2030,9 +2121,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "gif"
|
name = "gif"
|
||||||
version = "0.13.1"
|
version = "0.13.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3fb2d69b19215e18bb912fa30f7ce15846e301408695e44e0ef719f1da9e19f2"
|
checksum = "4ae047235e33e2829703574b54fdec96bfbad892062d97fed2f76022287de61b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"color_quant",
|
"color_quant",
|
||||||
"weezl",
|
"weezl",
|
||||||
@@ -2354,7 +2445,7 @@ dependencies = [
|
|||||||
"tokio",
|
"tokio",
|
||||||
"tokio-rustls",
|
"tokio-rustls",
|
||||||
"tower-service",
|
"tower-service",
|
||||||
"webpki-roots 1.0.0",
|
"webpki-roots 1.0.1",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -2543,9 +2634,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "image-webp"
|
name = "image-webp"
|
||||||
version = "0.2.2"
|
version = "0.2.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "14d75c7014ddab93c232bc6bb9f64790d3dfd1d605199acd4b40b6d69e691e9f"
|
checksum = "f6970fe7a5300b4b42e62c52efa0187540a5bef546c60edaf554ef595d2e6f0b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"byteorder-lite",
|
"byteorder-lite",
|
||||||
"quick-error",
|
"quick-error",
|
||||||
@@ -2604,7 +2695,26 @@ checksum = "c34819042dc3d3971c46c2190835914dfbe0c3c13f61449b2997f4e9722dfa60"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "intl-memoizer"
|
||||||
|
version = "0.5.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "310da2e345f5eb861e7a07ee182262e94975051db9e4223e909ba90f392f163f"
|
||||||
|
dependencies = [
|
||||||
|
"type-map",
|
||||||
|
"unic-langid",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "intl_pluralrules"
|
||||||
|
version = "7.0.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "078ea7b7c29a2b4df841a7f6ac8775ff6074020c6776d48491ce2268e068f972"
|
||||||
|
dependencies = [
|
||||||
|
"unic-langid",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -2744,9 +2854,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jpeg-decoder"
|
name = "jpeg-decoder"
|
||||||
version = "0.3.1"
|
version = "0.3.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f5d4a7da358eff58addd2877a45865158f0d78c911d43a5784ceb7bbf52833b0"
|
checksum = "00810f1d8b74be64b13dbf3db89ac67740615d6c891f0e7b6179326533011a07"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "js-sys"
|
name = "js-sys"
|
||||||
@@ -2804,9 +2914,9 @@ checksum = "03087c2bad5e1034e8cace5926dec053fb3790248370865f5117a7d0213354c8"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libc"
|
name = "libc"
|
||||||
version = "0.2.173"
|
version = "0.2.174"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d8cfeafaffdbc32176b64fb251369d52ea9f0a8fbc6f8759edffef7b525d64bb"
|
checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libfuzzer-sys"
|
name = "libfuzzer-sys"
|
||||||
@@ -2925,9 +3035,9 @@ checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lz4_flex"
|
name = "lz4_flex"
|
||||||
version = "0.11.4"
|
version = "0.11.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2c592ad9fbc1b7838633b3ae55ce69b17d01150c72fcef229fbb819d39ee51ee"
|
checksum = "08ab2867e3eeeca90e844d1940eab391c9dc5228783db2ed999acbc0a9ed375a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "malloc_buf"
|
name = "malloc_buf"
|
||||||
@@ -2957,6 +3067,12 @@ dependencies = [
|
|||||||
"rayon",
|
"rayon",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "md5"
|
||||||
|
version = "0.7.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memchr"
|
name = "memchr"
|
||||||
version = "2.7.5"
|
version = "2.7.5"
|
||||||
@@ -3293,19 +3409,25 @@ dependencies = [
|
|||||||
"egui-winit",
|
"egui-winit",
|
||||||
"ehttp",
|
"ehttp",
|
||||||
"enostr",
|
"enostr",
|
||||||
|
"fluent",
|
||||||
|
"fluent-langneg",
|
||||||
|
"fluent-resmgr",
|
||||||
"hashbrown",
|
"hashbrown",
|
||||||
"hex",
|
"hex",
|
||||||
"image",
|
"image",
|
||||||
"jni 0.21.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"jni 0.21.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"lightning-invoice",
|
"lightning-invoice",
|
||||||
|
"md5",
|
||||||
"mime_guess",
|
"mime_guess",
|
||||||
"nostr 0.37.0",
|
"nostr 0.37.0",
|
||||||
"nostrdb",
|
"nostrdb",
|
||||||
"nwc",
|
"nwc",
|
||||||
|
"once_cell",
|
||||||
"poll-promise",
|
"poll-promise",
|
||||||
"profiling",
|
"profiling",
|
||||||
"puffin",
|
"puffin",
|
||||||
"puffin_egui",
|
"puffin_egui",
|
||||||
|
"regex",
|
||||||
"secp256k1 0.30.0",
|
"secp256k1 0.30.0",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
@@ -3317,6 +3439,7 @@ dependencies = [
|
|||||||
"tokenator",
|
"tokenator",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tracing",
|
"tracing",
|
||||||
|
"unic-langid",
|
||||||
"url",
|
"url",
|
||||||
"uuid",
|
"uuid",
|
||||||
]
|
]
|
||||||
@@ -3494,7 +3617,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -3528,23 +3651,24 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "num_enum"
|
name = "num_enum"
|
||||||
version = "0.7.3"
|
version = "0.7.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4e613fc340b2220f734a8595782c551f1250e969d87d3be1ae0579e8d4065179"
|
checksum = "a973b4e44ce6cad84ce69d797acf9a044532e4184c4f267913d1b546a0727b7a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"num_enum_derive",
|
"num_enum_derive",
|
||||||
|
"rustversion",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "num_enum_derive"
|
name = "num_enum_derive"
|
||||||
version = "0.7.3"
|
version = "0.7.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56"
|
checksum = "77e878c846a8abae00dd069496dbe8751b16ac1c3d6bd2a7283a938e8228f90d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro-crate",
|
"proc-macro-crate",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -4031,7 +4155,7 @@ dependencies = [
|
|||||||
"phf_shared",
|
"phf_shared",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
"unicase",
|
"unicase",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -4051,6 +4175,12 @@ version = "0.5.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315"
|
checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pin-cell"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e1f4c4ebd3c5f82080164b7d9cc8e505cd9536fda8c750b779daceb4b7180a7b"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pin-project"
|
name = "pin-project"
|
||||||
version = "1.1.10"
|
version = "1.1.10"
|
||||||
@@ -4068,7 +4198,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -4192,12 +4322,12 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "prettyplease"
|
name = "prettyplease"
|
||||||
version = "0.2.34"
|
version = "0.2.35"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6837b9e10d61f45f987d50808f83d1ee3d206c66acf650c3e4ae2e1f6ddedf55"
|
checksum = "061c1221631e079b26479d25bbf2275bfe5917ae8419cd7e34f13bfc2aa7539a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -4220,9 +4350,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "profiling"
|
name = "profiling"
|
||||||
version = "1.0.16"
|
version = "1.0.17"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "afbdc74edc00b6f6a218ca6a5364d6226a259d4b8ea1af4a0ea063f27e179f4d"
|
checksum = "3eb8486b569e12e2c32ad3e204dbaba5e4b5b216e9367044f25f1dba42341773"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"profiling-procmacros",
|
"profiling-procmacros",
|
||||||
"puffin",
|
"puffin",
|
||||||
@@ -4230,12 +4360,12 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "profiling-procmacros"
|
name = "profiling-procmacros"
|
||||||
version = "1.0.16"
|
version = "1.0.17"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a65f2e60fbf1063868558d69c6beacf412dc755f9fc020f514b7955fc914fe30"
|
checksum = "52717f9a02b6965224f95ca2a81e2e0c5c43baacd28ca057577988930b6c3d5b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -4338,9 +4468,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quinn-udp"
|
name = "quinn-udp"
|
||||||
version = "0.5.12"
|
version = "0.5.13"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ee4e529991f949c5e25755532370b8af5d114acae52326361d68d47af64aa842"
|
checksum = "fcebb1209ee276352ef14ff8732e24cc2b02bbac986cd74a4c81bcb2f9881970"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg_aliases",
|
"cfg_aliases",
|
||||||
"libc",
|
"libc",
|
||||||
@@ -4361,9 +4491,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "r-efi"
|
name = "r-efi"
|
||||||
version = "5.2.0"
|
version = "5.3.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5"
|
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rand"
|
name = "rand"
|
||||||
@@ -4461,9 +4591,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ravif"
|
name = "ravif"
|
||||||
version = "0.11.12"
|
version = "0.11.20"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d6a5f31fcf7500f9401fea858ea4ab5525c99f2322cfcee732c0e6c74208c0c6"
|
checksum = "5825c26fddd16ab9f515930d49028a630efec172e903483c94796cfe31893e6b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"avif-serialize",
|
"avif-serialize",
|
||||||
"imgref",
|
"imgref",
|
||||||
@@ -4679,7 +4809,7 @@ dependencies = [
|
|||||||
"wasm-bindgen-futures",
|
"wasm-bindgen-futures",
|
||||||
"wasm-streams",
|
"wasm-streams",
|
||||||
"web-sys",
|
"web-sys",
|
||||||
"webpki-roots 1.0.0",
|
"webpki-roots 1.0.1",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -4841,9 +4971,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustls"
|
name = "rustls"
|
||||||
version = "0.23.27"
|
version = "0.23.28"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "730944ca083c1c233a75c09f199e973ca499344a2b7ba9e755c457e86fb4a321"
|
checksum = "7160e3e10bf4535308537f3c4e1641468cd0e485175d6163087c0393c7d46643"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
@@ -5041,6 +5171,12 @@ dependencies = [
|
|||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "self_cell"
|
||||||
|
version = "1.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0f7d95a54511e0c7be3f51e8867aa8cf35148d7b9445d44de2f943e2b206e749"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "semver"
|
name = "semver"
|
||||||
version = "1.0.26"
|
version = "1.0.26"
|
||||||
@@ -5064,7 +5200,7 @@ checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -5088,7 +5224,7 @@ checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -5325,7 +5461,7 @@ dependencies = [
|
|||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"rustversion",
|
"rustversion",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -5357,9 +5493,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.103"
|
version = "2.0.104"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e4307e30089d6fd6aff212f2da3a1f9e32f3223b1f010fb09b7c95f90f3ca1e8"
|
checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@@ -5383,7 +5519,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -5467,7 +5603,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -5478,7 +5614,7 @@ checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -5614,7 +5750,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -5784,13 +5920,13 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-attributes"
|
name = "tracing-attributes"
|
||||||
version = "0.1.29"
|
version = "0.1.30"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1b1ffbcf9c6f6b99d386e7444eb608ba646ae452a36b39737deb9663b610f662"
|
checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -5921,6 +6057,24 @@ dependencies = [
|
|||||||
"winapi",
|
"winapi",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unic-langid"
|
||||||
|
version = "0.9.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a28ba52c9b05311f4f6e62d5d9d46f094bd6e84cb8df7b3ef952748d752a7d05"
|
||||||
|
dependencies = [
|
||||||
|
"unic-langid-impl",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unic-langid-impl"
|
||||||
|
version = "0.9.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "dce1bf08044d4b7a94028c93786f8566047edc11110595914de93362559bc658"
|
||||||
|
dependencies = [
|
||||||
|
"tinystr",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicase"
|
name = "unicase"
|
||||||
version = "2.8.1"
|
version = "2.8.1"
|
||||||
@@ -5935,9 +6089,9 @@ checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-normalization"
|
name = "unicode-normalization"
|
||||||
version = "0.1.22"
|
version = "0.1.24"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921"
|
checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"tinyvec",
|
"tinyvec",
|
||||||
]
|
]
|
||||||
@@ -6168,7 +6322,7 @@ dependencies = [
|
|||||||
"log",
|
"log",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -6203,7 +6357,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
"wasm-bindgen-backend",
|
"wasm-bindgen-backend",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
@@ -6391,14 +6545,14 @@ version = "0.26.11"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9"
|
checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"webpki-roots 1.0.0",
|
"webpki-roots 1.0.1",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "webpki-roots"
|
name = "webpki-roots"
|
||||||
version = "1.0.0"
|
version = "1.0.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2853738d1cc4f2da3a225c18ec6c3721abb31961096e9dbf5ab35fa88b19cfdb"
|
checksum = "8782dd5a41a24eed3a4f40b606249b3e236ca61adf1f25ea4d45c73de122b502"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"rustls-pki-types",
|
"rustls-pki-types",
|
||||||
]
|
]
|
||||||
@@ -6620,7 +6774,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -6631,7 +6785,7 @@ checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -6642,7 +6796,7 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -6653,7 +6807,7 @@ checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -6735,6 +6889,15 @@ dependencies = [
|
|||||||
"windows-targets 0.52.6",
|
"windows-targets 0.52.6",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows-sys"
|
||||||
|
version = "0.60.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
|
||||||
|
dependencies = [
|
||||||
|
"windows-targets 0.53.2",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-targets"
|
name = "windows-targets"
|
||||||
version = "0.42.2"
|
version = "0.42.2"
|
||||||
@@ -7137,9 +7300,9 @@ checksum = "ec107c4503ea0b4a98ef47356329af139c0a4f7750e621cf2973cd3385ebcb3d"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "xcursor"
|
name = "xcursor"
|
||||||
version = "0.3.8"
|
version = "0.3.10"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0ef33da6b1660b4ddbfb3aef0ade110c8b8a781a3b6382fa5f2b5b040fd55f61"
|
checksum = "bec9e4a500ca8864c5b47b8b482a73d62e4237670e5b5f1d6b9e3cae50f28f2b"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "xkbcommon-dl"
|
name = "xkbcommon-dl"
|
||||||
@@ -7198,7 +7361,7 @@ checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
"synstructure",
|
"synstructure",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -7244,7 +7407,7 @@ dependencies = [
|
|||||||
"proc-macro-crate",
|
"proc-macro-crate",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
"zbus_names",
|
"zbus_names",
|
||||||
"zvariant",
|
"zvariant",
|
||||||
"zvariant_utils",
|
"zvariant_utils",
|
||||||
@@ -7264,22 +7427,22 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zerocopy"
|
name = "zerocopy"
|
||||||
version = "0.8.25"
|
version = "0.8.26"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a1702d9583232ddb9174e01bb7c15a2ab8fb1bc6f227aa1233858c351a3ba0cb"
|
checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"zerocopy-derive",
|
"zerocopy-derive",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zerocopy-derive"
|
name = "zerocopy-derive"
|
||||||
version = "0.8.25"
|
version = "0.8.26"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "28a6e20d751156648aa063f3800b706ee209a32c0b4d9f24be3d980b01be55ef"
|
checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -7299,7 +7462,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
"synstructure",
|
"synstructure",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -7339,7 +7502,7 @@ checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -7359,9 +7522,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zune-jpeg"
|
name = "zune-jpeg"
|
||||||
version = "0.4.17"
|
version = "0.4.18"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0f6fe2e33d02a98ee64423802e16df3de99c43e5cf5ff983767e1128b394c8ac"
|
checksum = "7384255a918371b5af158218d131530f694de9ad3815ebdd0453a940485cb0fa"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"zune-core",
|
"zune-core",
|
||||||
]
|
]
|
||||||
@@ -7390,7 +7553,7 @@ dependencies = [
|
|||||||
"proc-macro-crate",
|
"proc-macro-crate",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
"zvariant_utils",
|
"zvariant_utils",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -7404,6 +7567,6 @@ dependencies = [
|
|||||||
"quote",
|
"quote",
|
||||||
"serde",
|
"serde",
|
||||||
"static_assertions",
|
"static_assertions",
|
||||||
"syn 2.0.103",
|
"syn 2.0.104",
|
||||||
"winnow",
|
"winnow",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -30,10 +30,14 @@ egui_virtual_list = { git = "https://github.com/jb55/hello_egui", rev = "a66b679
|
|||||||
ehttp = "0.5.0"
|
ehttp = "0.5.0"
|
||||||
enostr = { path = "crates/enostr" }
|
enostr = { path = "crates/enostr" }
|
||||||
ewebsock = { version = "0.2.0", features = ["tls"] }
|
ewebsock = { version = "0.2.0", features = ["tls"] }
|
||||||
|
fluent = "0.17.0"
|
||||||
|
fluent-resmgr = "0.0.8"
|
||||||
|
fluent-langneg = "0.13"
|
||||||
hex = "0.4.3"
|
hex = "0.4.3"
|
||||||
image = { version = "0.25", features = ["jpeg", "png", "webp"] }
|
image = { version = "0.25", features = ["jpeg", "png", "webp"] }
|
||||||
indexmap = "2.6.0"
|
indexmap = "2.6.0"
|
||||||
log = "0.4.17"
|
log = "0.4.17"
|
||||||
|
md5 = "0.7.0"
|
||||||
nostr = { version = "0.37.0", default-features = false, features = ["std", "nip49"] }
|
nostr = { version = "0.37.0", default-features = false, features = ["std", "nip49"] }
|
||||||
nwc = "0.39.0"
|
nwc = "0.39.0"
|
||||||
mio = { version = "1.0.3", features = ["os-poll", "net"] }
|
mio = { version = "1.0.3", features = ["os-poll", "net"] }
|
||||||
@@ -45,6 +49,7 @@ notedeck_columns = { path = "crates/notedeck_columns" }
|
|||||||
notedeck_dave = { path = "crates/notedeck_dave" }
|
notedeck_dave = { path = "crates/notedeck_dave" }
|
||||||
notedeck_ui = { path = "crates/notedeck_ui" }
|
notedeck_ui = { path = "crates/notedeck_ui" }
|
||||||
tokenator = { path = "crates/tokenator" }
|
tokenator = { path = "crates/tokenator" }
|
||||||
|
once_cell = "1.19.0"
|
||||||
open = "5.3.0"
|
open = "5.3.0"
|
||||||
poll-promise = { version = "0.3.0", features = ["tokio"] }
|
poll-promise = { version = "0.3.0", features = ["tokio"] }
|
||||||
puffin = { git = "https://github.com/jb55/puffin", package = "puffin", rev = "c6a6242adaf90b6292c0f462d2acd34d96d224d2" }
|
puffin = { git = "https://github.com/jb55/puffin", package = "puffin", rev = "c6a6242adaf90b6292c0f462d2acd34d96d224d2" }
|
||||||
@@ -60,6 +65,7 @@ tracing = { version = "0.1.40", features = ["log"] }
|
|||||||
tracing-appender = "0.2.3"
|
tracing-appender = "0.2.3"
|
||||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||||
tempfile = "3.13.0"
|
tempfile = "3.13.0"
|
||||||
|
unic-langid = "0.9.6"
|
||||||
url = "2.5.2"
|
url = "2.5.2"
|
||||||
urlencoding = "2.1.3"
|
urlencoding = "2.1.3"
|
||||||
uuid = { version = "1.10.0", features = ["v4"] }
|
uuid = { version = "1.10.0", features = ["v4"] }
|
||||||
|
|||||||
@@ -39,6 +39,13 @@ bech32 = { workspace = true }
|
|||||||
lightning-invoice = { workspace = true }
|
lightning-invoice = { workspace = true }
|
||||||
secp256k1 = { workspace = true }
|
secp256k1 = { workspace = true }
|
||||||
hashbrown = { workspace = true }
|
hashbrown = { workspace = true }
|
||||||
|
fluent = { workspace = true }
|
||||||
|
fluent-resmgr = { workspace = true }
|
||||||
|
fluent-langneg = { workspace = true }
|
||||||
|
unic-langid = { workspace = true }
|
||||||
|
once_cell = { workspace = true }
|
||||||
|
md5 = { workspace = true }
|
||||||
|
regex = "1"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tempfile = { workspace = true }
|
tempfile = { workspace = true }
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
use crate::account::FALLBACK_PUBKEY;
|
use crate::account::FALLBACK_PUBKEY;
|
||||||
|
use crate::i18n::{LocalizationContext, LocalizationManager};
|
||||||
use crate::persist::{AppSizeHandler, ZoomHandler};
|
use crate::persist::{AppSizeHandler, ZoomHandler};
|
||||||
use crate::wallet::GlobalWallet;
|
use crate::wallet::GlobalWallet;
|
||||||
use crate::zaps::Zaps;
|
use crate::zaps::Zaps;
|
||||||
@@ -17,6 +18,7 @@ use std::cell::RefCell;
|
|||||||
use std::collections::BTreeSet;
|
use std::collections::BTreeSet;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
use std::sync::Arc;
|
||||||
use tracing::{error, info};
|
use tracing::{error, info};
|
||||||
|
|
||||||
pub enum AppAction {
|
pub enum AppAction {
|
||||||
@@ -48,6 +50,7 @@ pub struct Notedeck {
|
|||||||
zaps: Zaps,
|
zaps: Zaps,
|
||||||
frame_history: FrameHistory,
|
frame_history: FrameHistory,
|
||||||
job_pool: JobPool,
|
job_pool: JobPool,
|
||||||
|
i18n: LocalizationContext,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Our chrome, which is basically nothing
|
/// Our chrome, which is basically nothing
|
||||||
@@ -227,6 +230,21 @@ impl Notedeck {
|
|||||||
let zaps = Zaps::default();
|
let zaps = Zaps::default();
|
||||||
let job_pool = JobPool::default();
|
let job_pool = JobPool::default();
|
||||||
|
|
||||||
|
// Initialize localization
|
||||||
|
let i18n_resource_dir = Path::new("assets/translations");
|
||||||
|
let localization_manager = Arc::new(
|
||||||
|
LocalizationManager::new(&i18n_resource_dir).unwrap_or_else(|e| {
|
||||||
|
error!("Failed to initialize localization manager: {}", e);
|
||||||
|
// Create a fallback manager with a temporary directory
|
||||||
|
LocalizationManager::new(&std::env::temp_dir().join("notedeck_i18n_fallback"))
|
||||||
|
.expect("Failed to create fallback localization manager")
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
let i18n = LocalizationContext::new(localization_manager);
|
||||||
|
|
||||||
|
// Initialize global i18n context
|
||||||
|
crate::i18n::init_global_i18n(i18n.clone());
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
ndb,
|
ndb,
|
||||||
img_cache,
|
img_cache,
|
||||||
@@ -246,6 +264,7 @@ impl Notedeck {
|
|||||||
clipboard: Clipboard::new(None),
|
clipboard: Clipboard::new(None),
|
||||||
zaps,
|
zaps,
|
||||||
job_pool,
|
job_pool,
|
||||||
|
i18n,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -270,6 +289,7 @@ impl Notedeck {
|
|||||||
zaps: &mut self.zaps,
|
zaps: &mut self.zaps,
|
||||||
frame_history: &mut self.frame_history,
|
frame_history: &mut self.frame_history,
|
||||||
job_pool: &mut self.job_pool,
|
job_pool: &mut self.job_pool,
|
||||||
|
i18n: &self.i18n,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
account::accounts::Accounts, frame_history::FrameHistory, wallet::GlobalWallet, zaps::Zaps,
|
account::accounts::Accounts, frame_history::FrameHistory, i18n::LocalizationContext,
|
||||||
Args, DataPath, Images, JobPool, NoteCache, ThemeHandler, UnknownIds,
|
wallet::GlobalWallet, zaps::Zaps, Args, DataPath, Images, JobPool, NoteCache, ThemeHandler,
|
||||||
|
UnknownIds,
|
||||||
};
|
};
|
||||||
use egui_winit::clipboard::Clipboard;
|
use egui_winit::clipboard::Clipboard;
|
||||||
|
|
||||||
@@ -24,4 +25,5 @@ pub struct AppContext<'a> {
|
|||||||
pub zaps: &'a mut Zaps,
|
pub zaps: &'a mut Zaps,
|
||||||
pub frame_history: &'a mut FrameHistory,
|
pub frame_history: &'a mut FrameHistory,
|
||||||
pub job_pool: &'a mut JobPool,
|
pub job_pool: &'a mut JobPool,
|
||||||
|
pub i18n: &'a LocalizationContext,
|
||||||
}
|
}
|
||||||
|
|||||||
766
crates/notedeck/src/i18n/manager.rs
Normal file
766
crates/notedeck/src/i18n/manager.rs
Normal file
@@ -0,0 +1,766 @@
|
|||||||
|
use fluent::FluentArgs;
|
||||||
|
use fluent::{FluentBundle, FluentResource};
|
||||||
|
use fluent_langneg::negotiate_languages;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::path::Path;
|
||||||
|
use std::sync::{Arc, RwLock};
|
||||||
|
use unic_langid::LanguageIdentifier;
|
||||||
|
|
||||||
|
/// Manages localization resources and provides localized strings
|
||||||
|
pub struct LocalizationManager {
|
||||||
|
/// Current locale
|
||||||
|
current_locale: RwLock<LanguageIdentifier>,
|
||||||
|
/// Available locales
|
||||||
|
available_locales: Vec<LanguageIdentifier>,
|
||||||
|
/// Fallback locale
|
||||||
|
fallback_locale: LanguageIdentifier,
|
||||||
|
/// Resource directory path
|
||||||
|
resource_dir: std::path::PathBuf,
|
||||||
|
/// Cached parsed FluentResource per locale
|
||||||
|
resource_cache: RwLock<HashMap<LanguageIdentifier, Arc<FluentResource>>>,
|
||||||
|
/// Cached string results per locale (only for strings without arguments)
|
||||||
|
string_cache: RwLock<HashMap<LanguageIdentifier, HashMap<String, String>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LocalizationManager {
|
||||||
|
/// Creates a new LocalizationManager with the specified resource directory
|
||||||
|
pub fn new(resource_dir: &Path) -> Result<Self, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
// Default to English (US)
|
||||||
|
let default_locale: LanguageIdentifier = "en-US"
|
||||||
|
.parse()
|
||||||
|
.map_err(|e| format!("Locale parse error: {e:?}"))?;
|
||||||
|
let fallback_locale = default_locale.clone();
|
||||||
|
|
||||||
|
// Check if pseudolocale is enabled via environment variable
|
||||||
|
let enable_pseudolocale = std::env::var("NOTEDECK_PSEUDOLOCALE").is_ok();
|
||||||
|
|
||||||
|
// Build available locales list
|
||||||
|
let mut available_locales = vec![default_locale.clone()];
|
||||||
|
|
||||||
|
// Add en-XA if pseudolocale is enabled
|
||||||
|
if enable_pseudolocale {
|
||||||
|
let pseudolocale: LanguageIdentifier = "en-XA"
|
||||||
|
.parse()
|
||||||
|
.map_err(|e| format!("Pseudolocale parse error: {e:?}"))?;
|
||||||
|
available_locales.push(pseudolocale);
|
||||||
|
tracing::info!(
|
||||||
|
"Pseudolocale (en-XA) enabled via NOTEDECK_PSEUDOLOCALE environment variable"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
current_locale: RwLock::new(default_locale),
|
||||||
|
available_locales,
|
||||||
|
fallback_locale,
|
||||||
|
resource_dir: resource_dir.to_path_buf(),
|
||||||
|
resource_cache: RwLock::new(HashMap::new()),
|
||||||
|
string_cache: RwLock::new(HashMap::new()),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets a localized string by its ID
|
||||||
|
pub fn get_string(&self, id: &str) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
tracing::debug!(
|
||||||
|
"Getting string '{}' for locale '{}'",
|
||||||
|
id,
|
||||||
|
self.get_current_locale()?
|
||||||
|
);
|
||||||
|
let result = self.get_string_with_args(id, None);
|
||||||
|
if let Err(ref e) = result {
|
||||||
|
tracing::error!("Failed to get string '{}': {}", id, e);
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Loads and caches a parsed FluentResource for the given locale
|
||||||
|
fn load_resource_for_locale(
|
||||||
|
&self,
|
||||||
|
locale: &LanguageIdentifier,
|
||||||
|
) -> Result<Arc<FluentResource>, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
// Construct the path using the stored resource directory
|
||||||
|
let expected_path = self.resource_dir.join(format!("{}/main.ftl", locale));
|
||||||
|
|
||||||
|
// Try to open the file directly
|
||||||
|
if let Err(e) = std::fs::File::open(&expected_path) {
|
||||||
|
tracing::error!(
|
||||||
|
"Direct file open failed: {} ({})",
|
||||||
|
expected_path.display(),
|
||||||
|
e
|
||||||
|
);
|
||||||
|
return Err(format!("Failed to open FTL file: {}", e).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load the FTL file directly instead of using ResourceManager
|
||||||
|
let ftl_string = std::fs::read_to_string(&expected_path)
|
||||||
|
.map_err(|e| format!("Failed to read FTL file: {}", e))?;
|
||||||
|
|
||||||
|
// Parse the FTL content
|
||||||
|
let resource = FluentResource::try_new(ftl_string)
|
||||||
|
.map_err(|e| format!("Failed to parse FTL content: {:?}", e))?;
|
||||||
|
|
||||||
|
tracing::debug!(
|
||||||
|
"Loaded and cached parsed FluentResource for locale: {}",
|
||||||
|
locale
|
||||||
|
);
|
||||||
|
Ok(Arc::new(resource))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets cached parsed FluentResource for the current locale, loading it if necessary
|
||||||
|
fn get_cached_resource(
|
||||||
|
&self,
|
||||||
|
) -> Result<Arc<FluentResource>, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let locale = self
|
||||||
|
.current_locale
|
||||||
|
.read()
|
||||||
|
.map_err(|e| format!("Lock error: {e}"))?;
|
||||||
|
|
||||||
|
// Try to get from cache first
|
||||||
|
{
|
||||||
|
let cache = self
|
||||||
|
.resource_cache
|
||||||
|
.read()
|
||||||
|
.map_err(|e| format!("Cache lock error: {e}"))?;
|
||||||
|
if let Some(resource) = cache.get(&locale) {
|
||||||
|
tracing::debug!("Using cached parsed FluentResource for locale: {}", locale);
|
||||||
|
return Ok(resource.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not in cache, load and cache it
|
||||||
|
let resource = self.load_resource_for_locale(&locale)?;
|
||||||
|
|
||||||
|
// Store in cache
|
||||||
|
{
|
||||||
|
let mut cache = self
|
||||||
|
.resource_cache
|
||||||
|
.write()
|
||||||
|
.map_err(|e| format!("Cache lock error: {e}"))?;
|
||||||
|
cache.insert(locale.clone(), resource.clone());
|
||||||
|
tracing::debug!("Cached parsed FluentResource for locale: {}", locale);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(resource)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets cached string result, or formats it and caches the result
|
||||||
|
fn get_cached_string(
|
||||||
|
&self,
|
||||||
|
id: &str,
|
||||||
|
args: Option<&FluentArgs>,
|
||||||
|
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let locale = self
|
||||||
|
.current_locale
|
||||||
|
.read()
|
||||||
|
.map_err(|e| format!("Lock error: {e}"))?;
|
||||||
|
|
||||||
|
// Only cache simple strings without arguments
|
||||||
|
// For strings with arguments, we can't cache the final result since args may vary
|
||||||
|
if args.is_none() {
|
||||||
|
// Try to get from string cache first
|
||||||
|
{
|
||||||
|
let cache = self
|
||||||
|
.string_cache
|
||||||
|
.read()
|
||||||
|
.map_err(|e| format!("String cache lock error: {e}"))?;
|
||||||
|
if let Some(locale_cache) = cache.get(&locale) {
|
||||||
|
if let Some(cached_string) = locale_cache.get(id) {
|
||||||
|
tracing::debug!(
|
||||||
|
"Using cached string result for '{}' in locale: {}",
|
||||||
|
id,
|
||||||
|
locale
|
||||||
|
);
|
||||||
|
return Ok(cached_string.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not in cache or has arguments, format it using cached resource
|
||||||
|
let resource = self.get_cached_resource()?;
|
||||||
|
|
||||||
|
// Create a bundle for this request (not cached due to thread-safety issues)
|
||||||
|
let mut bundle = FluentBundle::new(vec![locale.clone()]);
|
||||||
|
bundle
|
||||||
|
.add_resource(resource.as_ref())
|
||||||
|
.map_err(|e| format!("Failed to add resource to bundle: {:?}", e))?;
|
||||||
|
|
||||||
|
let message = bundle
|
||||||
|
.get_message(id)
|
||||||
|
.ok_or_else(|| format!("Message not found: {}", id))?;
|
||||||
|
|
||||||
|
let pattern = message
|
||||||
|
.value()
|
||||||
|
.ok_or_else(|| format!("Message has no value: {}", id))?;
|
||||||
|
|
||||||
|
// Format the message
|
||||||
|
let mut errors = Vec::new();
|
||||||
|
let result = bundle.format_pattern(pattern, args, &mut errors);
|
||||||
|
|
||||||
|
if !errors.is_empty() {
|
||||||
|
tracing::warn!("Localization errors for {}: {:?}", id, errors);
|
||||||
|
}
|
||||||
|
|
||||||
|
let result_string = result.into_owned();
|
||||||
|
|
||||||
|
// Only cache simple strings without arguments
|
||||||
|
// This prevents caching issues when the same message ID is used with different arguments
|
||||||
|
if args.is_none() {
|
||||||
|
let mut cache = self
|
||||||
|
.string_cache
|
||||||
|
.write()
|
||||||
|
.map_err(|e| format!("String cache lock error: {e}"))?;
|
||||||
|
let locale_cache = cache.entry(locale.clone()).or_insert_with(HashMap::new);
|
||||||
|
locale_cache.insert(id.to_string(), result_string.clone());
|
||||||
|
tracing::debug!("Cached string result for '{}' in locale: {}", id, locale);
|
||||||
|
} else {
|
||||||
|
tracing::debug!("Not caching string '{}' due to arguments", id);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(result_string)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets a localized string by its ID with optional arguments
|
||||||
|
pub fn get_string_with_args(
|
||||||
|
&self,
|
||||||
|
id: &str,
|
||||||
|
args: Option<&FluentArgs>,
|
||||||
|
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
self.get_cached_string(id, args)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sets the current locale
|
||||||
|
pub fn set_locale(
|
||||||
|
&self,
|
||||||
|
locale: LanguageIdentifier,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
tracing::info!("Attempting to set locale to: {}", locale);
|
||||||
|
tracing::info!("Available locales: {:?}", self.available_locales);
|
||||||
|
|
||||||
|
// Validate that the locale is available
|
||||||
|
if !self.available_locales.contains(&locale) {
|
||||||
|
tracing::error!(
|
||||||
|
"Locale {} is not available. Available locales: {:?}",
|
||||||
|
locale,
|
||||||
|
self.available_locales
|
||||||
|
);
|
||||||
|
return Err(format!("Locale {} is not available", locale).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut current = self
|
||||||
|
.current_locale
|
||||||
|
.write()
|
||||||
|
.map_err(|e| format!("Lock error: {e}"))?;
|
||||||
|
tracing::info!("Switching locale from {} to {}", *current, locale);
|
||||||
|
*current = locale.clone();
|
||||||
|
tracing::info!("Successfully set locale to: {}", locale);
|
||||||
|
|
||||||
|
// Clear caches when locale changes since they are locale-specific
|
||||||
|
let mut string_cache = self
|
||||||
|
.string_cache
|
||||||
|
.write()
|
||||||
|
.map_err(|e| format!("String cache lock error: {e}"))?;
|
||||||
|
string_cache.clear();
|
||||||
|
tracing::debug!("String cache cleared due to locale change");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Clears the parsed FluentResource cache (useful for development when FTL files change)
|
||||||
|
pub fn clear_cache(&self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let mut cache = self
|
||||||
|
.resource_cache
|
||||||
|
.write()
|
||||||
|
.map_err(|e| format!("Cache lock error: {e}"))?;
|
||||||
|
cache.clear();
|
||||||
|
tracing::info!("Parsed FluentResource cache cleared");
|
||||||
|
|
||||||
|
let mut string_cache = self
|
||||||
|
.string_cache
|
||||||
|
.write()
|
||||||
|
.map_err(|e| format!("String cache lock error: {e}"))?;
|
||||||
|
string_cache.clear();
|
||||||
|
tracing::info!("String result cache cleared");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets the current locale
|
||||||
|
pub fn get_current_locale(
|
||||||
|
&self,
|
||||||
|
) -> Result<LanguageIdentifier, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let current = self
|
||||||
|
.current_locale
|
||||||
|
.read()
|
||||||
|
.map_err(|e| format!("Lock error: {e}"))?;
|
||||||
|
Ok(current.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets all available locales
|
||||||
|
pub fn get_available_locales(&self) -> &[LanguageIdentifier] {
|
||||||
|
&self.available_locales
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets the fallback locale
|
||||||
|
pub fn get_fallback_locale(&self) -> &LanguageIdentifier {
|
||||||
|
&self.fallback_locale
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets cache statistics for monitoring performance
|
||||||
|
pub fn get_cache_stats(&self) -> Result<CacheStats, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let resource_cache = self
|
||||||
|
.resource_cache
|
||||||
|
.read()
|
||||||
|
.map_err(|e| format!("Cache lock error: {e}"))?;
|
||||||
|
let string_cache = self
|
||||||
|
.string_cache
|
||||||
|
.read()
|
||||||
|
.map_err(|e| format!("String cache lock error: {e}"))?;
|
||||||
|
|
||||||
|
let mut total_strings = 0;
|
||||||
|
for locale_cache in string_cache.values() {
|
||||||
|
total_strings += locale_cache.len();
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(CacheStats {
|
||||||
|
resource_cache_size: resource_cache.len(),
|
||||||
|
string_cache_size: total_strings,
|
||||||
|
cached_locales: resource_cache.keys().cloned().collect(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Limits the string cache size to prevent memory growth
|
||||||
|
pub fn limit_string_cache_size(
|
||||||
|
&self,
|
||||||
|
max_strings_per_locale: usize,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
let mut string_cache = self
|
||||||
|
.string_cache
|
||||||
|
.write()
|
||||||
|
.map_err(|e| format!("String cache lock error: {e}"))?;
|
||||||
|
|
||||||
|
for locale_cache in string_cache.values_mut() {
|
||||||
|
if locale_cache.len() > max_strings_per_locale {
|
||||||
|
// Remove oldest entries (simple approach: just clear and let it rebuild)
|
||||||
|
// In a more sophisticated implementation, you might use an LRU cache
|
||||||
|
locale_cache.clear();
|
||||||
|
tracing::debug!("Cleared string cache for locale due to size limit");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Negotiates the best locale from a list of preferred locales
|
||||||
|
pub fn negotiate_locale(&self, preferred: &[LanguageIdentifier]) -> LanguageIdentifier {
|
||||||
|
let available = self.available_locales.clone();
|
||||||
|
let negotiated = negotiate_languages(
|
||||||
|
preferred,
|
||||||
|
&available,
|
||||||
|
Some(&self.fallback_locale),
|
||||||
|
fluent_langneg::NegotiationStrategy::Filtering,
|
||||||
|
);
|
||||||
|
negotiated
|
||||||
|
.first()
|
||||||
|
.map_or(self.fallback_locale.clone(), |v| (*v).clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Context for sharing localization across the application
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct LocalizationContext {
|
||||||
|
/// The localization manager
|
||||||
|
manager: Arc<LocalizationManager>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LocalizationContext {
|
||||||
|
/// Creates a new LocalizationContext
|
||||||
|
pub fn new(manager: Arc<LocalizationManager>) -> Self {
|
||||||
|
let context = Self { manager };
|
||||||
|
|
||||||
|
// Auto-switch to pseudolocale if environment variable is set
|
||||||
|
if std::env::var("NOTEDECK_PSEUDOLOCALE").is_ok() {
|
||||||
|
tracing::info!("NOTEDECK_PSEUDOLOCALE environment variable detected");
|
||||||
|
if let Ok(pseudolocale) = "en-XA".parse::<LanguageIdentifier>() {
|
||||||
|
tracing::info!("Attempting to switch to pseudolocale: {}", pseudolocale);
|
||||||
|
if let Err(e) = context.set_locale(pseudolocale) {
|
||||||
|
tracing::warn!("Failed to switch to pseudolocale: {}", e);
|
||||||
|
} else {
|
||||||
|
tracing::info!("Automatically switched to pseudolocale (en-XA)");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tracing::error!("Failed to parse en-XA as LanguageIdentifier");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tracing::info!("NOTEDECK_PSEUDOLOCALE environment variable not set");
|
||||||
|
}
|
||||||
|
|
||||||
|
context
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets a localized string by its ID
|
||||||
|
pub fn get_string(&self, id: &str) -> Option<String> {
|
||||||
|
self.manager.get_string(id).ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets a localized string by its ID with optional arguments
|
||||||
|
pub fn get_string_with_args(&self, id: &str, args: Option<&FluentArgs>) -> String {
|
||||||
|
self.manager
|
||||||
|
.get_string_with_args(id, args)
|
||||||
|
.unwrap_or_else(|_| format!("[MISSING: {}]", id))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sets the current locale
|
||||||
|
pub fn set_locale(
|
||||||
|
&self,
|
||||||
|
locale: LanguageIdentifier,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
self.manager.set_locale(locale)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets the current locale
|
||||||
|
pub fn get_current_locale(
|
||||||
|
&self,
|
||||||
|
) -> Result<LanguageIdentifier, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
self.manager.get_current_locale()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Clears the resource cache (useful for development when FTL files change)
|
||||||
|
pub fn clear_cache(&self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
self.manager.clear_cache()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets the underlying manager
|
||||||
|
pub fn manager(&self) -> &Arc<LocalizationManager> {
|
||||||
|
&self.manager
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Trait for objects that can be localized
|
||||||
|
pub trait Localizable {
|
||||||
|
/// Gets a localized string by its ID
|
||||||
|
fn get_localized_string(&self, id: &str) -> String;
|
||||||
|
|
||||||
|
/// Gets a localized string by its ID with optional arguments
|
||||||
|
fn get_localized_string_with_args(&self, id: &str, args: Option<&FluentArgs>) -> String;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Localizable for LocalizationContext {
|
||||||
|
fn get_localized_string(&self, id: &str) -> String {
|
||||||
|
self.get_string(id)
|
||||||
|
.unwrap_or_else(|| format!("[MISSING: {}]", id))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_localized_string_with_args(&self, id: &str, args: Option<&FluentArgs>) -> String {
|
||||||
|
self.get_string_with_args(id, args)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Statistics about cache usage
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct CacheStats {
|
||||||
|
pub resource_cache_size: usize,
|
||||||
|
pub string_cache_size: usize,
|
||||||
|
pub cached_locales: Vec<LanguageIdentifier>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_localization_manager_creation() {
|
||||||
|
let temp_dir = std::env::temp_dir().join("notedeck_i18n_test");
|
||||||
|
std::fs::create_dir_all(&temp_dir).unwrap();
|
||||||
|
|
||||||
|
let manager = LocalizationManager::new(&temp_dir);
|
||||||
|
assert!(manager.is_ok());
|
||||||
|
|
||||||
|
// Cleanup
|
||||||
|
std::fs::remove_dir_all(&temp_dir).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_locale_management() {
|
||||||
|
let temp_dir = std::env::temp_dir().join("notedeck_i18n_test2");
|
||||||
|
std::fs::create_dir_all(&temp_dir).unwrap();
|
||||||
|
|
||||||
|
let manager = LocalizationManager::new(&temp_dir).unwrap();
|
||||||
|
|
||||||
|
// Test default locale
|
||||||
|
let current = manager.get_current_locale().unwrap();
|
||||||
|
assert_eq!(current.to_string(), "en-US");
|
||||||
|
|
||||||
|
// Test available locales
|
||||||
|
let available = manager.get_available_locales();
|
||||||
|
assert_eq!(available.len(), 1);
|
||||||
|
assert_eq!(available[0].to_string(), "en-US");
|
||||||
|
|
||||||
|
// Cleanup
|
||||||
|
std::fs::remove_dir_all(&temp_dir).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ftl_caching() {
|
||||||
|
let temp_dir = std::env::temp_dir().join("notedeck_i18n_test3");
|
||||||
|
std::fs::create_dir_all(&temp_dir).unwrap();
|
||||||
|
|
||||||
|
// Create a test FTL file
|
||||||
|
let en_us_dir = temp_dir.join("en-US");
|
||||||
|
std::fs::create_dir_all(&en_us_dir).unwrap();
|
||||||
|
let ftl_content = "test_key = Test Value\nanother_key = Another Value";
|
||||||
|
std::fs::write(en_us_dir.join("main.ftl"), ftl_content).unwrap();
|
||||||
|
|
||||||
|
let manager = LocalizationManager::new(&temp_dir).unwrap();
|
||||||
|
|
||||||
|
// First call should load and cache the FTL content
|
||||||
|
let result1 = manager.get_string("test_key");
|
||||||
|
assert!(result1.is_ok());
|
||||||
|
assert_eq!(result1.as_ref().unwrap(), "Test Value");
|
||||||
|
|
||||||
|
// Second call should use cached FTL content
|
||||||
|
let result2 = manager.get_string("test_key");
|
||||||
|
assert!(result2.is_ok());
|
||||||
|
assert_eq!(result2.unwrap(), "Test Value");
|
||||||
|
|
||||||
|
// Test another key from the same FTL content
|
||||||
|
let result3 = manager.get_string("another_key");
|
||||||
|
assert!(result3.is_ok());
|
||||||
|
assert_eq!(result3.unwrap(), "Another Value");
|
||||||
|
|
||||||
|
// Cleanup
|
||||||
|
std::fs::remove_dir_all(&temp_dir).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_cache_clearing() {
|
||||||
|
let temp_dir = std::env::temp_dir().join("notedeck_i18n_test4");
|
||||||
|
std::fs::create_dir_all(&temp_dir).unwrap();
|
||||||
|
|
||||||
|
// Create a test FTL file
|
||||||
|
let en_us_dir = temp_dir.join("en-US");
|
||||||
|
std::fs::create_dir_all(&en_us_dir).unwrap();
|
||||||
|
let ftl_content = "test_key = Test Value";
|
||||||
|
std::fs::write(en_us_dir.join("main.ftl"), ftl_content).unwrap();
|
||||||
|
|
||||||
|
let manager = LocalizationManager::new(&temp_dir).unwrap();
|
||||||
|
|
||||||
|
// Load and cache the FTL content
|
||||||
|
let result1 = manager.get_string("test_key");
|
||||||
|
assert!(result1.is_ok());
|
||||||
|
|
||||||
|
// Clear the cache
|
||||||
|
let clear_result = manager.clear_cache();
|
||||||
|
assert!(clear_result.is_ok());
|
||||||
|
|
||||||
|
// Should still work after clearing cache (will reload)
|
||||||
|
let result2 = manager.get_string("test_key");
|
||||||
|
assert!(result2.is_ok());
|
||||||
|
assert_eq!(result2.unwrap(), "Test Value");
|
||||||
|
|
||||||
|
// Cleanup
|
||||||
|
std::fs::remove_dir_all(&temp_dir).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_context_caching() {
|
||||||
|
let temp_dir = std::env::temp_dir().join("notedeck_i18n_test5");
|
||||||
|
std::fs::create_dir_all(&temp_dir).unwrap();
|
||||||
|
|
||||||
|
// Create a test FTL file
|
||||||
|
let en_us_dir = temp_dir.join("en-US");
|
||||||
|
std::fs::create_dir_all(&en_us_dir).unwrap();
|
||||||
|
let ftl_content = "test_key = Test Value";
|
||||||
|
std::fs::write(en_us_dir.join("main.ftl"), ftl_content).unwrap();
|
||||||
|
|
||||||
|
let manager = Arc::new(LocalizationManager::new(&temp_dir).unwrap());
|
||||||
|
let context = LocalizationContext::new(manager);
|
||||||
|
|
||||||
|
// Debug: check what the normalized key should be
|
||||||
|
let normalized_key = crate::i18n::normalize_ftl_key("test_key", None);
|
||||||
|
println!("Normalized key: '{}'", normalized_key);
|
||||||
|
|
||||||
|
// First call should load and cache the FTL content
|
||||||
|
let result1 = context.get_string("test_key");
|
||||||
|
println!("First result: {:?}", result1);
|
||||||
|
assert!(result1.is_some());
|
||||||
|
assert_eq!(result1.unwrap(), "Test Value");
|
||||||
|
|
||||||
|
// Second call should use cached FTL content
|
||||||
|
let result2 = context.get_string("test_key");
|
||||||
|
assert!(result2.is_some());
|
||||||
|
assert_eq!(result2.unwrap(), "Test Value");
|
||||||
|
|
||||||
|
// Test cache clearing through context
|
||||||
|
let clear_result = context.clear_cache();
|
||||||
|
assert!(clear_result.is_ok());
|
||||||
|
|
||||||
|
// Should still work after clearing cache
|
||||||
|
let result3 = context.get_string("test_key");
|
||||||
|
assert!(result3.is_some());
|
||||||
|
assert_eq!(result3.unwrap(), "Test Value");
|
||||||
|
|
||||||
|
// Cleanup
|
||||||
|
std::fs::remove_dir_all(&temp_dir).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_bundle_caching() {
|
||||||
|
let temp_dir = std::env::temp_dir().join("notedeck_i18n_test6");
|
||||||
|
std::fs::create_dir_all(&temp_dir).unwrap();
|
||||||
|
|
||||||
|
// Create a test FTL file
|
||||||
|
let en_us_dir = temp_dir.join("en-US");
|
||||||
|
std::fs::create_dir_all(&en_us_dir).unwrap();
|
||||||
|
let ftl_content = "test_key = Test Value\nanother_key = Another Value";
|
||||||
|
std::fs::write(en_us_dir.join("main.ftl"), ftl_content).unwrap();
|
||||||
|
|
||||||
|
let manager = LocalizationManager::new(&temp_dir).unwrap();
|
||||||
|
|
||||||
|
// First call should create bundle and cache the resource
|
||||||
|
let result1 = manager.get_string("test_key");
|
||||||
|
assert!(result1.is_ok());
|
||||||
|
assert_eq!(result1.unwrap(), "Test Value");
|
||||||
|
|
||||||
|
// Second call should use cached resource but create new bundle
|
||||||
|
let result2 = manager.get_string("another_key");
|
||||||
|
assert!(result2.is_ok());
|
||||||
|
assert_eq!(result2.unwrap(), "Another Value");
|
||||||
|
|
||||||
|
// Check cache stats
|
||||||
|
let stats = manager.get_cache_stats().unwrap();
|
||||||
|
assert_eq!(stats.resource_cache_size, 1);
|
||||||
|
assert_eq!(stats.string_cache_size, 2); // Both strings should be cached
|
||||||
|
|
||||||
|
// Cleanup
|
||||||
|
std::fs::remove_dir_all(&temp_dir).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_string_caching() {
|
||||||
|
let temp_dir = std::env::temp_dir().join("notedeck_i18n_test7");
|
||||||
|
std::fs::create_dir_all(&temp_dir).unwrap();
|
||||||
|
|
||||||
|
// Create a test FTL file
|
||||||
|
let en_us_dir = temp_dir.join("en-US");
|
||||||
|
std::fs::create_dir_all(&en_us_dir).unwrap();
|
||||||
|
let ftl_content = "test_key = Test Value";
|
||||||
|
std::fs::write(en_us_dir.join("main.ftl"), ftl_content).unwrap();
|
||||||
|
|
||||||
|
let manager = LocalizationManager::new(&temp_dir).unwrap();
|
||||||
|
|
||||||
|
// First call should format and cache the string
|
||||||
|
let result1 = manager.get_string("test_key");
|
||||||
|
assert!(result1.is_ok());
|
||||||
|
assert_eq!(result1.unwrap(), "Test Value");
|
||||||
|
|
||||||
|
// Second call should use cached string
|
||||||
|
let result2 = manager.get_string("test_key");
|
||||||
|
assert!(result2.is_ok());
|
||||||
|
assert_eq!(result2.unwrap(), "Test Value");
|
||||||
|
|
||||||
|
// Check cache stats
|
||||||
|
let stats = manager.get_cache_stats().unwrap();
|
||||||
|
assert_eq!(stats.string_cache_size, 1);
|
||||||
|
|
||||||
|
// Cleanup
|
||||||
|
std::fs::remove_dir_all(&temp_dir).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_cache_clearing_on_locale_change() {
|
||||||
|
let temp_dir = std::env::temp_dir().join("notedeck_i18n_test8");
|
||||||
|
std::fs::create_dir_all(&temp_dir).unwrap();
|
||||||
|
|
||||||
|
// Create test FTL files for two locales
|
||||||
|
let en_us_dir = temp_dir.join("en-US");
|
||||||
|
std::fs::create_dir_all(&en_us_dir).unwrap();
|
||||||
|
std::fs::write(en_us_dir.join("main.ftl"), "test_key = Test Value").unwrap();
|
||||||
|
|
||||||
|
let en_xa_dir = temp_dir.join("en-XA");
|
||||||
|
std::fs::create_dir_all(&en_xa_dir).unwrap();
|
||||||
|
std::fs::write(en_xa_dir.join("main.ftl"), "test_key = Test Value XA").unwrap();
|
||||||
|
|
||||||
|
// Enable pseudolocale for this test
|
||||||
|
std::env::set_var("NOTEDECK_PSEUDOLOCALE", "1");
|
||||||
|
|
||||||
|
let manager = LocalizationManager::new(&temp_dir).unwrap();
|
||||||
|
|
||||||
|
// Load some strings in en-US
|
||||||
|
let result1 = manager.get_string("test_key");
|
||||||
|
assert!(result1.is_ok());
|
||||||
|
|
||||||
|
// Check that caches are populated
|
||||||
|
let stats1 = manager.get_cache_stats().unwrap();
|
||||||
|
assert!(stats1.resource_cache_size > 0);
|
||||||
|
assert!(stats1.string_cache_size > 0);
|
||||||
|
|
||||||
|
// Switch to en-XA
|
||||||
|
let en_xa: LanguageIdentifier = "en-XA".parse().unwrap();
|
||||||
|
manager.set_locale(en_xa).unwrap();
|
||||||
|
|
||||||
|
// Check that string cache is cleared (resource cache remains for both locales)
|
||||||
|
let stats2 = manager.get_cache_stats().unwrap();
|
||||||
|
assert_eq!(stats2.string_cache_size, 0);
|
||||||
|
|
||||||
|
// Cleanup
|
||||||
|
std::env::remove_var("NOTEDECK_PSEUDOLOCALE");
|
||||||
|
std::fs::remove_dir_all(&temp_dir).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_string_caching_with_arguments() {
|
||||||
|
let temp_dir = std::env::temp_dir().join("notedeck_i18n_test9");
|
||||||
|
std::fs::create_dir_all(&temp_dir).unwrap();
|
||||||
|
|
||||||
|
// Create a test FTL file with a message that takes arguments
|
||||||
|
let en_us_dir = temp_dir.join("en-US");
|
||||||
|
std::fs::create_dir_all(&en_us_dir).unwrap();
|
||||||
|
let ftl_content = "welcome_message = Welcome {$name}!";
|
||||||
|
std::fs::write(en_us_dir.join("main.ftl"), ftl_content).unwrap();
|
||||||
|
|
||||||
|
let manager = LocalizationManager::new(&temp_dir).unwrap();
|
||||||
|
|
||||||
|
// First call with arguments should not be cached
|
||||||
|
let mut args = fluent::FluentArgs::new();
|
||||||
|
args.set("name", "Alice");
|
||||||
|
let result1 = manager.get_string_with_args("welcome_message", Some(&args));
|
||||||
|
assert!(result1.is_ok());
|
||||||
|
// Note: Fluent may add bidirectional text control characters, so we check contains
|
||||||
|
let result1_str = result1.unwrap();
|
||||||
|
assert!(result1_str.contains("Alice"));
|
||||||
|
|
||||||
|
// Check that it's not in the string cache
|
||||||
|
let stats1 = manager.get_cache_stats().unwrap();
|
||||||
|
assert_eq!(stats1.string_cache_size, 0);
|
||||||
|
|
||||||
|
// Second call with different arguments should work correctly
|
||||||
|
let mut args2 = fluent::FluentArgs::new();
|
||||||
|
args2.set("name", "Bob");
|
||||||
|
let result2 = manager.get_string_with_args("welcome_message", Some(&args2));
|
||||||
|
assert!(result2.is_ok());
|
||||||
|
let result2_str = result2.unwrap();
|
||||||
|
assert!(result2_str.contains("Bob"));
|
||||||
|
|
||||||
|
// Check that it's still not in the string cache
|
||||||
|
let stats2 = manager.get_cache_stats().unwrap();
|
||||||
|
assert_eq!(stats2.string_cache_size, 0);
|
||||||
|
|
||||||
|
// Test a simple string without arguments - should be cached
|
||||||
|
let ftl_content_simple = "simple_message = Hello World";
|
||||||
|
std::fs::write(en_us_dir.join("main.ftl"), ftl_content_simple).unwrap();
|
||||||
|
|
||||||
|
// Clear cache to start fresh
|
||||||
|
manager.clear_cache().unwrap();
|
||||||
|
|
||||||
|
let result3 = manager.get_string("simple_message");
|
||||||
|
assert!(result3.is_ok());
|
||||||
|
assert_eq!(result3.unwrap(), "Hello World");
|
||||||
|
|
||||||
|
// Check that simple string is cached
|
||||||
|
let stats3 = manager.get_cache_stats().unwrap();
|
||||||
|
assert_eq!(stats3.string_cache_size, 1);
|
||||||
|
|
||||||
|
// Cleanup
|
||||||
|
std::fs::remove_dir_all(&temp_dir).unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
222
crates/notedeck/src/i18n/mod.rs
Normal file
222
crates/notedeck/src/i18n/mod.rs
Normal file
@@ -0,0 +1,222 @@
|
|||||||
|
//! Internationalization (i18n) module for Notedeck
|
||||||
|
//!
|
||||||
|
//! This module provides localization support using fluent and fluent-resmgr.
|
||||||
|
//! It handles loading translation files, managing locales, and providing
|
||||||
|
//! localized strings throughout the application.
|
||||||
|
|
||||||
|
pub mod manager;
|
||||||
|
|
||||||
|
pub use manager::CacheStats;
|
||||||
|
pub use manager::LocalizationContext;
|
||||||
|
pub use manager::LocalizationManager;
|
||||||
|
|
||||||
|
/// Re-export commonly used types for convenience
|
||||||
|
pub use fluent::FluentArgs;
|
||||||
|
pub use fluent::FluentValue;
|
||||||
|
pub use unic_langid::LanguageIdentifier;
|
||||||
|
|
||||||
|
use md5;
|
||||||
|
use once_cell::sync::OnceCell;
|
||||||
|
use regex::Regex;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::sync::Mutex;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
|
/// Global localization manager for easy access from anywhere
|
||||||
|
static GLOBAL_I18N: OnceCell<Arc<LocalizationManager>> = OnceCell::new();
|
||||||
|
|
||||||
|
/// Cache for normalized FTL keys to avoid repeated normalization
|
||||||
|
static NORMALIZED_KEY_CACHE: OnceCell<Mutex<HashMap<String, String>>> = OnceCell::new();
|
||||||
|
|
||||||
|
/// Initialize the global localization context
|
||||||
|
pub fn init_global_i18n(context: LocalizationContext) {
|
||||||
|
info!("Initializing global i18n context");
|
||||||
|
let _ = GLOBAL_I18N.set(context.manager().clone());
|
||||||
|
|
||||||
|
// Initialize the normalized key cache
|
||||||
|
let _ = NORMALIZED_KEY_CACHE.set(Mutex::new(HashMap::new()));
|
||||||
|
|
||||||
|
info!("Global i18n context initialized successfully");
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the global localization manager
|
||||||
|
pub fn get_global_i18n() -> Option<Arc<LocalizationManager>> {
|
||||||
|
GLOBAL_I18N.get().cloned()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn simple_hash(s: &str) -> String {
|
||||||
|
let digest = md5::compute(s.as_bytes());
|
||||||
|
// Take the first 2 bytes and convert to 4 hex characters
|
||||||
|
format!("{:02x}{:02x}", digest[0], digest[1])
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn normalize_ftl_key(key: &str, comment: Option<&str>) -> String {
|
||||||
|
// Try to get from cache first
|
||||||
|
let cache_key = if let Some(comment) = comment {
|
||||||
|
format!("{}:{}", key, comment)
|
||||||
|
} else {
|
||||||
|
key.to_string()
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(cache) = NORMALIZED_KEY_CACHE.get() {
|
||||||
|
if let Ok(cache) = cache.lock() {
|
||||||
|
if let Some(cached) = cache.get(&cache_key) {
|
||||||
|
return cached.clone();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Replace each invalid character with exactly one underscore
|
||||||
|
// This matches the behavior of the Python extraction script
|
||||||
|
let re = Regex::new(r"[^a-zA-Z0-9_-]").unwrap();
|
||||||
|
let mut result = re.replace_all(key, "_").to_string();
|
||||||
|
|
||||||
|
// Remove leading/trailing underscores
|
||||||
|
result = result.trim_matches('_').to_string();
|
||||||
|
|
||||||
|
// Ensure the key starts with a letter (Fluent requirement)
|
||||||
|
if !(result.len() > 0 && result.chars().next().unwrap().is_ascii_alphabetic()) {
|
||||||
|
result = format!("k_{}", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we have a comment, append a hash of it to reduce collisions
|
||||||
|
if let Some(comment) = comment {
|
||||||
|
let hash_str = format!("_{}", simple_hash(comment));
|
||||||
|
result.push_str(&hash_str);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache the result
|
||||||
|
if let Some(cache) = NORMALIZED_KEY_CACHE.get() {
|
||||||
|
if let Ok(mut cache) = cache.lock() {
|
||||||
|
cache.insert(cache_key, result.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tracing::debug!(
|
||||||
|
"normalize_ftl_key: original='{}', comment='{:?}', final='{}'",
|
||||||
|
key,
|
||||||
|
comment,
|
||||||
|
result
|
||||||
|
);
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Macro for getting localized strings with format-like syntax
|
||||||
|
///
|
||||||
|
/// Syntax: tr!("message", comment)
|
||||||
|
/// tr!("message with {param}", comment, param="value")
|
||||||
|
/// tr!("message with {first} and {second}", comment, first="value1", second="value2")
|
||||||
|
///
|
||||||
|
/// The first argument is the source message (like format!).
|
||||||
|
/// The second argument is always the comment to provide context for translators.
|
||||||
|
/// If `{name}` placeholders are found, there must be corresponding named arguments after the comment.
|
||||||
|
/// All placeholders must be named and start with a letter (a-zA-Z).
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! tr {
|
||||||
|
// Simple case: just message and comment
|
||||||
|
($message:expr, $comment:expr) => {
|
||||||
|
{
|
||||||
|
let norm_key = $crate::i18n::normalize_ftl_key($message, Some($comment));
|
||||||
|
if let Some(i18n) = $crate::i18n::get_global_i18n() {
|
||||||
|
let result = i18n.get_string(&norm_key);
|
||||||
|
match result {
|
||||||
|
Ok(ref s) if s != $message => s.clone(),
|
||||||
|
_ => {
|
||||||
|
tracing::warn!("FALLBACK: Using key '{}' as string (not found in FTL)", $message);
|
||||||
|
$message.to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tracing::warn!("FALLBACK: Global i18n not initialized, using key '{}' as string", $message);
|
||||||
|
$message.to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Case with named parameters: message, comment, param=value, ...
|
||||||
|
($message:expr, $comment:expr, $($param:ident = $value:expr),*) => {
|
||||||
|
{
|
||||||
|
let norm_key = $crate::i18n::normalize_ftl_key($message, Some($comment));
|
||||||
|
if let Some(i18n) = $crate::i18n::get_global_i18n() {
|
||||||
|
let mut args = $crate::i18n::FluentArgs::new();
|
||||||
|
$(
|
||||||
|
args.set(stringify!($param), $value);
|
||||||
|
)*
|
||||||
|
match i18n.get_string_with_args(&norm_key, Some(&args)) {
|
||||||
|
Ok(s) => s,
|
||||||
|
Err(_) => {
|
||||||
|
// Fallback: replace placeholders with values
|
||||||
|
let mut result = $message.to_string();
|
||||||
|
$(
|
||||||
|
result = result.replace(&format!("{{{}}}", stringify!($param)), &$value.to_string());
|
||||||
|
)*
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Fallback: replace placeholders with values
|
||||||
|
let mut result = $message.to_string();
|
||||||
|
$(
|
||||||
|
result = result.replace(&format!("{{{}}}", stringify!($param)), &$value.to_string());
|
||||||
|
)*
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Macro for getting localized pluralized strings with count and named arguments
|
||||||
|
///
|
||||||
|
/// Syntax: tr_plural!(one, other, comment, count, param1=..., param2=...)
|
||||||
|
/// - one: Message for the singular ("one") plural rule
|
||||||
|
/// - other: Message for the "other" plural rule
|
||||||
|
/// - comment: Context for translators
|
||||||
|
/// - count: The count value
|
||||||
|
/// - named arguments: Any additional named parameters for interpolation
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! tr_plural {
|
||||||
|
// With named parameters
|
||||||
|
($one:expr, $other:expr, $comment:expr, $count:expr, $($param:ident = $value:expr),*) => {{
|
||||||
|
let norm_key = $crate::i18n::normalize_ftl_key($other, Some($comment));
|
||||||
|
if let Some(i18n) = $crate::i18n::get_global_i18n() {
|
||||||
|
let mut args = $crate::i18n::FluentArgs::new();
|
||||||
|
args.set("count", $count);
|
||||||
|
$(args.set(stringify!($param), $value);)*
|
||||||
|
match i18n.get_string_with_args(&norm_key, Some(&args)) {
|
||||||
|
Ok(s) => s,
|
||||||
|
Err(_) => {
|
||||||
|
// Fallback: use simple pluralization
|
||||||
|
if $count == 1 {
|
||||||
|
let mut result = $one.to_string();
|
||||||
|
$(result = result.replace(&format!("{{{}}}", stringify!($param)), &$value.to_string());)*
|
||||||
|
result = result.replace("{count}", &$count.to_string());
|
||||||
|
result
|
||||||
|
} else {
|
||||||
|
let mut result = $other.to_string();
|
||||||
|
$(result = result.replace(&format!("{{{}}}", stringify!($param)), &$value.to_string());)*
|
||||||
|
result = result.replace("{count}", &$count.to_string());
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Fallback: use simple pluralization
|
||||||
|
if $count == 1 {
|
||||||
|
let mut result = $one.to_string();
|
||||||
|
$(result = result.replace(&format!("{{{}}}", stringify!($param)), &$value.to_string());)*
|
||||||
|
result = result.replace("{count}", &$count.to_string());
|
||||||
|
result
|
||||||
|
} else {
|
||||||
|
let mut result = $other.to_string();
|
||||||
|
$(result = result.replace(&format!("{{{}}}", stringify!($param)), &$value.to_string());)*
|
||||||
|
result = result.replace("{count}", &$count.to_string());
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
// Without named parameters
|
||||||
|
($one:expr, $other:expr, $comment:expr, $count:expr) => {{
|
||||||
|
$crate::tr_plural!($one, $other, $comment, $count, )
|
||||||
|
}};
|
||||||
|
}
|
||||||
@@ -9,6 +9,7 @@ mod error;
|
|||||||
pub mod filter;
|
pub mod filter;
|
||||||
pub mod fonts;
|
pub mod fonts;
|
||||||
mod frame_history;
|
mod frame_history;
|
||||||
|
pub mod i18n;
|
||||||
mod imgcache;
|
mod imgcache;
|
||||||
mod job_pool;
|
mod job_pool;
|
||||||
mod muted;
|
mod muted;
|
||||||
@@ -44,6 +45,11 @@ pub use context::AppContext;
|
|||||||
pub use error::{show_one_error_message, Error, FilterError, ZapError};
|
pub use error::{show_one_error_message, Error, FilterError, ZapError};
|
||||||
pub use filter::{FilterState, FilterStates, UnifiedSubscription};
|
pub use filter::{FilterState, FilterStates, UnifiedSubscription};
|
||||||
pub use fonts::NamedFontFamily;
|
pub use fonts::NamedFontFamily;
|
||||||
|
pub use i18n::manager::Localizable;
|
||||||
|
pub use i18n::{
|
||||||
|
CacheStats, FluentArgs, FluentValue, LanguageIdentifier, LocalizationContext,
|
||||||
|
LocalizationManager,
|
||||||
|
};
|
||||||
pub use imgcache::{
|
pub use imgcache::{
|
||||||
Animation, GifState, GifStateMap, ImageFrame, Images, LoadableTextureState, MediaCache,
|
Animation, GifState, GifStateMap, ImageFrame, Images, LoadableTextureState, MediaCache,
|
||||||
MediaCacheType, TextureFrame, TextureState, TexturedImage, TexturesCache,
|
MediaCacheType, TextureFrame, TextureState, TexturedImage, TexturesCache,
|
||||||
@@ -83,3 +89,5 @@ pub use enostr;
|
|||||||
pub use nostrdb;
|
pub use nostrdb;
|
||||||
|
|
||||||
pub use zaps::Zaps;
|
pub use zaps::Zaps;
|
||||||
|
|
||||||
|
pub use crate::i18n::{get_global_i18n, init_global_i18n};
|
||||||
|
|||||||
595
scripts/export_source_strings.py
Executable file
595
scripts/export_source_strings.py
Executable file
@@ -0,0 +1,595 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Export US English (en-US) strings defined in tr! and tr_plural! macros in Rust code
|
||||||
|
by generating a main.ftl file that can be used for translating into other languages.
|
||||||
|
|
||||||
|
This script also creates a Psuedolocalized English (en-XA) main.ftl file with a given number of characters accented,
|
||||||
|
so that developers can easily detect which strings have been internationalized or not without needing to have
|
||||||
|
actual translations for a non-English language instead.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import argparse
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Set, Dict, List, Tuple
|
||||||
|
import json
|
||||||
|
import collections
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
def find_rust_files(project_root: Path) -> List[Path]:
|
||||||
|
"""Find all Rust files in the project."""
|
||||||
|
rust_files = []
|
||||||
|
for root, dirs, files in os.walk(project_root):
|
||||||
|
# Skip irrelevant directories
|
||||||
|
dirs[:] = [d for d in dirs if d not in ['target', '.git', '.cargo']]
|
||||||
|
|
||||||
|
for file in files:
|
||||||
|
# Find only Rust source files
|
||||||
|
if file.endswith('.rs'):
|
||||||
|
rust_files.append(Path(root) / file)
|
||||||
|
|
||||||
|
return rust_files
|
||||||
|
|
||||||
|
def strip_rust_comments(code: str) -> str:
|
||||||
|
"""Remove // line comments, /* ... */ block comments, and doc comments (///, //!, //! ...) from Rust code."""
|
||||||
|
# Remove block comments first
|
||||||
|
code = re.sub(r'/\*.*?\*/', '', code, flags=re.DOTALL)
|
||||||
|
# Remove line comments
|
||||||
|
code = re.sub(r'//.*', '', code)
|
||||||
|
# Remove doc comments (/// and //! at start of line)
|
||||||
|
code = re.sub(r'^\s*///.*$', '', code, flags=re.MULTILINE)
|
||||||
|
code = re.sub(r'^\s*//!.*$', '', code, flags=re.MULTILINE)
|
||||||
|
return code
|
||||||
|
|
||||||
|
def extract_tr_macros_with_lines(content: str, file_path: str) -> dict:
|
||||||
|
"""Extract tr! macro calls from Rust code with comments and line numbers. Handles multi-line macros."""
|
||||||
|
matches = []
|
||||||
|
# Strip comments before processing
|
||||||
|
content = strip_rust_comments(content)
|
||||||
|
# Search the entire content for tr! macro calls (multi-line aware)
|
||||||
|
for macro_content in extract_macro_calls(content, 'tr!'):
|
||||||
|
args = parse_macro_arguments(macro_content)
|
||||||
|
if len(args) >= 2: # Must have at least message and comment
|
||||||
|
message = args[0].strip()
|
||||||
|
comment = args[1].strip() # Second argument is always the comment
|
||||||
|
# Validate placeholders
|
||||||
|
if not validate_placeholders(message, file_path):
|
||||||
|
continue
|
||||||
|
if not any(skip in message.lower() for skip in [
|
||||||
|
'/', '\\', '.ftl', '.rs', 'http', 'https', 'www', '@',
|
||||||
|
'crates/', 'src/', 'target/', 'build.rs']):
|
||||||
|
# Find the line number where this macro starts
|
||||||
|
macro_start = f'tr!({macro_content}'
|
||||||
|
idx = content.find(macro_start)
|
||||||
|
line_num = content[:idx].count('\n') + 1 if idx != -1 else 1
|
||||||
|
matches.append((message, comment, line_num, file_path))
|
||||||
|
return matches
|
||||||
|
|
||||||
|
def extract_tr_plural_macros_with_lines(content: str, file_path: str) -> dict:
|
||||||
|
"""Extract tr_plural! macro calls from Rust code with new signature and correct keying, skipping macro definitions and doc comments."""
|
||||||
|
matches = []
|
||||||
|
# Skip macro definitions
|
||||||
|
if 'macro_rules! tr_plural' in content or file_path.endswith('i18n/mod.rs'):
|
||||||
|
return matches
|
||||||
|
for idx, macro_content in enumerate(extract_macro_calls(content, 'tr_plural!')):
|
||||||
|
args = parse_macro_arguments(macro_content)
|
||||||
|
if len(args) >= 4:
|
||||||
|
one = args[0].strip()
|
||||||
|
other = args[1].strip()
|
||||||
|
comment = args[2].strip()
|
||||||
|
key = other
|
||||||
|
if key and not key.startswith('//') and not key.startswith('$'):
|
||||||
|
matches.append((key, comment, idx + 1, file_path))
|
||||||
|
return matches
|
||||||
|
|
||||||
|
def parse_macro_arguments(content: str) -> List[str]:
|
||||||
|
"""Parse macro arguments, handling quoted strings, param = value pairs, commas, and inline comments."""
|
||||||
|
# Remove all // comments
|
||||||
|
content = re.sub(r'//.*', '', content)
|
||||||
|
# Collapse all whitespace/newlines to a single space
|
||||||
|
content = re.sub(r'\s+', ' ', content.strip())
|
||||||
|
args = []
|
||||||
|
i = 0
|
||||||
|
n = len(content)
|
||||||
|
while i < n:
|
||||||
|
# Skip whitespace
|
||||||
|
while i < n and content[i].isspace():
|
||||||
|
i += 1
|
||||||
|
if i >= n:
|
||||||
|
break
|
||||||
|
# Handle quoted strings
|
||||||
|
if content[i] in ['"', "'"]:
|
||||||
|
quote_char = content[i]
|
||||||
|
i += 1
|
||||||
|
arg_start = i
|
||||||
|
while i < n:
|
||||||
|
if content[i] == '\\' and i + 1 < n:
|
||||||
|
i += 2
|
||||||
|
elif content[i] == quote_char:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
i += 1
|
||||||
|
arg = content[arg_start:i]
|
||||||
|
args.append(arg)
|
||||||
|
i += 1 # Skip closing quote
|
||||||
|
else:
|
||||||
|
arg_start = i
|
||||||
|
paren_count = 0
|
||||||
|
brace_count = 0
|
||||||
|
while i < n:
|
||||||
|
char = content[i]
|
||||||
|
if char == '(':
|
||||||
|
paren_count += 1
|
||||||
|
elif char == ')':
|
||||||
|
paren_count -= 1
|
||||||
|
elif char == '{':
|
||||||
|
brace_count += 1
|
||||||
|
elif char == '}':
|
||||||
|
brace_count -= 1
|
||||||
|
elif char == ',' and paren_count == 0 and brace_count == 0:
|
||||||
|
break
|
||||||
|
i += 1
|
||||||
|
arg = content[arg_start:i].strip()
|
||||||
|
if arg:
|
||||||
|
args.append(arg)
|
||||||
|
# Skip the comma if we found one
|
||||||
|
if i < n and content[i] == ',':
|
||||||
|
i += 1
|
||||||
|
return args
|
||||||
|
|
||||||
|
def extract_macro_calls(content: str, macro_name: str):
|
||||||
|
"""Extract all macro calls of the given macro_name from the entire content, handling parentheses inside quoted strings and multi-line macros."""
|
||||||
|
calls = []
|
||||||
|
idx = 0
|
||||||
|
macro_start = f'{macro_name}('
|
||||||
|
content_len = len(content)
|
||||||
|
while idx < content_len:
|
||||||
|
start = content.find(macro_start, idx)
|
||||||
|
if start == -1:
|
||||||
|
break
|
||||||
|
i = start + len(macro_start)
|
||||||
|
paren_count = 1 # Start after the initial '('
|
||||||
|
in_quote = False
|
||||||
|
quote_char = ''
|
||||||
|
macro_content = ''
|
||||||
|
while i < content_len:
|
||||||
|
c = content[i]
|
||||||
|
if in_quote:
|
||||||
|
macro_content += c
|
||||||
|
if c == quote_char and (i == 0 or content[i-1] != '\\'):
|
||||||
|
in_quote = False
|
||||||
|
else:
|
||||||
|
if c in ('"', "'"):
|
||||||
|
in_quote = True
|
||||||
|
quote_char = c
|
||||||
|
macro_content += c
|
||||||
|
elif c == '(':
|
||||||
|
paren_count += 1
|
||||||
|
macro_content += c
|
||||||
|
elif c == ')':
|
||||||
|
paren_count -= 1
|
||||||
|
if paren_count == 0:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
macro_content += c
|
||||||
|
else:
|
||||||
|
macro_content += c
|
||||||
|
i += 1
|
||||||
|
# Only add if we found a closing parenthesis
|
||||||
|
if i < content_len and content[i] == ')':
|
||||||
|
calls.append(macro_content)
|
||||||
|
idx = i + 1
|
||||||
|
else:
|
||||||
|
# Malformed macro, skip past this occurrence
|
||||||
|
idx = start + len(macro_start)
|
||||||
|
return calls
|
||||||
|
|
||||||
|
def validate_placeholders(message: str, file_path: str = "") -> bool:
|
||||||
|
"""Validate that all placeholders in a message are named and start with a letter."""
|
||||||
|
import re
|
||||||
|
|
||||||
|
# Find all placeholders in the message
|
||||||
|
placeholder_pattern = r'\{([^}]*)\}'
|
||||||
|
placeholders = re.findall(placeholder_pattern, message)
|
||||||
|
|
||||||
|
valid = True
|
||||||
|
for placeholder in placeholders:
|
||||||
|
if not placeholder.strip():
|
||||||
|
print(f"[VALIDATE] Warning: Empty placeholder {{}} found in message: '{message[:100]}...' {file_path}")
|
||||||
|
valid = False
|
||||||
|
elif not placeholder[0].isalpha():
|
||||||
|
print(f"[VALIDATE] Warning: Placeholder '{{{placeholder}}}' does not start with a letter in message: '{message[:100]}...' {file_path}")
|
||||||
|
valid = False
|
||||||
|
if not valid:
|
||||||
|
print(f"[VALIDATE] Message rejected: '{message}'")
|
||||||
|
return valid
|
||||||
|
|
||||||
|
def extract_tr_macros(content: str) -> List[Tuple[str, str]]:
|
||||||
|
"""Extract tr! macro calls from Rust code with comments."""
|
||||||
|
filtered_matches = []
|
||||||
|
# Strip comments before processing
|
||||||
|
content = strip_rust_comments(content)
|
||||||
|
# Process the entire content instead of line by line to handle multi-line macros
|
||||||
|
for macro_content in extract_macro_calls(content, 'tr!'):
|
||||||
|
args = parse_macro_arguments(macro_content)
|
||||||
|
if len(args) >= 2: # Must have at least message and comment
|
||||||
|
message = args[0].strip()
|
||||||
|
comment = args[1].strip() # Second argument is always the comment
|
||||||
|
# Debug output for identification strings
|
||||||
|
if "identification" in comment.lower():
|
||||||
|
print(f"[DEBUG] Found identification tr! macro: message='{message}', comment='{comment}', args={args}")
|
||||||
|
norm_key = normalize_key(message, comment)
|
||||||
|
print(f"[DEBUG] Normalized key: '{norm_key}'")
|
||||||
|
# Validate placeholders
|
||||||
|
if not validate_placeholders(message):
|
||||||
|
continue
|
||||||
|
# More specific filtering logic
|
||||||
|
should_skip = False
|
||||||
|
for skip in ['/', '.ftl', '.rs', 'http', 'https', 'www', 'crates/', 'src/', 'target/', 'build.rs']:
|
||||||
|
if skip in message.lower():
|
||||||
|
should_skip = True
|
||||||
|
break
|
||||||
|
# Special handling for @ - only skip if it looks like an actual email address
|
||||||
|
if '@' in message and (
|
||||||
|
# Skip if it's a short string that looks like an email
|
||||||
|
len(message) < 50 or
|
||||||
|
# Skip if it contains common email patterns
|
||||||
|
any(pattern in message.lower() for pattern in ['@gmail.com', '@yahoo.com', '@hotmail.com', '@outlook.com'])
|
||||||
|
):
|
||||||
|
should_skip = True
|
||||||
|
if not should_skip:
|
||||||
|
# Store as (message, comment) tuple to preserve all combinations
|
||||||
|
filtered_matches.append((message, comment))
|
||||||
|
return filtered_matches
|
||||||
|
|
||||||
|
def extract_tr_plural_macros(content: str, file_path: str = "") -> Dict[str, dict]:
|
||||||
|
"""Extract tr_plural! macro calls from Rust code with new signature, skipping macro definitions and doc comments."""
|
||||||
|
filtered_matches = {}
|
||||||
|
# Skip macro definitions
|
||||||
|
if 'macro_rules! tr_plural' in content or file_path.endswith('i18n/mod.rs'):
|
||||||
|
print(f"[DEBUG] Skipping macro definitions in {file_path}")
|
||||||
|
return filtered_matches
|
||||||
|
for macro_content in extract_macro_calls(content, 'tr_plural!'):
|
||||||
|
print(f"[DEBUG] Found tr_plural! macro in {file_path}: {macro_content}")
|
||||||
|
args = parse_macro_arguments(macro_content)
|
||||||
|
print(f"[DEBUG] Parsed args: {args}")
|
||||||
|
if len(args) >= 4:
|
||||||
|
one = args[0].strip()
|
||||||
|
other = args[1].strip()
|
||||||
|
comment = args[2].strip()
|
||||||
|
key = other
|
||||||
|
if key and not key.startswith('//') and not key.startswith('$'):
|
||||||
|
print(f"[DEBUG] Adding plural key '{key}' from {file_path}")
|
||||||
|
filtered_matches[key] = {
|
||||||
|
'one': one,
|
||||||
|
'other': other,
|
||||||
|
'comment': comment
|
||||||
|
}
|
||||||
|
return filtered_matches
|
||||||
|
|
||||||
|
def escape_rust_placeholders(text: str) -> str:
|
||||||
|
"""Convert Rust-style placeholders to Fluent-style placeholders"""
|
||||||
|
# Unescape double quotes first
|
||||||
|
text = text.replace('\\"', '"')
|
||||||
|
# Convert Rust placeholders to Fluent placeholders
|
||||||
|
return re.sub(r'\{([a-zA-Z][a-zA-Z0-9_]*)\}', r'{$\1}', text)
|
||||||
|
|
||||||
|
def simple_hash(s: str) -> str:
|
||||||
|
"""Simple hash function using MD5 - matches Rust implementation, 4 hex chars"""
|
||||||
|
return hashlib.md5(s.encode('utf-8')).hexdigest()[:4]
|
||||||
|
|
||||||
|
def normalize_key(message, comment=None):
|
||||||
|
"""Normalize a message to create a consistent key - matches Rust normalize_ftl_key function"""
|
||||||
|
# Remove quotes and normalize
|
||||||
|
key = message.strip('"\'')
|
||||||
|
# Unescape double quotes
|
||||||
|
key = key.replace('\\"', '"')
|
||||||
|
# Replace each invalid character with exactly one underscore (allow hyphens and underscores)
|
||||||
|
key = re.sub(r'[^a-zA-Z0-9_-]', '_', key)
|
||||||
|
# Remove leading/trailing underscores
|
||||||
|
key = key.strip('_')
|
||||||
|
# Add 'k_' prefix if the result doesn't start with a letter (Fluent requirement)
|
||||||
|
if not (key and key[0].isalpha()):
|
||||||
|
key = "k_" + key
|
||||||
|
|
||||||
|
# If we have a comment, append a hash of it to reduce collisions
|
||||||
|
if comment:
|
||||||
|
# Create a hash of the comment and append it to the key
|
||||||
|
hash_str = f"_{simple_hash(comment)}"
|
||||||
|
key += hash_str
|
||||||
|
|
||||||
|
return key
|
||||||
|
|
||||||
|
def pseudolocalize(text: str) -> str:
|
||||||
|
"""Convert English text to pseudolocalized text for testing."""
|
||||||
|
# Common pseudolocalization patterns
|
||||||
|
replacements = {
|
||||||
|
'a': 'à', 'e': 'é', 'i': 'í', 'o': 'ó', 'u': 'ú',
|
||||||
|
'A': 'À', 'E': 'É', 'I': 'Í', 'O': 'Ó', 'U': 'Ú',
|
||||||
|
'n': 'ñ', 'N': 'Ñ', 'c': 'ç', 'C': 'Ç'
|
||||||
|
}
|
||||||
|
|
||||||
|
# First, protect Fluent placeables from pseudolocalization
|
||||||
|
placeable_pattern = r'\{ *\$[a-zA-Z][a-zA-Z0-9_]* *\}'
|
||||||
|
placeables = re.findall(placeable_pattern, text)
|
||||||
|
|
||||||
|
# Replace placeables with unique placeholders that won't be modified
|
||||||
|
protected_text = text
|
||||||
|
for i, placeable in enumerate(placeables):
|
||||||
|
placeholder = f"<<PLACEABLE_{i}>>"
|
||||||
|
protected_text = protected_text.replace(placeable, placeholder, 1)
|
||||||
|
|
||||||
|
# Apply character replacements, skipping <<PLACEABLE_n>>
|
||||||
|
result = ''
|
||||||
|
i = 0
|
||||||
|
while i < len(protected_text):
|
||||||
|
if protected_text.startswith('<<PLACEABLE_', i):
|
||||||
|
end = protected_text.find('>>', i)
|
||||||
|
if end != -1:
|
||||||
|
result += protected_text[i:end+2]
|
||||||
|
i = end + 2
|
||||||
|
continue
|
||||||
|
char = protected_text[i]
|
||||||
|
result += replacements.get(char, char)
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
# Restore placeables
|
||||||
|
for i, placeable in enumerate(placeables):
|
||||||
|
placeholder = f"<<PLACEABLE_{i}>>"
|
||||||
|
result = result.replace(placeholder, placeable)
|
||||||
|
|
||||||
|
# Wrap pseudolocalized string with square brackets so that it can be distinguished from other strings
|
||||||
|
return f'{{"["}}{result}{{"]"}}'
|
||||||
|
|
||||||
|
def generate_ftl_content(tr_strings: Dict[str, str],
|
||||||
|
plural_strings: Dict[str, dict],
|
||||||
|
tr_occurrences: Dict[Tuple[str, str], list],
|
||||||
|
plural_occurrences: Dict[Tuple[str, str], list],
|
||||||
|
pseudolocalize_content: bool = False) -> str:
|
||||||
|
"""Generate FTL file content from extracted strings with comments."""
|
||||||
|
|
||||||
|
lines = [
|
||||||
|
"# Main translation file for Notedeck",
|
||||||
|
"# This file contains common UI strings used throughout the application",
|
||||||
|
"# Auto-generated by extract_i18n.py - DO NOT EDIT MANUALLY",
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Sort strings for consistent output
|
||||||
|
sorted_tr = sorted(tr_strings.items(), key=lambda item: item[0].lower())
|
||||||
|
sorted_plural = sorted(plural_strings.items(), key=lambda item: item[0].lower())
|
||||||
|
|
||||||
|
# Add regular tr! strings
|
||||||
|
if sorted_tr:
|
||||||
|
lines.append("# Regular strings")
|
||||||
|
for norm_key, (original_message, comment) in sorted_tr:
|
||||||
|
lines.append("")
|
||||||
|
# Write the comment
|
||||||
|
if comment:
|
||||||
|
lines.append(f"# {comment}")
|
||||||
|
# Apply pseudolocalization if requested
|
||||||
|
value = escape_rust_placeholders(original_message)
|
||||||
|
value = pseudolocalize(value) if pseudolocalize_content else value
|
||||||
|
lines.append(f"{norm_key} = {value}")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# Add pluralized strings
|
||||||
|
if sorted_plural:
|
||||||
|
lines.append("# Pluralized strings")
|
||||||
|
for key, data in sorted_plural:
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
one = data['one']
|
||||||
|
other = data['other']
|
||||||
|
comment = data['comment']
|
||||||
|
# Write comment
|
||||||
|
if comment:
|
||||||
|
lines.append(f"# {comment}")
|
||||||
|
norm_key = normalize_key(key, comment)
|
||||||
|
one_val = escape_rust_placeholders(one)
|
||||||
|
other_val = escape_rust_placeholders(other)
|
||||||
|
if pseudolocalize_content:
|
||||||
|
one_val = pseudolocalize(one_val)
|
||||||
|
other_val = pseudolocalize(other_val)
|
||||||
|
lines.append(f'{norm_key} =')
|
||||||
|
lines.append(f' {{ $count ->')
|
||||||
|
lines.append(f' [one] {one_val}')
|
||||||
|
lines.append(f' *[other] {other_val}')
|
||||||
|
lines.append(f' }}')
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def read_existing_ftl(ftl_path: Path) -> Dict[str, str]:
|
||||||
|
"""Read existing FTL file to preserve comments and custom translations."""
|
||||||
|
if not ftl_path.exists():
|
||||||
|
return {}
|
||||||
|
|
||||||
|
existing_translations = {}
|
||||||
|
with open(ftl_path, 'r', encoding='utf-8') as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
# Extract key-value pairs
|
||||||
|
pattern = r'^([^#\s][^=]*?)\s*=\s*(.+)$'
|
||||||
|
for line in content.split('\n'):
|
||||||
|
match = re.match(pattern, line.strip())
|
||||||
|
if match:
|
||||||
|
key = match.group(1).strip()
|
||||||
|
value = match.group(2).strip()
|
||||||
|
# For existing FTL files, we need to handle keys that may have hash suffixes
|
||||||
|
# Strip the hash suffix if present (8 hex characters after underscore)
|
||||||
|
original_key = re.sub(r'_[0-9a-f]{8}$', '', key)
|
||||||
|
norm_key = normalize_key(original_key)
|
||||||
|
existing_translations[norm_key] = value
|
||||||
|
|
||||||
|
return existing_translations
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(description='Extract i18n macros and generate FTL file')
|
||||||
|
parser.add_argument('--project-root', type=str, default='.',
|
||||||
|
help='Project root directory (default: current directory)')
|
||||||
|
parser.add_argument('--dry-run', action='store_true',
|
||||||
|
help='Show what would be generated without writing to file')
|
||||||
|
parser.add_argument('--fail-on-collisions', action='store_true',
|
||||||
|
help='Exit with error if key collisions are detected')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
project_root = Path(args.project_root)
|
||||||
|
|
||||||
|
print(f"Scanning Rust files in {project_root}...")
|
||||||
|
|
||||||
|
# Find all Rust files
|
||||||
|
rust_files = find_rust_files(project_root)
|
||||||
|
print(f"Found {len(rust_files)} Rust files")
|
||||||
|
|
||||||
|
# Extract strings from all files
|
||||||
|
all_tr_strings = {}
|
||||||
|
all_plural_strings = {}
|
||||||
|
|
||||||
|
# Track normalized keys to detect actual key collisions
|
||||||
|
all_tr_normalized_keys = {}
|
||||||
|
all_plural_normalized_keys = {}
|
||||||
|
|
||||||
|
# Track collisions
|
||||||
|
tr_collisions = {}
|
||||||
|
plural_collisions = {}
|
||||||
|
|
||||||
|
# Track all occurrences for intra-file collision detection
|
||||||
|
tr_occurrences = collections.defaultdict(list)
|
||||||
|
plural_occurrences = collections.defaultdict(list)
|
||||||
|
|
||||||
|
for rust_file in rust_files:
|
||||||
|
try:
|
||||||
|
with open(rust_file, 'r', encoding='utf-8') as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
# For intra-file collision detection
|
||||||
|
tr_lines = extract_tr_macros_with_lines(content, str(rust_file))
|
||||||
|
for key, comment, line, file_path in tr_lines:
|
||||||
|
tr_occurrences[(file_path, key)].append((comment, line))
|
||||||
|
plural_lines = extract_tr_plural_macros_with_lines(content, str(rust_file))
|
||||||
|
for key, comment, line, file_path in plural_lines:
|
||||||
|
plural_occurrences[(file_path, key)].append((comment, line))
|
||||||
|
|
||||||
|
tr_strings = extract_tr_macros(content)
|
||||||
|
plural_strings = extract_tr_plural_macros(content, str(rust_file))
|
||||||
|
|
||||||
|
if tr_strings or plural_strings:
|
||||||
|
print(f" {rust_file}: {len(tr_strings)} tr!, {len(plural_strings)} tr_plural!")
|
||||||
|
|
||||||
|
# Check for collisions in tr! strings using normalized keys
|
||||||
|
for message, comment in tr_strings:
|
||||||
|
norm_key = normalize_key(message, comment)
|
||||||
|
if norm_key in all_tr_normalized_keys:
|
||||||
|
# This is a real key collision (same normalized key)
|
||||||
|
if norm_key not in tr_collisions:
|
||||||
|
tr_collisions[norm_key] = []
|
||||||
|
tr_collisions[norm_key].append((rust_file, all_tr_normalized_keys[norm_key]))
|
||||||
|
tr_collisions[norm_key].append((rust_file, comment))
|
||||||
|
# Store by normalized key to preserve all unique combinations
|
||||||
|
all_tr_strings[norm_key] = (message, comment)
|
||||||
|
all_tr_normalized_keys[norm_key] = comment
|
||||||
|
|
||||||
|
# Check for collisions in plural strings using normalized keys
|
||||||
|
for key, data in plural_strings.items():
|
||||||
|
comment = data['comment']
|
||||||
|
norm_key = normalize_key(key, comment)
|
||||||
|
if norm_key in all_plural_normalized_keys:
|
||||||
|
# This is a real key collision (same normalized key)
|
||||||
|
if norm_key not in plural_collisions:
|
||||||
|
plural_collisions[norm_key] = []
|
||||||
|
plural_collisions[norm_key].append((rust_file, all_plural_normalized_keys[norm_key]))
|
||||||
|
plural_collisions[norm_key].append((rust_file, data))
|
||||||
|
all_plural_strings[key] = data
|
||||||
|
all_plural_normalized_keys[norm_key] = data
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error reading {rust_file}: {e}")
|
||||||
|
|
||||||
|
# Intra-file collision detection
|
||||||
|
has_intra_file_collisions = False
|
||||||
|
for (file_path, key), occurrences in tr_occurrences.items():
|
||||||
|
comments = set(c for c, _ in occurrences)
|
||||||
|
if len(occurrences) > 1 and len(comments) > 1:
|
||||||
|
has_intra_file_collisions = True
|
||||||
|
print(f"\n⚠️ Intra-file key collision in {file_path} for '{key}':")
|
||||||
|
for comment, line in occurrences:
|
||||||
|
comment_text = f" (comment: '{comment}')" if comment else " (no comment)"
|
||||||
|
print(f" Line {line}{comment_text}")
|
||||||
|
for (file_path, key), occurrences in plural_occurrences.items():
|
||||||
|
comments = set(c for c, _ in occurrences)
|
||||||
|
if len(occurrences) > 1 and len(comments) > 1:
|
||||||
|
has_intra_file_collisions = True
|
||||||
|
print(f"\n⚠️ Intra-file key collision in {file_path} for '{key}':")
|
||||||
|
for comment, line in occurrences:
|
||||||
|
comment_text = f" (comment: '{comment}')" if comment else " (no comment)"
|
||||||
|
print(f" Line {line}{comment_text}")
|
||||||
|
if has_intra_file_collisions and args.fail_on_collisions:
|
||||||
|
print(f"❌ Exiting due to intra-file key collisions (--fail-on-collisions flag)")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
# Report collisions
|
||||||
|
has_collisions = False
|
||||||
|
|
||||||
|
if tr_collisions:
|
||||||
|
has_collisions = True
|
||||||
|
print(f"\n⚠️ Key collisions detected in tr! strings:")
|
||||||
|
for key, collisions in tr_collisions.items():
|
||||||
|
print(f" '{key}':")
|
||||||
|
for file_path, comment in collisions:
|
||||||
|
comment_text = f" (comment: '{comment}')" if comment else " (no comment)"
|
||||||
|
print(f" {file_path}{comment_text}")
|
||||||
|
|
||||||
|
if plural_collisions:
|
||||||
|
has_collisions = True
|
||||||
|
print(f"\n⚠️ Key collisions detected in tr_plural! strings:")
|
||||||
|
for key, collisions in plural_collisions.items():
|
||||||
|
print(f" '{key}':")
|
||||||
|
for file_path, comment in collisions:
|
||||||
|
comment_text = f" (comment: '{comment}')" if comment else " (no comment)"
|
||||||
|
print(f" {file_path}{comment_text}")
|
||||||
|
|
||||||
|
if has_collisions:
|
||||||
|
print(f"\n💡 Collision resolution: The last occurrence of each key will be used.")
|
||||||
|
if args.fail_on_collisions:
|
||||||
|
print(f"❌ Exiting due to key collisions (--fail-on-collisions flag)")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
print(f"\nExtracted strings:")
|
||||||
|
print(f" Regular strings: {len(all_tr_strings)}")
|
||||||
|
print(f" Plural strings: {len(all_plural_strings)}")
|
||||||
|
|
||||||
|
# Debug: print all keys in all_tr_strings
|
||||||
|
print("[DEBUG] All tr! keys:")
|
||||||
|
for k in all_tr_strings.keys():
|
||||||
|
print(f" {k}")
|
||||||
|
|
||||||
|
# Generate FTL content for both locales
|
||||||
|
locales = ['en-US', 'en-XA']
|
||||||
|
|
||||||
|
for locale in locales:
|
||||||
|
pseudolocalize_content = (locale == 'en-XA')
|
||||||
|
ftl_content = generate_ftl_content(all_tr_strings, all_plural_strings, tr_occurrences, plural_occurrences, pseudolocalize_content)
|
||||||
|
output_path = Path(f'assets/translations/{locale}/main.ftl')
|
||||||
|
|
||||||
|
if args.dry_run:
|
||||||
|
print(f"\n--- Generated FTL content for {locale} ---")
|
||||||
|
print(ftl_content)
|
||||||
|
print(f"--- End of content for {locale} ---")
|
||||||
|
else:
|
||||||
|
# Ensure output directory exists
|
||||||
|
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Write to file
|
||||||
|
with open(output_path, 'w', encoding='utf-8') as f:
|
||||||
|
f.write(ftl_content)
|
||||||
|
|
||||||
|
print(f"\nGenerated FTL file: {output_path}")
|
||||||
|
|
||||||
|
if not args.dry_run:
|
||||||
|
print(f"\nTotal strings: {len(all_tr_strings) + len(all_plural_strings)}")
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user