Bug 1840044 - Update to Glean 53.1.0, UniFFI 0.24.1 and latest application-services. r=TravisLong,nika,markh,supply-chain-reviewers

Update:
  - Glean to v53.1.0
  - UniFFI to v0.24.1
  - application-services to a recent nightly that uses the above
    versions

- Updated `rusqlite` in toolkit/library/rust/shared/Cargo.toml
- Updated `uniffi-bindgen-gecko-js` to work with the new UniFFI.  Also
  updated it's askama version.
- Vetted new cargo dependencies

Ran `mach uniffi generate` to regenerate the code.

Differential Revision: https://phabricator.services.mozilla.com/D181872
This commit is contained in:
Jan-Erik Rediger 2023-07-26 15:34:27 +00:00
parent ba24b03ab2
commit 1c2fbc9d73
651 changed files with 39420 additions and 30059 deletions

View file

@ -70,9 +70,9 @@ git = "https://github.com/mozilla-spidermonkey/jsparagus"
rev = "64ba08e24749616de2344112f226d1ef4ba893ae"
replace-with = "vendored-sources"
[source."git+https://github.com/mozilla/application-services?rev=86c84c217036c12283d19368867323a66bf35883"]
[source."git+https://github.com/mozilla/application-services?rev=14fae5c7f01bb2645a3f09e91c64033ed796a862"]
git = "https://github.com/mozilla/application-services"
rev = "86c84c217036c12283d19368867323a66bf35883"
rev = "14fae5c7f01bb2645a3f09e91c64033ed796a862"
replace-with = "vendored-sources"
[source."git+https://github.com/mozilla/audioipc?rev=0b51291d2483a17dce3e300c7784b369e02bee73"]
@ -105,9 +105,9 @@ git = "https://github.com/mozilla/neqo"
tag = "v0.6.4"
replace-with = "vendored-sources"
[source."git+https://github.com/mozilla/uniffi-rs.git?rev=bc7ff8977bf38d0fdd1a458810b14f434d4dc4de"]
[source."git+https://github.com/mozilla/uniffi-rs.git?rev=c0e64b839018728d8153ce1758d391b7782e2e21"]
git = "https://github.com/mozilla/uniffi-rs.git"
rev = "bc7ff8977bf38d0fdd1a458810b14f434d4dc4de"
rev = "c0e64b839018728d8153ce1758d391b7782e2e21"
replace-with = "vendored-sources"
[source."git+https://github.com/rust-minidump/minidump-writer.git?rev=a15bd5cab6a3de251c0c23264be14b977c0af09c"]

158
Cargo.lock generated
View file

@ -159,24 +159,28 @@ dependencies = [
[[package]]
name = "askama"
version = "0.11.1"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb98f10f371286b177db5eeb9a6e5396609555686a35e1d4f7b9a9c6d8af0139"
checksum = "47cbc3cf73fa8d9833727bbee4835ba5c421a0d65b72daf9a7b5d0e0f9cfb57e"
dependencies = [
"askama_derive",
"askama_escape",
"askama_shared",
]
[[package]]
name = "askama_derive"
version = "0.11.2"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87bf87e6e8b47264efa9bde63d6225c6276a52e05e91bf37eaa8afd0032d6b71"
checksum = "c22fbe0413545c098358e56966ff22cdd039e10215ae213cfbd65032b119fc94"
dependencies = [
"askama_shared",
"basic-toml",
"mime",
"mime_guess",
"nom",
"proc-macro2",
"syn 1.0.107",
"quote",
"serde",
"syn 2.0.18",
]
[[package]]
@ -185,23 +189,6 @@ version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341"
[[package]]
name = "askama_shared"
version = "0.12.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf722b94118a07fcbc6640190f247334027685d4e218b794dbfe17c32bf38ed0"
dependencies = [
"askama_escape",
"mime",
"mime_guess",
"nom",
"proc-macro2",
"quote",
"serde",
"syn 1.0.107",
"toml",
]
[[package]]
name = "async-task"
version = "4.3.0"
@ -367,6 +354,15 @@ version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a"
[[package]]
name = "basic-toml"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c0de75129aa8d0cceaf750b89013f0e08804d6ec61416da787b35ad0d7cddf1"
dependencies = [
"serde",
]
[[package]]
name = "bench-collections-gtest"
version = "0.1.0"
@ -1499,7 +1495,7 @@ dependencies = [
[[package]]
name = "error-support"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=86c84c217036c12283d19368867323a66bf35883#86c84c217036c12283d19368867323a66bf35883"
source = "git+https://github.com/mozilla/application-services?rev=14fae5c7f01bb2645a3f09e91c64033ed796a862#14fae5c7f01bb2645a3f09e91c64033ed796a862"
dependencies = [
"error-support-macros",
"lazy_static",
@ -1511,11 +1507,11 @@ dependencies = [
[[package]]
name = "error-support-macros"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=86c84c217036c12283d19368867323a66bf35883#86c84c217036c12283d19368867323a66bf35883"
source = "git+https://github.com/mozilla/application-services?rev=14fae5c7f01bb2645a3f09e91c64033ed796a862#14fae5c7f01bb2645a3f09e91c64033ed796a862"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.107",
"syn 2.0.18",
]
[[package]]
@ -2200,9 +2196,9 @@ dependencies = [
[[package]]
name = "glean"
version = "53.0.0"
version = "53.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "682e1f9e0b4bf0ae80e479d228e7cd2102f4d4febd58e10cab57771216b9de13"
checksum = "0efbf048a79e634cd5ccd224f972018e3f217c72d4071bbe6ebee382037bffcb"
dependencies = [
"chrono",
"crossbeam-channel",
@ -2220,9 +2216,9 @@ dependencies = [
[[package]]
name = "glean-core"
version = "53.0.0"
version = "53.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "767c210fd3af23c70cb19bb889f5642142ea16b6d86160649d96ff348c6a7549"
checksum = "826ac72df83806896eda459414972a0ca288d4d206811fd10a8a00a581b85498"
dependencies = [
"android_logger",
"bincode",
@ -2620,7 +2616,7 @@ dependencies = [
[[package]]
name = "interrupt-support"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=86c84c217036c12283d19368867323a66bf35883#86c84c217036c12283d19368867323a66bf35883"
source = "git+https://github.com/mozilla/application-services?rev=14fae5c7f01bb2645a3f09e91c64033ed796a862#14fae5c7f01bb2645a3f09e91c64033ed796a862"
dependencies = [
"lazy_static",
"parking_lot",
@ -2951,9 +2947,9 @@ dependencies = [
[[package]]
name = "libsqlite3-sys"
version = "0.25.2"
version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29f835d03d717946d28b1d1ed632eb6f0e24a299388ee623d0c23118d3e8a7fa"
checksum = "afc22eff61b133b115c6e8c74e818c628d6d5e7a502afea6f64dee076dd94326"
dependencies = [
"cc",
"pkg-config",
@ -3762,7 +3758,7 @@ dependencies = [
[[package]]
name = "nss_build_common"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=86c84c217036c12283d19368867323a66bf35883#86c84c217036c12283d19368867323a66bf35883"
source = "git+https://github.com/mozilla/application-services?rev=14fae5c7f01bb2645a3f09e91c64033ed796a862#14fae5c7f01bb2645a3f09e91c64033ed796a862"
[[package]]
name = "nsstring"
@ -3894,9 +3890,9 @@ dependencies = [
[[package]]
name = "once_cell"
version = "1.17.1"
version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
[[package]]
name = "ordered-float"
@ -4243,9 +4239,9 @@ checksum = "74605f360ce573babfe43964cbe520294dcb081afbf8c108fc6e23036b4da2df"
[[package]]
name = "prost"
version = "0.8.0"
version = "0.11.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de5e2533f59d08fcf364fd374ebda0692a70bd6d7e66ef97f306f45c6c5d8020"
checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd"
dependencies = [
"bytes",
"prost-derive",
@ -4253,9 +4249,9 @@ dependencies = [
[[package]]
name = "prost-derive"
version = "0.8.0"
version = "0.11.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "600d2f334aa05acb02a755e217ef1ab6dea4d51b58b7846588b747edec04efba"
checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4"
dependencies = [
"anyhow",
"itertools",
@ -4515,11 +4511,11 @@ dependencies = [
[[package]]
name = "rusqlite"
version = "0.28.0"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "01e213bc3ecb39ac32e81e51ebe31fd888a940515173e3a18a35f8c6e896422a"
checksum = "549b9d036d571d42e6e85d1c1425e2ac83491075078ca9a15be021c56b1641f2"
dependencies = [
"bitflags 1.3.2",
"bitflags 2.999.999",
"fallible-iterator",
"fallible-streaming-iterator",
"hashlink",
@ -4726,6 +4722,15 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_path_to_error"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7f05c1d5476066defcdfacce1f52fc3cae3af1d3089727100c02ae92e5abbe0"
dependencies = [
"serde",
]
[[package]]
name = "serde_repr"
version = "0.1.12"
@ -4920,7 +4925,7 @@ dependencies = [
[[package]]
name = "sql-support"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=86c84c217036c12283d19368867323a66bf35883#86c84c217036c12283d19368867323a66bf35883"
source = "git+https://github.com/mozilla/application-services?rev=14fae5c7f01bb2645a3f09e91c64033ed796a862#14fae5c7f01bb2645a3f09e91c64033ed796a862"
dependencies = [
"ffi-support",
"interrupt-support",
@ -5121,7 +5126,7 @@ dependencies = [
[[package]]
name = "sync-guid"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=86c84c217036c12283d19368867323a66bf35883#86c84c217036c12283d19368867323a66bf35883"
source = "git+https://github.com/mozilla/application-services?rev=14fae5c7f01bb2645a3f09e91c64033ed796a862#14fae5c7f01bb2645a3f09e91c64033ed796a862"
dependencies = [
"base64 0.13.999",
"rand",
@ -5132,7 +5137,7 @@ dependencies = [
[[package]]
name = "sync15"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=86c84c217036c12283d19368867323a66bf35883#86c84c217036c12283d19368867323a66bf35883"
source = "git+https://github.com/mozilla/application-services?rev=14fae5c7f01bb2645a3f09e91c64033ed796a862#14fae5c7f01bb2645a3f09e91c64033ed796a862"
dependencies = [
"anyhow",
"error-support",
@ -5143,8 +5148,10 @@ dependencies = [
"serde",
"serde_derive",
"serde_json",
"serde_path_to_error",
"sync-guid",
"thiserror",
"uniffi",
]
[[package]]
@ -5162,7 +5169,7 @@ dependencies = [
[[package]]
name = "tabs"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=86c84c217036c12283d19368867323a66bf35883#86c84c217036c12283d19368867323a66bf35883"
source = "git+https://github.com/mozilla/application-services?rev=14fae5c7f01bb2645a3f09e91c64033ed796a862#14fae5c7f01bb2645a3f09e91c64033ed796a862"
dependencies = [
"anyhow",
"error-support",
@ -5540,9 +5547,9 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]]
name = "uniffi"
version = "0.23.0"
version = "0.24.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f71cc01459bc34cfe43fabf32b39f1228709bc6db1b3a664a92940af3d062376"
checksum = "1e669f1deb394464d015e445390ea31736421cd32ee3e4373f573860534a587c"
dependencies = [
"anyhow",
"uniffi_build",
@ -5569,7 +5576,7 @@ dependencies = [
[[package]]
name = "uniffi-example-arithmetic"
version = "0.22.0"
source = "git+https://github.com/mozilla/uniffi-rs.git?rev=bc7ff8977bf38d0fdd1a458810b14f434d4dc4de#bc7ff8977bf38d0fdd1a458810b14f434d4dc4de"
source = "git+https://github.com/mozilla/uniffi-rs.git?rev=c0e64b839018728d8153ce1758d391b7782e2e21#c0e64b839018728d8153ce1758d391b7782e2e21"
dependencies = [
"thiserror",
"uniffi",
@ -5589,7 +5596,7 @@ dependencies = [
[[package]]
name = "uniffi-example-geometry"
version = "0.22.0"
source = "git+https://github.com/mozilla/uniffi-rs.git?rev=bc7ff8977bf38d0fdd1a458810b14f434d4dc4de#bc7ff8977bf38d0fdd1a458810b14f434d4dc4de"
source = "git+https://github.com/mozilla/uniffi-rs.git?rev=c0e64b839018728d8153ce1758d391b7782e2e21#c0e64b839018728d8153ce1758d391b7782e2e21"
dependencies = [
"uniffi",
]
@ -5597,7 +5604,7 @@ dependencies = [
[[package]]
name = "uniffi-example-rondpoint"
version = "0.22.0"
source = "git+https://github.com/mozilla/uniffi-rs.git?rev=bc7ff8977bf38d0fdd1a458810b14f434d4dc4de#bc7ff8977bf38d0fdd1a458810b14f434d4dc4de"
source = "git+https://github.com/mozilla/uniffi-rs.git?rev=c0e64b839018728d8153ce1758d391b7782e2e21#c0e64b839018728d8153ce1758d391b7782e2e21"
dependencies = [
"uniffi",
]
@ -5605,7 +5612,7 @@ dependencies = [
[[package]]
name = "uniffi-example-sprites"
version = "0.22.0"
source = "git+https://github.com/mozilla/uniffi-rs.git?rev=bc7ff8977bf38d0fdd1a458810b14f434d4dc4de#bc7ff8977bf38d0fdd1a458810b14f434d4dc4de"
source = "git+https://github.com/mozilla/uniffi-rs.git?rev=c0e64b839018728d8153ce1758d391b7782e2e21#c0e64b839018728d8153ce1758d391b7782e2e21"
dependencies = [
"uniffi",
]
@ -5613,7 +5620,7 @@ dependencies = [
[[package]]
name = "uniffi-example-todolist"
version = "0.22.0"
source = "git+https://github.com/mozilla/uniffi-rs.git?rev=bc7ff8977bf38d0fdd1a458810b14f434d4dc4de#bc7ff8977bf38d0fdd1a458810b14f434d4dc4de"
source = "git+https://github.com/mozilla/uniffi-rs.git?rev=c0e64b839018728d8153ce1758d391b7782e2e21#c0e64b839018728d8153ce1758d391b7782e2e21"
dependencies = [
"once_cell",
"thiserror",
@ -5639,14 +5646,14 @@ dependencies = [
[[package]]
name = "uniffi_bindgen"
version = "0.23.0"
version = "0.24.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbbba5103051c18f10b22f80a74439ddf7100273f217a547005d2735b2498994"
checksum = "d824f847a2390304a0870a86440fc0d858011fb0f22c2e14c78252a518fd051e"
dependencies = [
"anyhow",
"askama",
"bincode",
"camino",
"cargo_metadata",
"fs-err",
"glob",
"goblin 0.6.999",
@ -5663,9 +5670,9 @@ dependencies = [
[[package]]
name = "uniffi_build"
version = "0.23.0"
version = "0.24.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ee1a28368ff3d83717e3d3e2e15a66269c43488c3f036914131bb68892f29fb"
checksum = "fce1d3af6c67de32c8a1bfa352a58f9b4b13c05d044765d6c85e47d7530f9e40"
dependencies = [
"anyhow",
"camino",
@ -5674,19 +5681,19 @@ dependencies = [
[[package]]
name = "uniffi_checksum_derive"
version = "0.23.0"
version = "0.24.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03de61393a42b4ad4984a3763c0600594ac3e57e5aaa1d05cede933958987c03"
checksum = "9aa14882751499e451251785112c452f13b2911941eb9c41af6e454da78d00a6"
dependencies = [
"quote",
"syn 1.0.107",
"syn 2.0.18",
]
[[package]]
name = "uniffi_core"
version = "0.23.0"
version = "0.24.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a2b4852d638d74ca2d70e450475efb6d91fe6d54a7cd8d6bd80ad2ee6cd7daa"
checksum = "e3eb1039faa52d30649695c3c49b2a50f5416116457000ba8845e4e86415e303"
dependencies = [
"anyhow",
"bytes",
@ -5700,9 +5707,9 @@ dependencies = [
[[package]]
name = "uniffi_macros"
version = "0.23.0"
version = "0.24.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa03394de21e759e0022f1ea8d992d2e39290d735b9ed52b1f74b20a684f794e"
checksum = "019fb49a47ad1ee9b474c4bae0cfcf482ed3bf1ed01b4986c52feab2de3db91d"
dependencies = [
"bincode",
"camino",
@ -5711,7 +5718,7 @@ dependencies = [
"proc-macro2",
"quote",
"serde",
"syn 1.0.107",
"syn 2.0.18",
"toml",
"uniffi_build",
"uniffi_meta",
@ -5719,10 +5726,12 @@ dependencies = [
[[package]]
name = "uniffi_meta"
version = "0.23.0"
version = "0.24.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "66fdab2c436aed7a6391bec64204ec33948bfed9b11b303235740771f85c4ea6"
checksum = "fbb88511d3599f45a70b200e3cd181a06e206ecc290057be966693827591a89c"
dependencies = [
"anyhow",
"bytes",
"serde",
"siphasher",
"uniffi_checksum_derive",
@ -5730,9 +5739,9 @@ dependencies = [
[[package]]
name = "uniffi_testing"
version = "0.23.0"
version = "0.24.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92b0570953ec41d97ce23e3b92161ac18231670a1f97523258a6d2ab76d7f76c"
checksum = "282becdb2dbab3bb6bceba1d1a3ea7a7d0d5ef2d06cf5f2389ed9e3fa6a88cb3"
dependencies = [
"anyhow",
"camino",
@ -5793,14 +5802,13 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "viaduct"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=86c84c217036c12283d19368867323a66bf35883#86c84c217036c12283d19368867323a66bf35883"
source = "git+https://github.com/mozilla/application-services?rev=14fae5c7f01bb2645a3f09e91c64033ed796a862#14fae5c7f01bb2645a3f09e91c64033ed796a862"
dependencies = [
"ffi-support",
"log",
"once_cell",
"parking_lot",
"prost",
"prost-derive",
"serde",
"serde_json",
"thiserror",
@ -5950,7 +5958,7 @@ dependencies = [
[[package]]
name = "webext-storage"
version = "0.1.0"
source = "git+https://github.com/mozilla/application-services?rev=86c84c217036c12283d19368867323a66bf35883#86c84c217036c12283d19368867323a66bf35883"
source = "git+https://github.com/mozilla/application-services?rev=14fae5c7f01bb2645a3f09e91c64033ed796a862#14fae5c7f01bb2645a3f09e91c64033ed796a862"
dependencies = [
"anyhow",
"error-support",

View file

@ -52,7 +52,7 @@ resolver = "2"
[workspace.dependencies]
# Shared across multiple UniFFI consumers.
uniffi = "0.23"
uniffi = "0.24.2"
# Explicitly specify what our profiles use. The opt-level setting here is
# a total fiction; see the setup of MOZ_RUST_DEFAULT_FLAGS for what the
@ -193,12 +193,12 @@ minidump-common = { git = "https://github.com/rust-minidump/rust-minidump", rev
warp = { git = "https://github.com/glandium/warp", rev = "4af45fae95bc98b0eba1ef0db17e1dac471bb23d" }
# application-services overrides to make updating them all simpler.
interrupt-support = { git = "https://github.com/mozilla/application-services", rev = "86c84c217036c12283d19368867323a66bf35883" }
sql-support = { git = "https://github.com/mozilla/application-services", rev = "86c84c217036c12283d19368867323a66bf35883" }
sync15 = { git = "https://github.com/mozilla/application-services", rev = "86c84c217036c12283d19368867323a66bf35883" }
tabs = { git = "https://github.com/mozilla/application-services", rev = "86c84c217036c12283d19368867323a66bf35883" }
viaduct = { git = "https://github.com/mozilla/application-services", rev = "86c84c217036c12283d19368867323a66bf35883" }
webext-storage = { git = "https://github.com/mozilla/application-services", rev = "86c84c217036c12283d19368867323a66bf35883" }
interrupt-support = { git = "https://github.com/mozilla/application-services", rev = "14fae5c7f01bb2645a3f09e91c64033ed796a862" }
sql-support = { git = "https://github.com/mozilla/application-services", rev = "14fae5c7f01bb2645a3f09e91c64033ed796a862" }
sync15 = { git = "https://github.com/mozilla/application-services", rev = "14fae5c7f01bb2645a3f09e91c64033ed796a862" }
tabs = { git = "https://github.com/mozilla/application-services", rev = "14fae5c7f01bb2645a3f09e91c64033ed796a862" }
viaduct = { git = "https://github.com/mozilla/application-services", rev = "14fae5c7f01bb2645a3f09e91c64033ed796a862" }
webext-storage = { git = "https://github.com/mozilla/application-services", rev = "14fae5c7f01bb2645a3f09e91c64033ed796a862" }
# Patch mio 0.6 to use winapi 0.3 and miow 0.3, getting rid of winapi 0.2.
# There is not going to be new version of mio 0.6, mio now being >= 0.7.11.
@ -206,8 +206,8 @@ webext-storage = { git = "https://github.com/mozilla/application-services", rev
path = "third_party/rust/mio-0.6.23"
[patch."https://github.com/mozilla/uniffi-rs.git"]
uniffi = "=0.23.0"
uniffi_bindgen = "=0.23.0"
uniffi_build = "=0.23.0"
uniffi_macros = "=0.23.0"
uniffi = "=0.24.2"
uniffi_bindgen = "=0.24.2"
uniffi_build = "=0.24.2"
uniffi_macros = "=0.24.2"
weedle2 = "=4.0.0"

View file

@ -36,7 +36,7 @@ allprojects {
topsrcdir = gradle.mozconfig.topsrcdir
topobjdir = gradle.mozconfig.topobjdir
gleanVersion = "53.0.0"
gleanVersion = "53.1.0"
if (gleanVersion != getRustVersionFor("glean")) {
throw new StopExecutionException("Mismatched Glean version, expected: ${gleanVersion}," +
" found ${getRustVersionFor("glean")}")

127
gfx/wr/Cargo.lock generated
View file

@ -80,24 +80,28 @@ dependencies = [
[[package]]
name = "askama"
version = "0.11.1"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb98f10f371286b177db5eeb9a6e5396609555686a35e1d4f7b9a9c6d8af0139"
checksum = "47cbc3cf73fa8d9833727bbee4835ba5c421a0d65b72daf9a7b5d0e0f9cfb57e"
dependencies = [
"askama_derive",
"askama_escape",
"askama_shared",
]
[[package]]
name = "askama_derive"
version = "0.11.2"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87bf87e6e8b47264efa9bde63d6225c6276a52e05e91bf37eaa8afd0032d6b71"
checksum = "c22fbe0413545c098358e56966ff22cdd039e10215ae213cfbd65032b119fc94"
dependencies = [
"askama_shared",
"basic-toml",
"mime",
"mime_guess",
"nom 7.1.1",
"proc-macro2",
"syn 1.0.91",
"quote",
"serde",
"syn 2.0.25",
]
[[package]]
@ -106,23 +110,6 @@ version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341"
[[package]]
name = "askama_shared"
version = "0.12.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf722b94118a07fcbc6640190f247334027685d4e218b794dbfe17c32bf38ed0"
dependencies = [
"askama_escape",
"mime",
"mime_guess",
"nom 7.1.1",
"proc-macro2",
"quote",
"serde",
"syn 1.0.91",
"toml",
]
[[package]]
name = "atty"
version = "0.2.14"
@ -146,6 +133,15 @@ version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd"
[[package]]
name = "basic-toml"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f838d03a705d72b12389b8930bd14cacf493be1380bfb15720d4d12db5ab03ac"
dependencies = [
"serde",
]
[[package]]
name = "bimap"
version = "0.6.3"
@ -231,9 +227,9 @@ checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
[[package]]
name = "bytes"
version = "1.1.0"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8"
checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be"
[[package]]
name = "calloop"
@ -874,7 +870,7 @@ checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.18",
"syn 2.0.25",
]
[[package]]
@ -991,9 +987,9 @@ dependencies = [
[[package]]
name = "glean"
version = "53.0.0"
version = "53.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "682e1f9e0b4bf0ae80e479d228e7cd2102f4d4febd58e10cab57771216b9de13"
checksum = "0efbf048a79e634cd5ccd224f972018e3f217c72d4071bbe6ebee382037bffcb"
dependencies = [
"chrono",
"crossbeam-channel",
@ -1011,9 +1007,9 @@ dependencies = [
[[package]]
name = "glean-core"
version = "53.0.0"
version = "53.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "767c210fd3af23c70cb19bb889f5642142ea16b6d86160649d96ff348c6a7549"
checksum = "826ac72df83806896eda459414972a0ca288d4d206811fd10a8a00a581b85498"
dependencies = [
"android_logger",
"bincode",
@ -1572,7 +1568,7 @@ name = "malloc_size_of_derive"
version = "0.1.3"
dependencies = [
"proc-macro2",
"syn 2.0.18",
"syn 2.0.25",
"synstructure",
]
@ -1870,9 +1866,9 @@ dependencies = [
[[package]]
name = "once_cell"
version = "1.13.1"
version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "074864da206b4973b84eb91683020dbefd6a8c3f0f38e054d93954e891935e4e"
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
[[package]]
name = "ordered-float"
@ -1982,7 +1978,7 @@ version = "0.3.0"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.18",
"syn 2.0.25",
"synstructure",
"unicode-xid",
]
@ -2087,9 +2083,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.59"
version = "1.0.64"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6aeca18b86b413c660b781aa319e4e2648a3e6f9eadc9b47e9038e6fe9f3451b"
checksum = "78803b62cbf1f46fde80d7c0e803111524b9877184cfe7c3033659490ac7a7da"
dependencies = [
"unicode-ident",
]
@ -2364,9 +2360,9 @@ dependencies = [
[[package]]
name = "serde"
version = "1.0.147"
version = "1.0.171"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d193d69bae983fc11a79df82342761dfbf28a99fc8d203dca4c3c1b590948965"
checksum = "30e27d1e4fd7659406c492fd6cfaf2066ba8773de45ca75e855590f856dc34a9"
dependencies = [
"serde_derive",
]
@ -2382,13 +2378,13 @@ dependencies = [
[[package]]
name = "serde_derive"
version = "1.0.147"
version = "1.0.171"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4f1d362ca8fc9c3e3a7484440752472d68a6caa98f1ab81d99b5dfe517cec852"
checksum = "389894603bd18c46fa56231694f8d827779c0951a667087194cf9de94ed24682"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.91",
"syn 2.0.25",
]
[[package]]
@ -2543,9 +2539,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.18"
version = "2.0.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e"
checksum = "15e3fc8c0c74267e2df136e5e5fb656a464158aa57624053375eb9c8c6e25ae2"
dependencies = [
"proc-macro2",
"quote",
@ -2560,7 +2556,7 @@ checksum = "285ba80e733fac80aa4270fbcdf83772a79b80aa35c97075320abfee4a915b06"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.18",
"syn 2.0.25",
"unicode-xid",
]
@ -2719,9 +2715,9 @@ checksum = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
[[package]]
name = "uniffi"
version = "0.23.0"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f71cc01459bc34cfe43fabf32b39f1228709bc6db1b3a664a92940af3d062376"
checksum = "6da26ba712a8547207ededc70f3e0952c09754be9516c320f71731d2f18daf3e"
dependencies = [
"anyhow",
"uniffi_build",
@ -2731,14 +2727,14 @@ dependencies = [
[[package]]
name = "uniffi_bindgen"
version = "0.23.0"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbbba5103051c18f10b22f80a74439ddf7100273f217a547005d2735b2498994"
checksum = "29bff3ba24868022fc82e2f1558f3a0fdcc2655e1335459a35f25d1ec4ff1d0c"
dependencies = [
"anyhow",
"askama",
"bincode",
"camino",
"cargo_metadata",
"fs-err",
"glob",
"goblin",
@ -2755,9 +2751,9 @@ dependencies = [
[[package]]
name = "uniffi_build"
version = "0.23.0"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ee1a28368ff3d83717e3d3e2e15a66269c43488c3f036914131bb68892f29fb"
checksum = "52b7cd03e17b997469e5438d1a491c3b9e2d41c2a87c86fd91ba96e87aecba6a"
dependencies = [
"anyhow",
"camino",
@ -2766,19 +2762,19 @@ dependencies = [
[[package]]
name = "uniffi_checksum_derive"
version = "0.23.0"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03de61393a42b4ad4984a3763c0600594ac3e57e5aaa1d05cede933958987c03"
checksum = "af98d58e238b6aef9ff62a93b5c60caa710bdb49351434a639b9bd7b4c84c808"
dependencies = [
"quote",
"syn 1.0.91",
"syn 2.0.25",
]
[[package]]
name = "uniffi_core"
version = "0.23.0"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a2b4852d638d74ca2d70e450475efb6d91fe6d54a7cd8d6bd80ad2ee6cd7daa"
checksum = "68640fa1b5dfbb4ccc149057c81b40adc51a01d295ce798c15c6c76f7e899907"
dependencies = [
"anyhow",
"bytes",
@ -2792,9 +2788,9 @@ dependencies = [
[[package]]
name = "uniffi_macros"
version = "0.23.0"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa03394de21e759e0022f1ea8d992d2e39290d735b9ed52b1f74b20a684f794e"
checksum = "76f72684ff48a8ff0ee95fde6dbcfa687236ad1789dc18205cb3305432a7b35c"
dependencies = [
"bincode",
"camino",
@ -2803,7 +2799,7 @@ dependencies = [
"proc-macro2",
"quote",
"serde",
"syn 1.0.91",
"syn 2.0.25",
"toml",
"uniffi_build",
"uniffi_meta",
@ -2811,20 +2807,23 @@ dependencies = [
[[package]]
name = "uniffi_meta"
version = "0.23.0"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "66fdab2c436aed7a6391bec64204ec33948bfed9b11b303235740771f85c4ea6"
checksum = "fe3388a58b13dad8f0cdcbdee1c59af6408608ce8d85a3ef5d1429369ca7b217"
dependencies = [
"anyhow",
"bytes",
"serde",
"siphasher",
"uniffi_checksum_derive",
"uniffi_core",
]
[[package]]
name = "uniffi_testing"
version = "0.23.0"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92b0570953ec41d97ce23e3b92161ac18231670a1f97523258a6d2ab76d7f76c"
checksum = "4fb437a2c8565249274e381fd88bc75b539897f321b79022c9fe7e275d2c2bbb"
dependencies = [
"anyhow",
"camino",

View file

@ -52,7 +52,7 @@ svg_fmt = "0.4"
tracy-rs = "0.1.2"
derive_more = { version = "0.99", default-features = false, features = ["add_assign"] }
etagere = "0.2.6"
glean = "53.0.0"
glean = "53.1.0"
firefox-on-glean = { version = "0.1.0", optional = true }
swgl = { path = "../swgl", optional = true }
topological-sort = "0.1"

View file

@ -25,7 +25,7 @@ tracy-rs = "0.1.2"
log = "0.4"
lazy_static = "1"
fxhash = "0.2.1"
glean = { version = "53.0.0", optional = true }
glean = { version = "53.1.0", optional = true }
firefox-on-glean = { version = "0.1.0", optional = true }
serde = { optional = true, version = "1.0", features = ["serde_derive"] }

View file

@ -84,7 +84,7 @@ vendored:third_party/python/wcwidth
vendored:third_party/python/wheel
# glean-sdk may not be installable if a wheel isn't available
# and it has to be built from source.
pypi-optional:glean-sdk==53.0.0:telemetry will not be collected
pypi-optional:glean-sdk==53.1.0:telemetry will not be collected
# Mach gracefully handles the case where `psutil` is unavailable.
# We aren't (yet) able to pin packages in automation, so we have to
# support down to the oldest locally-installed version (5.4.2).

View file

@ -333,6 +333,70 @@ start = "2019-07-25"
end = "2024-05-03"
notes = "All code written or reviewed by Manish"
[[wildcard-audits.uniffi]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
user-id = 48 # Jan-Erik Rediger (badboy)
start = "2022-05-05"
end = "2024-06-21"
notes = "Maintained by the Glean and Application Services teams"
[[wildcard-audits.uniffi_bindgen]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
user-id = 48 # Jan-Erik Rediger (badboy)
start = "2022-05-05"
end = "2024-06-21"
notes = "Maintained by the Glean and Application Services teams"
[[wildcard-audits.uniffi_build]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
user-id = 48 # Jan-Erik Rediger (badboy)
start = "2022-05-05"
end = "2024-06-21"
notes = "Maintained by the Glean and Application Services teams"
[[wildcard-audits.uniffi_checksum_derive]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
user-id = 48 # Jan-Erik Rediger (badboy)
start = "2022-12-16"
end = "2024-06-21"
notes = "Maintained by the Glean and Application Services teams"
[[wildcard-audits.uniffi_core]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
user-id = 48 # Jan-Erik Rediger (badboy)
start = "2023-06-21"
end = "2024-06-21"
notes = "Maintained by the Glean and Application Services teams"
[[wildcard-audits.uniffi_macros]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
user-id = 48 # Jan-Erik Rediger (badboy)
start = "2022-05-05"
end = "2024-06-21"
notes = "Maintained by the Glean and Application Services teams"
[[wildcard-audits.uniffi_meta]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
user-id = 48 # Jan-Erik Rediger (badboy)
start = "2022-08-31"
end = "2024-06-21"
notes = "Maintained by the Glean and Application Services teams"
[[wildcard-audits.uniffi_testing]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
user-id = 48 # Jan-Erik Rediger (badboy)
start = "2022-12-16"
end = "2024-06-21"
notes = "Maintained by the Glean and Application Services teams"
[[wildcard-audits.webdriver]]
who = "Henrik Skupin <mail@hskupin.info>"
criteria = "safe-to-deploy"
@ -1821,6 +1885,11 @@ who = "Mike Hommey <mh+mozilla@glandium.org>"
criteria = "safe-to-deploy"
delta = "0.7.3 -> 0.7.4"
[[audits.libsqlite3-sys]]
who = "Ben Dean-Kawamura <bdk@mozilla.com>"
criteria = "safe-to-deploy"
delta = "0.25.2 -> 0.26.0"
[[audits.linked-hash-map]]
who = "Aria Beingessner <a.beingessner@gmail.com>"
criteria = "safe-to-deploy"
@ -2415,6 +2484,18 @@ who = "Mike Hommey <mh+mozilla@glandium.org>"
criteria = "safe-to-deploy"
delta = "1.0.6 -> 1.0.7"
[[audits.prost]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
delta = "0.8.0 -> 0.11.9"
notes = "Mostly internal refactorings. Minimal new unsafe code, but with the invariants explicitly checked in code"
[[audits.prost-derive]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
delta = "0.8.0 -> 0.11.9"
notes = "Documentation and internal refactoring changes only"
[[audits.qcms]]
who = "Jeff Muizelaar <jmuizelaar@mozilla.com>"
criteria = "safe-to-deploy"
@ -2595,6 +2676,11 @@ who = "Mike Hommey <mh+mozilla@glandium.org>"
criteria = "safe-to-deploy"
delta = "0.27.0 -> 0.28.0"
[[audits.rusqlite]]
who = "Ben Dean-Kawamura <bdk@mozilla.com>"
criteria = "safe-to-deploy"
delta = "0.28.0 -> 0.29.0"
[[audits.rust_cascade]]
who = "Mike Hommey <mh+mozilla@glandium.org>"
criteria = "safe-to-deploy"
@ -2803,6 +2889,11 @@ who = "Mike Hommey <mh+mozilla@glandium.org>"
criteria = "safe-to-deploy"
delta = "1.0.91 -> 1.0.93"
[[audits.serde_path_to_error]]
who = "Ben Dean-Kawamura <bdk@mozilla.com>"
criteria = "safe-to-deploy"
version = "0.1.11"
[[audits.serde_repr]]
who = "Mike Hommey <mh+mozilla@glandium.org>"
criteria = "safe-to-run"

View file

@ -241,10 +241,6 @@ criteria = "safe-to-deploy"
version = "0.10.3"
criteria = "safe-to-deploy"
[[exemptions.askama_shared]]
version = "0.12.2"
criteria = "safe-to-deploy"
[[exemptions.async-task]]
version = "4.0.3"
criteria = "safe-to-deploy"
@ -549,10 +545,6 @@ criteria = "safe-to-deploy"
version = "0.6.5"
criteria = "safe-to-deploy"
[[exemptions.metal]]
version = "0.23.1"
criteria = "safe-to-deploy"
[[exemptions.midir]]
version = "0.7.0"
criteria = "safe-to-deploy"
@ -689,10 +681,6 @@ criteria = "safe-to-deploy"
version = "0.6.3"
criteria = "safe-to-deploy"
[[exemptions.redox_syscall]]
version = "0.2.13"
criteria = "safe-to-deploy"
[[exemptions.remove_dir_all]]
version = "0.5.3"
criteria = "safe-to-deploy"

View file

@ -191,15 +191,15 @@ user-login = "jrmuizel"
user-name = "Jeff Muizelaar"
[[publisher.glean]]
version = "53.0.0"
when = "2023-06-07"
version = "53.1.0"
when = "2023-06-28"
user-id = 48
user-login = "badboy"
user-name = "Jan-Erik Rediger"
[[publisher.glean-core]]
version = "53.0.0"
when = "2023-06-07"
version = "53.1.0"
when = "2023-06-28"
user-id = 48
user-login = "badboy"
user-name = "Jan-Erik Rediger"
@ -518,6 +518,62 @@ user-id = 1139
user-login = "Manishearth"
user-name = "Manish Goregaokar"
[[publisher.uniffi]]
version = "0.24.2"
when = "2023-07-25"
user-id = 48
user-login = "badboy"
user-name = "Jan-Erik Rediger"
[[publisher.uniffi_bindgen]]
version = "0.24.2"
when = "2023-07-25"
user-id = 48
user-login = "badboy"
user-name = "Jan-Erik Rediger"
[[publisher.uniffi_build]]
version = "0.24.2"
when = "2023-07-25"
user-id = 48
user-login = "badboy"
user-name = "Jan-Erik Rediger"
[[publisher.uniffi_checksum_derive]]
version = "0.24.2"
when = "2023-07-25"
user-id = 48
user-login = "badboy"
user-name = "Jan-Erik Rediger"
[[publisher.uniffi_core]]
version = "0.24.2"
when = "2023-07-25"
user-id = 48
user-login = "badboy"
user-name = "Jan-Erik Rediger"
[[publisher.uniffi_macros]]
version = "0.24.2"
when = "2023-07-25"
user-id = 48
user-login = "badboy"
user-name = "Jan-Erik Rediger"
[[publisher.uniffi_meta]]
version = "0.24.2"
when = "2023-07-25"
user-id = 48
user-login = "badboy"
user-name = "Jan-Erik Rediger"
[[publisher.uniffi_testing]]
version = "0.24.2"
when = "2023-07-25"
user-id = 48
user-login = "badboy"
user-name = "Jan-Erik Rediger"
[[publisher.walkdir]]
version = "2.3.2"
when = "2021-03-22"
@ -1094,6 +1150,16 @@ who = "David Cook <dcook@divviup.org>"
criteria = "safe-to-deploy"
version = "0.1.2"
[[audits.isrg.audits.once_cell]]
who = "Brandon Pitman <bran@bran.land>"
criteria = "safe-to-deploy"
delta = "1.17.1 -> 1.17.2"
[[audits.isrg.audits.once_cell]]
who = "David Cook <dcook@divviup.org>"
criteria = "safe-to-deploy"
delta = "1.17.2 -> 1.18.0"
[[audits.isrg.audits.rayon-core]]
who = "Brandon Pitman <bran@bran.land>"
criteria = "safe-to-deploy"
@ -1113,6 +1179,27 @@ end = "2024-05-10"
notes = "Maintained by me"
aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
[[audits.mozilla.audits.askama]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
delta = "0.11.1 -> 0.12.0"
notes = "No new unsafe usage, mostly dependency updates and smaller API changes"
aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
[[audits.mozilla.audits.askama_derive]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
delta = "0.11.2 -> 0.12.1"
notes = "Dependency updates, a new toml dependency and some API changes. No unsafe use."
aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
[[audits.mozilla.audits.basic-toml]]
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy"
version = "0.1.2"
notes = "TOML parser, forked from toml 0.5"
aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
[[audits.mozilla.audits.either]]
who = "Nika Layzell <nika@thelayzells.com>"
criteria = "safe-to-deploy"

View file

@ -1 +1 @@
{"files":{"Cargo.toml":"f15e26a3a1f5c3efc5cb364ee53ae42d462a4221c9e416d8a765260e2a75eada","LICENSE-APACHE":"87cb0d734c723c083e51c825930ff42bce28596b52dee15567f6b28f19c195e3","LICENSE-MIT":"df20e0180764bf5bd76f74d47bc9e8c0069a666401629c390003a1d5eba99c92","src/lib.rs":"fd52fa3b97acd0957a183d9dac465ca67d53c53f49dad461ca174c312d81e32e"},"package":"fb98f10f371286b177db5eeb9a6e5396609555686a35e1d4f7b9a9c6d8af0139"}
{"files":{"Cargo.toml":"fbab611fc3ba2204942300a534b4f030460f33b0606fa50b9ad08ea567ba81e8","LICENSE-APACHE":"87cb0d734c723c083e51c825930ff42bce28596b52dee15567f6b28f19c195e3","LICENSE-MIT":"df20e0180764bf5bd76f74d47bc9e8c0069a666401629c390003a1d5eba99c92","README.md":"6a4430cf614ff9d36ba01463a8f94085ed4b0889fd719793fa914568247acce2","src/error.rs":"1e3f8020092469090f314f60685c077347e730a88222dfdaa38aaf2396507532","src/filters/json.rs":"dccd0a3f1017da9f6cd9650bd39eb1670f4a9833d2f0968614cd8cd65d18a9dd","src/filters/mod.rs":"903d09599e62f56657b00b2aa577c9d2f963348dd12a1029e90e68549f78b1db","src/filters/yaml.rs":"4e641bedbe3666b334836fb6603fe7f718f7e90d8e33419acca624f50a580c3f","src/helpers.rs":"76e0422acd4ccba7b1735d6ab7622a93f6ec5a2fa89531111d877266784d5334","src/lib.rs":"3a6e4d0b3aadc7c391cbe59416504a719406303726122779281a3af1a7ad76a4"},"package":"47cbc3cf73fa8d9833727bbee4835ba5c421a0d65b72daf9a7b5d0e0f9cfb57e"}

View file

@ -10,46 +10,117 @@
# See Cargo.toml.orig for the original contents.
[package]
edition = "2018"
edition = "2021"
rust-version = "1.58"
name = "askama"
version = "0.11.1"
version = "0.12.0"
description = "Type-safe, compiled Jinja-like templates for Rust"
homepage = "https://github.com/djc/askama"
documentation = "https://docs.rs/askama"
readme = "../README.md"
keywords = ["markup", "template", "jinja2", "html"]
readme = "README.md"
keywords = [
"markup",
"template",
"jinja2",
"html",
]
categories = ["template-engine"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/djc/askama"
resolver = "1"
[package.metadata.docs.rs]
features = ["config", "humansize", "num-traits", "serde-json", "serde-yaml"]
features = [
"config",
"humansize",
"num-traits",
"serde-json",
"serde-yaml",
]
[dependencies.askama_derive]
version = "0.11.2"
version = "0.12.0"
[dependencies.askama_escape]
version = "0.10"
version = "0.10.3"
[dependencies.askama_shared]
version = "0.12.1"
[dependencies.comrak]
version = "0.16"
optional = true
default-features = false
[dependencies.dep_humansize]
version = "2"
optional = true
package = "humansize"
[dependencies.dep_num_traits]
version = "0.2.6"
optional = true
package = "num-traits"
[dependencies.percent-encoding]
version = "2.1.0"
optional = true
[dependencies.serde]
version = "1.0"
features = ["derive"]
optional = true
[dependencies.serde_json]
version = "1.0"
optional = true
[dependencies.serde_yaml]
version = "0.9"
optional = true
[features]
config = ["askama_derive/config", "askama_shared/config"]
default = ["config", "humansize", "num-traits", "urlencode"]
humansize = ["askama_shared/humansize"]
markdown = ["askama_shared/markdown"]
config = ["askama_derive/config"]
default = [
"config",
"humansize",
"num-traits",
"urlencode",
]
humansize = [
"askama_derive/humansize",
"dep_humansize",
]
markdown = [
"askama_derive/markdown",
"comrak",
]
mime = []
mime_guess = []
num-traits = ["askama_shared/num-traits"]
serde-json = ["askama_derive/json", "askama_shared/json"]
serde-yaml = ["askama_derive/yaml", "askama_shared/yaml"]
urlencode = ["askama_shared/percent-encoding"]
with-actix-web = ["askama_derive/actix-web"]
with-axum = ["askama_derive/axum"]
with-gotham = ["askama_derive/gotham"]
with-mendes = ["askama_derive/mendes"]
with-rocket = ["askama_derive/rocket"]
with-tide = ["askama_derive/tide"]
with-warp = ["askama_derive/warp"]
num-traits = [
"askama_derive/num-traits",
"dep_num_traits",
]
serde-json = [
"askama_derive/serde-json",
"askama_escape/json",
"serde",
"serde_json",
]
serde-yaml = [
"askama_derive/serde-yaml",
"serde",
"serde_yaml",
]
urlencode = [
"askama_derive/urlencode",
"percent-encoding",
]
with-actix-web = ["askama_derive/with-actix-web"]
with-axum = ["askama_derive/with-axum"]
with-gotham = ["askama_derive/with-gotham"]
with-hyper = ["askama_derive/with-hyper"]
with-mendes = ["askama_derive/with-mendes"]
with-rocket = ["askama_derive/with-rocket"]
with-tide = ["askama_derive/with-tide"]
with-warp = ["askama_derive/with-warp"]
[badges.maintenance]
status = "actively-developed"

96
third_party/rust/askama/README.md vendored Normal file
View file

@ -0,0 +1,96 @@
# Askama
[![Documentation](https://docs.rs/askama/badge.svg)](https://docs.rs/askama/)
[![Latest version](https://img.shields.io/crates/v/askama.svg)](https://crates.io/crates/askama)
[![Build Status](https://github.com/djc/askama/workflows/CI/badge.svg)](https://github.com/djc/askama/actions?query=workflow%3ACI)
[![Chat](https://badges.gitter.im/gitterHQ/gitter.svg)](https://gitter.im/djc/askama)
Askama implements a template rendering engine based on [Jinja](https://jinja.palletsprojects.com/).
It generates Rust code from your templates at compile time
based on a user-defined `struct` to hold the template's context.
See below for an example, or read [the book][docs].
**"Pretty exciting. I would love to use this already."** --
[Armin Ronacher][mitsuhiko], creator of Jinja
All feedback welcome. Feel free to file bugs, requests for documentation and
any other feedback to the [issue tracker][issues] or [tweet me][twitter].
Askama was created by and is maintained by Dirkjan Ochtman. If you are in a
position to support ongoing maintenance and further development or use it
in a for-profit context, please consider supporting my open source work on
[Patreon][patreon].
### Feature highlights
* Construct templates using a familiar, easy-to-use syntax
* Benefit from the safety provided by Rust's type system
* Template code is compiled into your crate for [optimal performance][benchmarks]
* Optional built-in support for Actix, Axum, Gotham, Mendes, Rocket, tide, and warp web frameworks
* Debugging features to assist you in template development
* Templates must be valid UTF-8 and produce UTF-8 when rendered
* IDE support available in [JetBrains products](https://plugins.jetbrains.com/plugin/16591-askama-template-support)
* Works on stable Rust
### Supported in templates
* Template inheritance
* Loops, if/else statements and include support
* Macro support
* Variables (no mutability allowed)
* Some built-in filters, and the ability to use your own
* Whitespace suppressing with '-' markers
* Opt-out HTML escaping
* Syntax customization
[docs]: https://djc.github.io/askama/
[fafhrd91]: https://github.com/fafhrd91
[mitsuhiko]: http://lucumr.pocoo.org/
[issues]: https://github.com/djc/askama/issues
[twitter]: https://twitter.com/djco/
[patreon]: https://www.patreon.com/dochtman
[benchmarks]: https://github.com/djc/template-benchmarks-rs
How to get started
------------------
First, add the following to your crate's `Cargo.toml`:
```toml
# in section [dependencies]
askama = "0.11.2"
```
Now create a directory called `templates` in your crate root.
In it, create a file called `hello.html`, containing the following:
```
Hello, {{ name }}!
```
In any Rust file inside your crate, add the following:
```rust
use askama::Template; // bring trait in scope
#[derive(Template)] // this will generate the code...
#[template(path = "hello.html")] // using the template in this path, relative
// to the `templates` dir in the crate root
struct HelloTemplate<'a> { // the name of the struct can be anything
name: &'a str, // the field name should match the variable name
// in your template
}
fn main() {
let hello = HelloTemplate { name: "world" }; // instantiate your struct
println!("{}", hello.render().unwrap()); // then render it.
}
```
You should now be able to compile and run this code.
Review the [test cases] for more examples.
[test cases]: https://github.com/djc/askama/tree/main/testing

View file

@ -41,14 +41,14 @@ pub enum Error {
}
impl std::error::Error for Error {
fn cause(&self) -> Option<&dyn std::error::Error> {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match *self {
Error::Fmt(ref err) => err.source(),
Error::Fmt(ref err) => Some(err),
Error::Custom(ref err) => Some(err.as_ref()),
#[cfg(feature = "serde_json")]
Error::Json(ref err) => err.source(),
Error::Json(ref err) => Some(err),
#[cfg(feature = "serde_yaml")]
Error::Yaml(ref err) => err.source(),
Error::Yaml(ref err) => Some(err),
}
}
}
@ -56,10 +56,10 @@ impl std::error::Error for Error {
impl Display for Error {
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Error::Fmt(err) => write!(formatter, "formatting error: {}", err),
Error::Custom(err) => write!(formatter, "{}", err),
Error::Fmt(err) => write!(formatter, "formatting error: {err}"),
Error::Custom(err) => write!(formatter, "{err}"),
#[cfg(feature = "serde_json")]
Error::Json(err) => write!(formatter, "json conversion error: {}", err),
Error::Json(err) => write!(formatter, "json conversion error: {err}"),
#[cfg(feature = "serde_yaml")]
Error::Yaml(err) => write!(formatter, "yaml conversion error: {}", err),
}

View file

@ -31,10 +31,10 @@ mod tests {
fn test_json() {
assert_eq!(json(true).unwrap(), "true");
assert_eq!(json("foo").unwrap(), r#""foo""#);
assert_eq!(json(&true).unwrap(), "true");
assert_eq!(json(&"foo").unwrap(), r#""foo""#);
assert_eq!(json(true).unwrap(), "true");
assert_eq!(json("foo").unwrap(), r#""foo""#);
assert_eq!(
json(&vec!["foo", "bar"]).unwrap(),
json(vec!["foo", "bar"]).unwrap(),
r#"[
"foo",
"bar"

View file

@ -5,25 +5,25 @@
//! For more information, read the [book](https://djc.github.io/askama/filters.html).
#![allow(clippy::trivially_copy_pass_by_ref)]
use std::fmt;
use std::fmt::{self, Write};
#[cfg(feature = "serde_json")]
#[cfg(feature = "serde-json")]
mod json;
#[cfg(feature = "serde_json")]
#[cfg(feature = "serde-json")]
pub use self::json::json;
#[cfg(feature = "serde_yaml")]
#[cfg(feature = "serde-yaml")]
mod yaml;
#[cfg(feature = "serde_yaml")]
#[cfg(feature = "serde-yaml")]
pub use self::yaml::yaml;
#[allow(unused_imports)]
use crate::error::Error::Fmt;
use askama_escape::{Escaper, MarkupDisplay};
#[cfg(feature = "humansize")]
use humansize::{file_size_opts, FileSize};
use dep_humansize::{format_size_i, ToF64, DECIMAL};
#[cfg(feature = "num-traits")]
use num_traits::{cast::NumCast, Signed};
use dep_num_traits::{cast::NumCast, Signed};
#[cfg(feature = "percent-encoding")]
use percent_encoding::{utf8_percent_encode, AsciiSet, NON_ALPHANUMERIC};
@ -43,42 +43,6 @@ const URLENCODE_STRICT_SET: &AsciiSet = &NON_ALPHANUMERIC
// Same as URLENCODE_STRICT_SET, but preserves forward slashes for encoding paths
const URLENCODE_SET: &AsciiSet = &URLENCODE_STRICT_SET.remove(b'/');
// This is used by the code generator to decide whether a named filter is part of
// Askama or should refer to a local `filters` module. It should contain all the
// filters shipped with Askama, even the optional ones (since optional inclusion
// in the const vector based on features seems impossible right now).
pub const BUILT_IN_FILTERS: &[&str] = &[
"abs",
"capitalize",
"center",
"e",
"escape",
"filesizeformat",
"fmt",
"format",
"indent",
"into_f64",
"into_isize",
"join",
"linebreaks",
"linebreaksbr",
"paragraphbreaks",
"lower",
"lowercase",
"safe",
"trim",
"truncate",
"upper",
"uppercase",
"urlencode",
"urlencode_strict",
"wordcount",
// optional features, reserve the names anyway:
"json",
"markdown",
"yaml",
];
/// Marks a string (or other `Display` type) as safe
///
/// Use this is you want to allow markup in an expression, or if you know
@ -95,11 +59,14 @@ where
Ok(MarkupDisplay::new_safe(v, e))
}
/// Escapes `&`, `<` and `>` in strings
/// Escapes strings according to the escape mode.
///
/// Askama will automatically insert the first (`Escaper`) argument,
/// so this filter only takes a single argument of any type that implements
/// `Display`.
///
/// It is possible to optionally specify an escaper other than the default for
/// the template's extension, like `{{ val|escape("txt") }}`.
pub fn escape<E, T>(e: E, v: T) -> Result<MarkupDisplay<E, T>>
where
E: Escaper,
@ -110,9 +77,8 @@ where
#[cfg(feature = "humansize")]
/// Returns adequate string representation (in KB, ..) of number of bytes
pub fn filesizeformat<B: FileSize>(b: &B) -> Result<String> {
b.file_size(file_size_opts::DECIMAL)
.map_err(|_| Fmt(fmt::Error))
pub fn filesizeformat(b: &(impl ToF64 + Copy)) -> Result<String> {
Ok(format_size_i(*b, DECIMAL))
}
#[cfg(feature = "percent-encoding")]
@ -197,7 +163,7 @@ pub fn linebreaks<T: fmt::Display>(s: T) -> Result<String> {
let s = s.to_string();
let linebroken = s.replace("\n\n", "</p><p>").replace('\n', "<br/>");
Ok(format!("<p>{}</p>", linebroken))
Ok(format!("<p>{linebroken}</p>"))
}
/// Converts all newlines in a piece of plain text to HTML line breaks
@ -215,7 +181,7 @@ pub fn paragraphbreaks<T: fmt::Display>(s: T) -> Result<String> {
let s = s.to_string();
let linebroken = s.replace("\n\n", "</p><p>").replace("<p></p>", "");
Ok(format!("<p>{}</p>", linebroken))
Ok(format!("<p>{linebroken}</p>"))
}
/// Converts to lowercase
@ -313,7 +279,7 @@ where
rv.push_str(separator);
}
rv.push_str(&format!("{}", item));
write!(rv, "{item}")?;
}
Ok(rv)
@ -330,20 +296,14 @@ where
/// Capitalize a value. The first character will be uppercase, all others lowercase.
pub fn capitalize<T: fmt::Display>(s: T) -> Result<String> {
let mut s = s.to_string();
match s.get_mut(0..1).map(|s| {
s.make_ascii_uppercase();
&*s
}) {
None => Ok(s),
_ => {
s.get_mut(1..).map(|s| {
s.make_ascii_lowercase();
&*s
});
Ok(s)
let s = s.to_string();
match s.chars().next() {
Some(c) => {
let mut replacement: String = c.to_uppercase().collect();
replacement.push_str(&s[c.len_utf8()..].to_lowercase());
Ok(replacement)
}
_ => Ok(s),
}
}
@ -393,7 +353,7 @@ where
{
use comrak::{
markdown_to_html, ComrakExtensionOptions, ComrakOptions, ComrakParseOptions,
ComrakRenderOptions,
ComrakRenderOptions, ListStyleType,
};
const DEFAULT_OPTIONS: ComrakOptions = ComrakOptions {
@ -414,6 +374,7 @@ where
// default:
smart: false,
default_info_string: None,
relaxed_tasklist_matching: false,
},
render: ComrakRenderOptions {
unsafe_: false,
@ -422,6 +383,7 @@ where
hardbreaks: false,
github_pre_lang: false,
width: 0,
list_style: ListStyleType::Dash,
},
};
@ -440,9 +402,9 @@ mod tests {
fn test_filesizeformat() {
assert_eq!(filesizeformat(&0).unwrap(), "0 B");
assert_eq!(filesizeformat(&999u64).unwrap(), "999 B");
assert_eq!(filesizeformat(&1000i32).unwrap(), "1 KB");
assert_eq!(filesizeformat(&1023).unwrap(), "1.02 KB");
assert_eq!(filesizeformat(&1024usize).unwrap(), "1.02 KB");
assert_eq!(filesizeformat(&1000i32).unwrap(), "1 kB");
assert_eq!(filesizeformat(&1023).unwrap(), "1.02 kB");
assert_eq!(filesizeformat(&1024usize).unwrap(), "1.02 kB");
}
#[cfg(feature = "percent-encoding")]
@ -450,61 +412,61 @@ mod tests {
fn test_urlencoding() {
// Unreserved (https://tools.ietf.org/html/rfc3986.html#section-2.3)
// alpha / digit
assert_eq!(urlencode(&"AZaz09").unwrap(), "AZaz09");
assert_eq!(urlencode_strict(&"AZaz09").unwrap(), "AZaz09");
assert_eq!(urlencode("AZaz09").unwrap(), "AZaz09");
assert_eq!(urlencode_strict("AZaz09").unwrap(), "AZaz09");
// other
assert_eq!(urlencode(&"_.-~").unwrap(), "_.-~");
assert_eq!(urlencode_strict(&"_.-~").unwrap(), "_.-~");
assert_eq!(urlencode("_.-~").unwrap(), "_.-~");
assert_eq!(urlencode_strict("_.-~").unwrap(), "_.-~");
// Reserved (https://tools.ietf.org/html/rfc3986.html#section-2.2)
// gen-delims
assert_eq!(urlencode(&":/?#[]@").unwrap(), "%3A/%3F%23%5B%5D%40");
assert_eq!(urlencode(":/?#[]@").unwrap(), "%3A/%3F%23%5B%5D%40");
assert_eq!(
urlencode_strict(&":/?#[]@").unwrap(),
urlencode_strict(":/?#[]@").unwrap(),
"%3A%2F%3F%23%5B%5D%40"
);
// sub-delims
assert_eq!(
urlencode(&"!$&'()*+,;=").unwrap(),
urlencode("!$&'()*+,;=").unwrap(),
"%21%24%26%27%28%29%2A%2B%2C%3B%3D"
);
assert_eq!(
urlencode_strict(&"!$&'()*+,;=").unwrap(),
urlencode_strict("!$&'()*+,;=").unwrap(),
"%21%24%26%27%28%29%2A%2B%2C%3B%3D"
);
// Other
assert_eq!(
urlencode(&"žŠďŤňĚáÉóŮ").unwrap(),
urlencode("žŠďŤňĚáÉóŮ").unwrap(),
"%C5%BE%C5%A0%C4%8F%C5%A4%C5%88%C4%9A%C3%A1%C3%89%C3%B3%C5%AE"
);
assert_eq!(
urlencode_strict(&"žŠďŤňĚáÉóŮ").unwrap(),
urlencode_strict("žŠďŤňĚáÉóŮ").unwrap(),
"%C5%BE%C5%A0%C4%8F%C5%A4%C5%88%C4%9A%C3%A1%C3%89%C3%B3%C5%AE"
);
// Ferris
assert_eq!(urlencode(&"🦀").unwrap(), "%F0%9F%A6%80");
assert_eq!(urlencode_strict(&"🦀").unwrap(), "%F0%9F%A6%80");
assert_eq!(urlencode("🦀").unwrap(), "%F0%9F%A6%80");
assert_eq!(urlencode_strict("🦀").unwrap(), "%F0%9F%A6%80");
}
#[test]
fn test_linebreaks() {
assert_eq!(
linebreaks(&"Foo\nBar Baz").unwrap(),
linebreaks("Foo\nBar Baz").unwrap(),
"<p>Foo<br/>Bar Baz</p>"
);
assert_eq!(
linebreaks(&"Foo\nBar\n\nBaz").unwrap(),
linebreaks("Foo\nBar\n\nBaz").unwrap(),
"<p>Foo<br/>Bar</p><p>Baz</p>"
);
}
#[test]
fn test_linebreaksbr() {
assert_eq!(linebreaksbr(&"Foo\nBar").unwrap(), "Foo<br/>Bar");
assert_eq!(linebreaksbr("Foo\nBar").unwrap(), "Foo<br/>Bar");
assert_eq!(
linebreaksbr(&"Foo\nBar\n\nBaz").unwrap(),
linebreaksbr("Foo\nBar\n\nBaz").unwrap(),
"Foo<br/>Bar<br/><br/>Baz"
);
}
@ -512,72 +474,72 @@ mod tests {
#[test]
fn test_paragraphbreaks() {
assert_eq!(
paragraphbreaks(&"Foo\nBar Baz").unwrap(),
paragraphbreaks("Foo\nBar Baz").unwrap(),
"<p>Foo\nBar Baz</p>"
);
assert_eq!(
paragraphbreaks(&"Foo\nBar\n\nBaz").unwrap(),
paragraphbreaks("Foo\nBar\n\nBaz").unwrap(),
"<p>Foo\nBar</p><p>Baz</p>"
);
assert_eq!(
paragraphbreaks(&"Foo\n\n\n\n\nBar\n\nBaz").unwrap(),
paragraphbreaks("Foo\n\n\n\n\nBar\n\nBaz").unwrap(),
"<p>Foo</p><p>\nBar</p><p>Baz</p>"
);
}
#[test]
fn test_lower() {
assert_eq!(lower(&"Foo").unwrap(), "foo");
assert_eq!(lower(&"FOO").unwrap(), "foo");
assert_eq!(lower(&"FooBar").unwrap(), "foobar");
assert_eq!(lower(&"foo").unwrap(), "foo");
assert_eq!(lower("Foo").unwrap(), "foo");
assert_eq!(lower("FOO").unwrap(), "foo");
assert_eq!(lower("FooBar").unwrap(), "foobar");
assert_eq!(lower("foo").unwrap(), "foo");
}
#[test]
fn test_upper() {
assert_eq!(upper(&"Foo").unwrap(), "FOO");
assert_eq!(upper(&"FOO").unwrap(), "FOO");
assert_eq!(upper(&"FooBar").unwrap(), "FOOBAR");
assert_eq!(upper(&"foo").unwrap(), "FOO");
assert_eq!(upper("Foo").unwrap(), "FOO");
assert_eq!(upper("FOO").unwrap(), "FOO");
assert_eq!(upper("FooBar").unwrap(), "FOOBAR");
assert_eq!(upper("foo").unwrap(), "FOO");
}
#[test]
fn test_trim() {
assert_eq!(trim(&" Hello\tworld\t").unwrap(), "Hello\tworld");
assert_eq!(trim(" Hello\tworld\t").unwrap(), "Hello\tworld");
}
#[test]
fn test_truncate() {
assert_eq!(truncate(&"hello", 2).unwrap(), "he...");
assert_eq!(truncate("hello", 2).unwrap(), "he...");
let a = String::from("您好");
assert_eq!(a.len(), 6);
assert_eq!(String::from("").len(), 3);
assert_eq!(truncate(&"您好", 1).unwrap(), "您...");
assert_eq!(truncate(&"您好", 2).unwrap(), "您...");
assert_eq!(truncate(&"您好", 3).unwrap(), "您...");
assert_eq!(truncate(&"您好", 4).unwrap(), "您好...");
assert_eq!(truncate(&"您好", 6).unwrap(), "您好");
assert_eq!(truncate(&"您好", 7).unwrap(), "您好");
assert_eq!(truncate("您好", 1).unwrap(), "您...");
assert_eq!(truncate("您好", 2).unwrap(), "您...");
assert_eq!(truncate("您好", 3).unwrap(), "您...");
assert_eq!(truncate("您好", 4).unwrap(), "您好...");
assert_eq!(truncate("您好", 6).unwrap(), "您好");
assert_eq!(truncate("您好", 7).unwrap(), "您好");
let s = String::from("🤚a🤚");
assert_eq!(s.len(), 9);
assert_eq!(String::from("🤚").len(), 4);
assert_eq!(truncate(&"🤚a🤚", 1).unwrap(), "🤚...");
assert_eq!(truncate(&"🤚a🤚", 2).unwrap(), "🤚...");
assert_eq!(truncate(&"🤚a🤚", 3).unwrap(), "🤚...");
assert_eq!(truncate(&"🤚a🤚", 4).unwrap(), "🤚...");
assert_eq!(truncate(&"🤚a🤚", 5).unwrap(), "🤚a...");
assert_eq!(truncate(&"🤚a🤚", 6).unwrap(), "🤚a🤚...");
assert_eq!(truncate(&"🤚a🤚", 9).unwrap(), "🤚a🤚");
assert_eq!(truncate(&"🤚a🤚", 10).unwrap(), "🤚a🤚");
assert_eq!(truncate("🤚a🤚", 1).unwrap(), "🤚...");
assert_eq!(truncate("🤚a🤚", 2).unwrap(), "🤚...");
assert_eq!(truncate("🤚a🤚", 3).unwrap(), "🤚...");
assert_eq!(truncate("🤚a🤚", 4).unwrap(), "🤚...");
assert_eq!(truncate("🤚a🤚", 5).unwrap(), "🤚a...");
assert_eq!(truncate("🤚a🤚", 6).unwrap(), "🤚a🤚...");
assert_eq!(truncate("🤚a🤚", 9).unwrap(), "🤚a🤚");
assert_eq!(truncate("🤚a🤚", 10).unwrap(), "🤚a🤚");
}
#[test]
fn test_indent() {
assert_eq!(indent(&"hello", 2).unwrap(), "hello");
assert_eq!(indent(&"hello\n", 2).unwrap(), "hello\n");
assert_eq!(indent(&"hello\nfoo", 2).unwrap(), "hello\n foo");
assert_eq!(indent("hello", 2).unwrap(), "hello");
assert_eq!(indent("hello\n", 2).unwrap(), "hello\n");
assert_eq!(indent("hello\nfoo", 2).unwrap(), "hello\n foo");
assert_eq!(
indent(&"hello\nfoo\n bar", 4).unwrap(),
indent("hello\nfoo\n bar", 4).unwrap(),
"hello\n foo\n bar"
);
}
@ -649,12 +611,15 @@ mod tests {
#[test]
fn test_capitalize() {
assert_eq!(capitalize(&"foo").unwrap(), "Foo".to_string());
assert_eq!(capitalize(&"f").unwrap(), "F".to_string());
assert_eq!(capitalize(&"fO").unwrap(), "Fo".to_string());
assert_eq!(capitalize(&"").unwrap(), "".to_string());
assert_eq!(capitalize(&"FoO").unwrap(), "Foo".to_string());
assert_eq!(capitalize(&"foO BAR").unwrap(), "Foo bar".to_string());
assert_eq!(capitalize("foo").unwrap(), "Foo".to_string());
assert_eq!(capitalize("f").unwrap(), "F".to_string());
assert_eq!(capitalize("fO").unwrap(), "Fo".to_string());
assert_eq!(capitalize("").unwrap(), "".to_string());
assert_eq!(capitalize("FoO").unwrap(), "Foo".to_string());
assert_eq!(capitalize("foO BAR").unwrap(), "Foo bar".to_string());
assert_eq!(capitalize("äØÄÅÖ").unwrap(), "Äøäåö".to_string());
assert_eq!(capitalize("ß").unwrap(), "SS".to_string());
assert_eq!(capitalize("ßß").unwrap(), "SSß".to_string());
}
#[test]
@ -667,9 +632,9 @@ mod tests {
#[test]
fn test_wordcount() {
assert_eq!(wordcount(&"").unwrap(), 0);
assert_eq!(wordcount(&" \n\t").unwrap(), 0);
assert_eq!(wordcount(&"foo").unwrap(), 1);
assert_eq!(wordcount(&"foo bar").unwrap(), 2);
assert_eq!(wordcount("").unwrap(), 0);
assert_eq!(wordcount(" \n\t").unwrap(), 0);
assert_eq!(wordcount("foo").unwrap(), 1);
assert_eq!(wordcount("foo bar").unwrap(), 2);
}
}

View file

@ -22,13 +22,13 @@ mod tests {
#[test]
fn test_yaml() {
assert_eq!(yaml(Html, true).unwrap().to_string(), "---\ntrue");
assert_eq!(yaml(Html, "foo").unwrap().to_string(), "---\nfoo");
assert_eq!(yaml(Html, &true).unwrap().to_string(), "---\ntrue");
assert_eq!(yaml(Html, &"foo").unwrap().to_string(), "---\nfoo");
assert_eq!(yaml(Html, true).unwrap().to_string(), "true\n");
assert_eq!(yaml(Html, "foo").unwrap().to_string(), "foo\n");
assert_eq!(yaml(Html, true).unwrap().to_string(), "true\n");
assert_eq!(yaml(Html, "foo").unwrap().to_string(), "foo\n");
assert_eq!(
yaml(Html, &vec!["foo", "bar"]).unwrap().to_string(),
"---\n- foo\n- bar"
"- foo\n- bar\n"
);
}
}

View file

@ -63,14 +63,23 @@
#![deny(elided_lifetimes_in_paths)]
#![deny(unreachable_pub)]
pub use askama_shared as shared;
mod error;
pub mod filters;
pub mod helpers;
pub use askama_escape::{Html, Text};
use std::fmt;
pub use askama_derive::Template;
pub use askama_escape::{Html, MarkupDisplay, Text};
#[doc(hidden)]
pub use crate as shared;
pub use crate::error::{Error, Result};
/// Main `Template` trait; implementations are generally derived
///
/// If you need an object-safe template, use [`DynTemplate`].
pub trait Template {
pub trait Template: fmt::Display {
/// Helper method which allocates a new `String` and renders into it
fn render(&self) -> Result<String> {
let mut buf = String::with_capacity(Self::SIZE_HINT);
@ -78,9 +87,15 @@ pub trait Template {
Ok(buf)
}
/// Renders the template to the given `writer` buffer
/// Renders the template to the given `writer` fmt buffer
fn render_into(&self, writer: &mut (impl std::fmt::Write + ?Sized)) -> Result<()>;
/// Renders the template to the given `writer` io buffer
#[inline]
fn write_into(&self, writer: &mut (impl std::io::Write + ?Sized)) -> std::io::Result<()> {
writer.write_fmt(format_args!("{self}"))
}
/// The template's extension, if provided
const EXTENSION: Option<&'static str>;
@ -98,9 +113,12 @@ pub trait DynTemplate {
/// Helper method which allocates a new `String` and renders into it
fn dyn_render(&self) -> Result<String>;
/// Renders the template to the given `writer` buffer
/// Renders the template to the given `writer` fmt buffer
fn dyn_render_into(&self, writer: &mut dyn std::fmt::Write) -> Result<()>;
/// Renders the template to the given `writer` io buffer
fn dyn_write_into(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()>;
/// Helper function to inspect the template's extension
fn extension(&self) -> Option<&'static str>;
@ -120,6 +138,11 @@ impl<T: Template> DynTemplate for T {
<Self as Template>::render_into(self, writer)
}
#[inline]
fn dyn_write_into(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> {
writer.write_fmt(format_args!("{self}"))
}
fn extension(&self) -> Option<&'static str> {
Self::EXTENSION
}
@ -133,39 +156,24 @@ impl<T: Template> DynTemplate for T {
}
}
pub use crate::shared::filters;
pub use crate::shared::helpers;
pub use crate::shared::{read_config_file, Error, MarkupDisplay, Result};
pub use askama_derive::*;
#[deprecated(since = "0.11.1", note = "The only function in this mod is deprecated")]
pub mod mime {
#[cfg(all(feature = "mime_guess", feature = "mime"))]
#[deprecated(since = "0.11.1", note = "Use Template::MIME_TYPE instead")]
pub use crate::shared::extension_to_mime_type;
impl fmt::Display for dyn DynTemplate {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.dyn_render_into(f).map_err(|_| ::std::fmt::Error {})
}
}
/// Old build script helper to rebuild crates if contained templates have changed
///
/// This function is now deprecated and does nothing.
#[deprecated(
since = "0.8.1",
note = "file-level dependency tracking is handled automatically without build script"
)]
pub fn rerun_if_templates_changed() {}
#[cfg(test)]
mod tests {
use super::{DynTemplate, Template};
use std::fmt;
use super::*;
use crate::{DynTemplate, Template};
#[test]
fn dyn_template() {
struct Test;
impl Template for Test {
fn render_into(
&self,
writer: &mut (impl std::fmt::Write + ?Sized),
) -> askama_shared::Result<()> {
fn render_into(&self, writer: &mut (impl std::fmt::Write + ?Sized)) -> Result<()> {
Ok(writer.write_str("test")?)
}
@ -176,10 +184,36 @@ mod tests {
const MIME_TYPE: &'static str = "text/plain; charset=utf-8";
}
impl fmt::Display for Test {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.render_into(f).map_err(|_| fmt::Error {})
}
}
fn render(t: &dyn DynTemplate) -> String {
t.dyn_render().unwrap()
}
assert_eq!(render(&Test), "test");
let test = &Test as &dyn DynTemplate;
assert_eq!(render(test), "test");
assert_eq!(test.to_string(), "test");
assert_eq!(format!("{test}"), "test");
let mut vec = Vec::new();
test.dyn_write_into(&mut vec).unwrap();
assert_eq!(vec, vec![b't', b'e', b's', b't']);
}
}
/// Old build script helper to rebuild crates if contained templates have changed
///
/// This function is now deprecated and does nothing.
#[deprecated(
since = "0.8.1",
note = "file-level dependency tracking is handled automatically without build script"
)]
pub fn rerun_if_templates_changed() {}

View file

@ -1 +1 @@
{"files":{"Cargo.toml":"e07a64f9eacd69eb4670070ab470ac837e8c6c05e0bed632521a9a4c04863dd2","LICENSE-APACHE":"87cb0d734c723c083e51c825930ff42bce28596b52dee15567f6b28f19c195e3","LICENSE-MIT":"df20e0180764bf5bd76f74d47bc9e8c0069a666401629c390003a1d5eba99c92","README.md":"dd3e4e203eeca91219fd57c0ca1f92b413176f406df19568d0fe33d7905123e4","src/lib.rs":"40c39439ea110b500f146b401f69ad385d21e6277fdbeabdab662a3b1238b90f"},"package":"87bf87e6e8b47264efa9bde63d6225c6276a52e05e91bf37eaa8afd0032d6b71"}
{"files":{"Cargo.toml":"f293fbc41371fb46f5b68775b158d8da37c09453dc9356ee8e97fce3d1021b2d","LICENSE-APACHE":"87cb0d734c723c083e51c825930ff42bce28596b52dee15567f6b28f19c195e3","LICENSE-MIT":"df20e0180764bf5bd76f74d47bc9e8c0069a666401629c390003a1d5eba99c92","README.md":"dd3e4e203eeca91219fd57c0ca1f92b413176f406df19568d0fe33d7905123e4","src/config.rs":"de4202804d32cc4da044ed41140ef987056f44116b1bbfac53001e07133e52b9","src/generator.rs":"4fec224dd261bc96a63b831f0692a62d9f8d19566377b39dd69bc0f3de4ab033","src/heritage.rs":"fceb0ac86034b8eb902212f9a78a6fb7d19688c3ccdb117099f15933073bf7bb","src/input.rs":"53afae3f73e2b52d83d73c1b38893677992a5ee04927e8b905198b742b1546ae","src/lib.rs":"003e91569575b72a9587796c82c9f9c0e5e9f3dc8db6b659735cf58f68504b76","src/parser/expr.rs":"3b8178398a293910df161ddd769d2efc7ae8dff03e7313f033149a38a6d81983","src/parser/mod.rs":"3afc065cdc69dc1498ddf9a04a77f56d807ed14653828918d36529a441fb6c48","src/parser/node.rs":"c5437e2525e245b6fcd358696f3607c50ef82cf649a66b6bef7816232c3220fa","src/parser/tests.rs":"81fb02f8cab87c93575fdb6b7d6e9cae6fa3b69173f5f5a76d214f5316ca66ca","templates/a.html":"b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c","templates/b.html":"7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730","templates/sub/b.html":"7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730","templates/sub/c.html":"bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c","templates/sub/sub1/d.html":"86b0c5a1e2b73b08fd54c727f4458649ed9fe3ad1b6e8ac9460c070113509a1e"},"package":"c22fbe0413545c098358e56966ff22cdd039e10215ae213cfbd65032b119fc94"}

View file

@ -10,35 +10,63 @@
# See Cargo.toml.orig for the original contents.
[package]
edition = "2018"
edition = "2021"
rust-version = "1.58"
name = "askama_derive"
version = "0.11.2"
version = "0.12.1"
description = "Procedural macro package for Askama"
homepage = "https://github.com/djc/askama"
readme = "README.md"
license = "MIT/Apache-2.0"
repository = "https://github.com/djc/askama"
resolver = "1"
[lib]
proc-macro = true
[dependencies.askama_shared]
version = "0.12.1"
default-features = false
[dependencies.basic-toml]
version = "0.1.1"
optional = true
[dependencies.mime]
version = "0.3"
[dependencies.mime_guess]
version = "2"
[dependencies.nom]
version = "7"
[dependencies.proc-macro2]
version = "1"
[dependencies.syn]
[dependencies.quote]
version = "1"
[dependencies.serde]
version = "1.0"
features = ["derive"]
optional = true
[dependencies.syn]
version = "2"
[features]
actix-web = []
axum = []
config = ["askama_shared/config"]
gotham = []
json = ["askama_shared/json"]
mendes = []
rocket = []
tide = []
warp = []
yaml = ["askama_shared/yaml"]
config = [
"serde",
"basic-toml",
]
humansize = []
markdown = []
num-traits = []
serde-json = []
serde-yaml = []
urlencode = []
with-actix-web = []
with-axum = []
with-gotham = []
with-hyper = []
with-mendes = []
with-rocket = []
with-tide = []
with-warp = []

View file

@ -1,44 +1,27 @@
#![cfg_attr(feature = "cargo-clippy", allow(unused_parens))]
#![forbid(unsafe_code)]
#![deny(elided_lifetimes_in_paths)]
#![deny(unreachable_pub)]
use std::borrow::Cow;
use std::collections::{BTreeMap, HashSet};
use std::convert::TryFrom;
use std::path::{Path, PathBuf};
use std::{env, fmt, fs};
use std::{env, fs};
use proc_macro2::{Span, TokenStream};
#[cfg(feature = "serde")]
use serde::Deserialize;
pub use crate::input::extension_to_mime_type;
pub use askama_escape::MarkupDisplay;
mod error;
pub use crate::error::{Error, Result};
pub mod filters;
#[doc(hidden)]
pub mod generator;
pub mod helpers;
#[doc(hidden)]
pub mod heritage;
#[doc(hidden)]
pub mod input;
#[doc(hidden)]
pub mod parser;
use crate::CompileError;
#[derive(Debug)]
pub struct Config<'a> {
pub dirs: Vec<PathBuf>,
pub syntaxes: BTreeMap<String, Syntax<'a>>,
pub default_syntax: &'a str,
pub escapers: Vec<(HashSet<String>, String)>,
pub(crate) struct Config<'a> {
pub(crate) dirs: Vec<PathBuf>,
pub(crate) syntaxes: BTreeMap<String, Syntax<'a>>,
pub(crate) default_syntax: &'a str,
pub(crate) escapers: Vec<(HashSet<String>, String)>,
pub(crate) whitespace: WhitespaceHandling,
}
impl Config<'_> {
pub fn new(s: &str) -> std::result::Result<Config<'_>, CompileError> {
impl<'a> Config<'a> {
pub(crate) fn new(
s: &'a str,
template_whitespace: Option<&String>,
) -> std::result::Result<Config<'a>, CompileError> {
let root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
let default_dirs = vec![root.join("templates")];
@ -51,18 +34,32 @@ impl Config<'_> {
RawConfig::from_toml_str(s)?
};
let (dirs, default_syntax) = match raw.general {
let (dirs, default_syntax, mut whitespace) = match raw.general {
Some(General {
dirs,
default_syntax,
whitespace,
}) => (
dirs.map_or(default_dirs, |v| {
v.into_iter().map(|dir| root.join(dir)).collect()
}),
default_syntax.unwrap_or(DEFAULT_SYNTAX_NAME),
whitespace,
),
None => (
default_dirs,
DEFAULT_SYNTAX_NAME,
WhitespaceHandling::default(),
),
None => (default_dirs, DEFAULT_SYNTAX_NAME),
};
if let Some(template_whitespace) = template_whitespace {
whitespace = match template_whitespace.as_str() {
"suppress" => WhitespaceHandling::Suppress,
"minimize" => WhitespaceHandling::Minimize,
"preserve" => WhitespaceHandling::Preserve,
s => return Err(format!("invalid value for `whitespace`: \"{s}\"").into()),
};
}
if let Some(raw_syntaxes) = raw.syntax {
for raw_s in raw_syntaxes {
@ -72,13 +69,13 @@ impl Config<'_> {
.insert(name.to_string(), Syntax::try_from(raw_s)?)
.is_some()
{
return Err(format!("syntax \"{}\" is already defined", name).into());
return Err(format!("syntax \"{name}\" is already defined").into());
}
}
}
if !syntaxes.contains_key(default_syntax) {
return Err(format!("default syntax \"{}\" not found", default_syntax).into());
return Err(format!("default syntax \"{default_syntax}\" not found").into());
}
let mut escapers = Vec::new();
@ -103,10 +100,11 @@ impl Config<'_> {
syntaxes,
default_syntax,
escapers,
whitespace,
})
}
pub fn find_template(
pub(crate) fn find_template(
&self,
path: &str,
start_at: Option<&Path>,
@ -134,16 +132,16 @@ impl Config<'_> {
}
#[derive(Debug)]
pub struct Syntax<'a> {
pub block_start: &'a str,
pub block_end: &'a str,
pub expr_start: &'a str,
pub expr_end: &'a str,
pub comment_start: &'a str,
pub comment_end: &'a str,
pub(crate) struct Syntax<'a> {
pub(crate) block_start: &'a str,
pub(crate) block_end: &'a str,
pub(crate) expr_start: &'a str,
pub(crate) expr_end: &'a str,
pub(crate) comment_start: &'a str,
pub(crate) comment_end: &'a str,
}
impl Default for Syntax<'_> {
impl Default for Syntax<'static> {
fn default() -> Self {
Self {
block_start: "{%",
@ -160,7 +158,7 @@ impl<'a> TryFrom<RawSyntax<'a>> for Syntax<'a> {
type Error = CompileError;
fn try_from(raw: RawSyntax<'a>) -> std::result::Result<Self, Self::Error> {
let default = Self::default();
let default = Syntax::default();
let syntax = Self {
block_start: raw.block_start.unwrap_or(default.block_start),
block_end: raw.block_end.unwrap_or(default.block_end),
@ -184,8 +182,8 @@ impl<'a> TryFrom<RawSyntax<'a>> for Syntax<'a> {
let be = syntax.block_start.as_bytes()[1];
let cs = syntax.comment_start.as_bytes()[0];
let ce = syntax.comment_start.as_bytes()[1];
let es = syntax.block_start.as_bytes()[0];
let ee = syntax.block_start.as_bytes()[1];
let es = syntax.expr_start.as_bytes()[0];
let ee = syntax.expr_start.as_bytes()[1];
if !((bs == cs && bs == es) || (be == ce && be == ee)) {
return Err(format!("bad delimiters block_start: {}, comment_start: {}, expr_start: {}, needs one of the two characters in common", syntax.block_start, syntax.comment_start, syntax.expr_start).into());
}
@ -196,17 +194,18 @@ impl<'a> TryFrom<RawSyntax<'a>> for Syntax<'a> {
#[cfg_attr(feature = "serde", derive(Deserialize))]
#[derive(Default)]
struct RawConfig<'d> {
struct RawConfig<'a> {
#[cfg_attr(feature = "serde", serde(borrow))]
general: Option<General<'d>>,
syntax: Option<Vec<RawSyntax<'d>>>,
escaper: Option<Vec<RawEscaper<'d>>>,
general: Option<General<'a>>,
syntax: Option<Vec<RawSyntax<'a>>>,
escaper: Option<Vec<RawEscaper<'a>>>,
}
impl RawConfig<'_> {
#[cfg(feature = "config")]
fn from_toml_str(s: &str) -> std::result::Result<RawConfig<'_>, CompileError> {
toml::from_str(s).map_err(|e| format!("invalid TOML in {}: {}", CONFIG_FILE_NAME, e).into())
basic_toml::from_str(s)
.map_err(|e| format!("invalid TOML in {CONFIG_FILE_NAME}: {e}").into())
}
#[cfg(not(feature = "config"))]
@ -215,11 +214,33 @@ impl RawConfig<'_> {
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "serde", derive(Deserialize))]
#[cfg_attr(feature = "serde", serde(field_identifier, rename_all = "lowercase"))]
pub(crate) enum WhitespaceHandling {
/// The default behaviour. It will leave the whitespace characters "as is".
Preserve,
/// It'll remove all the whitespace characters before and after the jinja block.
Suppress,
/// It'll remove all the whitespace characters except one before and after the jinja blocks.
/// If there is a newline character, the preserved character in the trimmed characters, it will
/// the one preserved.
Minimize,
}
impl Default for WhitespaceHandling {
fn default() -> Self {
WhitespaceHandling::Preserve
}
}
#[cfg_attr(feature = "serde", derive(Deserialize))]
struct General<'a> {
#[cfg_attr(feature = "serde", serde(borrow))]
dirs: Option<Vec<&'a str>>,
default_syntax: Option<&'a str>,
#[cfg_attr(feature = "serde", serde(default))]
whitespace: WhitespaceHandling,
}
#[cfg_attr(feature = "serde", derive(Deserialize))]
@ -239,12 +260,20 @@ struct RawEscaper<'a> {
extensions: Vec<&'a str>,
}
pub fn read_config_file() -> std::result::Result<String, CompileError> {
pub(crate) fn read_config_file(
config_path: Option<&str>,
) -> std::result::Result<String, CompileError> {
let root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
let filename = root.join(CONFIG_FILE_NAME);
let filename = match config_path {
Some(config_path) => root.join(config_path),
None => root.join(CONFIG_FILE_NAME),
};
if filename.exists() {
fs::read_to_string(&filename)
.map_err(|_| format!("unable to read {:?}", filename.to_str().unwrap()).into())
} else if config_path.is_some() {
Err(format!("`{}` does not exist", root.display()).into())
} else {
Ok("".to_string())
}
@ -258,7 +287,7 @@ where
}
#[allow(clippy::match_wild_err_arm)]
pub fn get_template_source(tpl_path: &Path) -> std::result::Result<String, CompileError> {
pub(crate) fn get_template_source(tpl_path: &Path) -> std::result::Result<String, CompileError> {
match fs::read_to_string(tpl_path) {
Err(_) => Err(format!(
"unable to open template file '{}'",
@ -274,17 +303,6 @@ pub fn get_template_source(tpl_path: &Path) -> std::result::Result<String, Compi
}
}
#[derive(Clone, Copy, Debug)]
pub struct Integrations {
pub actix: bool,
pub axum: bool,
pub gotham: bool,
pub mendes: bool,
pub rocket: bool,
pub tide: bool,
pub warp: bool,
}
static CONFIG_FILE_NAME: &str = "askama.toml";
static DEFAULT_SYNTAX_NAME: &str = "default";
static DEFAULT_ESCAPERS: &[(&[&str], &str)] = &[
@ -293,58 +311,16 @@ static DEFAULT_ESCAPERS: &[(&[&str], &str)] = &[
(&["j2", "jinja", "jinja2"], "::askama::Html"),
];
#[derive(Debug, Clone)]
pub struct CompileError {
msg: Cow<'static, str>,
span: Span,
}
impl CompileError {
pub fn new<S: Into<Cow<'static, str>>>(s: S, span: Span) -> Self {
Self {
msg: s.into(),
span,
}
}
pub fn to_compile_error(self) -> TokenStream {
syn::Error::new(self.span, self.msg).to_compile_error()
}
}
impl std::error::Error for CompileError {}
impl fmt::Display for CompileError {
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt.write_str(&self.msg)
}
}
impl From<&'static str> for CompileError {
#[inline]
fn from(s: &'static str) -> Self {
Self::new(s, Span::call_site())
}
}
impl From<String> for CompileError {
#[inline]
fn from(s: String) -> Self {
Self::new(s, Span::call_site())
}
}
#[cfg(test)]
#[allow(clippy::blacklisted_name)]
mod tests {
use super::*;
use std::env;
use std::path::{Path, PathBuf};
use super::*;
#[test]
fn get_source() {
let path = Config::new("")
let path = Config::new("", None)
.and_then(|config| config.find_template("b.html", None))
.unwrap();
assert_eq!(get_template_source(&path).unwrap(), "bar");
@ -354,7 +330,7 @@ mod tests {
fn test_default_config() {
let mut root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
root.push("templates");
let config = Config::new("").unwrap();
let config = Config::new("", None).unwrap();
assert_eq!(config.dirs, vec![root]);
}
@ -363,7 +339,7 @@ mod tests {
fn test_config_dirs() {
let mut root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
root.push("tpl");
let config = Config::new("[general]\ndirs = [\"tpl\"]").unwrap();
let config = Config::new("[general]\ndirs = [\"tpl\"]", None).unwrap();
assert_eq!(config.dirs, vec![root]);
}
@ -377,7 +353,7 @@ mod tests {
#[test]
fn find_absolute() {
let config = Config::new("").unwrap();
let config = Config::new("", None).unwrap();
let root = config.find_template("a.html", None).unwrap();
let path = config.find_template("sub/b.html", Some(&root)).unwrap();
assert_eq_rooted(&path, "sub/b.html");
@ -386,14 +362,14 @@ mod tests {
#[test]
#[should_panic]
fn find_relative_nonexistent() {
let config = Config::new("").unwrap();
let config = Config::new("", None).unwrap();
let root = config.find_template("a.html", None).unwrap();
config.find_template("c.html", Some(&root)).unwrap();
}
#[test]
fn find_relative() {
let config = Config::new("").unwrap();
let config = Config::new("", None).unwrap();
let root = config.find_template("sub/b.html", None).unwrap();
let path = config.find_template("c.html", Some(&root)).unwrap();
assert_eq_rooted(&path, "sub/c.html");
@ -401,7 +377,7 @@ mod tests {
#[test]
fn find_relative_sub() {
let config = Config::new("").unwrap();
let config = Config::new("", None).unwrap();
let root = config.find_template("sub/b.html", None).unwrap();
let path = config.find_template("sub1/d.html", Some(&root)).unwrap();
assert_eq_rooted(&path, "sub/sub1/d.html");
@ -424,7 +400,7 @@ mod tests {
"#;
let default_syntax = Syntax::default();
let config = Config::new(raw_config).unwrap();
let config = Config::new(raw_config, None).unwrap();
assert_eq!(config.default_syntax, "foo");
let foo = config.syntaxes.get("foo").unwrap();
@ -456,7 +432,7 @@ mod tests {
"#;
let default_syntax = Syntax::default();
let config = Config::new(raw_config).unwrap();
let config = Config::new(raw_config, None).unwrap();
assert_eq!(config.default_syntax, "foo");
let foo = config.syntaxes.get("foo").unwrap();
@ -484,7 +460,7 @@ mod tests {
syntax = [{ name = "default" }]
"#;
let _config = Config::new(raw_config).unwrap();
let _config = Config::new(raw_config, None).unwrap();
}
#[cfg(feature = "toml")]
@ -496,7 +472,7 @@ mod tests {
{ name = "foo", block_start = "%%" } ]
"#;
let _config = Config::new(raw_config).unwrap();
let _config = Config::new(raw_config, None).unwrap();
}
#[cfg(feature = "toml")]
@ -508,7 +484,7 @@ mod tests {
default_syntax = "foo"
"#;
let _config = Config::new(raw_config).unwrap();
let _config = Config::new(raw_config, None).unwrap();
}
#[cfg(feature = "config")]
@ -520,6 +496,7 @@ mod tests {
path = "::askama::Js"
extensions = ["js"]
"#,
None,
)
.unwrap();
assert_eq!(
@ -535,4 +512,71 @@ mod tests {
]
);
}
#[cfg(feature = "config")]
#[test]
fn test_whitespace_parsing() {
let config = Config::new(
r#"
[general]
whitespace = "suppress"
"#,
None,
)
.unwrap();
assert_eq!(config.whitespace, WhitespaceHandling::Suppress);
let config = Config::new(r#""#, None).unwrap();
assert_eq!(config.whitespace, WhitespaceHandling::Preserve);
let config = Config::new(
r#"
[general]
whitespace = "preserve"
"#,
None,
)
.unwrap();
assert_eq!(config.whitespace, WhitespaceHandling::Preserve);
let config = Config::new(
r#"
[general]
whitespace = "minimize"
"#,
None,
)
.unwrap();
assert_eq!(config.whitespace, WhitespaceHandling::Minimize);
}
#[cfg(feature = "toml")]
#[test]
fn test_whitespace_in_template() {
// Checking that template arguments have precedence over general configuration.
// So in here, in the template arguments, there is `whitespace = "minimize"` so
// the `WhitespaceHandling` should be `Minimize` as well.
let config = Config::new(
r#"
[general]
whitespace = "suppress"
"#,
Some(&"minimize".to_owned()),
)
.unwrap();
assert_eq!(config.whitespace, WhitespaceHandling::Minimize);
let config = Config::new(r#""#, Some(&"minimize".to_owned())).unwrap();
assert_eq!(config.whitespace, WhitespaceHandling::Minimize);
}
#[test]
fn test_config_whitespace_error() {
let config = Config::new(r#""#, Some(&"trim".to_owned()));
if let Err(err) = config {
assert_eq!(err.msg, "invalid value for `whitespace`: \"trim\"");
} else {
panic!("Config::new should have return an error");
}
}
}

View file

@ -1,36 +1,254 @@
use super::{get_template_source, CompileError, Integrations};
use crate::filters;
use crate::config::{get_template_source, read_config_file, Config, WhitespaceHandling};
use crate::heritage::{Context, Heritage};
use crate::input::{Source, TemplateInput};
use crate::parser::{parse, Cond, CondTest, Expr, Loop, Node, Target, When, Ws};
use proc_macro2::Span;
use crate::input::{Print, Source, TemplateInput};
use crate::parser::{parse, Cond, CondTest, Expr, Loop, Node, Target, When, Whitespace, Ws};
use crate::CompileError;
use proc_macro::TokenStream;
use quote::{quote, ToTokens};
use syn::punctuated::Punctuated;
use std::collections::HashMap;
use std::path::Path;
use std::collections::hash_map::{Entry, HashMap};
use std::path::{Path, PathBuf};
use std::{cmp, hash, mem, str};
pub fn generate<S: std::hash::BuildHasher>(
input: &TemplateInput<'_>,
contexts: &HashMap<&Path, Context<'_>, S>,
heritage: Option<&Heritage<'_>>,
integrations: Integrations,
) -> Result<String, CompileError> {
Generator::new(input, contexts, heritage, integrations, MapChain::new())
.build(&contexts[input.path.as_path()])
/// The actual implementation for askama_derive::Template
pub(crate) fn derive_template(input: TokenStream) -> TokenStream {
let ast: syn::DeriveInput = syn::parse(input).unwrap();
match build_template(&ast) {
Ok(source) => source.parse().unwrap(),
Err(e) => e.into_compile_error(),
}
}
struct Generator<'a, S: std::hash::BuildHasher> {
/// Takes a `syn::DeriveInput` and generates source code for it
///
/// Reads the metadata from the `template()` attribute to get the template
/// metadata, then fetches the source from the filesystem. The source is
/// parsed, and the parse tree is fed to the code generator. Will print
/// the parse tree and/or generated source according to the `print` key's
/// value as passed to the `template()` attribute.
fn build_template(ast: &syn::DeriveInput) -> Result<String, CompileError> {
let template_args = TemplateArgs::new(ast)?;
let config_toml = read_config_file(template_args.config_path.as_deref())?;
let config = Config::new(&config_toml, template_args.whitespace.as_ref())?;
let input = TemplateInput::new(ast, &config, template_args)?;
let source: String = match input.source {
Source::Source(ref s) => s.clone(),
Source::Path(_) => get_template_source(&input.path)?,
};
let mut sources = HashMap::new();
find_used_templates(&input, &mut sources, source)?;
let mut parsed = HashMap::new();
for (path, src) in &sources {
parsed.insert(path.as_path(), parse(src, input.syntax)?);
}
let mut contexts = HashMap::new();
for (path, nodes) in &parsed {
contexts.insert(*path, Context::new(input.config, path, nodes)?);
}
let ctx = &contexts[input.path.as_path()];
let heritage = if !ctx.blocks.is_empty() || ctx.extends.is_some() {
Some(Heritage::new(ctx, &contexts))
} else {
None
};
if input.print == Print::Ast || input.print == Print::All {
eprintln!("{:?}", parsed[input.path.as_path()]);
}
let code = Generator::new(
&input,
&contexts,
heritage.as_ref(),
MapChain::new(),
config.whitespace,
)
.build(&contexts[input.path.as_path()])?;
if input.print == Print::Code || input.print == Print::All {
eprintln!("{code}");
}
Ok(code)
}
#[derive(Default)]
pub(crate) struct TemplateArgs {
pub(crate) source: Option<Source>,
pub(crate) print: Print,
pub(crate) escaping: Option<String>,
pub(crate) ext: Option<String>,
pub(crate) syntax: Option<String>,
pub(crate) config_path: Option<String>,
pub(crate) whitespace: Option<String>,
}
impl TemplateArgs {
fn new(ast: &'_ syn::DeriveInput) -> Result<Self, CompileError> {
// Check that an attribute called `template()` exists once and that it is
// the proper type (list).
let mut template_args = None;
for attr in &ast.attrs {
if !attr.path().is_ident("template") {
continue;
}
match attr.parse_args_with(Punctuated::<syn::Meta, syn::Token![,]>::parse_terminated) {
Ok(args) if template_args.is_none() => template_args = Some(args),
Ok(_) => return Err("duplicated 'template' attribute".into()),
Err(e) => return Err(format!("unable to parse template arguments: {e}").into()),
};
}
let template_args =
template_args.ok_or_else(|| CompileError::from("no attribute 'template' found"))?;
let mut args = Self::default();
// Loop over the meta attributes and find everything that we
// understand. Return a CompileError if something is not right.
// `source` contains an enum that can represent `path` or `source`.
for item in template_args {
let pair = match item {
syn::Meta::NameValue(pair) => pair,
_ => {
return Err(format!(
"unsupported attribute argument {:?}",
item.to_token_stream()
)
.into())
}
};
let ident = match pair.path.get_ident() {
Some(ident) => ident,
None => unreachable!("not possible in syn::Meta::NameValue(…)"),
};
let value = match pair.value {
syn::Expr::Lit(lit) => lit,
syn::Expr::Group(group) => match *group.expr {
syn::Expr::Lit(lit) => lit,
_ => {
return Err(format!("unsupported argument value type for {ident:?}").into())
}
},
_ => return Err(format!("unsupported argument value type for {ident:?}").into()),
};
if ident == "path" {
if let syn::Lit::Str(s) = value.lit {
if args.source.is_some() {
return Err("must specify 'source' or 'path', not both".into());
}
args.source = Some(Source::Path(s.value()));
} else {
return Err("template path must be string literal".into());
}
} else if ident == "source" {
if let syn::Lit::Str(s) = value.lit {
if args.source.is_some() {
return Err("must specify 'source' or 'path', not both".into());
}
args.source = Some(Source::Source(s.value()));
} else {
return Err("template source must be string literal".into());
}
} else if ident == "print" {
if let syn::Lit::Str(s) = value.lit {
args.print = s.value().parse()?;
} else {
return Err("print value must be string literal".into());
}
} else if ident == "escape" {
if let syn::Lit::Str(s) = value.lit {
args.escaping = Some(s.value());
} else {
return Err("escape value must be string literal".into());
}
} else if ident == "ext" {
if let syn::Lit::Str(s) = value.lit {
args.ext = Some(s.value());
} else {
return Err("ext value must be string literal".into());
}
} else if ident == "syntax" {
if let syn::Lit::Str(s) = value.lit {
args.syntax = Some(s.value())
} else {
return Err("syntax value must be string literal".into());
}
} else if ident == "config" {
if let syn::Lit::Str(s) = value.lit {
args.config_path = Some(s.value())
} else {
return Err("config value must be string literal".into());
}
} else if ident == "whitespace" {
if let syn::Lit::Str(s) = value.lit {
args.whitespace = Some(s.value())
} else {
return Err("whitespace value must be string literal".into());
}
} else {
return Err(format!("unsupported attribute key {ident:?} found").into());
}
}
Ok(args)
}
}
fn find_used_templates(
input: &TemplateInput<'_>,
map: &mut HashMap<PathBuf, String>,
source: String,
) -> Result<(), CompileError> {
let mut dependency_graph = Vec::new();
let mut check = vec![(input.path.clone(), source)];
while let Some((path, source)) = check.pop() {
for n in parse(&source, input.syntax)? {
match n {
Node::Extends(extends) => {
let extends = input.config.find_template(extends, Some(&path))?;
let dependency_path = (path.clone(), extends.clone());
if dependency_graph.contains(&dependency_path) {
return Err(format!(
"cyclic dependency in graph {:#?}",
dependency_graph
.iter()
.map(|e| format!("{:#?} --> {:#?}", e.0, e.1))
.collect::<Vec<String>>()
)
.into());
}
dependency_graph.push(dependency_path);
let source = get_template_source(&extends)?;
check.push((extends, source));
}
Node::Import(_, import, _) => {
let import = input.config.find_template(import, Some(&path))?;
let source = get_template_source(&import)?;
check.push((import, source));
}
_ => {}
}
}
map.insert(path, source);
}
Ok(())
}
struct Generator<'a> {
// The template input state: original struct AST and attributes
input: &'a TemplateInput<'a>,
// All contexts, keyed by the package-relative template path
contexts: &'a HashMap<&'a Path, Context<'a>, S>,
contexts: &'a HashMap<&'a Path, Context<'a>>,
// The heritage contains references to blocks and their ancestry
heritage: Option<&'a Heritage<'a>>,
// What integrations need to be generated
integrations: Integrations,
// Variables accessible directly from the current scope (not redirected to context)
locals: MapChain<'a, &'a str, LocalMeta>,
// Suffix whitespace from the previous literal. Will be flushed to the
@ -39,81 +257,75 @@ struct Generator<'a, S: std::hash::BuildHasher> {
next_ws: Option<&'a str>,
// Whitespace suppression from the previous non-literal. Will be used to
// determine whether to flush prefix whitespace from the next literal.
skip_ws: bool,
skip_ws: WhitespaceHandling,
// If currently in a block, this will contain the name of a potential parent block
super_block: Option<(&'a str, usize)>,
// buffer for writable
buf_writable: Vec<Writable<'a>>,
// Counter for write! hash named arguments
named: usize,
// If set to `suppress`, the whitespace characters will be removed by default unless `+` is
// used.
whitespace: WhitespaceHandling,
}
impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
impl<'a> Generator<'a> {
fn new<'n>(
input: &'n TemplateInput<'_>,
contexts: &'n HashMap<&'n Path, Context<'n>, S>,
contexts: &'n HashMap<&'n Path, Context<'n>>,
heritage: Option<&'n Heritage<'_>>,
integrations: Integrations,
locals: MapChain<'n, &'n str, LocalMeta>,
) -> Generator<'n, S> {
whitespace: WhitespaceHandling,
) -> Generator<'n> {
Generator {
input,
contexts,
heritage,
integrations,
locals,
next_ws: None,
skip_ws: false,
skip_ws: WhitespaceHandling::Preserve,
super_block: None,
buf_writable: vec![],
named: 0,
whitespace,
}
}
fn child(&mut self) -> Generator<'_, S> {
fn child(&mut self) -> Generator<'_> {
let locals = MapChain::with_parent(&self.locals);
Self::new(
self.input,
self.contexts,
self.heritage,
self.integrations,
locals,
self.whitespace,
)
}
// Takes a Context and generates the relevant implementations.
fn build(mut self, ctx: &'a Context<'_>) -> Result<String, CompileError> {
let mut buf = Buffer::new(0);
if !ctx.blocks.is_empty() {
if let Some(parent) = self.input.parent {
self.deref_to_parent(&mut buf, parent)?;
}
};
self.impl_template(ctx, &mut buf)?;
self.impl_display(&mut buf)?;
if self.integrations.actix {
self.impl_actix_web_responder(&mut buf)?;
}
if self.integrations.axum {
self.impl_axum_into_response(&mut buf)?;
}
if self.integrations.gotham {
self.impl_gotham_into_response(&mut buf)?;
}
if self.integrations.mendes {
self.impl_mendes_responder(&mut buf)?;
}
if self.integrations.rocket {
self.impl_rocket_responder(&mut buf)?;
}
if self.integrations.tide {
self.impl_tide_integrations(&mut buf)?;
}
if self.integrations.warp {
self.impl_warp_reply(&mut buf)?;
}
#[cfg(feature = "with-actix-web")]
self.impl_actix_web_responder(&mut buf)?;
#[cfg(feature = "with-axum")]
self.impl_axum_into_response(&mut buf)?;
#[cfg(feature = "with-gotham")]
self.impl_gotham_into_response(&mut buf)?;
#[cfg(feature = "with-hyper")]
self.impl_hyper_into_response(&mut buf)?;
#[cfg(feature = "with-mendes")]
self.impl_mendes_responder(&mut buf)?;
#[cfg(feature = "with-rocket")]
self.impl_rocket_responder(&mut buf)?;
#[cfg(feature = "with-tide")]
self.impl_tide_integrations(&mut buf)?;
#[cfg(feature = "with-warp")]
self.impl_warp_reply(&mut buf)?;
Ok(buf.buf)
}
@ -153,7 +365,7 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
self.handle(ctx, ctx.nodes, buf, AstLevel::Top)
}?;
self.flush_ws(Ws(false, false));
self.flush_ws(Ws(None, None));
buf.writeln("::askama::Result::Ok(())")?;
buf.writeln("}")?;
@ -162,7 +374,7 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
buf.writeln(";")?;
buf.writeln("const SIZE_HINT: ::std::primitive::usize = ")?;
buf.writeln(&format!("{}", size_hint))?;
buf.writeln(&format!("{size_hint}"))?;
buf.writeln(";")?;
buf.writeln("const MIME_TYPE: &'static ::std::primitive::str = ")?;
@ -173,24 +385,6 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
Ok(())
}
// Implement `Deref<Parent>` for an inheriting context struct.
fn deref_to_parent(
&mut self,
buf: &mut Buffer,
parent_type: &syn::Type,
) -> Result<(), CompileError> {
self.write_header(buf, "::std::ops::Deref", None)?;
buf.writeln(&format!(
"type Target = {};",
parent_type.into_token_stream()
))?;
buf.writeln("#[inline]")?;
buf.writeln("fn deref(&self) -> &Self::Target {")?;
buf.writeln("&self._parent")?;
buf.writeln("}")?;
buf.writeln("}")
}
// Implement `Display` for the given context struct.
fn impl_display(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
self.write_header(buf, "::std::fmt::Display", None)?;
@ -202,6 +396,7 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
}
// Implement Actix-web's `Responder`.
#[cfg(feature = "with-actix-web")]
fn impl_actix_web_responder(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
self.write_header(buf, "::askama_actix::actix_web::Responder", None)?;
buf.writeln("type Body = ::askama_actix::actix_web::body::BoxBody;")?;
@ -216,20 +411,21 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
}
// Implement Axum's `IntoResponse`.
#[cfg(feature = "with-axum")]
fn impl_axum_into_response(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
self.write_header(buf, "::askama_axum::IntoResponse", None)?;
buf.writeln("#[inline]")?;
buf.writeln(
"fn into_response(self)\
-> ::askama_axum::Response<::askama_axum::BoxBody> {",
-> ::askama_axum::Response {",
)?;
let ext = self.input.extension().unwrap_or("txt");
buf.writeln(&format!("::askama_axum::into_response(&self, {:?})", ext))?;
buf.writeln("::askama_axum::into_response(&self)")?;
buf.writeln("}")?;
buf.writeln("}")
}
// Implement gotham's `IntoResponse`.
#[cfg(feature = "with-gotham")]
fn impl_gotham_into_response(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
self.write_header(buf, "::askama_gotham::IntoResponse", None)?;
buf.writeln("#[inline]")?;
@ -237,13 +433,57 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
"fn into_response(self, _state: &::askama_gotham::State)\
-> ::askama_gotham::Response<::askama_gotham::Body> {",
)?;
let ext = self.input.extension().unwrap_or("txt");
buf.writeln(&format!("::askama_gotham::respond(&self, {:?})", ext))?;
buf.writeln("::askama_gotham::respond(&self)")?;
buf.writeln("}")?;
buf.writeln("}")
}
// Implement `From<Template> for hyper::Response<Body>` and `From<Template> for hyper::Body.
#[cfg(feature = "with-hyper")]
fn impl_hyper_into_response(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
let (impl_generics, orig_ty_generics, where_clause) =
self.input.ast.generics.split_for_impl();
let ident = &self.input.ast.ident;
// From<Template> for hyper::Response<Body>
buf.writeln(&format!(
"{} {{",
quote!(
impl #impl_generics ::core::convert::From<&#ident #orig_ty_generics>
for ::askama_hyper::hyper::Response<::askama_hyper::hyper::Body>
#where_clause
)
))?;
buf.writeln("#[inline]")?;
buf.writeln(&format!(
"{} {{",
quote!(fn from(value: &#ident #orig_ty_generics) -> Self)
))?;
buf.writeln("::askama_hyper::respond(value)")?;
buf.writeln("}")?;
buf.writeln("}")?;
// TryFrom<Template> for hyper::Body
buf.writeln(&format!(
"{} {{",
quote!(
impl #impl_generics ::core::convert::TryFrom<&#ident #orig_ty_generics>
for ::askama_hyper::hyper::Body
#where_clause
)
))?;
buf.writeln("type Error = ::askama::Error;")?;
buf.writeln("#[inline]")?;
buf.writeln(&format!(
"{} {{",
quote!(fn try_from(value: &#ident #orig_ty_generics) -> Result<Self, Self::Error>)
))?;
buf.writeln("::askama::Template::render(value).map(Into::into)")?;
buf.writeln("}")?;
buf.writeln("}")
}
// Implement mendes' `Responder`.
#[cfg(feature = "with-mendes")]
fn impl_mendes_responder(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
let param = syn::parse_str("A: ::mendes::Application").unwrap();
@ -255,7 +495,7 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
let mut where_clause = match where_clause {
Some(clause) => clause.clone(),
None => syn::WhereClause {
where_token: syn::Token![where](Span::call_site()),
where_token: syn::Token![where](proc_macro2::Span::call_site()),
predicates: syn::punctuated::Punctuated::new(),
},
};
@ -270,7 +510,7 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
buf.writeln(
format!(
"{} {} for {} {} {{",
quote!(impl#impl_generics),
quote!(impl #impl_generics),
"::mendes::application::IntoResponse<A>",
self.input.ast.ident,
quote!(#orig_ty_generics #where_clause),
@ -283,22 +523,20 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
-> ::mendes::http::Response<A::ResponseBody> {",
)?;
buf.writeln(&format!(
"::askama_mendes::into_response(app, req, &self, {:?})",
self.input.extension()
))?;
buf.writeln("::askama_mendes::into_response(app, req, &self)")?;
buf.writeln("}")?;
buf.writeln("}")?;
Ok(())
}
// Implement Rocket's `Responder`.
#[cfg(feature = "with-rocket")]
fn impl_rocket_responder(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
let lifetime = syn::Lifetime::new("'askama", Span::call_site());
let param = syn::GenericParam::Lifetime(syn::LifetimeDef::new(lifetime));
let lifetime = syn::Lifetime::new("'askama", proc_macro2::Span::call_site());
let param = syn::GenericParam::Lifetime(syn::LifetimeParam::new(lifetime));
self.write_header(
buf,
"::askama_rocket::Responder<'askama>",
"::askama_rocket::Responder<'askama, 'askama>",
Some(vec![param]),
)?;
@ -307,17 +545,15 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
"fn respond_to(self, _: &::askama_rocket::Request) \
-> ::askama_rocket::Result<'askama> {",
)?;
let ext = self.input.extension().unwrap_or("txt");
buf.writeln(&format!("::askama_rocket::respond(&self, {:?})", ext))?;
buf.writeln("::askama_rocket::respond(&self)")?;
buf.writeln("}")?;
buf.writeln("}")?;
Ok(())
}
#[cfg(feature = "with-tide")]
fn impl_tide_integrations(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
let ext = self.input.extension().unwrap_or("txt");
self.write_header(
buf,
"::std::convert::TryInto<::askama_tide::tide::Body>",
@ -328,7 +564,7 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
#[inline]\n\
fn try_into(self) -> ::askama_tide::askama::Result<::askama_tide::tide::Body> {",
)?;
buf.writeln(&format!("::askama_tide::try_into_body(&self, {:?})", &ext))?;
buf.writeln("::askama_tide::try_into_body(&self)")?;
buf.writeln("}")?;
buf.writeln("}")?;
@ -336,16 +572,16 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
self.write_header(buf, "Into<::askama_tide::tide::Response>", None)?;
buf.writeln("#[inline]")?;
buf.writeln("fn into(self) -> ::askama_tide::tide::Response {")?;
buf.writeln(&format!("::askama_tide::into_response(&self, {:?})", ext))?;
buf.writeln("::askama_tide::into_response(&self)")?;
buf.writeln("}\n}")
}
#[cfg(feature = "with-warp")]
fn impl_warp_reply(&mut self, buf: &mut Buffer) -> Result<(), CompileError> {
self.write_header(buf, "::askama_warp::warp::reply::Reply", None)?;
buf.writeln("#[inline]")?;
buf.writeln("fn into_response(self) -> ::askama_warp::warp::reply::Response {")?;
let ext = self.input.extension().unwrap_or("txt");
buf.writeln(&format!("::askama_warp::reply(&self, {:?})", ext))?;
buf.writeln("::askama_warp::reply(&self)")?;
buf.writeln("}")?;
buf.writeln("}")
}
@ -369,7 +605,7 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
buf.writeln(
format!(
"{} {} for {}{} {{",
quote!(impl#impl_generics),
quote!(impl #impl_generics),
target,
self.input.ast.ident,
quote!(#orig_ty_generics #where_clause),
@ -406,16 +642,16 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
self.write_let(buf, ws, var, val)?;
}
Node::Cond(ref conds, ws) => {
self.write_cond(ctx, buf, conds, ws)?;
size_hint += self.write_cond(ctx, buf, conds, ws)?;
}
Node::Match(ws1, ref expr, ref arms, ws2) => {
self.write_match(ctx, buf, ws1, expr, arms, ws2)?;
size_hint += self.write_match(ctx, buf, ws1, expr, arms, ws2)?;
}
Node::Loop(ref loop_block) => {
self.write_loop(ctx, buf, loop_block)?;
size_hint += self.write_loop(ctx, buf, loop_block)?;
}
Node::BlockDef(ws1, name, _, ws2) => {
self.write_block(buf, Some(name), Ws(ws1.0, ws2.1))?;
size_hint += self.write_block(buf, Some(name), Ws(ws1.0, ws2.1))?;
}
Node::Include(ws, path) => {
size_hint += self.handle_include(ctx, buf, ws, path)?;
@ -462,6 +698,11 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
}
if AstLevel::Top == level {
// Handle any pending whitespace.
if self.next_ws.is_some() {
self.flush_ws(Ws(Some(self.skip_ws.into()), None));
}
size_hint += self.write_buf_writable(buf)?;
}
Ok(size_hint)
@ -551,7 +792,7 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
let mut arm_sizes = Vec::new();
let expr_code = self.visit_expr_root(expr)?;
buf.writeln(&format!("match &{} {{", expr_code))?;
buf.writeln(&format!("match &{expr_code} {{"))?;
let mut arm_size = 0;
for (i, arm) in arms.iter().enumerate() {
@ -598,24 +839,24 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
buf.writeln("{")?;
buf.writeln("let mut _did_loop = false;")?;
match loop_block.iter {
Expr::Range(_, _, _) => buf.writeln(&format!("let _iter = {};", expr_code)),
Expr::Array(..) => buf.writeln(&format!("let _iter = {}.iter();", expr_code)),
Expr::Range(_, _, _) => buf.writeln(&format!("let _iter = {expr_code};")),
Expr::Array(..) => buf.writeln(&format!("let _iter = {expr_code}.iter();")),
// If `iter` is a call then we assume it's something that returns
// an iterator. If not then the user can explicitly add the needed
// call without issues.
Expr::Call(..) | Expr::Index(..) => {
buf.writeln(&format!("let _iter = ({}).into_iter();", expr_code))
buf.writeln(&format!("let _iter = ({expr_code}).into_iter();"))
}
// If accessing `self` then it most likely needs to be
// borrowed, to prevent an attempt of moving.
_ if expr_code.starts_with("self.") => {
buf.writeln(&format!("let _iter = (&{}).into_iter();", expr_code))
buf.writeln(&format!("let _iter = (&{expr_code}).into_iter();"))
}
// If accessing a field then it most likely needs to be
// borrowed, to prevent an attempt of moving.
Expr::Attr(..) => buf.writeln(&format!("let _iter = (&{}).into_iter();", expr_code)),
Expr::Attr(..) => buf.writeln(&format!("let _iter = (&{expr_code}).into_iter();")),
// Otherwise, we borrow `iter` assuming that it implements `IntoIterator`.
_ => buf.writeln(&format!("let _iter = ({}).into_iter();", expr_code)),
_ => buf.writeln(&format!("let _iter = ({expr_code}).into_iter();")),
}?;
if let Some(cond) = &loop_block.cond {
self.locals.push();
@ -668,13 +909,14 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
let (def, own_ctx) = match scope {
Some(s) => {
let path = ctx.imports.get(s).ok_or_else(|| {
CompileError::from(format!("no import found for scope {:?}", s))
})?;
let mctx = self.contexts.get(path.as_path()).ok_or_else(|| {
CompileError::from(format!("context for {:?} not found", path))
CompileError::from(format!("no import found for scope {s:?}"))
})?;
let mctx = self
.contexts
.get(path.as_path())
.ok_or_else(|| CompileError::from(format!("context for {path:?} not found")))?;
let def = mctx.macros.get(name).ok_or_else(|| {
CompileError::from(format!("macro {:?} not found in scope {:?}", name, s))
CompileError::from(format!("macro {name:?} not found in scope {s:?}"))
})?;
(def, mctx)
}
@ -682,7 +924,7 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
let def = ctx
.macros
.get(name)
.ok_or_else(|| CompileError::from(format!("macro {:?} not found", name)))?;
.ok_or_else(|| CompileError::from(format!("macro {name:?} not found")))?;
(def, ctx)
}
};
@ -698,7 +940,7 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
let mut is_first_variable = true;
for (i, arg) in def.args.iter().enumerate() {
let expr = args.get(i).ok_or_else(|| {
CompileError::from(format!("macro {:?} takes more than {} arguments", name, i))
CompileError::from(format!("macro {name:?} takes more than {i} arguments"))
})?;
match expr {
@ -885,7 +1127,7 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
(Some(cur_name), None) => (cur_name, 0),
// A block definition contains a block definition of the same name
(Some(cur_name), Some((prev_name, _))) if cur_name == prev_name => {
return Err(format!("cannot define recursive blocks ({})", cur_name).into());
return Err(format!("cannot define recursive blocks ({cur_name})").into());
}
// A block definition contains a definition of another block
(Some(cur_name), Some((_, _))) => (cur_name, 0),
@ -904,7 +1146,7 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
let (ctx, def) = heritage.blocks[cur.0].get(cur.1).ok_or_else(|| {
CompileError::from(match name {
None => format!("no super() block found for block '{}'", cur.0),
Some(name) => format!("no block found for name '{}'", name),
Some(name) => format!("no block found for name '{name}'"),
})
})?;
@ -983,24 +1225,26 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
),
};
use std::collections::hash_map::Entry;
let id = match expr_cache.entry(expression.clone()) {
Entry::Occupied(e) => *e.get(),
Entry::Vacant(e) => {
Entry::Occupied(e) if s.is_cacheable() => *e.get(),
e => {
let id = self.named;
self.named += 1;
buf_expr.write(&format!("expr{} = ", id));
buf_expr.write(&format!("expr{id} = "));
buf_expr.write("&");
buf_expr.write(&expression);
buf_expr.writeln(",")?;
e.insert(id);
if let Entry::Vacant(e) = e {
e.insert(id);
}
id
}
};
buf_format.write(&format!("{{expr{}}}", id));
buf_format.write(&format!("{{expr{id}}}"));
size_hint += 3;
}
}
@ -1019,17 +1263,25 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
fn visit_lit(&mut self, lws: &'a str, val: &'a str, rws: &'a str) {
assert!(self.next_ws.is_none());
if !lws.is_empty() {
if self.skip_ws {
self.skip_ws = false;
} else if val.is_empty() {
assert!(rws.is_empty());
self.next_ws = Some(lws);
} else {
self.buf_writable.push(Writable::Lit(lws));
match self.skip_ws {
WhitespaceHandling::Suppress => {}
_ if val.is_empty() => {
assert!(rws.is_empty());
self.next_ws = Some(lws);
}
WhitespaceHandling::Preserve => self.buf_writable.push(Writable::Lit(lws)),
WhitespaceHandling::Minimize => {
self.buf_writable
.push(Writable::Lit(match lws.contains('\n') {
true => "\n",
false => " ",
}));
}
}
}
if !val.is_empty() {
self.skip_ws = WhitespaceHandling::Preserve;
self.buf_writable.push(Writable::Lit(val));
}
@ -1068,7 +1320,9 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
Expr::Filter(name, ref args) => self.visit_filter(buf, name, args)?,
Expr::Unary(op, ref inner) => self.visit_unary(buf, op, inner)?,
Expr::BinOp(op, ref left, ref right) => self.visit_binop(buf, op, left, right)?,
Expr::Range(op, ref left, ref right) => self.visit_range(buf, op, left, right)?,
Expr::Range(op, ref left, ref right) => {
self.visit_range(buf, op, left.as_deref(), right.as_deref())?
}
Expr::Group(ref inner) => self.visit_group(buf, inner)?,
Expr::Call(ref obj, ref args) => self.visit_call(buf, obj, args)?,
Expr::RustMacro(name, args) => self.visit_rust_macro(buf, name, args),
@ -1162,11 +1416,11 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
name = "json";
}
#[cfg(not(feature = "json"))]
#[cfg(not(feature = "serde-json"))]
if name == "json" {
return Err("the `json` filter requires the `serde-json` feature to be enabled".into());
}
#[cfg(not(feature = "yaml"))]
#[cfg(not(feature = "serde-yaml"))]
if name == "yaml" {
return Err("the `yaml` filter requires the `serde-yaml` feature to be enabled".into());
}
@ -1177,10 +1431,10 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
"::askama::filters::{}({}, ",
name, self.input.escaper
));
} else if filters::BUILT_IN_FILTERS.contains(&name) {
buf.write(&format!("::askama::filters::{}(", name));
} else if crate::BUILT_IN_FILTERS.contains(&name) {
buf.write(&format!("::askama::filters::{name}("));
} else {
buf.write(&format!("filters::{}(", name));
buf.write(&format!("filters::{name}("));
}
self._visit_args(buf, args)?;
@ -1210,7 +1464,7 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
.config
.escapers
.iter()
.find_map(|(escapers, escaper)| escapers.contains(name).then(|| escaper))
.find_map(|(escapers, escaper)| escapers.contains(name).then_some(escaper))
.ok_or_else(|| CompileError::from("invalid escaper for escape filter"))?,
None => self.input.escaper,
};
@ -1384,7 +1638,7 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
}
_ => return Err("loop.cycle(…) expects exactly one argument".into()),
},
s => return Err(format!("unknown loop method: {:?}", s).into()),
s => return Err(format!("unknown loop method: {s:?}").into()),
},
left => {
match left {
@ -1420,8 +1674,8 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
&mut self,
buf: &mut Buffer,
op: &str,
left: &Option<Box<Expr<'_>>>,
right: &Option<Box<Expr<'_>>>,
left: Option<&Expr<'_>>,
right: Option<&Expr<'_>>,
) -> Result<DisplayWrap, CompileError> {
if let Some(left) = left {
self.visit_expr(buf, left)?;
@ -1441,7 +1695,7 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
right: &Expr<'_>,
) -> Result<DisplayWrap, CompileError> {
self.visit_expr(buf, left)?;
buf.write(&format!(" {} ", op));
buf.write(&format!(" {op} "));
self.visit_expr(buf, right)?;
Ok(DisplayWrap::Unwrapped)
}
@ -1516,12 +1770,12 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
}
fn visit_str_lit(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap {
buf.write(&format!("\"{}\"", s));
buf.write(&format!("\"{s}\""));
DisplayWrap::Unwrapped
}
fn visit_char_lit(&mut self, buf: &mut Buffer, s: &str) -> DisplayWrap {
buf.write(&format!("'{}'", s));
buf.write(&format!("'{s}'"));
DisplayWrap::Unwrapped
}
@ -1608,15 +1862,43 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
self.prepare_ws(ws);
}
fn should_trim_ws(&self, ws: Option<Whitespace>) -> WhitespaceHandling {
match ws {
Some(Whitespace::Suppress) => WhitespaceHandling::Suppress,
Some(Whitespace::Preserve) => WhitespaceHandling::Preserve,
Some(Whitespace::Minimize) => WhitespaceHandling::Minimize,
None => self.whitespace,
}
}
// If the previous literal left some trailing whitespace in `next_ws` and the
// prefix whitespace suppressor from the given argument, flush that whitespace.
// In either case, `next_ws` is reset to `None` (no trailing whitespace).
fn flush_ws(&mut self, ws: Ws) {
if self.next_ws.is_some() && !ws.0 {
let val = self.next_ws.unwrap();
if !val.is_empty() {
self.buf_writable.push(Writable::Lit(val));
if self.next_ws.is_none() {
return;
}
// If `whitespace` is set to `suppress`, we keep the whitespace characters only if there is
// a `+` character.
match self.should_trim_ws(ws.0) {
WhitespaceHandling::Preserve => {
let val = self.next_ws.unwrap();
if !val.is_empty() {
self.buf_writable.push(Writable::Lit(val));
}
}
WhitespaceHandling::Minimize => {
let val = self.next_ws.unwrap();
if !val.is_empty() {
self.buf_writable
.push(Writable::Lit(match val.contains('\n') {
true => "\n",
false => " ",
}));
}
}
WhitespaceHandling::Suppress => {}
}
self.next_ws = None;
}
@ -1625,7 +1907,7 @@ impl<'a, S: std::hash::BuildHasher> Generator<'a, S> {
// argument, to determine whether to suppress leading whitespace from the
// next literal.
fn prepare_ws(&mut self, ws: Ws) {
self.skip_ws = ws.1;
self.skip_ws = self.should_trim_ws(ws.1);
}
}
@ -1739,10 +2021,9 @@ where
/// Iterates the scopes in reverse and returns `Some(LocalMeta)`
/// from the first scope where `key` exists.
fn get(&self, key: &K) -> Option<&V> {
let scopes = self.scopes.iter().rev();
let mut scopes = self.scopes.iter().rev();
scopes
.filter_map(|set| set.get(key))
.next()
.find_map(|set| set.get(key))
.or_else(|| self.parent.and_then(|set| set.get(key)))
}
@ -1788,8 +2069,7 @@ impl MapChain<'_, &str, LocalMeta> {
fn resolve_or_self(&self, name: &str) -> String {
let name = normalize_identifier(name);
self.resolve(name)
.unwrap_or_else(|| format!("self.{}", name))
self.resolve(name).unwrap_or_else(|| format!("self.{name}"))
}
}
@ -1802,23 +2082,19 @@ fn median(sizes: &mut [usize]) -> usize {
}
}
#[derive(Clone, PartialEq)]
#[derive(Clone, Copy, PartialEq)]
enum AstLevel {
Top,
Block,
Nested,
}
impl Copy for AstLevel {}
#[derive(Clone)]
#[derive(Clone, Copy)]
enum DisplayWrap {
Wrapped,
Unwrapped,
}
impl Copy for DisplayWrap {}
#[derive(Debug)]
enum Writable<'a> {
Lit(&'a str),

View file

@ -1,18 +1,19 @@
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use crate::parser::{Expr, Loop, Macro, Node};
use crate::{CompileError, Config};
use crate::config::Config;
use crate::parser::{Loop, Macro, Node};
use crate::CompileError;
pub struct Heritage<'a> {
pub root: &'a Context<'a>,
pub blocks: BlockAncestry<'a>,
pub(crate) struct Heritage<'a> {
pub(crate) root: &'a Context<'a>,
pub(crate) blocks: BlockAncestry<'a>,
}
impl Heritage<'_> {
pub fn new<'n, S: std::hash::BuildHasher>(
pub(crate) fn new<'n>(
mut ctx: &'n Context<'n>,
contexts: &'n HashMap<&'n Path, Context<'n>, S>,
contexts: &'n HashMap<&'n Path, Context<'n>>,
) -> Heritage<'n> {
let mut blocks: BlockAncestry<'n> = ctx
.blocks
@ -33,16 +34,16 @@ impl Heritage<'_> {
type BlockAncestry<'a> = HashMap<&'a str, Vec<(&'a Context<'a>, &'a Node<'a>)>>;
pub struct Context<'a> {
pub nodes: &'a [Node<'a>],
pub extends: Option<PathBuf>,
pub blocks: HashMap<&'a str, &'a Node<'a>>,
pub macros: HashMap<&'a str, &'a Macro<'a>>,
pub imports: HashMap<&'a str, PathBuf>,
pub(crate) struct Context<'a> {
pub(crate) nodes: &'a [Node<'a>],
pub(crate) extends: Option<PathBuf>,
pub(crate) blocks: HashMap<&'a str, &'a Node<'a>>,
pub(crate) macros: HashMap<&'a str, &'a Macro<'a>>,
pub(crate) imports: HashMap<&'a str, PathBuf>,
}
impl Context<'_> {
pub fn new<'n>(
pub(crate) fn new<'n>(
config: &Config<'_>,
path: &Path,
nodes: &'n [Node<'n>],
@ -57,7 +58,7 @@ impl Context<'_> {
while let Some(nodes) = nested.pop() {
for n in nodes {
match n {
Node::Extends(Expr::StrLit(extends_path)) if top => match extends {
Node::Extends(extends_path) if top => match extends {
Some(_) => return Err("multiple extend blocks found".into()),
None => {
extends = Some(config.find_template(extends_path, Some(path))?);

View file

@ -1,163 +1,54 @@
use crate::{CompileError, Config, Syntax};
use crate::config::{Config, Syntax};
use crate::generator::TemplateArgs;
use crate::CompileError;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use mime::Mime;
use quote::ToTokens;
pub struct TemplateInput<'a> {
pub ast: &'a syn::DeriveInput,
pub config: &'a Config<'a>,
pub syntax: &'a Syntax<'a>,
pub source: Source,
pub print: Print,
pub escaper: &'a str,
pub ext: Option<String>,
pub mime_type: String,
pub parent: Option<&'a syn::Type>,
pub path: PathBuf,
pub(crate) struct TemplateInput<'a> {
pub(crate) ast: &'a syn::DeriveInput,
pub(crate) config: &'a Config<'a>,
pub(crate) syntax: &'a Syntax<'a>,
pub(crate) source: Source,
pub(crate) print: Print,
pub(crate) escaper: &'a str,
pub(crate) ext: Option<String>,
pub(crate) mime_type: String,
pub(crate) path: PathBuf,
}
impl TemplateInput<'_> {
/// Extract the template metadata from the `DeriveInput` structure. This
/// mostly recovers the data for the `TemplateInput` fields from the
/// `template()` attribute list fields; it also finds the of the `_parent`
/// field, if any.
pub fn new<'n>(
/// `template()` attribute list fields.
pub(crate) fn new<'n>(
ast: &'n syn::DeriveInput,
config: &'n Config<'_>,
args: TemplateArgs,
) -> Result<TemplateInput<'n>, CompileError> {
// Check that an attribute called `template()` exists once and that it is
// the proper type (list).
let mut template_args = None;
for attr in &ast.attrs {
let ident = match attr.path.get_ident() {
Some(ident) => ident,
None => continue,
};
if ident == "template" {
if template_args.is_some() {
return Err("duplicated 'template' attribute".into());
}
match attr.parse_meta() {
Ok(syn::Meta::List(syn::MetaList { nested, .. })) => {
template_args = Some(nested);
}
Ok(_) => return Err("'template' attribute must be a list".into()),
Err(e) => return Err(format!("unable to parse attribute: {}", e).into()),
}
}
}
let template_args =
template_args.ok_or_else(|| CompileError::from("no attribute 'template' found"))?;
// Loop over the meta attributes and find everything that we
// understand. Return a CompileError if something is not right.
// `source` contains an enum that can represent `path` or `source`.
let mut source = None;
let mut print = Print::None;
let mut escaping = None;
let mut ext = None;
let mut syntax = None;
for item in template_args {
let pair = match item {
syn::NestedMeta::Meta(syn::Meta::NameValue(ref pair)) => pair,
_ => {
return Err(format!(
"unsupported attribute argument {:?}",
item.to_token_stream()
)
.into())
}
};
let ident = match pair.path.get_ident() {
Some(ident) => ident,
None => unreachable!("not possible in syn::Meta::NameValue(…)"),
};
if ident == "path" {
if let syn::Lit::Str(ref s) = pair.lit {
if source.is_some() {
return Err("must specify 'source' or 'path', not both".into());
}
source = Some(Source::Path(s.value()));
} else {
return Err("template path must be string literal".into());
}
} else if ident == "source" {
if let syn::Lit::Str(ref s) = pair.lit {
if source.is_some() {
return Err("must specify 'source' or 'path', not both".into());
}
source = Some(Source::Source(s.value()));
} else {
return Err("template source must be string literal".into());
}
} else if ident == "print" {
if let syn::Lit::Str(ref s) = pair.lit {
print = s.value().parse()?;
} else {
return Err("print value must be string literal".into());
}
} else if ident == "escape" {
if let syn::Lit::Str(ref s) = pair.lit {
escaping = Some(s.value());
} else {
return Err("escape value must be string literal".into());
}
} else if ident == "ext" {
if let syn::Lit::Str(ref s) = pair.lit {
ext = Some(s.value());
} else {
return Err("ext value must be string literal".into());
}
} else if ident == "syntax" {
if let syn::Lit::Str(ref s) = pair.lit {
syntax = Some(s.value())
} else {
return Err("syntax value must be string literal".into());
}
} else {
return Err(format!("unsupported attribute key {:?} found", ident).into());
}
}
let TemplateArgs {
source,
print,
escaping,
ext,
syntax,
..
} = args;
// Validate the `source` and `ext` value together, since they are
// related. In case `source` was used instead of `path`, the value
// of `ext` is merged into a synthetic `path` value here.
let source = source.expect("template path or source not found in attributes");
let path = match (&source, &ext) {
(&Source::Path(ref path), _) => config.find_template(path, None)?,
(Source::Path(path), _) => config.find_template(path, None)?,
(&Source::Source(_), Some(ext)) => PathBuf::from(format!("{}.{}", ast.ident, ext)),
(&Source::Source(_), None) => {
return Err("must include 'ext' attribute when using 'source' attribute".into())
}
};
// Check to see if a `_parent` field was defined on the context
// struct, and store the type for it for use in the code generator.
let parent = match ast.data {
syn::Data::Struct(syn::DataStruct {
fields: syn::Fields::Named(ref fields),
..
}) => fields
.named
.iter()
.find(|f| f.ident.as_ref().filter(|name| *name == "_parent").is_some())
.map(|f| &f.ty),
_ => None,
};
if parent.is_some() {
eprint!(
" --> in struct {}\n = use of deprecated field '_parent'\n",
ast.ident
);
}
// Validate syntax
let syntax = syntax.map_or_else(
|| Ok(config.syntaxes.get(config.default_syntax).unwrap()),
@ -165,7 +56,7 @@ impl TemplateInput<'_> {
config
.syntaxes
.get(&s)
.ok_or_else(|| CompileError::from(format!("attribute syntax {} not exist", s)))
.ok_or_else(|| CompileError::from(format!("attribute syntax {s} not exist")))
},
)?;
@ -187,7 +78,7 @@ impl TemplateInput<'_> {
}
let escaper = escaper.ok_or_else(|| {
CompileError::from(format!("no escaper defined for extension '{}'", escaping))
CompileError::from(format!("no escaper defined for extension '{escaping}'"))
})?;
let mime_type =
@ -203,19 +94,18 @@ impl TemplateInput<'_> {
escaper,
ext,
mime_type,
parent,
path,
})
}
#[inline]
pub fn extension(&self) -> Option<&str> {
pub(crate) fn extension(&self) -> Option<&str> {
ext_default_to_path(self.ext.as_deref(), &self.path)
}
}
#[inline]
pub fn ext_default_to_path<'a>(ext: Option<&'a str>, path: &'a Path) -> Option<&'a str> {
fn ext_default_to_path<'a>(ext: Option<&'a str>, path: &'a Path) -> Option<&'a str> {
ext.or_else(|| extension(path))
}
@ -233,13 +123,13 @@ fn extension(path: &Path) -> Option<&str> {
}
}
pub enum Source {
pub(crate) enum Source {
Path(String),
Source(String),
}
#[derive(PartialEq)]
pub enum Print {
pub(crate) enum Print {
All,
Ast,
Code,
@ -256,13 +146,18 @@ impl FromStr for Print {
"ast" => Ast,
"code" => Code,
"none" => None,
v => return Err(format!("invalid value for print option: {}", v,).into()),
v => return Err(format!("invalid value for print option: {v}",).into()),
})
}
}
#[doc(hidden)]
pub fn extension_to_mime_type(ext: &str) -> Mime {
impl Default for Print {
fn default() -> Self {
Self::None
}
}
pub(crate) fn extension_to_mime_type(ext: &str) -> Mime {
let basic_type = mime_guess::from_ext(ext).first_or_octet_stream();
for (simple, utf_8) in &TEXT_TYPES {
if &basic_type == simple {

View file

@ -2,119 +2,99 @@
#![deny(elided_lifetimes_in_paths)]
#![deny(unreachable_pub)]
use askama_shared::heritage::{Context, Heritage};
use askama_shared::input::{Print, Source, TemplateInput};
use askama_shared::parser::{parse, Expr, Node};
use askama_shared::{
generator, get_template_source, read_config_file, CompileError, Config, Integrations,
};
use proc_macro::TokenStream;
use std::borrow::Cow;
use std::fmt;
use std::collections::HashMap;
use std::path::PathBuf;
use proc_macro::TokenStream;
use proc_macro2::Span;
mod config;
mod generator;
mod heritage;
mod input;
mod parser;
#[proc_macro_derive(Template, attributes(template))]
pub fn derive_template(input: TokenStream) -> TokenStream {
let ast: syn::DeriveInput = syn::parse(input).unwrap();
match build_template(&ast) {
Ok(source) => source.parse().unwrap(),
Err(e) => e.to_compile_error().into(),
}
generator::derive_template(input)
}
/// Takes a `syn::DeriveInput` and generates source code for it
///
/// Reads the metadata from the `template()` attribute to get the template
/// metadata, then fetches the source from the filesystem. The source is
/// parsed, and the parse tree is fed to the code generator. Will print
/// the parse tree and/or generated source according to the `print` key's
/// value as passed to the `template()` attribute.
fn build_template(ast: &syn::DeriveInput) -> Result<String, CompileError> {
let config_toml = read_config_file()?;
let config = Config::new(&config_toml)?;
let input = TemplateInput::new(ast, &config)?;
let source: String = match input.source {
Source::Source(ref s) => s.clone(),
Source::Path(_) => get_template_source(&input.path)?,
};
let mut sources = HashMap::new();
find_used_templates(&input, &mut sources, source)?;
let mut parsed = HashMap::new();
for (path, src) in &sources {
parsed.insert(path.as_path(), parse(src, input.syntax)?);
}
let mut contexts = HashMap::new();
for (path, nodes) in &parsed {
contexts.insert(*path, Context::new(input.config, path, nodes)?);
}
let ctx = &contexts[input.path.as_path()];
let heritage = if !ctx.blocks.is_empty() || ctx.extends.is_some() {
Some(Heritage::new(ctx, &contexts))
} else {
None
};
if input.print == Print::Ast || input.print == Print::All {
eprintln!("{:?}", parsed[input.path.as_path()]);
}
let code = generator::generate(&input, &contexts, heritage.as_ref(), INTEGRATIONS)?;
if input.print == Print::Code || input.print == Print::All {
eprintln!("{}", code);
}
Ok(code)
#[derive(Debug, Clone)]
struct CompileError {
msg: Cow<'static, str>,
span: Span,
}
fn find_used_templates(
input: &TemplateInput<'_>,
map: &mut HashMap<PathBuf, String>,
source: String,
) -> Result<(), CompileError> {
let mut dependency_graph = Vec::new();
let mut check = vec![(input.path.clone(), source)];
while let Some((path, source)) = check.pop() {
for n in parse(&source, input.syntax)? {
match n {
Node::Extends(Expr::StrLit(extends)) => {
let extends = input.config.find_template(extends, Some(&path))?;
let dependency_path = (path.clone(), extends.clone());
if dependency_graph.contains(&dependency_path) {
return Err(format!(
"cyclic dependecy in graph {:#?}",
dependency_graph
.iter()
.map(|e| format!("{:#?} --> {:#?}", e.0, e.1))
.collect::<Vec<String>>()
)
.into());
}
dependency_graph.push(dependency_path);
let source = get_template_source(&extends)?;
check.push((extends, source));
}
Node::Import(_, import, _) => {
let import = input.config.find_template(import, Some(&path))?;
let source = get_template_source(&import)?;
check.push((import, source));
}
_ => {}
}
impl CompileError {
fn new<S: Into<Cow<'static, str>>>(s: S, span: Span) -> Self {
Self {
msg: s.into(),
span,
}
map.insert(path, source);
}
Ok(())
fn into_compile_error(self) -> TokenStream {
syn::Error::new(self.span, self.msg)
.to_compile_error()
.into()
}
}
const INTEGRATIONS: Integrations = Integrations {
actix: cfg!(feature = "actix-web"),
axum: cfg!(feature = "axum"),
gotham: cfg!(feature = "gotham"),
mendes: cfg!(feature = "mendes"),
rocket: cfg!(feature = "rocket"),
tide: cfg!(feature = "tide"),
warp: cfg!(feature = "warp"),
};
impl std::error::Error for CompileError {}
impl fmt::Display for CompileError {
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt.write_str(&self.msg)
}
}
impl From<&'static str> for CompileError {
#[inline]
fn from(s: &'static str) -> Self {
Self::new(s, Span::call_site())
}
}
impl From<String> for CompileError {
#[inline]
fn from(s: String) -> Self {
Self::new(s, Span::call_site())
}
}
// This is used by the code generator to decide whether a named filter is part of
// Askama or should refer to a local `filters` module. It should contain all the
// filters shipped with Askama, even the optional ones (since optional inclusion
// in the const vector based on features seems impossible right now).
const BUILT_IN_FILTERS: &[&str] = &[
"abs",
"capitalize",
"center",
"e",
"escape",
"filesizeformat",
"fmt",
"format",
"indent",
"into_f64",
"into_isize",
"join",
"linebreaks",
"linebreaksbr",
"paragraphbreaks",
"lower",
"lowercase",
"safe",
"trim",
"truncate",
"upper",
"uppercase",
"urlencode",
"urlencode_strict",
"wordcount",
// optional features, reserve the names anyway:
"json",
"markdown",
"yaml",
];

View file

@ -0,0 +1,346 @@
use std::str;
use nom::branch::alt;
use nom::bytes::complete::{tag, take_till};
use nom::character::complete::char;
use nom::combinator::{cut, map, not, opt, peek, recognize};
use nom::multi::{fold_many0, many0, separated_list0, separated_list1};
use nom::sequence::{delimited, pair, preceded, terminated, tuple};
use nom::IResult;
use super::{
bool_lit, char_lit, identifier, nested_parenthesis, not_ws, num_lit, path, str_lit, ws,
};
#[derive(Debug, PartialEq)]
pub(crate) enum Expr<'a> {
BoolLit(&'a str),
NumLit(&'a str),
StrLit(&'a str),
CharLit(&'a str),
Var(&'a str),
Path(Vec<&'a str>),
Array(Vec<Expr<'a>>),
Attr(Box<Expr<'a>>, &'a str),
Index(Box<Expr<'a>>, Box<Expr<'a>>),
Filter(&'a str, Vec<Expr<'a>>),
Unary(&'a str, Box<Expr<'a>>),
BinOp(&'a str, Box<Expr<'a>>, Box<Expr<'a>>),
Range(&'a str, Option<Box<Expr<'a>>>, Option<Box<Expr<'a>>>),
Group(Box<Expr<'a>>),
Tuple(Vec<Expr<'a>>),
Call(Box<Expr<'a>>, Vec<Expr<'a>>),
RustMacro(&'a str, &'a str),
Try(Box<Expr<'a>>),
}
impl Expr<'_> {
pub(super) fn parse(i: &str) -> IResult<&str, Expr<'_>> {
expr_any(i)
}
pub(super) fn parse_arguments(i: &str) -> IResult<&str, Vec<Expr<'_>>> {
arguments(i)
}
/// Returns `true` if enough assumptions can be made,
/// to determine that `self` is copyable.
pub(crate) fn is_copyable(&self) -> bool {
self.is_copyable_within_op(false)
}
fn is_copyable_within_op(&self, within_op: bool) -> bool {
use Expr::*;
match self {
BoolLit(_) | NumLit(_) | StrLit(_) | CharLit(_) => true,
Unary(.., expr) => expr.is_copyable_within_op(true),
BinOp(_, lhs, rhs) => {
lhs.is_copyable_within_op(true) && rhs.is_copyable_within_op(true)
}
Range(..) => true,
// The result of a call likely doesn't need to be borrowed,
// as in that case the call is more likely to return a
// reference in the first place then.
Call(..) | Path(..) => true,
// If the `expr` is within a `Unary` or `BinOp` then
// an assumption can be made that the operand is copy.
// If not, then the value is moved and adding `.clone()`
// will solve that issue. However, if the operand is
// implicitly borrowed, then it's likely not even possible
// to get the template to compile.
_ => within_op && self.is_attr_self(),
}
}
/// Returns `true` if this is an `Attr` where the `obj` is `"self"`.
pub(crate) fn is_attr_self(&self) -> bool {
match self {
Expr::Attr(obj, _) if matches!(obj.as_ref(), Expr::Var("self")) => true,
Expr::Attr(obj, _) if matches!(obj.as_ref(), Expr::Attr(..)) => obj.is_attr_self(),
_ => false,
}
}
/// Returns `true` if the outcome of this expression may be used multiple times in the same
/// `write!()` call, without evaluating the expression again, i.e. the expression should be
/// side-effect free.
pub(crate) fn is_cacheable(&self) -> bool {
match self {
// Literals are the definition of pure:
Expr::BoolLit(_) => true,
Expr::NumLit(_) => true,
Expr::StrLit(_) => true,
Expr::CharLit(_) => true,
// fmt::Display should have no effects:
Expr::Var(_) => true,
Expr::Path(_) => true,
// Check recursively:
Expr::Array(args) => args.iter().all(|arg| arg.is_cacheable()),
Expr::Attr(lhs, _) => lhs.is_cacheable(),
Expr::Index(lhs, rhs) => lhs.is_cacheable() && rhs.is_cacheable(),
Expr::Filter(_, args) => args.iter().all(|arg| arg.is_cacheable()),
Expr::Unary(_, arg) => arg.is_cacheable(),
Expr::BinOp(_, lhs, rhs) => lhs.is_cacheable() && rhs.is_cacheable(),
Expr::Range(_, lhs, rhs) => {
lhs.as_ref().map_or(true, |v| v.is_cacheable())
&& rhs.as_ref().map_or(true, |v| v.is_cacheable())
}
Expr::Group(arg) => arg.is_cacheable(),
Expr::Tuple(args) => args.iter().all(|arg| arg.is_cacheable()),
// We have too little information to tell if the expression is pure:
Expr::Call(_, _) => false,
Expr::RustMacro(_, _) => false,
Expr::Try(_) => false,
}
}
}
fn expr_bool_lit(i: &str) -> IResult<&str, Expr<'_>> {
map(bool_lit, Expr::BoolLit)(i)
}
fn expr_num_lit(i: &str) -> IResult<&str, Expr<'_>> {
map(num_lit, Expr::NumLit)(i)
}
fn expr_array_lit(i: &str) -> IResult<&str, Expr<'_>> {
delimited(
ws(char('[')),
map(separated_list1(ws(char(',')), expr_any), Expr::Array),
ws(char(']')),
)(i)
}
fn expr_str_lit(i: &str) -> IResult<&str, Expr<'_>> {
map(str_lit, Expr::StrLit)(i)
}
fn expr_char_lit(i: &str) -> IResult<&str, Expr<'_>> {
map(char_lit, Expr::CharLit)(i)
}
fn expr_var(i: &str) -> IResult<&str, Expr<'_>> {
map(identifier, Expr::Var)(i)
}
fn expr_path(i: &str) -> IResult<&str, Expr<'_>> {
let (i, path) = path(i)?;
Ok((i, Expr::Path(path)))
}
fn expr_group(i: &str) -> IResult<&str, Expr<'_>> {
let (i, expr) = preceded(ws(char('(')), opt(expr_any))(i)?;
let expr = match expr {
Some(expr) => expr,
None => {
let (i, _) = char(')')(i)?;
return Ok((i, Expr::Tuple(vec![])));
}
};
let (i, comma) = ws(opt(peek(char(','))))(i)?;
if comma.is_none() {
let (i, _) = char(')')(i)?;
return Ok((i, Expr::Group(Box::new(expr))));
}
let mut exprs = vec![expr];
let (i, _) = fold_many0(
preceded(char(','), ws(expr_any)),
|| (),
|_, expr| {
exprs.push(expr);
},
)(i)?;
let (i, _) = pair(ws(opt(char(','))), char(')'))(i)?;
Ok((i, Expr::Tuple(exprs)))
}
fn expr_single(i: &str) -> IResult<&str, Expr<'_>> {
alt((
expr_bool_lit,
expr_num_lit,
expr_str_lit,
expr_char_lit,
expr_path,
expr_rust_macro,
expr_array_lit,
expr_var,
expr_group,
))(i)
}
enum Suffix<'a> {
Attr(&'a str),
Index(Expr<'a>),
Call(Vec<Expr<'a>>),
Try,
}
fn expr_attr(i: &str) -> IResult<&str, Suffix<'_>> {
map(
preceded(
ws(pair(char('.'), not(char('.')))),
cut(alt((num_lit, identifier))),
),
Suffix::Attr,
)(i)
}
fn expr_index(i: &str) -> IResult<&str, Suffix<'_>> {
map(
preceded(ws(char('[')), cut(terminated(expr_any, ws(char(']'))))),
Suffix::Index,
)(i)
}
fn expr_call(i: &str) -> IResult<&str, Suffix<'_>> {
map(arguments, Suffix::Call)(i)
}
fn expr_try(i: &str) -> IResult<&str, Suffix<'_>> {
map(preceded(take_till(not_ws), char('?')), |_| Suffix::Try)(i)
}
fn filter(i: &str) -> IResult<&str, (&str, Option<Vec<Expr<'_>>>)> {
let (i, (_, fname, args)) = tuple((char('|'), ws(identifier), opt(arguments)))(i)?;
Ok((i, (fname, args)))
}
fn expr_filtered(i: &str) -> IResult<&str, Expr<'_>> {
let (i, (obj, filters)) = tuple((expr_prefix, many0(filter)))(i)?;
let mut res = obj;
for (fname, args) in filters {
res = Expr::Filter(fname, {
let mut args = match args {
Some(inner) => inner,
None => Vec::new(),
};
args.insert(0, res);
args
});
}
Ok((i, res))
}
fn expr_prefix(i: &str) -> IResult<&str, Expr<'_>> {
let (i, (ops, mut expr)) = pair(many0(ws(alt((tag("!"), tag("-"))))), expr_suffix)(i)?;
for op in ops.iter().rev() {
expr = Expr::Unary(op, Box::new(expr));
}
Ok((i, expr))
}
fn expr_suffix(i: &str) -> IResult<&str, Expr<'_>> {
let (mut i, mut expr) = expr_single(i)?;
loop {
let (j, suffix) = opt(alt((expr_attr, expr_index, expr_call, expr_try)))(i)?;
i = j;
match suffix {
Some(Suffix::Attr(attr)) => expr = Expr::Attr(expr.into(), attr),
Some(Suffix::Index(index)) => expr = Expr::Index(expr.into(), index.into()),
Some(Suffix::Call(args)) => expr = Expr::Call(expr.into(), args),
Some(Suffix::Try) => expr = Expr::Try(expr.into()),
None => break,
}
}
Ok((i, expr))
}
fn macro_arguments(i: &str) -> IResult<&str, &str> {
delimited(char('('), recognize(nested_parenthesis), char(')'))(i)
}
fn expr_rust_macro(i: &str) -> IResult<&str, Expr<'_>> {
let (i, (mname, _, args)) = tuple((identifier, char('!'), macro_arguments))(i)?;
Ok((i, Expr::RustMacro(mname, args)))
}
macro_rules! expr_prec_layer {
( $name:ident, $inner:ident, $op:expr ) => {
fn $name(i: &str) -> IResult<&str, Expr<'_>> {
let (i, left) = $inner(i)?;
let (i, right) = many0(pair(
ws(tag($op)),
$inner,
))(i)?;
Ok((
i,
right.into_iter().fold(left, |left, (op, right)| {
Expr::BinOp(op, Box::new(left), Box::new(right))
}),
))
}
};
( $name:ident, $inner:ident, $( $op:expr ),+ ) => {
fn $name(i: &str) -> IResult<&str, Expr<'_>> {
let (i, left) = $inner(i)?;
let (i, right) = many0(pair(
ws(alt(($( tag($op) ),+,))),
$inner,
))(i)?;
Ok((
i,
right.into_iter().fold(left, |left, (op, right)| {
Expr::BinOp(op, Box::new(left), Box::new(right))
}),
))
}
}
}
expr_prec_layer!(expr_muldivmod, expr_filtered, "*", "/", "%");
expr_prec_layer!(expr_addsub, expr_muldivmod, "+", "-");
expr_prec_layer!(expr_shifts, expr_addsub, ">>", "<<");
expr_prec_layer!(expr_band, expr_shifts, "&");
expr_prec_layer!(expr_bxor, expr_band, "^");
expr_prec_layer!(expr_bor, expr_bxor, "|");
expr_prec_layer!(expr_compare, expr_bor, "==", "!=", ">=", ">", "<=", "<");
expr_prec_layer!(expr_and, expr_compare, "&&");
expr_prec_layer!(expr_or, expr_and, "||");
fn expr_any(i: &str) -> IResult<&str, Expr<'_>> {
let range_right = |i| pair(ws(alt((tag("..="), tag("..")))), opt(expr_or))(i);
alt((
map(range_right, |(op, right)| {
Expr::Range(op, None, right.map(Box::new))
}),
map(
pair(expr_or, opt(range_right)),
|(left, right)| match right {
Some((op, right)) => Expr::Range(op, Some(Box::new(left)), right.map(Box::new)),
None => left,
},
),
))(i)
}
fn arguments(i: &str) -> IResult<&str, Vec<Expr<'_>>> {
delimited(
ws(char('(')),
separated_list0(char(','), ws(expr_any)),
ws(char(')')),
)(i)
}

View file

@ -0,0 +1,317 @@
use std::cell::Cell;
use std::str;
use nom::branch::alt;
use nom::bytes::complete::{escaped, is_not, tag, take_till};
use nom::character::complete::char;
use nom::character::complete::{anychar, digit1};
use nom::combinator::{eof, map, not, opt, recognize, value};
use nom::error::ErrorKind;
use nom::multi::separated_list1;
use nom::sequence::{delimited, pair, tuple};
use nom::{error_position, AsChar, IResult, InputTakeAtPosition};
pub(crate) use self::expr::Expr;
pub(crate) use self::node::{Cond, CondTest, Loop, Macro, Node, Target, When, Whitespace, Ws};
use crate::config::Syntax;
use crate::CompileError;
mod expr;
mod node;
#[cfg(test)]
mod tests;
struct State<'a> {
syntax: &'a Syntax<'a>,
loop_depth: Cell<usize>,
}
impl<'a> State<'a> {
fn new(syntax: &'a Syntax<'a>) -> State<'a> {
State {
syntax,
loop_depth: Cell::new(0),
}
}
fn enter_loop(&self) {
self.loop_depth.set(self.loop_depth.get() + 1);
}
fn leave_loop(&self) {
self.loop_depth.set(self.loop_depth.get() - 1);
}
fn is_in_loop(&self) -> bool {
self.loop_depth.get() > 0
}
}
impl From<char> for Whitespace {
fn from(c: char) -> Self {
match c {
'+' => Self::Preserve,
'-' => Self::Suppress,
'~' => Self::Minimize,
_ => panic!("unsupported `Whitespace` conversion"),
}
}
}
pub(crate) fn parse<'a>(
src: &'a str,
syntax: &'a Syntax<'_>,
) -> Result<Vec<Node<'a>>, CompileError> {
match Node::parse(src, &State::new(syntax)) {
Ok((left, res)) => {
if !left.is_empty() {
Err(format!("unable to parse template:\n\n{left:?}").into())
} else {
Ok(res)
}
}
Err(nom::Err::Error(err)) | Err(nom::Err::Failure(err)) => {
let nom::error::Error { input, .. } = err;
let offset = src.len() - input.len();
let (source_before, source_after) = src.split_at(offset);
let source_after = match source_after.char_indices().enumerate().take(41).last() {
Some((40, (i, _))) => format!("{:?}...", &source_after[..i]),
_ => format!("{source_after:?}"),
};
let (row, last_line) = source_before.lines().enumerate().last().unwrap();
let column = last_line.chars().count();
let msg = format!(
"problems parsing template source at row {}, column {} near:\n{}",
row + 1,
column,
source_after,
);
Err(msg.into())
}
Err(nom::Err::Incomplete(_)) => Err("parsing incomplete".into()),
}
}
fn is_ws(c: char) -> bool {
matches!(c, ' ' | '\t' | '\r' | '\n')
}
fn not_ws(c: char) -> bool {
!is_ws(c)
}
fn ws<'a, O>(
inner: impl FnMut(&'a str) -> IResult<&'a str, O>,
) -> impl FnMut(&'a str) -> IResult<&'a str, O> {
delimited(take_till(not_ws), inner, take_till(not_ws))
}
fn split_ws_parts(s: &str) -> Node<'_> {
let trimmed_start = s.trim_start_matches(is_ws);
let len_start = s.len() - trimmed_start.len();
let trimmed = trimmed_start.trim_end_matches(is_ws);
Node::Lit(&s[..len_start], trimmed, &trimmed_start[trimmed.len()..])
}
/// Skips input until `end` was found, but does not consume it.
/// Returns tuple that would be returned when parsing `end`.
fn skip_till<'a, O>(
end: impl FnMut(&'a str) -> IResult<&'a str, O>,
) -> impl FnMut(&'a str) -> IResult<&'a str, (&'a str, O)> {
enum Next<O> {
IsEnd(O),
NotEnd(char),
}
let mut next = alt((map(end, Next::IsEnd), map(anychar, Next::NotEnd)));
move |start: &'a str| {
let mut i = start;
loop {
let (j, is_end) = next(i)?;
match is_end {
Next::IsEnd(lookahead) => return Ok((i, (j, lookahead))),
Next::NotEnd(_) => i = j,
}
}
}
}
fn keyword<'a>(k: &'a str) -> impl FnMut(&'a str) -> IResult<&'a str, &'a str> {
move |i: &'a str| -> IResult<&'a str, &'a str> {
let (j, v) = identifier(i)?;
if k == v {
Ok((j, v))
} else {
Err(nom::Err::Error(error_position!(i, ErrorKind::Tag)))
}
}
}
fn identifier(input: &str) -> IResult<&str, &str> {
recognize(pair(identifier_start, opt(identifier_tail)))(input)
}
fn identifier_start(s: &str) -> IResult<&str, &str> {
s.split_at_position1_complete(
|c| !(c.is_alpha() || c == '_' || c >= '\u{0080}'),
nom::error::ErrorKind::Alpha,
)
}
fn identifier_tail(s: &str) -> IResult<&str, &str> {
s.split_at_position1_complete(
|c| !(c.is_alphanum() || c == '_' || c >= '\u{0080}'),
nom::error::ErrorKind::Alpha,
)
}
fn bool_lit(i: &str) -> IResult<&str, &str> {
alt((keyword("false"), keyword("true")))(i)
}
fn num_lit(i: &str) -> IResult<&str, &str> {
recognize(pair(digit1, opt(pair(char('.'), digit1))))(i)
}
fn str_lit(i: &str) -> IResult<&str, &str> {
let (i, s) = delimited(
char('"'),
opt(escaped(is_not("\\\""), '\\', anychar)),
char('"'),
)(i)?;
Ok((i, s.unwrap_or_default()))
}
fn char_lit(i: &str) -> IResult<&str, &str> {
let (i, s) = delimited(
char('\''),
opt(escaped(is_not("\\\'"), '\\', anychar)),
char('\''),
)(i)?;
Ok((i, s.unwrap_or_default()))
}
fn nested_parenthesis(i: &str) -> IResult<&str, ()> {
let mut nested = 0;
let mut last = 0;
let mut in_str = false;
let mut escaped = false;
for (i, b) in i.chars().enumerate() {
if !(b == '(' || b == ')') || !in_str {
match b {
'(' => nested += 1,
')' => {
if nested == 0 {
last = i;
break;
}
nested -= 1;
}
'"' => {
if in_str {
if !escaped {
in_str = false;
}
} else {
in_str = true;
}
}
'\\' => {
escaped = !escaped;
}
_ => (),
}
}
if escaped && b != '\\' {
escaped = false;
}
}
if nested == 0 {
Ok((&i[last..], ()))
} else {
Err(nom::Err::Error(error_position!(
i,
ErrorKind::SeparatedNonEmptyList
)))
}
}
fn path(i: &str) -> IResult<&str, Vec<&str>> {
let root = opt(value("", ws(tag("::"))));
let tail = separated_list1(ws(tag("::")), identifier);
match tuple((root, identifier, ws(tag("::")), tail))(i) {
Ok((i, (root, start, _, rest))) => {
let mut path = Vec::new();
path.extend(root);
path.push(start);
path.extend(rest);
Ok((i, path))
}
Err(err) => {
if let Ok((i, name)) = identifier(i) {
// The returned identifier can be assumed to be path if:
// - Contains both a lowercase and uppercase character, i.e. a type name like `None`
// - Doesn't contain any lowercase characters, i.e. it's a constant
// In short, if it contains any uppercase characters it's a path.
if name.contains(char::is_uppercase) {
return Ok((i, vec![name]));
}
}
// If `identifier()` fails then just return the original error
Err(err)
}
}
}
fn take_content<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let p_start = alt((
tag(s.syntax.block_start),
tag(s.syntax.comment_start),
tag(s.syntax.expr_start),
));
let (i, _) = not(eof)(i)?;
let (i, content) = opt(recognize(skip_till(p_start)))(i)?;
let (i, content) = match content {
Some("") => {
// {block,comment,expr}_start follows immediately.
return Err(nom::Err::Error(error_position!(i, ErrorKind::TakeUntil)));
}
Some(content) => (i, content),
None => ("", i), // there is no {block,comment,expr}_start: take everything
};
Ok((i, split_ws_parts(content)))
}
fn tag_block_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
tag(s.syntax.block_start)(i)
}
fn tag_block_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
tag(s.syntax.block_end)(i)
}
fn tag_comment_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
tag(s.syntax.comment_start)(i)
}
fn tag_comment_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
tag(s.syntax.comment_end)(i)
}
fn tag_expr_start<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
tag(s.syntax.expr_start)(i)
}
fn tag_expr_end<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
tag(s.syntax.expr_end)(i)
}

View file

@ -0,0 +1,682 @@
use std::str;
use nom::branch::alt;
use nom::bytes::complete::{tag, take_until};
use nom::character::complete::char;
use nom::combinator::{complete, consumed, cut, map, opt, peek, value};
use nom::error::{Error, ErrorKind};
use nom::multi::{fold_many0, many0, many1, separated_list0, separated_list1};
use nom::sequence::{delimited, pair, preceded, terminated, tuple};
use nom::{error_position, IResult};
use super::{
bool_lit, char_lit, identifier, keyword, num_lit, path, skip_till, split_ws_parts, str_lit,
tag_block_end, tag_block_start, tag_comment_end, tag_comment_start, tag_expr_end,
tag_expr_start, take_content, ws, Expr, State,
};
use crate::config::WhitespaceHandling;
#[derive(Debug, PartialEq)]
pub(crate) enum Node<'a> {
Lit(&'a str, &'a str, &'a str),
Comment(Ws),
Expr(Ws, Expr<'a>),
Call(Ws, Option<&'a str>, &'a str, Vec<Expr<'a>>),
LetDecl(Ws, Target<'a>),
Let(Ws, Target<'a>, Expr<'a>),
Cond(Vec<Cond<'a>>, Ws),
Match(Ws, Expr<'a>, Vec<When<'a>>, Ws),
Loop(Loop<'a>),
Extends(&'a str),
BlockDef(Ws, &'a str, Vec<Node<'a>>, Ws),
Include(Ws, &'a str),
Import(Ws, &'a str, &'a str),
Macro(&'a str, Macro<'a>),
Raw(Ws, &'a str, &'a str, &'a str, Ws),
Break(Ws),
Continue(Ws),
}
#[derive(Debug, PartialEq)]
pub(crate) enum Target<'a> {
Name(&'a str),
Tuple(Vec<&'a str>, Vec<Target<'a>>),
Struct(Vec<&'a str>, Vec<(&'a str, Target<'a>)>),
NumLit(&'a str),
StrLit(&'a str),
CharLit(&'a str),
BoolLit(&'a str),
Path(Vec<&'a str>),
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub(crate) enum Whitespace {
Preserve,
Suppress,
Minimize,
}
impl From<WhitespaceHandling> for Whitespace {
fn from(ws: WhitespaceHandling) -> Self {
match ws {
WhitespaceHandling::Suppress => Whitespace::Suppress,
WhitespaceHandling::Preserve => Whitespace::Preserve,
WhitespaceHandling::Minimize => Whitespace::Minimize,
}
}
}
#[derive(Debug, PartialEq)]
pub(crate) struct Loop<'a> {
pub(crate) ws1: Ws,
pub(crate) var: Target<'a>,
pub(crate) iter: Expr<'a>,
pub(crate) cond: Option<Expr<'a>>,
pub(crate) body: Vec<Node<'a>>,
pub(crate) ws2: Ws,
pub(crate) else_block: Vec<Node<'a>>,
pub(crate) ws3: Ws,
}
pub(crate) type When<'a> = (Ws, Target<'a>, Vec<Node<'a>>);
#[derive(Debug, PartialEq)]
pub(crate) struct Macro<'a> {
pub(crate) ws1: Ws,
pub(crate) args: Vec<&'a str>,
pub(crate) nodes: Vec<Node<'a>>,
pub(crate) ws2: Ws,
}
/// First field is "minus/plus sign was used on the left part of the item".
///
/// Second field is "minus/plus sign was used on the right part of the item".
#[derive(Clone, Copy, Debug, PartialEq)]
pub(crate) struct Ws(pub(crate) Option<Whitespace>, pub(crate) Option<Whitespace>);
pub(crate) type Cond<'a> = (Ws, Option<CondTest<'a>>, Vec<Node<'a>>);
#[derive(Debug, PartialEq)]
pub(crate) struct CondTest<'a> {
pub(crate) target: Option<Target<'a>>,
pub(crate) expr: Expr<'a>,
}
impl Node<'_> {
pub(super) fn parse<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Vec<Node<'a>>> {
parse_template(i, s)
}
}
impl Target<'_> {
pub(super) fn parse(i: &str) -> IResult<&str, Target<'_>> {
target(i)
}
}
fn expr_handle_ws(i: &str) -> IResult<&str, Whitespace> {
alt((char('-'), char('+'), char('~')))(i).map(|(s, r)| (s, Whitespace::from(r)))
}
fn parameters(i: &str) -> IResult<&str, Vec<&str>> {
delimited(
ws(char('(')),
separated_list0(char(','), ws(identifier)),
ws(char(')')),
)(i)
}
fn block_call(i: &str) -> IResult<&str, Node<'_>> {
let mut p = tuple((
opt(expr_handle_ws),
ws(keyword("call")),
cut(tuple((
opt(tuple((ws(identifier), ws(tag("::"))))),
ws(identifier),
ws(Expr::parse_arguments),
opt(expr_handle_ws),
))),
));
let (i, (pws, _, (scope, name, args, nws))) = p(i)?;
let scope = scope.map(|(scope, _)| scope);
Ok((i, Node::Call(Ws(pws, nws), scope, name, args)))
}
fn cond_if(i: &str) -> IResult<&str, CondTest<'_>> {
let mut p = preceded(
ws(keyword("if")),
cut(tuple((
opt(delimited(
ws(alt((keyword("let"), keyword("set")))),
ws(Target::parse),
ws(char('=')),
)),
ws(Expr::parse),
))),
);
let (i, (target, expr)) = p(i)?;
Ok((i, CondTest { target, expr }))
}
fn cond_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Cond<'a>> {
let mut p = tuple((
|i| tag_block_start(i, s),
opt(expr_handle_ws),
ws(keyword("else")),
cut(tuple((
opt(cond_if),
opt(expr_handle_ws),
|i| tag_block_end(i, s),
cut(|i| parse_template(i, s)),
))),
));
let (i, (_, pws, _, (cond, nws, _, block))) = p(i)?;
Ok((i, (Ws(pws, nws), cond, block)))
}
fn block_if<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let mut p = tuple((
opt(expr_handle_ws),
cond_if,
cut(tuple((
opt(expr_handle_ws),
|i| tag_block_end(i, s),
cut(tuple((
|i| parse_template(i, s),
many0(|i| cond_block(i, s)),
cut(tuple((
|i| tag_block_start(i, s),
opt(expr_handle_ws),
ws(keyword("endif")),
opt(expr_handle_ws),
))),
))),
))),
));
let (i, (pws1, cond, (nws1, _, (block, elifs, (_, pws2, _, nws2))))) = p(i)?;
let mut res = vec![(Ws(pws1, nws1), Some(cond), block)];
res.extend(elifs);
Ok((i, Node::Cond(res, Ws(pws2, nws2))))
}
fn match_else_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, When<'a>> {
let mut p = tuple((
|i| tag_block_start(i, s),
opt(expr_handle_ws),
ws(keyword("else")),
cut(tuple((
opt(expr_handle_ws),
|i| tag_block_end(i, s),
cut(|i| parse_template(i, s)),
))),
));
let (i, (_, pws, _, (nws, _, block))) = p(i)?;
Ok((i, (Ws(pws, nws), Target::Name("_"), block)))
}
fn when_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, When<'a>> {
let mut p = tuple((
|i| tag_block_start(i, s),
opt(expr_handle_ws),
ws(keyword("when")),
cut(tuple((
ws(Target::parse),
opt(expr_handle_ws),
|i| tag_block_end(i, s),
cut(|i| parse_template(i, s)),
))),
));
let (i, (_, pws, _, (target, nws, _, block))) = p(i)?;
Ok((i, (Ws(pws, nws), target, block)))
}
fn block_match<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let mut p = tuple((
opt(expr_handle_ws),
ws(keyword("match")),
cut(tuple((
ws(Expr::parse),
opt(expr_handle_ws),
|i| tag_block_end(i, s),
cut(tuple((
ws(many0(ws(value((), |i| block_comment(i, s))))),
many1(|i| when_block(i, s)),
cut(tuple((
opt(|i| match_else_block(i, s)),
cut(tuple((
ws(|i| tag_block_start(i, s)),
opt(expr_handle_ws),
ws(keyword("endmatch")),
opt(expr_handle_ws),
))),
))),
))),
))),
));
let (i, (pws1, _, (expr, nws1, _, (_, arms, (else_arm, (_, pws2, _, nws2)))))) = p(i)?;
let mut arms = arms;
if let Some(arm) = else_arm {
arms.push(arm);
}
Ok((i, Node::Match(Ws(pws1, nws1), expr, arms, Ws(pws2, nws2))))
}
fn block_let(i: &str) -> IResult<&str, Node<'_>> {
let mut p = tuple((
opt(expr_handle_ws),
ws(alt((keyword("let"), keyword("set")))),
cut(tuple((
ws(Target::parse),
opt(tuple((ws(char('=')), ws(Expr::parse)))),
opt(expr_handle_ws),
))),
));
let (i, (pws, _, (var, val, nws))) = p(i)?;
Ok((
i,
if let Some((_, val)) = val {
Node::Let(Ws(pws, nws), var, val)
} else {
Node::LetDecl(Ws(pws, nws), var)
},
))
}
fn parse_loop_content<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Vec<Node<'a>>> {
s.enter_loop();
let result = parse_template(i, s);
s.leave_loop();
result
}
fn block_for<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let if_cond = preceded(ws(keyword("if")), cut(ws(Expr::parse)));
let else_block = |i| {
let mut p = preceded(
ws(keyword("else")),
cut(tuple((
opt(expr_handle_ws),
delimited(
|i| tag_block_end(i, s),
|i| parse_template(i, s),
|i| tag_block_start(i, s),
),
opt(expr_handle_ws),
))),
);
let (i, (pws, nodes, nws)) = p(i)?;
Ok((i, (pws, nodes, nws)))
};
let mut p = tuple((
opt(expr_handle_ws),
ws(keyword("for")),
cut(tuple((
ws(Target::parse),
ws(keyword("in")),
cut(tuple((
ws(Expr::parse),
opt(if_cond),
opt(expr_handle_ws),
|i| tag_block_end(i, s),
cut(tuple((
|i| parse_loop_content(i, s),
cut(tuple((
|i| tag_block_start(i, s),
opt(expr_handle_ws),
opt(else_block),
ws(keyword("endfor")),
opt(expr_handle_ws),
))),
))),
))),
))),
));
let (i, (pws1, _, (var, _, (iter, cond, nws1, _, (body, (_, pws2, else_block, _, nws2)))))) =
p(i)?;
let (nws3, else_block, pws3) = else_block.unwrap_or_default();
Ok((
i,
Node::Loop(Loop {
ws1: Ws(pws1, nws1),
var,
iter,
cond,
body,
ws2: Ws(pws2, nws3),
else_block,
ws3: Ws(pws3, nws2),
}),
))
}
fn block_extends(i: &str) -> IResult<&str, Node<'_>> {
let (i, (_, name)) = tuple((ws(keyword("extends")), ws(str_lit)))(i)?;
Ok((i, Node::Extends(name)))
}
fn block_block<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let mut start = tuple((
opt(expr_handle_ws),
ws(keyword("block")),
cut(tuple((ws(identifier), opt(expr_handle_ws), |i| {
tag_block_end(i, s)
}))),
));
let (i, (pws1, _, (name, nws1, _))) = start(i)?;
let mut end = cut(tuple((
|i| parse_template(i, s),
cut(tuple((
|i| tag_block_start(i, s),
opt(expr_handle_ws),
ws(keyword("endblock")),
cut(tuple((opt(ws(keyword(name))), opt(expr_handle_ws)))),
))),
)));
let (i, (contents, (_, pws2, _, (_, nws2)))) = end(i)?;
Ok((
i,
Node::BlockDef(Ws(pws1, nws1), name, contents, Ws(pws2, nws2)),
))
}
fn block_include(i: &str) -> IResult<&str, Node<'_>> {
let mut p = tuple((
opt(expr_handle_ws),
ws(keyword("include")),
cut(pair(ws(str_lit), opt(expr_handle_ws))),
));
let (i, (pws, _, (name, nws))) = p(i)?;
Ok((i, Node::Include(Ws(pws, nws), name)))
}
fn block_import(i: &str) -> IResult<&str, Node<'_>> {
let mut p = tuple((
opt(expr_handle_ws),
ws(keyword("import")),
cut(tuple((
ws(str_lit),
ws(keyword("as")),
cut(pair(ws(identifier), opt(expr_handle_ws))),
))),
));
let (i, (pws, _, (name, _, (scope, nws)))) = p(i)?;
Ok((i, Node::Import(Ws(pws, nws), name, scope)))
}
fn block_macro<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let mut start = tuple((
opt(expr_handle_ws),
ws(keyword("macro")),
cut(tuple((
ws(identifier),
ws(parameters),
opt(expr_handle_ws),
|i| tag_block_end(i, s),
))),
));
let (i, (pws1, _, (name, params, nws1, _))) = start(i)?;
let mut end = cut(tuple((
|i| parse_template(i, s),
cut(tuple((
|i| tag_block_start(i, s),
opt(expr_handle_ws),
ws(keyword("endmacro")),
cut(tuple((opt(ws(keyword(name))), opt(expr_handle_ws)))),
))),
)));
let (i, (contents, (_, pws2, _, (_, nws2)))) = end(i)?;
assert_ne!(name, "super", "invalid macro name 'super'");
Ok((
i,
Node::Macro(
name,
Macro {
ws1: Ws(pws1, nws1),
args: params,
nodes: contents,
ws2: Ws(pws2, nws2),
},
),
))
}
fn block_raw<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let endraw = tuple((
|i| tag_block_start(i, s),
opt(expr_handle_ws),
ws(keyword("endraw")),
opt(expr_handle_ws),
peek(|i| tag_block_end(i, s)),
));
let mut p = tuple((
opt(expr_handle_ws),
ws(keyword("raw")),
cut(tuple((
opt(expr_handle_ws),
|i| tag_block_end(i, s),
consumed(skip_till(endraw)),
))),
));
let (_, (pws1, _, (nws1, _, (contents, (i, (_, pws2, _, nws2, _)))))) = p(i)?;
let (lws, val, rws) = match split_ws_parts(contents) {
Node::Lit(lws, val, rws) => (lws, val, rws),
_ => unreachable!(),
};
let ws1 = Ws(pws1, nws1);
let ws2 = Ws(pws2, nws2);
Ok((i, Node::Raw(ws1, lws, val, rws, ws2)))
}
fn break_statement<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let mut p = tuple((
opt(expr_handle_ws),
ws(keyword("break")),
opt(expr_handle_ws),
));
let (j, (pws, _, nws)) = p(i)?;
if !s.is_in_loop() {
return Err(nom::Err::Failure(error_position!(i, ErrorKind::Tag)));
}
Ok((j, Node::Break(Ws(pws, nws))))
}
fn continue_statement<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let mut p = tuple((
opt(expr_handle_ws),
ws(keyword("continue")),
opt(expr_handle_ws),
));
let (j, (pws, _, nws)) = p(i)?;
if !s.is_in_loop() {
return Err(nom::Err::Failure(error_position!(i, ErrorKind::Tag)));
}
Ok((j, Node::Continue(Ws(pws, nws))))
}
fn block_node<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let mut p = tuple((
|i| tag_block_start(i, s),
alt((
block_call,
block_let,
|i| block_if(i, s),
|i| block_for(i, s),
|i| block_match(i, s),
block_extends,
block_include,
block_import,
|i| block_block(i, s),
|i| block_macro(i, s),
|i| block_raw(i, s),
|i| break_statement(i, s),
|i| continue_statement(i, s),
)),
cut(|i| tag_block_end(i, s)),
));
let (i, (_, contents, _)) = p(i)?;
Ok((i, contents))
}
fn block_comment_body<'a>(mut i: &'a str, s: &State<'_>) -> IResult<&'a str, &'a str> {
let mut level = 0;
loop {
let (end, tail) = take_until(s.syntax.comment_end)(i)?;
match take_until::<_, _, Error<_>>(s.syntax.comment_start)(i) {
Ok((start, _)) if start.as_ptr() < end.as_ptr() => {
level += 1;
i = &start[2..];
}
_ if level > 0 => {
level -= 1;
i = &end[2..];
}
_ => return Ok((end, tail)),
}
}
}
fn block_comment<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let mut p = tuple((
|i| tag_comment_start(i, s),
cut(tuple((
opt(expr_handle_ws),
|i| block_comment_body(i, s),
|i| tag_comment_end(i, s),
))),
));
let (i, (_, (pws, tail, _))) = p(i)?;
let nws = if tail.ends_with('-') {
Some(Whitespace::Suppress)
} else if tail.ends_with('+') {
Some(Whitespace::Preserve)
} else if tail.ends_with('~') {
Some(Whitespace::Minimize)
} else {
None
};
Ok((i, Node::Comment(Ws(pws, nws))))
}
fn expr_node<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Node<'a>> {
let mut p = tuple((
|i| tag_expr_start(i, s),
cut(tuple((
opt(expr_handle_ws),
ws(Expr::parse),
opt(expr_handle_ws),
|i| tag_expr_end(i, s),
))),
));
let (i, (_, (pws, expr, nws, _))) = p(i)?;
Ok((i, Node::Expr(Ws(pws, nws), expr)))
}
fn parse_template<'a>(i: &'a str, s: &State<'_>) -> IResult<&'a str, Vec<Node<'a>>> {
many0(alt((
complete(|i| take_content(i, s)),
complete(|i| block_comment(i, s)),
complete(|i| expr_node(i, s)),
complete(|i| block_node(i, s)),
)))(i)
}
fn variant_lit(i: &str) -> IResult<&str, Target<'_>> {
alt((
map(str_lit, Target::StrLit),
map(char_lit, Target::CharLit),
map(num_lit, Target::NumLit),
map(bool_lit, Target::BoolLit),
))(i)
}
fn target(i: &str) -> IResult<&str, Target<'_>> {
let mut opt_opening_paren = map(opt(ws(char('('))), |o| o.is_some());
let mut opt_closing_paren = map(opt(ws(char(')'))), |o| o.is_some());
let mut opt_opening_brace = map(opt(ws(char('{'))), |o| o.is_some());
let (i, lit) = opt(variant_lit)(i)?;
if let Some(lit) = lit {
return Ok((i, lit));
}
// match tuples and unused parentheses
let (i, target_is_tuple) = opt_opening_paren(i)?;
if target_is_tuple {
let (i, is_empty_tuple) = opt_closing_paren(i)?;
if is_empty_tuple {
return Ok((i, Target::Tuple(Vec::new(), Vec::new())));
}
let (i, first_target) = target(i)?;
let (i, is_unused_paren) = opt_closing_paren(i)?;
if is_unused_paren {
return Ok((i, first_target));
}
let mut targets = vec![first_target];
let (i, _) = cut(tuple((
fold_many0(
preceded(ws(char(',')), target),
|| (),
|_, target| {
targets.push(target);
},
),
opt(ws(char(','))),
ws(cut(char(')'))),
)))(i)?;
return Ok((i, Target::Tuple(Vec::new(), targets)));
}
// match structs
let (i, path) = opt(path)(i)?;
if let Some(path) = path {
let i_before_matching_with = i;
let (i, _) = opt(ws(keyword("with")))(i)?;
let (i, is_unnamed_struct) = opt_opening_paren(i)?;
if is_unnamed_struct {
let (i, targets) = alt((
map(char(')'), |_| Vec::new()),
terminated(
cut(separated_list1(ws(char(',')), target)),
pair(opt(ws(char(','))), ws(cut(char(')')))),
),
))(i)?;
return Ok((i, Target::Tuple(path, targets)));
}
let (i, is_named_struct) = opt_opening_brace(i)?;
if is_named_struct {
let (i, targets) = alt((
map(char('}'), |_| Vec::new()),
terminated(
cut(separated_list1(ws(char(',')), named_target)),
pair(opt(ws(char(','))), ws(cut(char('}')))),
),
))(i)?;
return Ok((i, Target::Struct(path, targets)));
}
return Ok((i_before_matching_with, Target::Path(path)));
}
// neither literal nor struct nor path
map(identifier, Target::Name)(i)
}
fn named_target(i: &str) -> IResult<&str, (&str, Target<'_>)> {
let (i, (src, target)) = pair(identifier, opt(preceded(ws(char(':')), target)))(i)?;
Ok((i, (src, target.unwrap_or(Target::Name(src)))))
}

View file

@ -0,0 +1,668 @@
use crate::config::Syntax;
use crate::parser::{Expr, Node, Whitespace, Ws};
fn check_ws_split(s: &str, res: &(&str, &str, &str)) {
match super::split_ws_parts(s) {
Node::Lit(lws, s, rws) => {
assert_eq!(lws, res.0);
assert_eq!(s, res.1);
assert_eq!(rws, res.2);
}
_ => {
panic!("fail");
}
}
}
#[test]
fn test_ws_splitter() {
check_ws_split("", &("", "", ""));
check_ws_split("a", &("", "a", ""));
check_ws_split("\ta", &("\t", "a", ""));
check_ws_split("b\n", &("", "b", "\n"));
check_ws_split(" \t\r\n", &(" \t\r\n", "", ""));
}
#[test]
#[should_panic]
fn test_invalid_block() {
super::parse("{% extend \"blah\" %}", &Syntax::default()).unwrap();
}
#[test]
fn test_parse_filter() {
use Expr::*;
let syntax = Syntax::default();
assert_eq!(
super::parse("{{ strvar|e }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Filter("e", vec![Var("strvar")]),)],
);
assert_eq!(
super::parse("{{ 2|abs }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Filter("abs", vec![NumLit("2")]),)],
);
assert_eq!(
super::parse("{{ -2|abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Filter("abs", vec![Unary("-", NumLit("2").into())]),
)],
);
assert_eq!(
super::parse("{{ (1 - 2)|abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Filter(
"abs",
vec![Group(
BinOp("-", NumLit("1").into(), NumLit("2").into()).into()
)]
),
)],
);
}
#[test]
fn test_parse_numbers() {
let syntax = Syntax::default();
assert_eq!(
super::parse("{{ 2 }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Expr::NumLit("2"),)],
);
assert_eq!(
super::parse("{{ 2.5 }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Expr::NumLit("2.5"),)],
);
}
#[test]
fn test_parse_var() {
let s = Syntax::default();
assert_eq!(
super::parse("{{ foo }}", &s).unwrap(),
vec![Node::Expr(Ws(None, None), Expr::Var("foo"))],
);
assert_eq!(
super::parse("{{ foo_bar }}", &s).unwrap(),
vec![Node::Expr(Ws(None, None), Expr::Var("foo_bar"))],
);
assert_eq!(
super::parse("{{ none }}", &s).unwrap(),
vec![Node::Expr(Ws(None, None), Expr::Var("none"))],
);
}
#[test]
fn test_parse_const() {
let s = Syntax::default();
assert_eq!(
super::parse("{{ FOO }}", &s).unwrap(),
vec![Node::Expr(Ws(None, None), Expr::Path(vec!["FOO"]))],
);
assert_eq!(
super::parse("{{ FOO_BAR }}", &s).unwrap(),
vec![Node::Expr(Ws(None, None), Expr::Path(vec!["FOO_BAR"]))],
);
assert_eq!(
super::parse("{{ NONE }}", &s).unwrap(),
vec![Node::Expr(Ws(None, None), Expr::Path(vec!["NONE"]))],
);
}
#[test]
fn test_parse_path() {
let s = Syntax::default();
assert_eq!(
super::parse("{{ None }}", &s).unwrap(),
vec![Node::Expr(Ws(None, None), Expr::Path(vec!["None"]))],
);
assert_eq!(
super::parse("{{ Some(123) }}", &s).unwrap(),
vec![Node::Expr(
Ws(None, None),
Expr::Call(
Box::new(Expr::Path(vec!["Some"])),
vec![Expr::NumLit("123")]
),
)],
);
assert_eq!(
super::parse("{{ Ok(123) }}", &s).unwrap(),
vec![Node::Expr(
Ws(None, None),
Expr::Call(Box::new(Expr::Path(vec!["Ok"])), vec![Expr::NumLit("123")]),
)],
);
assert_eq!(
super::parse("{{ Err(123) }}", &s).unwrap(),
vec![Node::Expr(
Ws(None, None),
Expr::Call(Box::new(Expr::Path(vec!["Err"])), vec![Expr::NumLit("123")]),
)],
);
}
#[test]
fn test_parse_var_call() {
assert_eq!(
super::parse("{{ function(\"123\", 3) }}", &Syntax::default()).unwrap(),
vec![Node::Expr(
Ws(None, None),
Expr::Call(
Box::new(Expr::Var("function")),
vec![Expr::StrLit("123"), Expr::NumLit("3")]
),
)],
);
}
#[test]
fn test_parse_path_call() {
let s = Syntax::default();
assert_eq!(
super::parse("{{ Option::None }}", &s).unwrap(),
vec![Node::Expr(
Ws(None, None),
Expr::Path(vec!["Option", "None"])
)],
);
assert_eq!(
super::parse("{{ Option::Some(123) }}", &s).unwrap(),
vec![Node::Expr(
Ws(None, None),
Expr::Call(
Box::new(Expr::Path(vec!["Option", "Some"])),
vec![Expr::NumLit("123")],
),
)],
);
assert_eq!(
super::parse("{{ self::function(\"123\", 3) }}", &s).unwrap(),
vec![Node::Expr(
Ws(None, None),
Expr::Call(
Box::new(Expr::Path(vec!["self", "function"])),
vec![Expr::StrLit("123"), Expr::NumLit("3")],
),
)],
);
}
#[test]
fn test_parse_root_path() {
let syntax = Syntax::default();
assert_eq!(
super::parse("{{ std::string::String::new() }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Expr::Call(
Box::new(Expr::Path(vec!["std", "string", "String", "new"])),
vec![]
),
)],
);
assert_eq!(
super::parse("{{ ::std::string::String::new() }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Expr::Call(
Box::new(Expr::Path(vec!["", "std", "string", "String", "new"])),
vec![]
),
)],
);
}
#[test]
fn change_delimiters_parse_filter() {
let syntax = Syntax {
expr_start: "{=",
expr_end: "=}",
..Syntax::default()
};
super::parse("{= strvar|e =}", &syntax).unwrap();
}
#[test]
fn test_precedence() {
use Expr::*;
let syntax = Syntax::default();
assert_eq!(
super::parse("{{ a + b == c }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"==",
BinOp("+", Var("a").into(), Var("b").into()).into(),
Var("c").into(),
)
)],
);
assert_eq!(
super::parse("{{ a + b * c - d / e }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"-",
BinOp(
"+",
Var("a").into(),
BinOp("*", Var("b").into(), Var("c").into()).into(),
)
.into(),
BinOp("/", Var("d").into(), Var("e").into()).into(),
)
)],
);
assert_eq!(
super::parse("{{ a * (b + c) / -d }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"/",
BinOp(
"*",
Var("a").into(),
Group(BinOp("+", Var("b").into(), Var("c").into()).into()).into()
)
.into(),
Unary("-", Var("d").into()).into()
)
)],
);
assert_eq!(
super::parse("{{ a || b && c || d && e }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"||",
BinOp(
"||",
Var("a").into(),
BinOp("&&", Var("b").into(), Var("c").into()).into(),
)
.into(),
BinOp("&&", Var("d").into(), Var("e").into()).into(),
)
)],
);
}
#[test]
fn test_associativity() {
use Expr::*;
let syntax = Syntax::default();
assert_eq!(
super::parse("{{ a + b + c }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"+",
BinOp("+", Var("a").into(), Var("b").into()).into(),
Var("c").into()
)
)],
);
assert_eq!(
super::parse("{{ a * b * c }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"*",
BinOp("*", Var("a").into(), Var("b").into()).into(),
Var("c").into()
)
)],
);
assert_eq!(
super::parse("{{ a && b && c }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"&&",
BinOp("&&", Var("a").into(), Var("b").into()).into(),
Var("c").into()
)
)],
);
assert_eq!(
super::parse("{{ a + b - c + d }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"+",
BinOp(
"-",
BinOp("+", Var("a").into(), Var("b").into()).into(),
Var("c").into()
)
.into(),
Var("d").into()
)
)],
);
assert_eq!(
super::parse("{{ a == b != c > d > e == f }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"==",
BinOp(
">",
BinOp(
">",
BinOp(
"!=",
BinOp("==", Var("a").into(), Var("b").into()).into(),
Var("c").into()
)
.into(),
Var("d").into()
)
.into(),
Var("e").into()
)
.into(),
Var("f").into()
)
)],
);
}
#[test]
fn test_odd_calls() {
use Expr::*;
let syntax = Syntax::default();
assert_eq!(
super::parse("{{ a[b](c) }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Call(
Box::new(Index(Box::new(Var("a")), Box::new(Var("b")))),
vec![Var("c")],
),
)],
);
assert_eq!(
super::parse("{{ (a + b)(c) }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Call(
Box::new(Group(Box::new(BinOp(
"+",
Box::new(Var("a")),
Box::new(Var("b"))
)))),
vec![Var("c")],
),
)],
);
assert_eq!(
super::parse("{{ a + b(c) }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"+",
Box::new(Var("a")),
Box::new(Call(Box::new(Var("b")), vec![Var("c")])),
),
)],
);
assert_eq!(
super::parse("{{ (-a)(b) }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Call(
Box::new(Group(Box::new(Unary("-", Box::new(Var("a")))))),
vec![Var("b")],
),
)],
);
assert_eq!(
super::parse("{{ -a(b) }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Unary("-", Box::new(Call(Box::new(Var("a")), vec![Var("b")])),),
)],
);
}
#[test]
fn test_parse_comments() {
let s = &Syntax::default();
assert_eq!(
super::parse("{##}", s).unwrap(),
vec![Node::Comment(Ws(None, None))],
);
assert_eq!(
super::parse("{#- #}", s).unwrap(),
vec![Node::Comment(Ws(Some(Whitespace::Suppress), None))],
);
assert_eq!(
super::parse("{# -#}", s).unwrap(),
vec![Node::Comment(Ws(None, Some(Whitespace::Suppress)))],
);
assert_eq!(
super::parse("{#--#}", s).unwrap(),
vec![Node::Comment(Ws(
Some(Whitespace::Suppress),
Some(Whitespace::Suppress)
))],
);
assert_eq!(
super::parse("{#- foo\n bar -#}", s).unwrap(),
vec![Node::Comment(Ws(
Some(Whitespace::Suppress),
Some(Whitespace::Suppress)
))],
);
assert_eq!(
super::parse("{#- foo\n {#- bar\n -#} baz -#}", s).unwrap(),
vec![Node::Comment(Ws(
Some(Whitespace::Suppress),
Some(Whitespace::Suppress)
))],
);
assert_eq!(
super::parse("{#+ #}", s).unwrap(),
vec![Node::Comment(Ws(Some(Whitespace::Preserve), None))],
);
assert_eq!(
super::parse("{# +#}", s).unwrap(),
vec![Node::Comment(Ws(None, Some(Whitespace::Preserve)))],
);
assert_eq!(
super::parse("{#++#}", s).unwrap(),
vec![Node::Comment(Ws(
Some(Whitespace::Preserve),
Some(Whitespace::Preserve)
))],
);
assert_eq!(
super::parse("{#+ foo\n bar +#}", s).unwrap(),
vec![Node::Comment(Ws(
Some(Whitespace::Preserve),
Some(Whitespace::Preserve)
))],
);
assert_eq!(
super::parse("{#+ foo\n {#+ bar\n +#} baz -+#}", s).unwrap(),
vec![Node::Comment(Ws(
Some(Whitespace::Preserve),
Some(Whitespace::Preserve)
))],
);
assert_eq!(
super::parse("{#~ #}", s).unwrap(),
vec![Node::Comment(Ws(Some(Whitespace::Minimize), None))],
);
assert_eq!(
super::parse("{# ~#}", s).unwrap(),
vec![Node::Comment(Ws(None, Some(Whitespace::Minimize)))],
);
assert_eq!(
super::parse("{#~~#}", s).unwrap(),
vec![Node::Comment(Ws(
Some(Whitespace::Minimize),
Some(Whitespace::Minimize)
))],
);
assert_eq!(
super::parse("{#~ foo\n bar ~#}", s).unwrap(),
vec![Node::Comment(Ws(
Some(Whitespace::Minimize),
Some(Whitespace::Minimize)
))],
);
assert_eq!(
super::parse("{#~ foo\n {#~ bar\n ~#} baz -~#}", s).unwrap(),
vec![Node::Comment(Ws(
Some(Whitespace::Minimize),
Some(Whitespace::Minimize)
))],
);
assert_eq!(
super::parse("{# foo {# bar #} {# {# baz #} qux #} #}", s).unwrap(),
vec![Node::Comment(Ws(None, None))],
);
}
#[test]
fn test_parse_tuple() {
use super::Expr::*;
let syntax = Syntax::default();
assert_eq!(
super::parse("{{ () }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Tuple(vec![]),)],
);
assert_eq!(
super::parse("{{ (1) }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Group(Box::new(NumLit("1"))),)],
);
assert_eq!(
super::parse("{{ (1,) }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)],
);
assert_eq!(
super::parse("{{ (1, ) }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)],
);
assert_eq!(
super::parse("{{ (1 ,) }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)],
);
assert_eq!(
super::parse("{{ (1 , ) }}", &syntax).unwrap(),
vec![Node::Expr(Ws(None, None), Tuple(vec![NumLit("1")]),)],
);
assert_eq!(
super::parse("{{ (1, 2) }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Tuple(vec![NumLit("1"), NumLit("2")]),
)],
);
assert_eq!(
super::parse("{{ (1, 2,) }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Tuple(vec![NumLit("1"), NumLit("2")]),
)],
);
assert_eq!(
super::parse("{{ (1, 2, 3) }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Tuple(vec![NumLit("1"), NumLit("2"), NumLit("3")]),
)],
);
assert_eq!(
super::parse("{{ ()|abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Filter("abs", vec![Tuple(vec![])]),
)],
);
assert_eq!(
super::parse("{{ () | abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp("|", Box::new(Tuple(vec![])), Box::new(Var("abs"))),
)],
);
assert_eq!(
super::parse("{{ (1)|abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Filter("abs", vec![Group(Box::new(NumLit("1")))]),
)],
);
assert_eq!(
super::parse("{{ (1) | abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"|",
Box::new(Group(Box::new(NumLit("1")))),
Box::new(Var("abs"))
),
)],
);
assert_eq!(
super::parse("{{ (1,)|abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Filter("abs", vec![Tuple(vec![NumLit("1")])]),
)],
);
assert_eq!(
super::parse("{{ (1,) | abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"|",
Box::new(Tuple(vec![NumLit("1")])),
Box::new(Var("abs"))
),
)],
);
assert_eq!(
super::parse("{{ (1, 2)|abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
Filter("abs", vec![Tuple(vec![NumLit("1"), NumLit("2")])]),
)],
);
assert_eq!(
super::parse("{{ (1, 2) | abs }}", &syntax).unwrap(),
vec![Node::Expr(
Ws(None, None),
BinOp(
"|",
Box::new(Tuple(vec![NumLit("1"), NumLit("2")])),
Box::new(Var("abs"))
),
)],
);
}
#[test]
fn test_missing_space_after_kw() {
let syntax = Syntax::default();
let err = super::parse("{%leta=b%}", &syntax).unwrap_err();
assert!(matches!(
&*err.msg,
"unable to parse template:\n\n\"{%leta=b%}\""
));
}

View file

@ -1 +0,0 @@
{"files":{"Cargo.toml":"d843e6077028802df1970bc4934bb5bd517bd028a1892a610f8a984a084a641c","LICENSE-APACHE":"df20e0180764bf5bd76f74d47bc9e8c0069a666401629c390003a1d5eba99c92","LICENSE-MIT":"df20e0180764bf5bd76f74d47bc9e8c0069a666401629c390003a1d5eba99c92","README.md":"dbea023a90feb38fd85bb365b453e919b3990d2b1413396b00d46b70c4a855e8","src/error.rs":"e0337c3fb6e9c8babe42d07888c23d2a5ba7fa08670e833c900e41e6d131020d","src/filters/json.rs":"ce662c9835d82dfce51e7a61216f5c527b31592686b7853f72eafc60e82a4651","src/filters/mod.rs":"4115fc70613750a3a5a957ee2700f866793a940f30ce6a06a409e83534b78baf","src/filters/yaml.rs":"90b69e1d29dbed5fccb40c2f868ebf1deb1f7dbb3ced2fcab9bf244a52924e1d","src/generator.rs":"557b10f7aa2567771479243ad46ba885a72c9d97564e2792bbba15225a634b8b","src/helpers/mod.rs":"76e0422acd4ccba7b1735d6ab7622a93f6ec5a2fa89531111d877266784d5334","src/heritage.rs":"a363ef47b061c642d258b849ce7d1644f2c94376a49d6999d955abb0c8f7a685","src/input.rs":"7ccac91c5fa48da23e8ca142a5b392cae73e0e02940c25d9dda0733106f95bc9","src/lib.rs":"bac88b35b3ebd9aa3f1e9f761b1f0c6fc9155c6a714fbd4e81d2dfc34a542645","src/parser.rs":"4604fc4a18ab5e73d6da17e46486ac555907e3874ace372e3aa0c6dba8107fc0","templates/a.html":"b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c","templates/b.html":"7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730","templates/sub/b.html":"7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730","templates/sub/c.html":"bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c","templates/sub/sub1/d.html":"86b0c5a1e2b73b08fd54c727f4458649ed9fe3ad1b6e8ac9460c070113509a1e"},"package":"bf722b94118a07fcbc6640190f247334027685d4e218b794dbfe17c32bf38ed0"}

View file

@ -1,83 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2018"
name = "askama_shared"
version = "0.12.2"
description = "Shared code for Askama"
homepage = "https://github.com/djc/askama"
readme = "README.md"
license = "MIT/Apache-2.0"
repository = "https://github.com/djc/askama"
[package.metadata.docs.rs]
features = ["config", "humansize", "num-traits", "json", "yaml", "percent-encoding"]
[dependencies.askama_escape]
version = "0.10.3"
[dependencies.comrak]
version = "0.12"
optional = true
default-features = false
[dependencies.humansize]
version = "1.1.0"
optional = true
[dependencies.mime]
version = "0.3"
[dependencies.mime_guess]
version = "2"
[dependencies.nom]
version = "7"
[dependencies.num-traits]
version = "0.2.6"
optional = true
[dependencies.percent-encoding]
version = "2.1.0"
optional = true
[dependencies.proc-macro2]
version = "1"
[dependencies.quote]
version = "1"
[dependencies.serde]
version = "1.0"
features = ["derive"]
optional = true
[dependencies.serde_json]
version = "1.0"
optional = true
[dependencies.serde_yaml]
version = "0.8"
optional = true
[dependencies.syn]
version = "1"
[dependencies.toml]
version = "0.5"
optional = true
[features]
config = ["serde", "toml"]
default = ["config", "humansize", "num-traits", "percent-encoding"]
json = ["serde", "serde_json", "askama_escape/json"]
markdown = ["comrak"]
yaml = ["serde", "serde_yaml"]

View file

@ -1,9 +0,0 @@
# askama_shared: shared code for the Askama templating engine
[![Documentation](https://docs.rs/askama_shared/badge.svg)](https://docs.rs/askama_shared/)
[![Latest version](https://img.shields.io/crates/v/askama_shared.svg)](https://crates.io/crates/askama_shared)
[![Build Status](https://github.com/djc/askama/workflows/CI/badge.svg)](https://github.com/djc/askama/actions?query=workflow%3ACI)
[![Chat](https://badges.gitter.im/gitterHQ/gitter.svg)](https://gitter.im/djc/askama)
This crate contains helper code used by the [Askama](https://github.com/djc/askama)
templating engine.

File diff suppressed because it is too large Load diff

File diff suppressed because one or more lines are too long

100
third_party/rust/basic-toml/Cargo.lock generated vendored Normal file
View file

@ -0,0 +1,100 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "basic-toml"
version = "0.1.2"
dependencies = [
"semver",
"serde",
"serde_derive",
"serde_json",
]
[[package]]
name = "itoa"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
[[package]]
name = "proc-macro2"
version = "1.0.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
dependencies = [
"proc-macro2",
]
[[package]]
name = "ryu"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
[[package]]
name = "semver"
version = "1.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58bc9567378fc7690d6b2addae4e60ac2eeea07becb2c64b9f218b53865cba2a"
dependencies = [
"serde",
]
[[package]]
name = "serde"
version = "1.0.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.91"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883"
dependencies = [
"itoa",
"ryu",
"serde",
]
[[package]]
name = "syn"
version = "1.0.107"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"

56
third_party/rust/basic-toml/Cargo.toml vendored Normal file
View file

@ -0,0 +1,56 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2021"
name = "basic-toml"
version = "0.1.2"
authors = [
"Alex Crichton <alex@alexcrichton.com>",
"David Tolnay <dtolnay@gmail.com>",
]
description = "Minimal TOML library with few dependencies"
documentation = "https://docs.rs/basic-toml"
readme = "README.md"
keywords = [
"toml",
"serde",
]
categories = [
"config",
"encoding",
"parser-implementations",
]
license = "MIT OR Apache-2.0"
repository = "https://github.com/dtolnay/basic-toml"
[package.metadata.docs.rs]
targets = ["x86_64-unknown-linux-gnu"]
[lib]
doc-scrape-examples = false
[dependencies.serde]
version = "1.0.97"
[dev-dependencies.semver]
version = "1.0"
features = ["serde"]
[dev-dependencies.serde]
version = "1.0"
features = ["derive"]
[dev-dependencies.serde_derive]
version = "1.0"
[dev-dependencies.serde_json]
version = "1.0"

View file

@ -0,0 +1,176 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS

View file

@ -1,5 +1,3 @@
Copyright (c) 2017-2020 Dirkjan Ochtman
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the

36
third_party/rust/basic-toml/README.md vendored Normal file
View file

@ -0,0 +1,36 @@
# basic-toml
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/basic--toml-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/basic-toml)
[<img alt="crates.io" src="https://img.shields.io/crates/v/basic-toml.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/basic-toml)
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-basic--toml-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/basic-toml)
[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/basic-toml/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/basic-toml/actions?query=branch%3Amaster)
A library for parsing and producing data in [TOML] format using [Serde].
This crate is a stripped down fork of version 0.5 of the `toml` crate (from
before the `toml_edit` rewrite).
[TOML]: https://toml.io
[Serde]: https://serde.rs
```toml
[dependencies]
basic-toml = "0.1"
```
<br>
#### License
<sup>
Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
</sup>
<br>
<sub>
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
be dual licensed as above, without any additional terms or conditions.
</sub>

View file

@ -0,0 +1,54 @@
//! An example showing off the usage of `Deserialize` to automatically decode
//! TOML into a Rust `struct`
#![deny(warnings)]
#![allow(dead_code)]
use serde_derive::Deserialize;
/// This is what we're going to decode into. Each field is optional, meaning
/// that it doesn't have to be present in TOML.
#[derive(Debug, Deserialize)]
struct Config {
global_string: Option<String>,
global_integer: Option<u64>,
server: Option<ServerConfig>,
peers: Option<Vec<PeerConfig>>,
}
/// Sub-structs are decoded from tables, so this will decode from the `[server]`
/// table.
///
/// Again, each field is optional, meaning they don't have to be present.
#[derive(Debug, Deserialize)]
struct ServerConfig {
ip: Option<String>,
port: Option<u64>,
}
#[derive(Debug, Deserialize)]
struct PeerConfig {
ip: Option<String>,
port: Option<u64>,
}
fn main() {
let toml_str = r#"
global_string = "test"
global_integer = 5
[server]
ip = "127.0.0.1"
port = 80
[[peers]]
ip = "127.0.0.1"
port = 8080
[[peers]]
ip = "127.0.0.1"
"#;
let decoded: Config = basic_toml::from_str(toml_str).unwrap();
println!("{:#?}", decoded);
}

1723
third_party/rust/basic-toml/src/de.rs vendored Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,54 @@
use std::fmt::{self, Debug, Display};
/// Errors that can occur when serializing or deserializing TOML.
pub struct Error(Box<ErrorInner>);
pub(crate) enum ErrorInner {
Ser(crate::ser::Error),
De(crate::de::Error),
}
impl Error {
/// Produces a (line, column) pair of the position of the error if
/// available.
///
/// All indexes are 0-based.
pub fn line_col(&self) -> Option<(usize, usize)> {
match &*self.0 {
ErrorInner::Ser(_) => None,
ErrorInner::De(error) => error.line_col(),
}
}
}
impl From<crate::ser::Error> for Error {
fn from(error: crate::ser::Error) -> Self {
Error(Box::new(ErrorInner::Ser(error)))
}
}
impl From<crate::de::Error> for Error {
fn from(error: crate::de::Error) -> Self {
Error(Box::new(ErrorInner::De(error)))
}
}
impl Display for Error {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
match &*self.0 {
ErrorInner::Ser(error) => Display::fmt(error, formatter),
ErrorInner::De(error) => Display::fmt(error, formatter),
}
}
}
impl Debug for Error {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
match &*self.0 {
ErrorInner::Ser(error) => Debug::fmt(error, formatter),
ErrorInner::De(error) => Debug::fmt(error, formatter),
}
}
}
impl std::error::Error for Error {}

141
third_party/rust/basic-toml/src/lib.rs vendored Normal file
View file

@ -0,0 +1,141 @@
//! [![github]](https://github.com/dtolnay/basic-toml)&ensp;[![crates-io]](https://crates.io/crates/basic-toml)&ensp;[![docs-rs]](https://docs.rs/basic-toml)
//!
//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs
//!
//! <br>
//!
//! A library for parsing and producing data in [TOML] format using [Serde].
//!
//! TOML is designed to be "a config file format for humans": minimal and easy
//! to read due to obvious semantics.
//!
//! ```toml
//! [package]
//! name = "basic-toml"
#![doc = concat!("version = \"", env!("CARGO_PKG_VERSION_MAJOR"), ".", env!("CARGO_PKG_VERSION_MINOR"), ".", env!("CARGO_PKG_VERSION_PATCH"), "\"")]
//! authors = ["Alex Crichton <alex@alexcrichton.com>"]
//!
//! [dependencies]
//! serde = "1.0"
//! ```
//!
//! The TOML format is widely used throughout the Rust community for
//! configuration, notably being used by [Cargo], Rust's package manager.
//!
//! [TOML]: https://toml.io
//! [Serde]: https://serde.rs
//! [Cargo]: https://crates.io
//!
//! # Deserialization
//!
//! ```
//! use semver::{Version, VersionReq};
//! use serde_derive::Deserialize;
//! use std::collections::BTreeMap as Map;
//!
//! #[derive(Deserialize)]
//! struct Manifest {
//! package: Package,
//! #[serde(default)]
//! dependencies: Map<String, VersionReq>,
//! }
//!
//! #[derive(Deserialize)]
//! struct Package {
//! name: String,
//! version: Version,
//! #[serde(default)]
//! authors: Vec<String>,
//! }
//!
//! fn main() {
//! let manifest: Manifest = basic_toml::from_str(r#"
//! [package]
//! name = "basic-toml"
#![doc = concat!(" version = \"", env!("CARGO_PKG_VERSION_MAJOR"), ".", env!("CARGO_PKG_VERSION_MINOR"), ".", env!("CARGO_PKG_VERSION_PATCH"), "\"")]
//! authors = ["Alex Crichton <alex@alexcrichton.com>"]
//!
//! [dependencies]
//! serde = "^1.0"
//! "#).unwrap();
//!
//! assert_eq!(manifest.package.name, "basic-toml");
#![doc = concat!(" assert_eq!(manifest.package.version, Version::new(", env!("CARGO_PKG_VERSION_MAJOR"), ", ", env!("CARGO_PKG_VERSION_MINOR"), ", ", env!("CARGO_PKG_VERSION_PATCH"), "));")]
//! assert_eq!(manifest.package.authors, ["Alex Crichton <alex@alexcrichton.com>"]);
//! assert_eq!(manifest.dependencies["serde"].to_string(), "^1.0");
//! }
//! ```
//!
//! # Serialization
//!
//! ```
//! use semver::{Version, VersionReq};
//! use serde_derive::Serialize;
//! use std::collections::BTreeMap as Map;
//!
//! #[derive(Serialize)]
//! struct Manifest {
//! package: Package,
//! dependencies: Map<String, VersionReq>,
//! }
//!
//! #[derive(Serialize)]
//! struct Package {
//! name: String,
//! version: Version,
//! authors: Vec<String>,
//! }
//!
//! fn main() {
//! let manifest = Manifest {
//! package: Package {
//! name: "basic-toml".to_owned(),
#![doc = concat!(" version: Version::new(", env!("CARGO_PKG_VERSION_MAJOR"), ", ", env!("CARGO_PKG_VERSION_MINOR"), ", ", env!("CARGO_PKG_VERSION_PATCH"), "),")]
//! authors: vec!["Alex Crichton <alex@alexcrichton.com>".to_owned()],
//! },
//! dependencies: {
//! let mut dependencies = Map::new();
//! dependencies.insert("serde".to_owned(), "^1.0".parse().unwrap());
//! dependencies
//! },
//! };
//!
//! let toml = basic_toml::to_string(&manifest).unwrap();
//! print!("{}", toml);
//! }
//! ```
//!
//! # Spec compatibility
//!
//! TOML v0.5.0.
//!
//! TOML's date and time syntax are not supported.
#![doc(html_root_url = "https://docs.rs/basic-toml/0.1.2")]
#![deny(missing_docs)]
#![allow(
clippy::bool_to_int_with_if,
clippy::let_underscore_untyped,
clippy::manual_let_else,
clippy::manual_range_contains,
clippy::match_like_matches_macro,
clippy::missing_errors_doc,
clippy::must_use_candidate,
clippy::needless_doctest_main,
clippy::needless_pass_by_value,
clippy::similar_names,
clippy::type_complexity,
clippy::uninlined_format_args,
clippy::unwrap_or_else_default
)]
mod de;
mod error;
mod ser;
mod tokens;
pub use crate::de::{from_slice, from_str};
pub use crate::error::Error;
pub use crate::ser::to_string;

838
third_party/rust/basic-toml/src/ser.rs vendored Normal file
View file

@ -0,0 +1,838 @@
use serde::ser::{self, Serialize};
use std::cell::Cell;
use std::error;
use std::fmt::{self, Display, Write};
/// Serialize the given data structure as a String of TOML.
///
/// Serialization can fail if `T`'s implementation of `Serialize` decides to
/// fail, if `T` contains a map with non-string keys, or if `T` attempts to
/// serialize an unsupported datatype such as an enum, tuple, or tuple struct.
pub fn to_string<T: ?Sized>(value: &T) -> Result<String, crate::Error>
where
T: Serialize,
{
let mut dst = String::with_capacity(128);
value.serialize(&mut Serializer::new(&mut dst))?;
Ok(dst)
}
#[derive(Debug)]
pub(crate) enum Error {
/// Indicates that a Rust type was requested to be serialized but it was not
/// supported.
///
/// Currently the TOML format does not support serializing types such as
/// enums, tuples and tuple structs.
UnsupportedType,
/// The key of all TOML maps must be strings, but serialization was
/// attempted where the key of a map was not a string.
KeyNotString,
/// All values in a TOML table must be emitted before further tables are
/// emitted. If a value is emitted *after* a table then this error is
/// generated.
ValueAfterTable,
/// None was attempted to be serialized, but it's not supported.
UnsupportedNone,
/// A custom error which could be generated when serializing a particular
/// type.
Custom(String),
}
struct Serializer<'a> {
dst: &'a mut String,
state: State<'a>,
}
#[derive(Debug, Copy, Clone)]
enum ArrayState {
Started,
StartedAsATable,
}
#[derive(Debug, Clone)]
enum State<'a> {
Table {
key: &'a str,
parent: &'a State<'a>,
first: &'a Cell<bool>,
table_emitted: &'a Cell<bool>,
},
Array {
parent: &'a State<'a>,
first: &'a Cell<bool>,
type_: &'a Cell<Option<ArrayState>>,
len: Option<usize>,
},
End,
}
struct SerializeSeq<'a, 'b> {
ser: &'b mut Serializer<'a>,
first: Cell<bool>,
type_: Cell<Option<ArrayState>>,
len: Option<usize>,
}
struct SerializeTable<'a, 'b> {
ser: &'b mut Serializer<'a>,
key: String,
first: Cell<bool>,
table_emitted: Cell<bool>,
}
impl<'a> Serializer<'a> {
fn new(dst: &'a mut String) -> Serializer<'a> {
Serializer {
dst,
state: State::End,
}
}
fn display<T: Display>(&mut self, t: T, type_: ArrayState) -> Result<(), Error> {
self.emit_key(type_)?;
write!(self.dst, "{}", t).map_err(ser::Error::custom)?;
if let State::Table { .. } = self.state {
self.dst.push('\n');
}
Ok(())
}
fn emit_key(&mut self, type_: ArrayState) -> Result<(), Error> {
self.array_type(type_);
let state = self.state.clone();
self._emit_key(&state)
}
// recursive implementation of `emit_key` above
fn _emit_key(&mut self, state: &State) -> Result<(), Error> {
match *state {
State::End => Ok(()),
State::Array {
parent,
first,
type_,
len,
} => {
assert!(type_.get().is_some());
if first.get() {
self._emit_key(parent)?;
}
self.emit_array(first, len);
Ok(())
}
State::Table {
parent,
first,
table_emitted,
key,
} => {
if table_emitted.get() {
return Err(Error::ValueAfterTable);
}
if first.get() {
self.emit_table_header(parent)?;
first.set(false);
}
self.escape_key(key)?;
self.dst.push_str(" = ");
Ok(())
}
}
}
fn emit_array(&mut self, first: &Cell<bool>, _len: Option<usize>) {
if first.get() {
self.dst.push('[');
} else {
self.dst.push_str(", ");
}
}
fn array_type(&mut self, type_: ArrayState) {
let prev = match self.state {
State::Array { type_, .. } => type_,
_ => return,
};
if prev.get().is_none() {
prev.set(Some(type_));
}
}
fn escape_key(&mut self, key: &str) -> Result<(), Error> {
let ok = !key.is_empty()
&& key.chars().all(|c| match c {
'a'..='z' | 'A'..='Z' | '0'..='9' | '-' | '_' => true,
_ => false,
});
if ok {
write!(self.dst, "{}", key).map_err(ser::Error::custom)?;
} else {
self.emit_str(key)?;
}
Ok(())
}
fn emit_str(&mut self, value: &str) -> Result<(), Error> {
self.dst.push('"');
for ch in value.chars() {
match ch {
'\u{8}' => self.dst.push_str("\\b"),
'\u{9}' => self.dst.push_str("\\t"),
'\u{a}' => self.dst.push_str("\\n"),
'\u{c}' => self.dst.push_str("\\f"),
'\u{d}' => self.dst.push_str("\\r"),
'\u{22}' => self.dst.push_str("\\\""),
'\u{5c}' => self.dst.push_str("\\\\"),
c if c <= '\u{1f}' || c == '\u{7f}' => {
write!(self.dst, "\\u{:04X}", ch as u32).map_err(ser::Error::custom)?;
}
ch => self.dst.push(ch),
}
}
self.dst.push('"');
Ok(())
}
fn emit_table_header(&mut self, state: &State) -> Result<(), Error> {
let array_of_tables = match *state {
State::End => return Ok(()),
State::Array { .. } => true,
State::Table { .. } => false,
};
// Unlike [..]s, we can't omit [[..]] ancestors, so be sure to emit
// table headers for them.
let mut p = state;
if let State::Array { first, parent, .. } = *state {
if first.get() {
p = parent;
}
}
while let State::Table { first, parent, .. } = *p {
p = parent;
if !first.get() {
break;
}
if let State::Array {
parent: &State::Table { .. },
..
} = *parent
{
self.emit_table_header(parent)?;
break;
}
}
match *state {
State::Table { first, .. } => {
if !first.get() {
// Newline if we are a table that is not the first table in
// the document.
self.dst.push('\n');
}
}
State::Array { parent, first, .. } => {
if !first.get() {
// Always newline if we are not the first item in the
// table-array
self.dst.push('\n');
} else if let State::Table { first, .. } = *parent {
if !first.get() {
// Newline if we are not the first item in the document
self.dst.push('\n');
}
}
}
State::End => {}
}
self.dst.push('[');
if array_of_tables {
self.dst.push('[');
}
self.emit_key_part(state)?;
if array_of_tables {
self.dst.push(']');
}
self.dst.push_str("]\n");
Ok(())
}
fn emit_key_part(&mut self, key: &State) -> Result<bool, Error> {
match *key {
State::Array { parent, .. } => self.emit_key_part(parent),
State::End => Ok(true),
State::Table {
key,
parent,
table_emitted,
..
} => {
table_emitted.set(true);
let first = self.emit_key_part(parent)?;
if !first {
self.dst.push('.');
}
self.escape_key(key)?;
Ok(false)
}
}
}
}
macro_rules! serialize_float {
($this:expr, $v:expr) => {{
$this.emit_key(ArrayState::Started)?;
match ($v.is_sign_negative(), $v.is_nan(), $v == 0.0) {
(true, true, _) => write!($this.dst, "-nan"),
(false, true, _) => write!($this.dst, "nan"),
(true, false, true) => write!($this.dst, "-0.0"),
(false, false, true) => write!($this.dst, "0.0"),
(_, false, false) => write!($this.dst, "{}", $v).and_then(|_| {
if $v % 1.0 == 0.0 {
write!($this.dst, ".0")
} else {
Ok(())
}
}),
}
.map_err(ser::Error::custom)?;
if let State::Table { .. } = $this.state {
$this.dst.push_str("\n");
}
return Ok(());
}};
}
impl<'a, 'b> ser::Serializer for &'b mut Serializer<'a> {
type Ok = ();
type Error = Error;
type SerializeSeq = SerializeSeq<'a, 'b>;
type SerializeTuple = SerializeSeq<'a, 'b>;
type SerializeTupleStruct = SerializeSeq<'a, 'b>;
type SerializeTupleVariant = ser::Impossible<(), Error>;
type SerializeMap = SerializeTable<'a, 'b>;
type SerializeStruct = SerializeTable<'a, 'b>;
type SerializeStructVariant = ser::Impossible<(), Error>;
fn serialize_bool(self, v: bool) -> Result<(), Self::Error> {
self.display(v, ArrayState::Started)
}
fn serialize_i8(self, v: i8) -> Result<(), Self::Error> {
self.display(v, ArrayState::Started)
}
fn serialize_i16(self, v: i16) -> Result<(), Self::Error> {
self.display(v, ArrayState::Started)
}
fn serialize_i32(self, v: i32) -> Result<(), Self::Error> {
self.display(v, ArrayState::Started)
}
fn serialize_i64(self, v: i64) -> Result<(), Self::Error> {
self.display(v, ArrayState::Started)
}
fn serialize_u8(self, v: u8) -> Result<(), Self::Error> {
self.display(v, ArrayState::Started)
}
fn serialize_u16(self, v: u16) -> Result<(), Self::Error> {
self.display(v, ArrayState::Started)
}
fn serialize_u32(self, v: u32) -> Result<(), Self::Error> {
self.display(v, ArrayState::Started)
}
fn serialize_u64(self, v: u64) -> Result<(), Self::Error> {
self.display(v, ArrayState::Started)
}
fn serialize_f32(self, v: f32) -> Result<(), Self::Error> {
serialize_float!(self, v)
}
fn serialize_f64(self, v: f64) -> Result<(), Self::Error> {
serialize_float!(self, v)
}
fn serialize_char(self, v: char) -> Result<(), Self::Error> {
let mut buf = [0; 4];
self.serialize_str(v.encode_utf8(&mut buf))
}
fn serialize_str(self, value: &str) -> Result<(), Self::Error> {
self.emit_key(ArrayState::Started)?;
self.emit_str(value)?;
if let State::Table { .. } = self.state {
self.dst.push('\n');
}
Ok(())
}
fn serialize_bytes(self, value: &[u8]) -> Result<(), Self::Error> {
value.serialize(self)
}
fn serialize_none(self) -> Result<(), Self::Error> {
Err(Error::UnsupportedNone)
}
fn serialize_some<T: ?Sized>(self, value: &T) -> Result<(), Self::Error>
where
T: Serialize,
{
value.serialize(self)
}
fn serialize_unit(self) -> Result<(), Self::Error> {
Err(Error::UnsupportedType)
}
fn serialize_unit_struct(self, _name: &'static str) -> Result<(), Self::Error> {
Err(Error::UnsupportedType)
}
fn serialize_unit_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
) -> Result<(), Self::Error> {
self.serialize_str(variant)
}
fn serialize_newtype_struct<T: ?Sized>(
self,
_name: &'static str,
value: &T,
) -> Result<(), Self::Error>
where
T: Serialize,
{
value.serialize(self)
}
fn serialize_newtype_variant<T: ?Sized>(
self,
_name: &'static str,
_variant_index: u32,
_variant: &'static str,
_value: &T,
) -> Result<(), Self::Error>
where
T: Serialize,
{
Err(Error::UnsupportedType)
}
fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
self.array_type(ArrayState::Started);
Ok(SerializeSeq {
ser: self,
first: Cell::new(true),
type_: Cell::new(None),
len,
})
}
fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {
self.serialize_seq(Some(len))
}
fn serialize_tuple_struct(
self,
_name: &'static str,
len: usize,
) -> Result<Self::SerializeTupleStruct, Self::Error> {
self.serialize_seq(Some(len))
}
fn serialize_tuple_variant(
self,
_name: &'static str,
_variant_index: u32,
_variant: &'static str,
_len: usize,
) -> Result<Self::SerializeTupleVariant, Self::Error> {
Err(Error::UnsupportedType)
}
fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
self.array_type(ArrayState::StartedAsATable);
Ok(SerializeTable {
ser: self,
key: String::new(),
first: Cell::new(true),
table_emitted: Cell::new(false),
})
}
fn serialize_struct(
self,
_name: &'static str,
_len: usize,
) -> Result<Self::SerializeStruct, Self::Error> {
self.array_type(ArrayState::StartedAsATable);
Ok(SerializeTable {
ser: self,
key: String::new(),
first: Cell::new(true),
table_emitted: Cell::new(false),
})
}
fn serialize_struct_variant(
self,
_name: &'static str,
_variant_index: u32,
_variant: &'static str,
_len: usize,
) -> Result<Self::SerializeStructVariant, Self::Error> {
Err(Error::UnsupportedType)
}
}
impl<'a, 'b> ser::SerializeSeq for SerializeSeq<'a, 'b> {
type Ok = ();
type Error = Error;
fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Error>
where
T: Serialize,
{
value.serialize(&mut Serializer {
dst: &mut *self.ser.dst,
state: State::Array {
parent: &self.ser.state,
first: &self.first,
type_: &self.type_,
len: self.len,
},
})?;
self.first.set(false);
Ok(())
}
fn end(self) -> Result<(), Error> {
match self.type_.get() {
Some(ArrayState::StartedAsATable) => return Ok(()),
Some(ArrayState::Started) => self.ser.dst.push(']'),
None => {
assert!(self.first.get());
self.ser.emit_key(ArrayState::Started)?;
self.ser.dst.push_str("[]");
}
}
if let State::Table { .. } = self.ser.state {
self.ser.dst.push('\n');
}
Ok(())
}
}
impl<'a, 'b> ser::SerializeTuple for SerializeSeq<'a, 'b> {
type Ok = ();
type Error = Error;
fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Error>
where
T: Serialize,
{
ser::SerializeSeq::serialize_element(self, value)
}
fn end(self) -> Result<(), Error> {
ser::SerializeSeq::end(self)
}
}
impl<'a, 'b> ser::SerializeTupleStruct for SerializeSeq<'a, 'b> {
type Ok = ();
type Error = Error;
fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Error>
where
T: Serialize,
{
ser::SerializeSeq::serialize_element(self, value)
}
fn end(self) -> Result<(), Error> {
ser::SerializeSeq::end(self)
}
}
impl<'a, 'b> ser::SerializeMap for SerializeTable<'a, 'b> {
type Ok = ();
type Error = Error;
fn serialize_key<T: ?Sized>(&mut self, input: &T) -> Result<(), Error>
where
T: Serialize,
{
self.key = input.serialize(StringExtractor)?;
Ok(())
}
fn serialize_value<T: ?Sized>(&mut self, value: &T) -> Result<(), Error>
where
T: Serialize,
{
let res = value.serialize(&mut Serializer {
dst: &mut *self.ser.dst,
state: State::Table {
key: &self.key,
parent: &self.ser.state,
first: &self.first,
table_emitted: &self.table_emitted,
},
});
match res {
Ok(()) => self.first.set(false),
Err(Error::UnsupportedNone) => {}
Err(e) => return Err(e),
}
Ok(())
}
fn end(self) -> Result<(), Error> {
if self.first.get() {
let state = self.ser.state.clone();
self.ser.emit_table_header(&state)?;
}
Ok(())
}
}
impl<'a, 'b> ser::SerializeStruct for SerializeTable<'a, 'b> {
type Ok = ();
type Error = Error;
fn serialize_field<T: ?Sized>(&mut self, key: &'static str, value: &T) -> Result<(), Error>
where
T: Serialize,
{
let res = value.serialize(&mut Serializer {
dst: &mut *self.ser.dst,
state: State::Table {
key,
parent: &self.ser.state,
first: &self.first,
table_emitted: &self.table_emitted,
},
});
match res {
Ok(()) => self.first.set(false),
Err(Error::UnsupportedNone) => {}
Err(e) => return Err(e),
}
Ok(())
}
fn end(self) -> Result<(), Error> {
if self.first.get() {
let state = self.ser.state.clone();
self.ser.emit_table_header(&state)?;
}
Ok(())
}
}
struct StringExtractor;
impl ser::Serializer for StringExtractor {
type Ok = String;
type Error = Error;
type SerializeSeq = ser::Impossible<String, Error>;
type SerializeTuple = ser::Impossible<String, Error>;
type SerializeTupleStruct = ser::Impossible<String, Error>;
type SerializeTupleVariant = ser::Impossible<String, Error>;
type SerializeMap = ser::Impossible<String, Error>;
type SerializeStruct = ser::Impossible<String, Error>;
type SerializeStructVariant = ser::Impossible<String, Error>;
fn serialize_bool(self, _v: bool) -> Result<String, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_i8(self, _v: i8) -> Result<String, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_i16(self, _v: i16) -> Result<String, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_i32(self, _v: i32) -> Result<String, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_i64(self, _v: i64) -> Result<String, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_u8(self, _v: u8) -> Result<String, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_u16(self, _v: u16) -> Result<String, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_u32(self, _v: u32) -> Result<String, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_u64(self, _v: u64) -> Result<String, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_f32(self, _v: f32) -> Result<String, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_f64(self, _v: f64) -> Result<String, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_char(self, _v: char) -> Result<String, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_str(self, value: &str) -> Result<String, Self::Error> {
Ok(value.to_string())
}
fn serialize_bytes(self, _value: &[u8]) -> Result<String, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_none(self) -> Result<String, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_some<T: ?Sized>(self, _value: &T) -> Result<String, Self::Error>
where
T: Serialize,
{
Err(Error::KeyNotString)
}
fn serialize_unit(self) -> Result<String, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_unit_struct(self, _name: &'static str) -> Result<String, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_unit_variant(
self,
_name: &'static str,
_variant_index: u32,
_variant: &'static str,
) -> Result<String, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_newtype_struct<T: ?Sized>(
self,
_name: &'static str,
value: &T,
) -> Result<String, Self::Error>
where
T: Serialize,
{
value.serialize(self)
}
fn serialize_newtype_variant<T: ?Sized>(
self,
_name: &'static str,
_variant_index: u32,
_variant: &'static str,
_value: &T,
) -> Result<String, Self::Error>
where
T: Serialize,
{
Err(Error::KeyNotString)
}
fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_tuple_struct(
self,
_name: &'static str,
_len: usize,
) -> Result<Self::SerializeTupleStruct, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_tuple_variant(
self,
_name: &'static str,
_variant_index: u32,
_variant: &'static str,
_len: usize,
) -> Result<Self::SerializeTupleVariant, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_struct(
self,
_name: &'static str,
_len: usize,
) -> Result<Self::SerializeStruct, Self::Error> {
Err(Error::KeyNotString)
}
fn serialize_struct_variant(
self,
_name: &'static str,
_variant_index: u32,
_variant: &'static str,
_len: usize,
) -> Result<Self::SerializeStructVariant, Self::Error> {
Err(Error::KeyNotString)
}
}
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::UnsupportedType => "unsupported Rust type".fmt(f),
Error::KeyNotString => "map key was not a string".fmt(f),
Error::ValueAfterTable => "values must be emitted before tables".fmt(f),
Error::UnsupportedNone => "unsupported None value".fmt(f),
Error::Custom(ref s) => s.fmt(f),
}
}
}
impl error::Error for Error {}
impl ser::Error for Error {
fn custom<T: Display>(msg: T) -> Error {
Error::Custom(msg.to_string())
}
}

View file

@ -0,0 +1,546 @@
use std::borrow::Cow;
use std::char;
use std::str;
/// A span, designating a range of bytes where a token is located.
#[derive(Eq, PartialEq, Debug, Clone, Copy)]
pub struct Span {
/// The start of the range.
pub start: usize,
/// The end of the range (exclusive).
pub end: usize,
}
impl From<Span> for (usize, usize) {
fn from(Span { start, end }: Span) -> (usize, usize) {
(start, end)
}
}
#[derive(Eq, PartialEq, Debug)]
pub enum Token<'a> {
Whitespace(&'a str),
Newline,
Comment(&'a str),
Equals,
Period,
Comma,
Colon,
Plus,
LeftBrace,
RightBrace,
LeftBracket,
RightBracket,
Keylike(&'a str),
String {
src: &'a str,
val: Cow<'a, str>,
multiline: bool,
},
}
#[derive(Eq, PartialEq, Debug)]
pub enum Error {
InvalidCharInString(usize, char),
InvalidEscape(usize, char),
InvalidHexEscape(usize, char),
InvalidEscapeValue(usize, u32),
NewlineInString(usize),
Unexpected(usize, char),
UnterminatedString(usize),
NewlineInTableKey(usize),
MultilineStringKey(usize),
Wanted {
at: usize,
expected: &'static str,
found: &'static str,
},
}
#[derive(Clone)]
pub struct Tokenizer<'a> {
input: &'a str,
chars: CrlfFold<'a>,
}
#[derive(Clone)]
struct CrlfFold<'a> {
chars: str::CharIndices<'a>,
}
#[derive(Debug)]
enum MaybeString {
NotEscaped(usize),
Owned(String),
}
impl<'a> Tokenizer<'a> {
pub fn new(input: &'a str) -> Tokenizer<'a> {
let mut t = Tokenizer {
input,
chars: CrlfFold {
chars: input.char_indices(),
},
};
// Eat utf-8 BOM
t.eatc('\u{feff}');
t
}
pub fn next(&mut self) -> Result<Option<(Span, Token<'a>)>, Error> {
let (start, token) = match self.one() {
Some((start, '\n')) => (start, Token::Newline),
Some((start, ' ' | '\t')) => (start, self.whitespace_token(start)),
Some((start, '#')) => (start, self.comment_token(start)),
Some((start, '=')) => (start, Token::Equals),
Some((start, '.')) => (start, Token::Period),
Some((start, ',')) => (start, Token::Comma),
Some((start, ':')) => (start, Token::Colon),
Some((start, '+')) => (start, Token::Plus),
Some((start, '{')) => (start, Token::LeftBrace),
Some((start, '}')) => (start, Token::RightBrace),
Some((start, '[')) => (start, Token::LeftBracket),
Some((start, ']')) => (start, Token::RightBracket),
Some((start, '\'')) => {
return self
.literal_string(start)
.map(|t| Some((self.step_span(start), t)))
}
Some((start, '"')) => {
return self
.basic_string(start)
.map(|t| Some((self.step_span(start), t)))
}
Some((start, ch)) if is_keylike(ch) => (start, self.keylike(start)),
Some((start, ch)) => return Err(Error::Unexpected(start, ch)),
None => return Ok(None),
};
let span = self.step_span(start);
Ok(Some((span, token)))
}
pub fn peek(&mut self) -> Result<Option<(Span, Token<'a>)>, Error> {
self.clone().next()
}
pub fn eat(&mut self, expected: Token<'a>) -> Result<bool, Error> {
self.eat_spanned(expected).map(|s| s.is_some())
}
/// Eat a value, returning it's span if it was consumed.
pub fn eat_spanned(&mut self, expected: Token<'a>) -> Result<Option<Span>, Error> {
let span = match self.peek()? {
Some((span, ref found)) if expected == *found => span,
Some(_) | None => return Ok(None),
};
drop(self.next());
Ok(Some(span))
}
pub fn expect(&mut self, expected: Token<'a>) -> Result<(), Error> {
// ignore span
let _ = self.expect_spanned(expected)?;
Ok(())
}
/// Expect the given token returning its span.
pub fn expect_spanned(&mut self, expected: Token<'a>) -> Result<Span, Error> {
let current = self.current();
match self.next()? {
Some((span, found)) => {
if expected == found {
Ok(span)
} else {
Err(Error::Wanted {
at: current,
expected: expected.describe(),
found: found.describe(),
})
}
}
None => Err(Error::Wanted {
at: self.input.len(),
expected: expected.describe(),
found: "eof",
}),
}
}
pub fn table_key(&mut self) -> Result<(Span, Cow<'a, str>), Error> {
let current = self.current();
match self.next()? {
Some((span, Token::Keylike(k))) => Ok((span, k.into())),
Some((
span,
Token::String {
src,
val,
multiline,
},
)) => {
let offset = self.substr_offset(src);
if multiline {
return Err(Error::MultilineStringKey(offset));
}
match src.find('\n') {
None => Ok((span, val)),
Some(i) => Err(Error::NewlineInTableKey(offset + i)),
}
}
Some((_, other)) => Err(Error::Wanted {
at: current,
expected: "a table key",
found: other.describe(),
}),
None => Err(Error::Wanted {
at: self.input.len(),
expected: "a table key",
found: "eof",
}),
}
}
pub fn eat_whitespace(&mut self) {
while self.eatc(' ') || self.eatc('\t') {
// ...
}
}
pub fn eat_comment(&mut self) -> Result<bool, Error> {
if !self.eatc('#') {
return Ok(false);
}
drop(self.comment_token(0));
self.eat_newline_or_eof().map(|()| true)
}
pub fn eat_newline_or_eof(&mut self) -> Result<(), Error> {
let current = self.current();
match self.next()? {
None | Some((_, Token::Newline)) => Ok(()),
Some((_, other)) => Err(Error::Wanted {
at: current,
expected: "newline",
found: other.describe(),
}),
}
}
pub fn skip_to_newline(&mut self) {
loop {
match self.one() {
Some((_, '\n')) | None => break,
_ => {}
}
}
}
fn eatc(&mut self, ch: char) -> bool {
match self.chars.clone().next() {
Some((_, ch2)) if ch == ch2 => {
self.one();
true
}
_ => false,
}
}
pub fn current(&mut self) -> usize {
match self.chars.clone().next() {
Some(i) => i.0,
None => self.input.len(),
}
}
fn whitespace_token(&mut self, start: usize) -> Token<'a> {
while self.eatc(' ') || self.eatc('\t') {
// ...
}
Token::Whitespace(&self.input[start..self.current()])
}
fn comment_token(&mut self, start: usize) -> Token<'a> {
while let Some((_, ch)) = self.chars.clone().next() {
if ch != '\t' && (ch < '\u{20}' || ch > '\u{10ffff}') {
break;
}
self.one();
}
Token::Comment(&self.input[start..self.current()])
}
fn read_string(
&mut self,
delim: char,
start: usize,
new_ch: &mut dyn FnMut(
&mut Tokenizer,
&mut MaybeString,
bool,
usize,
char,
) -> Result<(), Error>,
) -> Result<Token<'a>, Error> {
let mut multiline = false;
if self.eatc(delim) {
if self.eatc(delim) {
multiline = true;
} else {
return Ok(Token::String {
src: &self.input[start..start + 2],
val: Cow::Borrowed(""),
multiline: false,
});
}
}
let mut val = MaybeString::NotEscaped(self.current());
let mut n = 0;
loop {
n += 1;
match self.one() {
Some((i, '\n')) => {
if multiline {
if self.input.as_bytes()[i] == b'\r' {
val.make_owned(&self.input[..i]);
}
if n == 1 {
val = MaybeString::NotEscaped(self.current());
} else {
val.push('\n');
}
} else {
return Err(Error::NewlineInString(i));
}
}
Some((mut i, ch)) if ch == delim => {
if multiline {
if !self.eatc(delim) {
val.push(delim);
continue;
}
if !self.eatc(delim) {
val.push(delim);
val.push(delim);
continue;
}
if self.eatc(delim) {
val.push(delim);
i += 1;
}
if self.eatc(delim) {
val.push(delim);
i += 1;
}
}
return Ok(Token::String {
src: &self.input[start..self.current()],
val: val.into_cow(&self.input[..i]),
multiline,
});
}
Some((i, c)) => new_ch(self, &mut val, multiline, i, c)?,
None => return Err(Error::UnterminatedString(start)),
}
}
}
fn literal_string(&mut self, start: usize) -> Result<Token<'a>, Error> {
self.read_string('\'', start, &mut |_me, val, _multi, i, ch| {
if ch == '\u{09}' || ('\u{20}' <= ch && ch <= '\u{10ffff}' && ch != '\u{7f}') {
val.push(ch);
Ok(())
} else {
Err(Error::InvalidCharInString(i, ch))
}
})
}
fn basic_string(&mut self, start: usize) -> Result<Token<'a>, Error> {
self.read_string('"', start, &mut |me, val, multi, i, ch| match ch {
'\\' => {
val.make_owned(&me.input[..i]);
match me.chars.next() {
Some((_, '"')) => val.push('"'),
Some((_, '\\')) => val.push('\\'),
Some((_, 'b')) => val.push('\u{8}'),
Some((_, 'f')) => val.push('\u{c}'),
Some((_, 'n')) => val.push('\n'),
Some((_, 'r')) => val.push('\r'),
Some((_, 't')) => val.push('\t'),
Some((i, c @ ('u' | 'U'))) => {
let len = if c == 'u' { 4 } else { 8 };
val.push(me.hex(start, i, len)?);
}
Some((i, c @ (' ' | '\t' | '\n'))) if multi => {
if c != '\n' {
while let Some((_, ch)) = me.chars.clone().next() {
match ch {
' ' | '\t' => {
me.chars.next();
continue;
}
'\n' => {
me.chars.next();
break;
}
_ => return Err(Error::InvalidEscape(i, c)),
}
}
}
while let Some((_, ch)) = me.chars.clone().next() {
match ch {
' ' | '\t' | '\n' => {
me.chars.next();
}
_ => break,
}
}
}
Some((i, c)) => return Err(Error::InvalidEscape(i, c)),
None => return Err(Error::UnterminatedString(start)),
}
Ok(())
}
ch if ch == '\u{09}' || ('\u{20}' <= ch && ch <= '\u{10ffff}' && ch != '\u{7f}') => {
val.push(ch);
Ok(())
}
_ => Err(Error::InvalidCharInString(i, ch)),
})
}
fn hex(&mut self, start: usize, i: usize, len: usize) -> Result<char, Error> {
let mut buf = String::with_capacity(len);
for _ in 0..len {
match self.one() {
Some((_, ch)) if ch as u32 <= 0x7F && ch.is_ascii_hexdigit() => buf.push(ch),
Some((i, ch)) => return Err(Error::InvalidHexEscape(i, ch)),
None => return Err(Error::UnterminatedString(start)),
}
}
let val = u32::from_str_radix(&buf, 16).unwrap();
match char::from_u32(val) {
Some(ch) => Ok(ch),
None => Err(Error::InvalidEscapeValue(i, val)),
}
}
fn keylike(&mut self, start: usize) -> Token<'a> {
while let Some((_, ch)) = self.peek_one() {
if !is_keylike(ch) {
break;
}
self.one();
}
Token::Keylike(&self.input[start..self.current()])
}
pub fn substr_offset(&self, s: &'a str) -> usize {
assert!(s.len() <= self.input.len());
let a = self.input.as_ptr() as usize;
let b = s.as_ptr() as usize;
assert!(a <= b);
b - a
}
/// Calculate the span of a single character.
fn step_span(&mut self, start: usize) -> Span {
let end = match self.peek_one() {
Some(t) => t.0,
None => self.input.len(),
};
Span { start, end }
}
/// Peek one char without consuming it.
fn peek_one(&mut self) -> Option<(usize, char)> {
self.chars.clone().next()
}
/// Take one char.
pub fn one(&mut self) -> Option<(usize, char)> {
self.chars.next()
}
}
impl<'a> Iterator for CrlfFold<'a> {
type Item = (usize, char);
fn next(&mut self) -> Option<(usize, char)> {
self.chars.next().map(|(i, c)| {
if c == '\r' {
let mut attempt = self.chars.clone();
if let Some((_, '\n')) = attempt.next() {
self.chars = attempt;
return (i, '\n');
}
}
(i, c)
})
}
}
impl MaybeString {
fn push(&mut self, ch: char) {
match *self {
MaybeString::NotEscaped(..) => {}
MaybeString::Owned(ref mut s) => s.push(ch),
}
}
fn make_owned(&mut self, input: &str) {
match *self {
MaybeString::NotEscaped(start) => {
*self = MaybeString::Owned(input[start..].to_owned());
}
MaybeString::Owned(..) => {}
}
}
fn into_cow(self, input: &str) -> Cow<str> {
match self {
MaybeString::NotEscaped(start) => Cow::Borrowed(&input[start..]),
MaybeString::Owned(s) => Cow::Owned(s),
}
}
}
fn is_keylike(ch: char) -> bool {
('A' <= ch && ch <= 'Z')
|| ('a' <= ch && ch <= 'z')
|| ('0' <= ch && ch <= '9')
|| ch == '-'
|| ch == '_'
}
impl<'a> Token<'a> {
pub fn describe(&self) -> &'static str {
match *self {
Token::Keylike(_) => "an identifier",
Token::Equals => "an equals",
Token::Period => "a period",
Token::Comment(_) => "a comment",
Token::Newline => "a newline",
Token::Whitespace(_) => "whitespace",
Token::Comma => "a comma",
Token::RightBrace => "a right brace",
Token::LeftBrace => "a left brace",
Token::RightBracket => "a right bracket",
Token::LeftBracket => "a left bracket",
Token::String { multiline, .. } => {
if multiline {
"a multiline string"
} else {
"a string"
}
}
Token::Colon => "a colon",
Token::Plus => "a plus",
}
}
}

View file

@ -0,0 +1 @@
Tests are from https://github.com/BurntSushi/toml-test

View file

@ -0,0 +1,142 @@
use serde_json::Value;
macro_rules! bad {
($toml:expr, $msg:expr) => {
match basic_toml::from_str::<Value>($toml) {
Ok(s) => panic!("parsed to: {:#?}", s),
Err(e) => assert_eq!(e.to_string(), $msg),
}
};
}
#[test]
fn times() {
fn multi_bad(s: &str, msg: &str) {
bad!(s, msg);
bad!(&s.replace('T', " "), msg);
bad!(&s.replace('T', "t"), msg);
bad!(&s.replace('Z', "z"), msg);
}
multi_bad(
"foo = 1997-09-09T09:09:09Z",
"invalid number at line 1 column 7",
);
multi_bad(
"foo = 1997-09-09T09:09:09+09:09",
"invalid number at line 1 column 7",
);
multi_bad(
"foo = 1997-09-09T09:09:09-09:09",
"invalid number at line 1 column 7",
);
multi_bad(
"foo = 1997-09-09T09:09:09",
"invalid number at line 1 column 7",
);
multi_bad("foo = 1997-09-09", "invalid number at line 1 column 7");
bad!("foo = 1997-09-09 ", "invalid number at line 1 column 7");
bad!(
"foo = 1997-09-09 # comment",
"invalid number at line 1 column 7"
);
multi_bad("foo = 09:09:09", "invalid number at line 1 column 8");
multi_bad(
"foo = 1997-09-09T09:09:09.09Z",
"invalid number at line 1 column 7",
);
multi_bad(
"foo = 1997-09-09T09:09:09.09+09:09",
"invalid number at line 1 column 7",
);
multi_bad(
"foo = 1997-09-09T09:09:09.09-09:09",
"invalid number at line 1 column 7",
);
multi_bad(
"foo = 1997-09-09T09:09:09.09",
"invalid number at line 1 column 7",
);
multi_bad("foo = 09:09:09.09", "invalid number at line 1 column 8");
}
#[test]
fn bad_times() {
bad!("foo = 199-09-09", "invalid number at line 1 column 7");
bad!("foo = 199709-09", "invalid number at line 1 column 7");
bad!("foo = 1997-9-09", "invalid number at line 1 column 7");
bad!("foo = 1997-09-9", "invalid number at line 1 column 7");
bad!(
"foo = 1997-09-0909:09:09",
"invalid number at line 1 column 7"
);
bad!(
"foo = 1997-09-09T09:09:09.",
"invalid number at line 1 column 7"
);
bad!(
"foo = T",
"invalid TOML value, did you mean to use a quoted string? at line 1 column 7"
);
bad!(
"foo = T.",
"invalid TOML value, did you mean to use a quoted string? at line 1 column 7"
);
bad!(
"foo = TZ",
"invalid TOML value, did you mean to use a quoted string? at line 1 column 7"
);
bad!(
"foo = 1997-09-09T09:09:09.09+",
"invalid number at line 1 column 7"
);
bad!(
"foo = 1997-09-09T09:09:09.09+09",
"invalid number at line 1 column 7"
);
bad!(
"foo = 1997-09-09T09:09:09.09+09:9",
"invalid number at line 1 column 7"
);
bad!(
"foo = 1997-09-09T09:09:09.09+0909",
"invalid number at line 1 column 7"
);
bad!(
"foo = 1997-09-09T09:09:09.09-",
"invalid number at line 1 column 7"
);
bad!(
"foo = 1997-09-09T09:09:09.09-09",
"invalid number at line 1 column 7"
);
bad!(
"foo = 1997-09-09T09:09:09.09-09:9",
"invalid number at line 1 column 7"
);
bad!(
"foo = 1997-09-09T09:09:09.09-0909",
"invalid number at line 1 column 7"
);
bad!(
"foo = 1997-00-09T09:09:09.09Z",
"invalid number at line 1 column 7"
);
bad!(
"foo = 1997-09-00T09:09:09.09Z",
"invalid number at line 1 column 7"
);
bad!(
"foo = 1997-09-09T30:09:09.09Z",
"invalid number at line 1 column 7"
);
bad!(
"foo = 1997-09-09T12:69:09.09Z",
"invalid number at line 1 column 7"
);
bad!(
"foo = 1997-09-09T12:09:69.09Z",
"invalid number at line 1 column 7"
);
}

View file

@ -0,0 +1,350 @@
#![allow(clippy::too_many_lines)]
use serde::{de, Deserialize};
use std::fmt;
macro_rules! bad {
($toml:expr, $ty:ty, $msg:expr) => {
match basic_toml::from_str::<$ty>($toml) {
Ok(s) => panic!("parsed to: {:#?}", s),
Err(e) => assert_eq!(e.to_string(), $msg),
}
};
}
#[derive(Debug, Deserialize, PartialEq)]
struct Parent<T> {
p_a: T,
p_b: Vec<Child<T>>,
}
#[derive(Debug, Deserialize, PartialEq)]
#[serde(deny_unknown_fields)]
struct Child<T> {
c_a: T,
c_b: T,
}
#[derive(Debug, PartialEq)]
enum CasedString {
Lowercase(String),
Uppercase(String),
}
impl<'de> de::Deserialize<'de> for CasedString {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
{
struct CasedStringVisitor;
impl<'de> de::Visitor<'de> for CasedStringVisitor {
type Value = CasedString;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a string")
}
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
if s.is_empty() {
Err(de::Error::invalid_length(0, &"a non-empty string"))
} else if s.chars().all(|x| x.is_ascii_lowercase()) {
Ok(CasedString::Lowercase(s.to_string()))
} else if s.chars().all(|x| x.is_ascii_uppercase()) {
Ok(CasedString::Uppercase(s.to_string()))
} else {
Err(de::Error::invalid_value(
de::Unexpected::Str(s),
&"all lowercase or all uppercase",
))
}
}
}
deserializer.deserialize_any(CasedStringVisitor)
}
}
#[test]
fn custom_errors() {
basic_toml::from_str::<Parent<CasedString>>(
"
p_a = 'a'
p_b = [{c_a = 'a', c_b = 'c'}]
",
)
.unwrap();
// Custom error at p_b value.
bad!(
"
p_a = ''
# ^
",
Parent<CasedString>,
"invalid length 0, expected a non-empty string for key `p_a` at line 2 column 19"
);
// Missing field in table.
bad!(
"
p_a = 'a'
# ^
",
Parent<CasedString>,
"missing field `p_b` at line 1 column 1"
);
// Invalid type in p_b.
bad!(
"
p_a = 'a'
p_b = 1
# ^
",
Parent<CasedString>,
"invalid type: integer `1`, expected a sequence for key `p_b` at line 3 column 19"
);
// Sub-table in Vec is missing a field.
bad!(
"
p_a = 'a'
p_b = [
{c_a = 'a'}
# ^
]
",
Parent<CasedString>,
"missing field `c_b` for key `p_b` at line 4 column 17"
);
// Sub-table in Vec has a field with a bad value.
bad!(
"
p_a = 'a'
p_b = [
{c_a = 'a', c_b = '*'}
# ^
]
",
Parent<CasedString>,
"invalid value: string \"*\", expected all lowercase or all uppercase for key `p_b` at line 4 column 35"
);
// Sub-table in Vec is missing a field.
bad!(
"
p_a = 'a'
p_b = [
{c_a = 'a', c_b = 'b'},
{c_a = 'aa'}
# ^
]
",
Parent<CasedString>,
"missing field `c_b` for key `p_b` at line 5 column 17"
);
// Sub-table in the middle of a Vec is missing a field.
bad!(
"
p_a = 'a'
p_b = [
{c_a = 'a', c_b = 'b'},
{c_a = 'aa'},
# ^
{c_a = 'aaa', c_b = 'bbb'},
]
",
Parent<CasedString>,
"missing field `c_b` for key `p_b` at line 5 column 17"
);
// Sub-table in the middle of a Vec has a field with a bad value.
bad!(
"
p_a = 'a'
p_b = [
{c_a = 'a', c_b = 'b'},
{c_a = 'aa', c_b = 1},
# ^
{c_a = 'aaa', c_b = 'bbb'},
]
",
Parent<CasedString>,
"invalid type: integer `1`, expected a string for key `p_b` at line 5 column 36"
);
// Sub-table in the middle of a Vec has an extra field.
// FIXME: This location could be better.
bad!(
"
p_a = 'a'
p_b = [
{c_a = 'a', c_b = 'b'},
{c_a = 'aa', c_b = 'bb', c_d = 'd'},
# ^
{c_a = 'aaa', c_b = 'bbb'},
{c_a = 'aaaa', c_b = 'bbbb'},
]
",
Parent<CasedString>,
"unknown field `c_d`, expected `c_a` or `c_b` for key `p_b` at line 5 column 17"
);
// Sub-table in the middle of a Vec is missing a field.
// FIXME: This location is pretty off.
bad!(
"
p_a = 'a'
[[p_b]]
c_a = 'a'
c_b = 'b'
[[p_b]]
c_a = 'aa'
# c_b = 'bb' # <- missing field
[[p_b]]
c_a = 'aaa'
c_b = 'bbb'
[[p_b]]
# ^
c_a = 'aaaa'
c_b = 'bbbb'
",
Parent<CasedString>,
"missing field `c_b` for key `p_b` at line 12 column 13"
);
// Sub-table in the middle of a Vec has a field with a bad value.
bad!(
"
p_a = 'a'
[[p_b]]
c_a = 'a'
c_b = 'b'
[[p_b]]
c_a = 'aa'
c_b = '*'
# ^
[[p_b]]
c_a = 'aaa'
c_b = 'bbb'
",
Parent<CasedString>,
"invalid value: string \"*\", expected all lowercase or all uppercase for key `p_b.c_b` at line 8 column 19"
);
// Sub-table in the middle of a Vec has an extra field.
// FIXME: This location is pretty off.
bad!(
"
p_a = 'a'
[[p_b]]
c_a = 'a'
c_b = 'b'
[[p_b]]
c_a = 'aa'
c_d = 'dd' # unknown field
[[p_b]]
c_a = 'aaa'
c_b = 'bbb'
[[p_b]]
# ^
c_a = 'aaaa'
c_b = 'bbbb'
",
Parent<CasedString>,
"unknown field `c_d`, expected `c_a` or `c_b` for key `p_b` at line 12 column 13"
);
}
#[test]
fn serde_derive_deserialize_errors() {
bad!(
"
p_a = ''
# ^
",
Parent<String>,
"missing field `p_b` at line 1 column 1"
);
bad!(
"
p_a = ''
p_b = [
{c_a = ''}
# ^
]
",
Parent<String>,
"missing field `c_b` for key `p_b` at line 4 column 17"
);
bad!(
"
p_a = ''
p_b = [
{c_a = '', c_b = 1}
# ^
]
",
Parent<String>,
"invalid type: integer `1`, expected a string for key `p_b` at line 4 column 34"
);
// FIXME: This location could be better.
bad!(
"
p_a = ''
p_b = [
{c_a = '', c_b = '', c_d = ''},
# ^
]
",
Parent<String>,
"unknown field `c_d`, expected `c_a` or `c_b` for key `p_b` at line 4 column 17"
);
bad!(
"
p_a = 'a'
p_b = [
{c_a = '', c_b = 1, c_d = ''},
# ^
]
",
Parent<String>,
"invalid type: integer `1`, expected a string for key `p_b` at line 4 column 34"
);
}
#[test]
fn error_handles_crlf() {
bad!(
"\r\n\
[t1]\r\n\
[t2]\r\n\
a = 1\r\n\
. = 2\r\n\
",
serde_json::Value,
"expected a table key, found a period at line 5 column 1"
);
// Should be the same as above.
bad!(
"\n\
[t1]\n\
[t2]\n\
a = 1\n\
. = 2\n\
",
serde_json::Value,
"expected a table key, found a period at line 5 column 1"
);
}

View file

@ -0,0 +1,53 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
pub struct Recipe {
pub name: String,
pub description: Option<String>,
#[serde(default)]
pub modules: Vec<Modules>,
#[serde(default)]
pub packages: Vec<Packages>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Modules {
pub name: String,
pub version: Option<String>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Packages {
pub name: String,
pub version: Option<String>,
}
#[test]
fn both_ends() {
let recipe_works = basic_toml::from_str::<Recipe>(
r#"
name = "testing"
description = "example"
modules = []
[[packages]]
name = "base"
"#,
)
.unwrap();
basic_toml::to_string(&recipe_works).unwrap();
let recipe_fails = basic_toml::from_str::<Recipe>(
r#"
name = "testing"
description = "example"
packages = []
[[modules]]
name = "base"
"#,
)
.unwrap();
let err = basic_toml::to_string(&recipe_fails).unwrap_err();
assert_eq!(err.to_string(), "values must be emitted before tables");
}

View file

@ -0,0 +1,30 @@
#![allow(clippy::wildcard_imports)]
use serde::Deserialize;
#[derive(Debug, Deserialize, PartialEq)]
struct Struct {
value: Enum,
}
#[derive(Debug, Deserialize, PartialEq)]
enum Enum {
Variant,
}
#[test]
fn unknown_variant() {
let error = basic_toml::from_str::<Struct>("value = \"NonExistent\"").unwrap_err();
assert_eq!(
error.to_string(),
"unknown variant `NonExistent`, expected `Variant` for key `value` at line 1 column 1"
);
}
#[test]
fn from_str() {
let s = basic_toml::from_str::<Struct>("value = \"Variant\"").unwrap();
assert_eq!(Enum::Variant, s.value);
}

View file

@ -0,0 +1,81 @@
#![allow(clippy::float_cmp)]
use serde::{Deserialize, Serialize};
use serde_json::Value;
#[rustfmt::skip] // appears to be a bug in rustfmt to make this converge...
macro_rules! float_inf_tests {
($ty:ty) => {{
#[derive(Serialize, Deserialize)]
struct S {
sf1: $ty,
sf2: $ty,
sf3: $ty,
sf4: $ty,
sf5: $ty,
sf6: $ty,
sf7: $ty,
sf8: $ty,
}
let inf: S = basic_toml::from_str(
r"
# infinity
sf1 = inf # positive infinity
sf2 = +inf # positive infinity
sf3 = -inf # negative infinity
# not a number
sf4 = nan # actual sNaN/qNaN encoding is implementation specific
sf5 = +nan # same as `nan`
sf6 = -nan # valid, actual encoding is implementation specific
# zero
sf7 = +0.0
sf8 = -0.0
",
)
.expect("Parse infinities.");
assert!(inf.sf1.is_infinite());
assert!(inf.sf1.is_sign_positive());
assert!(inf.sf2.is_infinite());
assert!(inf.sf2.is_sign_positive());
assert!(inf.sf3.is_infinite());
assert!(inf.sf3.is_sign_negative());
assert!(inf.sf4.is_nan());
assert!(inf.sf4.is_sign_positive());
assert!(inf.sf5.is_nan());
assert!(inf.sf5.is_sign_positive());
assert!(inf.sf6.is_nan());
assert!(inf.sf6.is_sign_negative());
assert_eq!(inf.sf7, 0.0);
assert!(inf.sf7.is_sign_positive());
assert_eq!(inf.sf8, 0.0);
assert!(inf.sf8.is_sign_negative());
let s = basic_toml::to_string(&inf).unwrap();
assert_eq!(
s,
"\
sf1 = inf
sf2 = inf
sf3 = -inf
sf4 = nan
sf5 = nan
sf6 = -nan
sf7 = 0.0
sf8 = -0.0
"
);
basic_toml::from_str::<Value>(&s).expect("roundtrip");
}};
}
#[test]
fn float_inf() {
float_inf_tests!(f32);
float_inf_tests!(f64);
}

View file

@ -0,0 +1,53 @@
use basic_toml::to_string;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
struct User {
pub name: String,
pub surname: String,
}
#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
struct Users {
pub user: Vec<User>,
}
#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
struct TwoUsers {
pub user0: User,
pub user1: User,
}
#[test]
fn no_unnecessary_newlines_array() {
assert!(!to_string(&Users {
user: vec![
User {
name: "John".to_string(),
surname: "Doe".to_string(),
},
User {
name: "Jane".to_string(),
surname: "Dough".to_string(),
},
],
})
.unwrap()
.starts_with('\n'));
}
#[test]
fn no_unnecessary_newlines_table() {
assert!(!to_string(&TwoUsers {
user0: User {
name: "John".to_string(),
surname: "Doe".to_string(),
},
user1: User {
name: "Jane".to_string(),
surname: "Dough".to_string(),
},
})
.unwrap()
.starts_with('\n'));
}

View file

@ -0,0 +1,15 @@
{
"ints-and-floats": {
"type": "array",
"value": [
{
"type": "integer",
"value": "1"
},
{
"type": "float",
"value": "1.1"
}
]
}
}

View file

@ -0,0 +1,48 @@
use serde_json::Value;
macro_rules! bad {
($toml:expr, $msg:expr) => {
match basic_toml::from_str::<Value>($toml) {
Ok(s) => panic!("parsed to: {:#?}", s),
Err(e) => assert_eq!(e.to_string(), $msg),
}
};
}
#[test]
fn bad() {
bad!("a = 01", "invalid number at line 1 column 6");
bad!("a = 1__1", "invalid number at line 1 column 5");
bad!("a = 1_", "invalid number at line 1 column 5");
bad!("''", "expected an equals, found eof at line 1 column 3");
bad!("a = 9e99999", "invalid number at line 1 column 5");
bad!(
"a = \"\u{7f}\"",
"invalid character in string: `\\u{7f}` at line 1 column 6"
);
bad!(
"a = '\u{7f}'",
"invalid character in string: `\\u{7f}` at line 1 column 6"
);
bad!("a = -0x1", "invalid number at line 1 column 5");
bad!("a = 0x-1", "invalid number at line 1 column 7");
// Dotted keys.
bad!(
"a.b.c = 1
a.b = 2
",
"duplicate key: `b` for key `a` at line 2 column 12"
);
bad!(
"a = 1
a.b = 2",
"dotted key attempted to extend non-table type at line 1 column 5"
);
bad!(
"a = {k1 = 1, k1.name = \"joe\"}",
"dotted key attempted to extend non-table type at line 1 column 11"
);
}

View file

@ -0,0 +1,226 @@
use serde_json::Value;
macro_rules! bad {
($toml:expr, $msg:expr) => {
match basic_toml::from_str::<Value>($toml) {
Ok(s) => panic!("parsed to: {:#?}", s),
Err(e) => assert_eq!(e.to_string(), $msg),
}
};
}
macro_rules! test( ($name:ident, $s:expr, $msg:expr) => (
#[test]
fn $name() { bad!($s, $msg); }
) );
test!(
datetime_malformed_no_leads,
include_str!("invalid/datetime-malformed-no-leads.toml"),
"invalid number at line 1 column 12"
);
test!(
datetime_malformed_no_secs,
include_str!("invalid/datetime-malformed-no-secs.toml"),
"invalid number at line 1 column 11"
);
test!(
datetime_malformed_no_t,
include_str!("invalid/datetime-malformed-no-t.toml"),
"invalid number at line 1 column 8"
);
test!(
datetime_malformed_with_milli,
include_str!("invalid/datetime-malformed-with-milli.toml"),
"invalid number at line 1 column 14"
);
test!(
duplicate_key_table,
include_str!("invalid/duplicate-key-table.toml"),
"duplicate key: `type` for key `fruit` at line 4 column 8"
);
test!(
duplicate_keys,
include_str!("invalid/duplicate-keys.toml"),
"duplicate key: `dupe` at line 2 column 1"
);
test!(
duplicate_table,
include_str!("invalid/duplicate-table.toml"),
"redefinition of table `dependencies` for key `dependencies` at line 7 column 1"
);
test!(
duplicate_tables,
include_str!("invalid/duplicate-tables.toml"),
"redefinition of table `a` for key `a` at line 2 column 1"
);
test!(
empty_implicit_table,
include_str!("invalid/empty-implicit-table.toml"),
"expected a table key, found a period at line 1 column 10"
);
test!(
empty_table,
include_str!("invalid/empty-table.toml"),
"expected a table key, found a right bracket at line 1 column 2"
);
test!(
float_no_leading_zero,
include_str!("invalid/float-no-leading-zero.toml"),
"expected a value, found a period at line 1 column 10"
);
test!(
float_no_suffix,
include_str!("invalid/float-no-suffix.toml"),
"invalid number at line 1 column 5"
);
test!(
float_no_trailing_digits,
include_str!("invalid/float-no-trailing-digits.toml"),
"invalid number at line 1 column 12"
);
test!(
key_after_array,
include_str!("invalid/key-after-array.toml"),
"expected newline, found an identifier at line 1 column 14"
);
test!(
key_after_table,
include_str!("invalid/key-after-table.toml"),
"expected newline, found an identifier at line 1 column 11"
);
test!(
key_empty,
include_str!("invalid/key-empty.toml"),
"expected a table key, found an equals at line 1 column 2"
);
test!(
key_hash,
include_str!("invalid/key-hash.toml"),
"expected an equals, found a comment at line 1 column 2"
);
test!(
key_newline,
include_str!("invalid/key-newline.toml"),
"expected an equals, found a newline at line 1 column 2"
);
test!(
key_open_bracket,
include_str!("invalid/key-open-bracket.toml"),
"expected a right bracket, found an equals at line 1 column 6"
);
test!(
key_single_open_bracket,
include_str!("invalid/key-single-open-bracket.toml"),
"expected a table key, found eof at line 1 column 2"
);
test!(
key_space,
include_str!("invalid/key-space.toml"),
"expected an equals, found an identifier at line 1 column 3"
);
test!(
key_start_bracket,
include_str!("invalid/key-start-bracket.toml"),
"expected a right bracket, found an equals at line 2 column 6"
);
test!(
key_two_equals,
include_str!("invalid/key-two-equals.toml"),
"expected a value, found an equals at line 1 column 6"
);
test!(
string_bad_byte_escape,
include_str!("invalid/string-bad-byte-escape.toml"),
"invalid escape character in string: `x` at line 1 column 13"
);
test!(
string_bad_escape,
include_str!("invalid/string-bad-escape.toml"),
"invalid escape character in string: `a` at line 1 column 42"
);
test!(
string_bad_line_ending_escape,
include_str!("invalid/string-bad-line-ending-escape.toml"),
"invalid escape character in string: ` ` at line 2 column 79"
);
test!(
string_byte_escapes,
include_str!("invalid/string-byte-escapes.toml"),
"invalid escape character in string: `x` at line 1 column 12"
);
test!(
string_no_close,
include_str!("invalid/string-no-close.toml"),
"newline in string found at line 1 column 42"
);
test!(
table_array_implicit,
include_str!("invalid/table-array-implicit.toml"),
"table redefined as array for key `albums` at line 13 column 1"
);
test!(
table_array_malformed_bracket,
include_str!("invalid/table-array-malformed-bracket.toml"),
"expected a right bracket, found a newline at line 1 column 10"
);
test!(
table_array_malformed_empty,
include_str!("invalid/table-array-malformed-empty.toml"),
"expected a table key, found a right bracket at line 1 column 3"
);
test!(
table_empty,
include_str!("invalid/table-empty.toml"),
"expected a table key, found a right bracket at line 1 column 2"
);
test!(
table_nested_brackets_close,
include_str!("invalid/table-nested-brackets-close.toml"),
"expected newline, found an identifier at line 1 column 4"
);
test!(
table_nested_brackets_open,
include_str!("invalid/table-nested-brackets-open.toml"),
"expected a right bracket, found a left bracket at line 1 column 3"
);
test!(
table_whitespace,
include_str!("invalid/table-whitespace.toml"),
"expected a right bracket, found an identifier at line 1 column 10"
);
test!(
table_with_pound,
include_str!("invalid/table-with-pound.toml"),
"expected a right bracket, found a comment at line 1 column 5"
);
test!(
text_after_array_entries,
include_str!("invalid/text-after-array-entries.toml"),
"invalid TOML value, did you mean to use a quoted string? at line 2 column 46"
);
test!(
text_after_integer,
include_str!("invalid/text-after-integer.toml"),
"expected newline, found an identifier at line 1 column 13"
);
test!(
text_after_string,
include_str!("invalid/text-after-string.toml"),
"expected newline, found an identifier at line 1 column 41"
);
test!(
text_after_table,
include_str!("invalid/text-after-table.toml"),
"expected newline, found an identifier at line 1 column 9"
);
test!(
text_before_array_separator,
include_str!("invalid/text-before-array-separator.toml"),
"expected a right bracket, found an identifier at line 2 column 46"
);
test!(
text_in_array,
include_str!("invalid/text-in-array.toml"),
"invalid TOML value, did you mean to use a quoted string? at line 3 column 3"
);

View file

@ -0,0 +1 @@
no-leads = 1987-7-05T17:45:00Z

View file

@ -0,0 +1 @@
no-secs = 1987-07-05T17:45Z

View file

@ -0,0 +1 @@
no-t = 1987-07-0517:45:00Z

View file

@ -0,0 +1 @@
with-milli = 1987-07-5T17:45:00.12Z

View file

@ -0,0 +1,5 @@
[fruit]
type = "apple"
[fruit.type]
apple = "yes"

View file

@ -0,0 +1,2 @@
dupe = false
dupe = true

View file

@ -0,0 +1,8 @@
[dependencies.openssl-sys]
version = "0.5.2"
[dependencies]
libc = "0.1"
[dependencies]
bitflags = "0.1.1"

View file

@ -0,0 +1,2 @@
[a]
[a]

View file

@ -0,0 +1 @@
[naughty..naughty]

View file

@ -0,0 +1 @@
[]

View file

@ -0,0 +1,2 @@
answer = .12345
neganswer = -.12345

View file

@ -0,0 +1 @@
a = 1.2f

View file

@ -0,0 +1,2 @@
answer = 1.
neganswer = -1.

View file

@ -0,0 +1 @@
[[agencies]] owner = "S Cjelli"

View file

@ -0,0 +1 @@
[history] guard = "sleeping"

View file

@ -0,0 +1 @@
= 1

View file

@ -0,0 +1 @@
a# = 1

View file

@ -0,0 +1,2 @@
a
= 1

View file

@ -0,0 +1 @@
[abc = 1

View file

@ -0,0 +1 @@
[

View file

@ -0,0 +1 @@
a b = 1

View file

@ -0,0 +1,3 @@
[a]
[xyz = 5
[b]

View file

@ -0,0 +1 @@
key= = 1

View file

@ -0,0 +1 @@
naughty = "\xAg"

View file

@ -0,0 +1 @@
invalid-escape = "This string has a bad \a escape character."

View file

@ -0,0 +1,3 @@
invalid-escape = """\
This string has a non whitespace-character after the line ending escape. \ a
"""

View file

@ -0,0 +1 @@
answer = "\x33"

View file

@ -0,0 +1 @@
no-ending-quote = "One time, at band camp

View file

@ -0,0 +1,14 @@
# This test is a bit tricky. It should fail because the first use of
# `[[albums.songs]]` without first declaring `albums` implies that `albums`
# must be a table. The alternative would be quite weird. Namely, it wouldn't
# comply with the TOML spec: "Each double-bracketed sub-table will belong to
# the most *recently* defined table element *above* it."
#
# This is in contrast to the *valid* test, table-array-implicit where
# `[[albums.songs]]` works by itself, so long as `[[albums]]` isn't declared
# later. (Although, `[albums]` could be.)
[[albums.songs]]
name = "Glory Days"
[[albums]]
name = "Born in the USA"

View file

@ -0,0 +1,2 @@
[[albums]
name = "Born to Run"

View file

@ -0,0 +1,2 @@
[[]]
name = "Born to Run"

View file

@ -0,0 +1 @@
[]

View file

@ -0,0 +1,2 @@
[a]b]
zyx = 42

View file

@ -0,0 +1,2 @@
[a[b]
zyx = 42

View file

@ -0,0 +1 @@
[invalid key]

View file

@ -0,0 +1,2 @@
[key#group]
answer = 42

View file

@ -0,0 +1,4 @@
array = [
"Is there life after an array separator?", No
"Entry"
]

View file

@ -0,0 +1 @@
answer = 42 the ultimate answer?

Some files were not shown because too many files have changed in this diff Show more