forked from mirrors/gecko-dev
Bug 1788040 - Update to Glean v51.2.0. r=chutten,supply-chain-reviewers
Differential Revision: https://phabricator.services.mozilla.com/D156937
This commit is contained in:
parent
a0bba4b6f3
commit
f6a6e37ef5
155 changed files with 7709 additions and 3879 deletions
78
Cargo.lock
generated
78
Cargo.lock
generated
|
|
@ -2215,9 +2215,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "glean"
|
||||
version = "51.1.0"
|
||||
version = "51.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d187c24d9b68cf4604253a38a15c5490f8089fa0abbd5d3a75a7dab6496c452c"
|
||||
checksum = "692dfb4494ad83161b7d596656c3e1f08b06bc2fa62a0eb8a3f4b7f83594025e"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"crossbeam-channel",
|
||||
|
|
@ -2235,9 +2235,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "glean-core"
|
||||
version = "51.1.0"
|
||||
version = "51.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ff1c7b9f39201bd6983e178a6356597dce2ba2dd2ee1e9da41e36680cd4a99bf"
|
||||
checksum = "0e26d3c442090135439a6c6618f60653b139ee5a89116605eab947d18b5d64e7"
|
||||
dependencies = [
|
||||
"android_logger",
|
||||
"bincode",
|
||||
|
|
@ -2315,9 +2315,16 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "goblin"
|
||||
version = "0.1.3"
|
||||
version = "0.1.99"
|
||||
dependencies = [
|
||||
"goblin 0.5.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "goblin"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3081214398d39e4bd7f2c1975f0488ed04614ffdd976c6fc7a0708278552c0da"
|
||||
checksum = "a7666983ed0dd8d21a6f6576ee00053ca0926fb281a5522577a4dbd0f1b54143"
|
||||
dependencies = [
|
||||
"log",
|
||||
"plain",
|
||||
|
|
@ -3273,7 +3280,7 @@ version = "0.1.0"
|
|||
source = "git+https://github.com/rust-minidump/minidump-writer.git?rev=75ada456c92a429704691a85e1cb42fef8cafc0d#75ada456c92a429704691a85e1cb42fef8cafc0d"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"goblin",
|
||||
"goblin 0.1.99",
|
||||
"libc",
|
||||
"memmap2 0.2.999",
|
||||
"memoffset 0.5.6",
|
||||
|
|
@ -4598,18 +4605,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
|||
|
||||
[[package]]
|
||||
name = "scroll"
|
||||
version = "0.10.2"
|
||||
version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fda28d4b4830b807a8b43f7b0e6b5df875311b3e7621d84577188c175b6ec1ec"
|
||||
checksum = "04c565b551bafbef4157586fa379538366e4385d42082f255bfd96e4fe8519da"
|
||||
dependencies = [
|
||||
"scroll_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scroll_derive"
|
||||
version = "0.10.5"
|
||||
version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aaaae8f38bb311444cfb7f1979af0bc9240d95795f75f9ceddf6a59b79ceffa0"
|
||||
checksum = "bdbda6ac5cd1321e724fa9cee216f3a61885889b896f073b8f82322789c5250e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
@ -5606,19 +5613,20 @@ checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04"
|
|||
|
||||
[[package]]
|
||||
name = "uniffi"
|
||||
version = "0.19.3"
|
||||
version = "0.19.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bc1de33ad46ce00bc9a31cea44e80ef69175d3a23007335216fe3996880a310d"
|
||||
checksum = "ea179ddeb64c249977c165b1d9f94af9c0a12a4f623e5986f553276612ea8796"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes 1.2.1",
|
||||
"camino",
|
||||
"cargo_metadata",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"once_cell",
|
||||
"paste",
|
||||
"static_assertions",
|
||||
"uniffi_bindgen",
|
||||
"uniffi_macros",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -5704,29 +5712,32 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "uniffi_bindgen"
|
||||
version = "0.19.3"
|
||||
version = "0.19.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b18e05c55840ddd690ba211f72bb1f2f6ca8c50bfeb7d7211ea5ee60b0f9be07"
|
||||
checksum = "8849753c67126dd7e4f309d09b696378481bfe9b3f5cec0a0c2b8e27391789c8"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"askama",
|
||||
"bincode",
|
||||
"camino",
|
||||
"cargo_metadata",
|
||||
"clap",
|
||||
"fs-err",
|
||||
"goblin 0.5.4",
|
||||
"heck",
|
||||
"lazy_static",
|
||||
"once_cell",
|
||||
"paste",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"toml",
|
||||
"uniffi_meta",
|
||||
"weedle2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uniffi_build"
|
||||
version = "0.19.3"
|
||||
version = "0.19.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8fff0860625e4e621f0317e5f6ac9e79966262bd86a6cfb2049e8425df23afbd"
|
||||
checksum = "8d6c6fedf97b345227270837fcfd8d9a799f202af7fa843298c788fb943b159f"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"camino",
|
||||
|
|
@ -5735,16 +5746,30 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "uniffi_macros"
|
||||
version = "0.19.3"
|
||||
version = "0.19.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7956a6c1fb12bff15e537028ea2174f000f90dd4f87912233b276ea782d420f2"
|
||||
checksum = "090e5a993b51dc02faa0dc135ce94f02f050791bda283a5ae9f40fc9a4389a39"
|
||||
dependencies = [
|
||||
"bincode",
|
||||
"camino",
|
||||
"glob",
|
||||
"fs-err",
|
||||
"once_cell",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"serde",
|
||||
"syn",
|
||||
"toml",
|
||||
"uniffi_build",
|
||||
"uniffi_meta",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uniffi_meta"
|
||||
version = "0.19.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "10d099cea0c721294ec11ae6c39d661c96d13d8994247ffadaf1576b065e38e6"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -6103,12 +6128,11 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "weedle2"
|
||||
version = "3.0.0"
|
||||
version = "4.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5d730d941cf471131c40a64cf2e8a595822009f51e64c05c5afdbc85af155857"
|
||||
checksum = "2e79c5206e1f43a2306fd64bdb95025ee4228960f2e6c5a8b173f3caaf807741"
|
||||
dependencies = [
|
||||
"fs-err",
|
||||
"nom 6.1.2",
|
||||
"nom 7.1.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
|||
13
Cargo.toml
13
Cargo.toml
|
|
@ -119,6 +119,9 @@ tinyvec = { path = "build/rust/tinyvec" }
|
|||
# Patch wasi 0.10 to 0.11
|
||||
wasi = { path = "build/rust/wasi" }
|
||||
|
||||
# Patch goblin 0.1 to 0.5
|
||||
goblin = { path = "build/rust/goblin" }
|
||||
|
||||
# Patch hashbrown 0.11 to 0.12
|
||||
hashbrown = { path = "build/rust/hashbrown" }
|
||||
|
||||
|
|
@ -165,8 +168,8 @@ path = "third_party/rust/mio-0.6.23"
|
|||
# duplicate crates.
|
||||
|
||||
[patch."https://github.com/mozilla/uniffi-rs.git"]
|
||||
uniffi = "=0.19.3"
|
||||
uniffi_bindgen = "=0.19.3"
|
||||
uniffi_build = "=0.19.3"
|
||||
uniffi_macros = "=0.19.3"
|
||||
weedle2 = "=3.0.0"
|
||||
uniffi = "=0.19.6"
|
||||
uniffi_bindgen = "=0.19.6"
|
||||
uniffi_build = "=0.19.6"
|
||||
uniffi_macros = "=0.19.6"
|
||||
weedle2 = "=4.0.0"
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ allprojects {
|
|||
topsrcdir = gradle.mozconfig.topsrcdir
|
||||
topobjdir = gradle.mozconfig.topobjdir
|
||||
|
||||
gleanVersion = "51.1.0"
|
||||
gleanVersion = "51.2.0"
|
||||
if (gleanVersion != getRustVersionFor("glean")) {
|
||||
throw new StopExecutionException("Mismatched Glean version, expected: ${gleanVersion}," +
|
||||
" found ${getRustVersionFor("glean")}")
|
||||
|
|
|
|||
11
build/rust/goblin/Cargo.toml
Normal file
11
build/rust/goblin/Cargo.toml
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
[package]
|
||||
name = "goblin"
|
||||
version = "0.1.99"
|
||||
edition = "2018"
|
||||
license = "MPL-2.0"
|
||||
|
||||
[lib]
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies.goblin]
|
||||
version = "0.5.3"
|
||||
5
build/rust/goblin/lib.rs
Normal file
5
build/rust/goblin/lib.rs
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
pub use goblin::*;
|
||||
109
gfx/wr/Cargo.lock
generated
109
gfx/wr/Cargo.lock
generated
|
|
@ -139,11 +139,10 @@ checksum = "88ceb0d16c4fd0e42876e298d7d3ce3780dd9ebdcbe4199816a32c77e08597ff"
|
|||
|
||||
[[package]]
|
||||
name = "bincode"
|
||||
version = "1.2.1"
|
||||
version = "1.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5753e2a71534719bf3f4e57006c3a4f0d2c672a4b676eec84161f763eca87dbf"
|
||||
checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"serde",
|
||||
]
|
||||
|
||||
|
|
@ -866,9 +865,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "glean"
|
||||
version = "51.1.0"
|
||||
version = "51.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d187c24d9b68cf4604253a38a15c5490f8089fa0abbd5d3a75a7dab6496c452c"
|
||||
checksum = "692dfb4494ad83161b7d596656c3e1f08b06bc2fa62a0eb8a3f4b7f83594025e"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"crossbeam-channel",
|
||||
|
|
@ -886,9 +885,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "glean-core"
|
||||
version = "51.1.0"
|
||||
version = "51.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ff1c7b9f39201bd6983e178a6356597dce2ba2dd2ee1e9da41e36680cd4a99bf"
|
||||
checksum = "0e26d3c442090135439a6c6618f60653b139ee5a89116605eab947d18b5d64e7"
|
||||
dependencies = [
|
||||
"android_logger",
|
||||
"bincode",
|
||||
|
|
@ -912,12 +911,6 @@ dependencies = [
|
|||
"zeitstempel",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "glob"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
|
||||
|
||||
[[package]]
|
||||
name = "glsl"
|
||||
version = "6.0.1"
|
||||
|
|
@ -1009,6 +1002,17 @@ dependencies = [
|
|||
"gl_generator 0.14.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "goblin"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a7666983ed0dd8d21a6f6576ee00053ca0926fb281a5522577a4dbd0f1b54143"
|
||||
dependencies = [
|
||||
"log",
|
||||
"plain",
|
||||
"scroll",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.11.2"
|
||||
|
|
@ -1529,9 +1533,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.10.0"
|
||||
version = "1.13.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9"
|
||||
checksum = "074864da206b4973b84eb91683020dbefd6a8c3f0f38e054d93954e891935e4e"
|
||||
|
||||
[[package]]
|
||||
name = "ordered-float"
|
||||
|
|
@ -1654,6 +1658,12 @@ version = "0.3.17"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "05da548ad6865900e60eaba7f589cc0783590a92e940c26953ff81ddbab2d677"
|
||||
|
||||
[[package]]
|
||||
name = "plain"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6"
|
||||
|
||||
[[package]]
|
||||
name = "plane-split"
|
||||
version = "0.17.1"
|
||||
|
|
@ -1901,6 +1911,26 @@ version = "1.1.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||
|
||||
[[package]]
|
||||
name = "scroll"
|
||||
version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "04c565b551bafbef4157586fa379538366e4385d42082f255bfd96e4fe8519da"
|
||||
dependencies = [
|
||||
"scroll_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scroll_derive"
|
||||
version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bdbda6ac5cd1321e724fa9cee216f3a61885889b896f073b8f82322789c5250e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "1.0.12"
|
||||
|
|
@ -2192,45 +2222,49 @@ checksum = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
|
|||
|
||||
[[package]]
|
||||
name = "uniffi"
|
||||
version = "0.19.3"
|
||||
version = "0.19.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bc1de33ad46ce00bc9a31cea44e80ef69175d3a23007335216fe3996880a310d"
|
||||
checksum = "ea179ddeb64c249977c165b1d9f94af9c0a12a4f623e5986f553276612ea8796"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
"camino",
|
||||
"cargo_metadata",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"once_cell",
|
||||
"paste 1.0.7",
|
||||
"static_assertions",
|
||||
"uniffi_macros",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uniffi_bindgen"
|
||||
version = "0.19.3"
|
||||
version = "0.19.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b18e05c55840ddd690ba211f72bb1f2f6ca8c50bfeb7d7211ea5ee60b0f9be07"
|
||||
checksum = "8849753c67126dd7e4f309d09b696378481bfe9b3f5cec0a0c2b8e27391789c8"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"askama",
|
||||
"bincode",
|
||||
"camino",
|
||||
"cargo_metadata",
|
||||
"clap",
|
||||
"fs-err",
|
||||
"goblin",
|
||||
"heck",
|
||||
"lazy_static",
|
||||
"once_cell",
|
||||
"paste 1.0.7",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"toml",
|
||||
"uniffi_meta",
|
||||
"weedle2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uniffi_build"
|
||||
version = "0.19.3"
|
||||
version = "0.19.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8fff0860625e4e621f0317e5f6ac9e79966262bd86a6cfb2049e8425df23afbd"
|
||||
checksum = "8d6c6fedf97b345227270837fcfd8d9a799f202af7fa843298c788fb943b159f"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"camino",
|
||||
|
|
@ -2239,16 +2273,30 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "uniffi_macros"
|
||||
version = "0.19.3"
|
||||
version = "0.19.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7956a6c1fb12bff15e537028ea2174f000f90dd4f87912233b276ea782d420f2"
|
||||
checksum = "090e5a993b51dc02faa0dc135ce94f02f050791bda283a5ae9f40fc9a4389a39"
|
||||
dependencies = [
|
||||
"bincode",
|
||||
"camino",
|
||||
"glob",
|
||||
"fs-err",
|
||||
"once_cell",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"serde",
|
||||
"syn",
|
||||
"toml",
|
||||
"uniffi_build",
|
||||
"uniffi_meta",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uniffi_meta"
|
||||
version = "0.19.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "10d099cea0c721294ec11ae6c39d661c96d13d8994247ffadaf1576b065e38e6"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -2537,12 +2585,11 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "weedle2"
|
||||
version = "3.0.0"
|
||||
version = "4.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5d730d941cf471131c40a64cf2e8a595822009f51e64c05c5afdbc85af155857"
|
||||
checksum = "2e79c5206e1f43a2306fd64bdb95025ee4228960f2e6c5a8b173f3caaf807741"
|
||||
dependencies = [
|
||||
"fs-err",
|
||||
"nom 6.2.1",
|
||||
"nom 7.1.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ svg_fmt = "0.4"
|
|||
tracy-rs = "0.1.2"
|
||||
derive_more = { version = "0.99", default-features = false, features = ["add_assign"] }
|
||||
etagere = "0.2.6"
|
||||
glean = "51.1.0"
|
||||
glean = "51.2.0"
|
||||
fog = { version = "0.1.0", optional = true }
|
||||
swgl = { path = "../swgl", optional = true }
|
||||
topological-sort = "0.1"
|
||||
|
|
|
|||
|
|
@ -130,7 +130,7 @@ pth:xpcom/geckoprocesstypes_generator
|
|||
pth:xpcom/idl-parser
|
||||
# glean-sdk may not be installable if a wheel isn't available
|
||||
# and it has to be built from source.
|
||||
pypi-optional:glean-sdk==51.1.0:telemetry will not be collected
|
||||
pypi-optional:glean-sdk==51.2.0:telemetry will not be collected
|
||||
# Mach gracefully handles the case where `psutil` is unavailable.
|
||||
# We aren't (yet) able to pin packages in automation, so we have to
|
||||
# support down to the oldest locally-installed version (5.4.2).
|
||||
|
|
|
|||
|
|
@ -469,6 +469,12 @@ criteria = "safe-to-deploy"
|
|||
delta = "50.1.3 -> 51.0.1"
|
||||
notes = "Maintained by the Glean team at Mozilla"
|
||||
|
||||
[[audits.glean]]
|
||||
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "51.1.0 -> 51.2.0"
|
||||
notes = "Maintained by the Glean team at Mozilla"
|
||||
|
||||
[[audits.glean-core]]
|
||||
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
|
|
@ -499,6 +505,18 @@ criteria = "safe-to-deploy"
|
|||
delta = "50.1.3 -> 51.0.1"
|
||||
notes = "Maintained by the Glean team at Mozilla"
|
||||
|
||||
[[audits.glean-core]]
|
||||
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "51.1.0 -> 51.2.0"
|
||||
notes = "Bug fix release with minimal changes, changes done by myself"
|
||||
|
||||
[[audits.goblin]]
|
||||
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.1.3 -> 0.5.4"
|
||||
notes = "Several bugfixes since 2019. This version is also in use by Mozilla's crash reporting tooling, e.g. minidump-writer"
|
||||
|
||||
[[audits.gpu-descriptor]]
|
||||
who = "Mike Hommey <mh+mozilla@glandium.org>"
|
||||
criteria = "safe-to-deploy"
|
||||
|
|
@ -884,6 +902,18 @@ who = "Mike Hommey <mh+mozilla@glandium.org>"
|
|||
criteria = "safe-to-deploy"
|
||||
delta = "1.0.10 -> 1.0.11"
|
||||
|
||||
[[audits.scroll]]
|
||||
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.10.2 -> 0.11.0"
|
||||
notes = "Small changes to exposed traits, that look reasonable and have additional buffer boundary checks. No unsafe code touched."
|
||||
|
||||
[[audits.scroll_derive]]
|
||||
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.10.5 -> 0.11.0"
|
||||
notes = "No code changes. Tagged together with its parent crate scroll."
|
||||
|
||||
[[audits.selectors]]
|
||||
who = "Emilio Cobos Álvarez <emilio@crisal.io>"
|
||||
criteria = "safe-to-deploy"
|
||||
|
|
@ -1108,24 +1138,54 @@ criteria = "safe-to-deploy"
|
|||
version = "0.19.3"
|
||||
notes = "Maintained by the Glean and Application Services teams"
|
||||
|
||||
[[audits.uniffi]]
|
||||
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.19.3 -> 0.19.6"
|
||||
notes = "Maintained by the Glean and Application Services team."
|
||||
|
||||
[[audits.uniffi_bindgen]]
|
||||
who = "Travis Long <tlong@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "0.19.3"
|
||||
notes = "Maintained by the Glean and Application Services teams."
|
||||
|
||||
[[audits.uniffi_bindgen]]
|
||||
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.19.3 -> 0.19.6"
|
||||
notes = "Maintained by the Glean and Application Services team."
|
||||
|
||||
[[audits.uniffi_build]]
|
||||
who = "Travis Long <tlong@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "0.19.3"
|
||||
notes = "Maintained by the Glean and Application Services teams."
|
||||
|
||||
[[audits.uniffi_build]]
|
||||
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.19.3 -> 0.19.6"
|
||||
notes = "Maintained by the Glean and Application Services team."
|
||||
|
||||
[[audits.uniffi_macros]]
|
||||
who = "Travis Long <tlong@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "0.19.3"
|
||||
notes = "Maintained by the Glean and Application Services teams."
|
||||
|
||||
[[audits.uniffi_macros]]
|
||||
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.19.3 -> 0.19.6"
|
||||
notes = "Maintained by the Glean and Application Services team."
|
||||
|
||||
[[audits.uniffi_meta]]
|
||||
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "0.19.6"
|
||||
notes = "Maintained by the Glean and Application Services team."
|
||||
|
||||
[[audits.void]]
|
||||
who = "Bobby Holley <bobbyholley@gmail.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
|
|
@ -1194,6 +1254,12 @@ criteria = "safe-to-deploy"
|
|||
version = "3.0.0"
|
||||
notes = "Maintained by the Glean and Application Services teams."
|
||||
|
||||
[[audits.weedle2]]
|
||||
who = "Jan-Erik Rediger <jrediger@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "3.0.0 -> 4.0.0"
|
||||
notes = "Maintained by the Glean and Application Services team."
|
||||
|
||||
[[audits.wgpu-core]]
|
||||
who = "Dzmitry Malyshau <kvark@fastmail.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
8
third_party/rust/glean-core/Cargo.toml
vendored
8
third_party/rust/glean-core/Cargo.toml
vendored
|
|
@ -12,7 +12,7 @@
|
|||
[package]
|
||||
edition = "2018"
|
||||
name = "glean-core"
|
||||
version = "51.1.0"
|
||||
version = "51.2.0"
|
||||
authors = [
|
||||
"Jan-Erik Rediger <jrediger@mozilla.com>",
|
||||
"The Glean Team <glean-team@mozilla.com>",
|
||||
|
|
@ -73,10 +73,10 @@ version = "1.0.4"
|
|||
version = "0.1.40"
|
||||
|
||||
[dependencies.uniffi]
|
||||
version = "0.19.3"
|
||||
version = "0.19.6"
|
||||
|
||||
[dependencies.uniffi_macros]
|
||||
version = "0.19.3"
|
||||
version = "0.19.6"
|
||||
|
||||
[dependencies.uuid]
|
||||
version = "0.8.1"
|
||||
|
|
@ -107,7 +107,7 @@ version = "0.4"
|
|||
version = "3.1.0"
|
||||
|
||||
[build-dependencies.uniffi_build]
|
||||
version = "0.19.3"
|
||||
version = "0.19.6"
|
||||
features = ["builtin-bindgen"]
|
||||
|
||||
[target."cfg(not(target_os = \"android\"))".dependencies.env_logger]
|
||||
|
|
|
|||
10
third_party/rust/glean-core/src/core/mod.rs
vendored
10
third_party/rust/glean-core/src/core/mod.rs
vendored
|
|
@ -13,7 +13,7 @@ use crate::internal_pings::InternalPings;
|
|||
use crate::metrics::{self, ExperimentMetric, Metric, MetricType, PingType, RecordedExperiment};
|
||||
use crate::ping::PingMaker;
|
||||
use crate::storage::{StorageManager, INTERNAL_STORAGE};
|
||||
use crate::upload::{PingUploadManager, PingUploadTask, UploadResult};
|
||||
use crate::upload::{PingUploadManager, PingUploadTask, UploadResult, UploadTaskAction};
|
||||
use crate::util::{local_now_with_offset, sanitize_application_id};
|
||||
use crate::{
|
||||
scheduler, system, CommonMetricData, ErrorKind, InternalConfiguration, Lifetime, Result,
|
||||
|
|
@ -565,9 +565,13 @@ impl Glean {
|
|||
///
|
||||
/// * `uuid` - The UUID of the ping in question.
|
||||
/// * `status` - The upload result.
|
||||
pub fn process_ping_upload_response(&self, uuid: &str, status: UploadResult) {
|
||||
pub fn process_ping_upload_response(
|
||||
&self,
|
||||
uuid: &str,
|
||||
status: UploadResult,
|
||||
) -> UploadTaskAction {
|
||||
self.upload_manager
|
||||
.process_ping_upload_response(self, uuid, status);
|
||||
.process_ping_upload_response(self, uuid, status)
|
||||
}
|
||||
|
||||
/// Takes a snapshot for the given store and optionally clear it.
|
||||
|
|
|
|||
15
third_party/rust/glean-core/src/glean.udl
vendored
15
third_party/rust/glean-core/src/glean.udl
vendored
|
|
@ -45,7 +45,7 @@ namespace glean {
|
|||
void glean_set_dirty_flag(boolean flag);
|
||||
|
||||
PingUploadTask glean_get_upload_task();
|
||||
void glean_process_ping_upload_response(string uuid, UploadResult result);
|
||||
UploadTaskAction glean_process_ping_upload_response(string uuid, UploadResult result);
|
||||
};
|
||||
|
||||
// The Glean configuration.
|
||||
|
|
@ -179,6 +179,19 @@ interface UploadResult {
|
|||
//
|
||||
// * code: The HTTP status code
|
||||
HttpStatus(i32 code);
|
||||
|
||||
// Signal that this uploader is done with work
|
||||
// and won't accept new work.
|
||||
Done(i8 unused);
|
||||
};
|
||||
|
||||
// Communicating back whether the uploader loop should continue.
|
||||
enum UploadTaskAction {
|
||||
// Instruct the caller to continue with work.
|
||||
"Next",
|
||||
|
||||
// Instruct the caller to end work.
|
||||
"End",
|
||||
};
|
||||
|
||||
// The supported metrics' lifetimes.
|
||||
|
|
|
|||
9
third_party/rust/glean-core/src/lib.rs
vendored
9
third_party/rust/glean-core/src/lib.rs
vendored
|
|
@ -63,7 +63,7 @@ pub use crate::metrics::{
|
|||
StringListMetric, StringMetric, TextMetric, TimeUnit, TimerId, TimespanMetric,
|
||||
TimingDistributionMetric, UrlMetric, UuidMetric,
|
||||
};
|
||||
pub use crate::upload::{PingRequest, PingUploadTask, UploadResult};
|
||||
pub use crate::upload::{PingRequest, PingUploadTask, UploadResult, UploadTaskAction};
|
||||
|
||||
const GLEAN_VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
const GLEAN_SCHEMA_VERSION: u32 = 1;
|
||||
|
|
@ -496,8 +496,9 @@ fn initialize_core_metrics(glean: &Glean, client_info: &ClientInfoMetrics) {
|
|||
if let Some(app_channel) = client_info.channel.as_ref() {
|
||||
core_metrics::internal_metrics::app_channel.set_sync(glean, app_channel);
|
||||
}
|
||||
core_metrics::internal_metrics::os_version.set_sync(glean, system::get_os_version());
|
||||
core_metrics::internal_metrics::architecture.set_sync(glean, system::ARCH.to_string());
|
||||
|
||||
core_metrics::internal_metrics::os_version.set_sync(glean, &client_info.os_version);
|
||||
core_metrics::internal_metrics::architecture.set_sync(glean, &client_info.architecture);
|
||||
|
||||
if let Some(android_sdk_version) = client_info.android_sdk_version.as_ref() {
|
||||
core_metrics::internal_metrics::android_sdk_version.set_sync(glean, android_sdk_version);
|
||||
|
|
@ -851,7 +852,7 @@ pub fn glean_get_upload_task() -> PingUploadTask {
|
|||
}
|
||||
|
||||
/// Processes the response from an attempt to upload a ping.
|
||||
pub fn glean_process_ping_upload_response(uuid: String, result: UploadResult) {
|
||||
pub fn glean_process_ping_upload_response(uuid: String, result: UploadResult) -> UploadTaskAction {
|
||||
core::with_glean(|glean| glean.process_ping_upload_response(&uuid, result))
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -58,8 +58,8 @@ impl UuidMetric {
|
|||
|
||||
let value = value.into();
|
||||
|
||||
if uuid::Uuid::parse_str(&value).is_ok() {
|
||||
let value = Metric::Uuid(value);
|
||||
if let Ok(uuid) = uuid::Uuid::parse_str(&value) {
|
||||
let value = Metric::Uuid(uuid.to_hyphenated().to_string());
|
||||
glean.storage().record(glean, &self.meta, &value)
|
||||
} else {
|
||||
let msg = format!("Unexpected UUID value '{}'", value);
|
||||
|
|
|
|||
64
third_party/rust/glean-core/src/system.rs
vendored
64
third_party/rust/glean-core/src/system.rs
vendored
|
|
@ -1,7 +1,8 @@
|
|||
// Copyright (c) 2017 The Rust Project Developers
|
||||
// Copyright (c) 2018-2020 The Rust Secure Code Working Group
|
||||
// Licensed under the MIT License.
|
||||
// Original license:
|
||||
// https://github.com/RustSec/platforms-crate/blob/ebbd3403243067ba3096f31684557285e352b639/LICENSE-MIT
|
||||
// https://github.com/rustsec/rustsec/blob/2a080f173ad9d8ac7fa260f0a3a6aebf0000de06/platforms/LICENSE-MIT
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any
|
||||
// person obtaining a copy of this software and associated
|
||||
|
|
@ -29,7 +30,7 @@
|
|||
|
||||
//! Detect and expose `target_os` as a constant.
|
||||
//!
|
||||
//! Code adopted from the "platforms" crate: <https://github.com/RustSec/platforms-crate>.
|
||||
//! Code adopted from the "platforms" crate: <https://github.com/rustsec/rustsec/tree/2a080f173ad9d8ac7fa260f0a3a6aebf0000de06/platforms>.
|
||||
|
||||
#[cfg(target_os = "android")]
|
||||
/// `target_os` when building this crate: `android`
|
||||
|
|
@ -79,62 +80,3 @@ pub const OS: &str = "Solaris";
|
|||
target_os = "solaris",
|
||||
)))]
|
||||
pub const OS: &str = "Unknown";
|
||||
|
||||
#[cfg(target_arch = "aarch64")]
|
||||
/// `target_arch` when building this crate: `aarch64`
|
||||
pub const ARCH: &str = "aarch64";
|
||||
|
||||
#[cfg(target_arch = "arm")]
|
||||
/// `target_arch` when building this crate: `arm`
|
||||
pub const ARCH: &str = "arm";
|
||||
|
||||
#[cfg(target_arch = "x86")]
|
||||
/// `target_arch` when building this crate: `x86`
|
||||
pub const ARCH: &str = "x86";
|
||||
|
||||
#[cfg(target_arch = "x86_64")]
|
||||
/// `target_arch` when building this crate: `x86_64`
|
||||
pub const ARCH: &str = "x86_64";
|
||||
|
||||
#[cfg(not(any(
|
||||
target_arch = "aarch64",
|
||||
target_arch = "arm",
|
||||
target_arch = "x86",
|
||||
target_arch = "x86_64"
|
||||
)))]
|
||||
/// `target_arch` when building this crate: unknown!
|
||||
pub const ARCH: &str = "Unknown";
|
||||
|
||||
#[cfg(any(target_os = "macos", target_os = "windows"))]
|
||||
/// Returns Darwin kernel version for MacOS, or NT Kernel version for Windows
|
||||
pub fn get_os_version() -> String {
|
||||
whatsys::kernel_version().unwrap_or_else(|| "Unknown".to_owned())
|
||||
}
|
||||
|
||||
#[cfg(not(any(target_os = "linux", target_os = "macos", target_os = "windows")))]
|
||||
/// Returns "Unknown" for platforms other than Linux, MacOS or Windows
|
||||
pub fn get_os_version() -> String {
|
||||
"Unknown".to_owned()
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
/// Returns Linux kernel version, in the format of <Major>.<Minor> e.g. 5.8
|
||||
pub fn get_os_version() -> String {
|
||||
parse_linux_os_string(whatsys::kernel_version().unwrap_or_else(|| "Unknown".to_owned()))
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
fn parse_linux_os_string(os_str: String) -> String {
|
||||
os_str.split('.').take(2).collect::<Vec<&str>>().join(".")
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(target_os = "linux")]
|
||||
fn parse_fixed_linux_os_string() {
|
||||
let alpine_os_string = "4.12.0-rc6-g48ec1f0-dirty".to_owned();
|
||||
assert_eq!(parse_linux_os_string(alpine_os_string), "4.12");
|
||||
let centos_os_string = "3.10.0-514.16.1.el7.x86_64".to_owned();
|
||||
assert_eq!(parse_linux_os_string(centos_os_string), "3.10");
|
||||
let ubuntu_os_string = "5.8.0-44-generic".to_owned();
|
||||
assert_eq!(parse_linux_os_string(ubuntu_os_string), "5.8");
|
||||
}
|
||||
|
|
|
|||
23
third_party/rust/glean-core/src/upload/mod.rs
vendored
23
third_party/rust/glean-core/src/upload/mod.rs
vendored
|
|
@ -25,7 +25,7 @@ use crate::{internal_metrics::UploadMetrics, Glean};
|
|||
use directory::{PingDirectoryManager, PingPayloadsByDirectory};
|
||||
use policy::Policy;
|
||||
pub use request::{HeaderMap, PingRequest};
|
||||
pub use result::UploadResult;
|
||||
pub use result::{UploadResult, UploadTaskAction};
|
||||
|
||||
mod directory;
|
||||
mod policy;
|
||||
|
|
@ -655,7 +655,7 @@ impl PingUploadManager {
|
|||
glean: &Glean,
|
||||
document_id: &str,
|
||||
status: UploadResult,
|
||||
) {
|
||||
) -> UploadTaskAction {
|
||||
use UploadResult::*;
|
||||
|
||||
if let Some(label) = status.get_label() {
|
||||
|
|
@ -688,7 +688,14 @@ impl PingUploadManager {
|
|||
self.recoverable_failure_count
|
||||
.fetch_add(1, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
Done { .. } => {
|
||||
log::debug!("Uploader signaled Done. Exiting.");
|
||||
return UploadTaskAction::End;
|
||||
}
|
||||
};
|
||||
|
||||
UploadTaskAction::Next
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1191,11 +1198,13 @@ mod test {
|
|||
// Return the max recoverable error failures in a row
|
||||
for _ in 0..max_recoverable_failures {
|
||||
match upload_manager.get_upload_task(&glean, false) {
|
||||
PingUploadTask::Upload { request } => upload_manager.process_ping_upload_response(
|
||||
&glean,
|
||||
&request.document_id,
|
||||
UploadResult::recoverable_failure(),
|
||||
),
|
||||
PingUploadTask::Upload { request } => {
|
||||
upload_manager.process_ping_upload_response(
|
||||
&glean,
|
||||
&request.document_id,
|
||||
UploadResult::recoverable_failure(),
|
||||
);
|
||||
}
|
||||
_ => panic!("Expected upload manager to return the next request!"),
|
||||
}
|
||||
}
|
||||
|
|
|
|||
23
third_party/rust/glean-core/src/upload/result.rs
vendored
23
third_party/rust/glean-core/src/upload/result.rs
vendored
|
|
@ -32,6 +32,14 @@ pub enum UploadResult {
|
|||
/// The HTTP status code
|
||||
code: i32,
|
||||
},
|
||||
|
||||
/// Signal that this uploader is done with work
|
||||
/// and won't accept new work.
|
||||
Done {
|
||||
#[doc(hidden)]
|
||||
/// Unused field. Required because UniFFI can't handle variants without fields.
|
||||
unused: i8,
|
||||
},
|
||||
}
|
||||
|
||||
impl UploadResult {
|
||||
|
|
@ -47,6 +55,7 @@ impl UploadResult {
|
|||
UploadResult::HttpStatus { .. } => Some("status_code_unknown"),
|
||||
UploadResult::UnrecoverableFailure { .. } => Some("unrecoverable"),
|
||||
UploadResult::RecoverableFailure { .. } => Some("recoverable"),
|
||||
UploadResult::Done { .. } => None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -72,4 +81,18 @@ impl UploadResult {
|
|||
pub fn http_status(code: i32) -> Self {
|
||||
Self::HttpStatus { code }
|
||||
}
|
||||
|
||||
/// This uploader is done.
|
||||
pub fn done() -> Self {
|
||||
Self::Done { unused: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
/// Communication back whether the uploader loop should continue.
|
||||
#[derive(Debug)]
|
||||
pub enum UploadTaskAction {
|
||||
/// Instruct the caller to continue with work.
|
||||
Next,
|
||||
/// Instruct the caller to end work.
|
||||
End,
|
||||
}
|
||||
|
|
|
|||
2
third_party/rust/glean/.cargo-checksum.json
vendored
2
third_party/rust/glean/.cargo-checksum.json
vendored
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"ec66c8a001c3fc6282887bace97d01db96ca04115b64c94b580e17ae4fe06ff5","LICENSE":"1f256ecad192880510e84ad60474eab7589218784b9a50bc7ceee34c2b91f1d5","README.md":"5bc5b1c46695f628e1023662752272e938a963b535d5686bd1ecc433f9e018c4","src/common_test.rs":"68f6d408cb7b683fa32c8b38a4df1e6c45bfd77c0c90ca35976ea7548bbc4b2f","src/configuration.rs":"37ad5b3e7d4e31dd04a7d6690179168b5f2768d87dd36056dee5d08bdbe20fb2","src/core_metrics.rs":"76ac5350cb6f82d9a193d519b085a08f138dceba77da3514bd0c636bcdefefca","src/lib.rs":"e342d497d60abceca3c84d35523a54d187b0282220a112da53e4ab1cf76da205","src/net/http_uploader.rs":"43812a70d19a38e8d7a093c8076c2b6345372c3c861b0f3511428762700a65e0","src/net/mod.rs":"86cbcb0b46f9d13923a20db9e482b65da49d7daa4e335a3f3092f1d760f572b0","src/private/event.rs":"02bbebf545695812e5055741cc0b5f3c99eda2039e684e26fcdd5f087ed15fe3","src/private/mod.rs":"0364ecf5f0439443a5b209583f4ff2c474b79f7c253c981ab0b7cdc528368698","src/private/ping.rs":"cbdc57f41fc9d46e56b4dfff91ac683753d1f8b3ecd0aa9bc3419e3595b8b81b","src/system.rs":"ff23a5b94f52dab484342dfed702412bc29ab1bbfd5af326033d8e07e7b9075f","src/test.rs":"d70c6aaadd529bec2fa3029c4138193865e598ad054ec636a4b5baae48177f99","tests/common/mod.rs":"37cd4c48e140c793b852ae09fb3e812da28a4412977295015bcbffd632fcf294","tests/init_fails.rs":"9b78226a4e3220de5b64a205a97b8d5778d1700391b5b71c7819b6cdd120747e","tests/never_init.rs":"1f33b8ce7ca3514b57b48cc16d98408974c85cf8aa7d13257ffc2ad878ebb295","tests/no_time_to_init.rs":"494dcddce49f279c6508f484ee59cf8bb83e7324de07bdbc1142f2a066b7f6a1","tests/overflowing_preinit.rs":"396206d5078b7e6c148bbf2aecb0f963cfaa4d7eff3fc7bf6590125076ee6113","tests/persist_ping_lifetime.rs":"2297d4b208e14188e6dcca2d4806b805cfc7dd824d21bd143a7803b95e0709f4","tests/persist_ping_lifetime_nopanic.rs":"06f1f3ca3b8a6c8b7fc4d6fc48d0e1d2ccffd32139f080db0a95003e9edd507d","tests/schema.rs":"a96089f828928b6be1fad7815e3269f5693af1b773e570312b357a29af28122a","tests/simple.rs":"a1d72af899293390bb955ca379baafb89c29bb746630409f8c51f453d222dbad"},"package":"d187c24d9b68cf4604253a38a15c5490f8089fa0abbd5d3a75a7dab6496c452c"}
|
||||
{"files":{"Cargo.toml":"182756cc94cd25fab16c27a7207b58d869746561f36f744b009813d1b9d55978","LICENSE":"1f256ecad192880510e84ad60474eab7589218784b9a50bc7ceee34c2b91f1d5","README.md":"5bc5b1c46695f628e1023662752272e938a963b535d5686bd1ecc433f9e018c4","src/common_test.rs":"68f6d408cb7b683fa32c8b38a4df1e6c45bfd77c0c90ca35976ea7548bbc4b2f","src/configuration.rs":"37ad5b3e7d4e31dd04a7d6690179168b5f2768d87dd36056dee5d08bdbe20fb2","src/core_metrics.rs":"76ac5350cb6f82d9a193d519b085a08f138dceba77da3514bd0c636bcdefefca","src/lib.rs":"e342d497d60abceca3c84d35523a54d187b0282220a112da53e4ab1cf76da205","src/net/http_uploader.rs":"43812a70d19a38e8d7a093c8076c2b6345372c3c861b0f3511428762700a65e0","src/net/mod.rs":"e05e61860f5828caa529c3ea75a2fff7371bfa3dce057077a74c09baf41a568a","src/private/event.rs":"02bbebf545695812e5055741cc0b5f3c99eda2039e684e26fcdd5f087ed15fe3","src/private/mod.rs":"0364ecf5f0439443a5b209583f4ff2c474b79f7c253c981ab0b7cdc528368698","src/private/ping.rs":"cbdc57f41fc9d46e56b4dfff91ac683753d1f8b3ecd0aa9bc3419e3595b8b81b","src/system.rs":"4e0ec743f6d06a9c83e46c95d0286d5745f4642398c942fce8ae7a1ea5202d37","src/test.rs":"1d9a01fa6befdc04e97caeb58ccebd67c840965ff0417b6b2ba9e53aa108a069","tests/common/mod.rs":"37cd4c48e140c793b852ae09fb3e812da28a4412977295015bcbffd632fcf294","tests/init_fails.rs":"9b78226a4e3220de5b64a205a97b8d5778d1700391b5b71c7819b6cdd120747e","tests/never_init.rs":"1f33b8ce7ca3514b57b48cc16d98408974c85cf8aa7d13257ffc2ad878ebb295","tests/no_time_to_init.rs":"494dcddce49f279c6508f484ee59cf8bb83e7324de07bdbc1142f2a066b7f6a1","tests/overflowing_preinit.rs":"396206d5078b7e6c148bbf2aecb0f963cfaa4d7eff3fc7bf6590125076ee6113","tests/persist_ping_lifetime.rs":"2297d4b208e14188e6dcca2d4806b805cfc7dd824d21bd143a7803b95e0709f4","tests/persist_ping_lifetime_nopanic.rs":"06f1f3ca3b8a6c8b7fc4d6fc48d0e1d2ccffd32139f080db0a95003e9edd507d","tests/schema.rs":"a96089f828928b6be1fad7815e3269f5693af1b773e570312b357a29af28122a","tests/simple.rs":"a1d72af899293390bb955ca379baafb89c29bb746630409f8c51f453d222dbad"},"package":"692dfb4494ad83161b7d596656c3e1f08b06bc2fa62a0eb8a3f4b7f83594025e"}
|
||||
4
third_party/rust/glean/Cargo.toml
vendored
4
third_party/rust/glean/Cargo.toml
vendored
|
|
@ -12,7 +12,7 @@
|
|||
[package]
|
||||
edition = "2018"
|
||||
name = "glean"
|
||||
version = "51.1.0"
|
||||
version = "51.2.0"
|
||||
authors = [
|
||||
"Jan-Erik Rediger <jrediger@mozilla.com>",
|
||||
"The Glean Team <glean-team@mozilla.com>",
|
||||
|
|
@ -41,7 +41,7 @@ features = ["serde"]
|
|||
version = "0.5"
|
||||
|
||||
[dependencies.glean-core]
|
||||
version = "51.1.0"
|
||||
version = "51.2.0"
|
||||
|
||||
[dependencies.inherent]
|
||||
version = "1"
|
||||
|
|
|
|||
21
third_party/rust/glean/src/net/mod.rs
vendored
21
third_party/rust/glean/src/net/mod.rs
vendored
|
|
@ -15,7 +15,7 @@ use std::thread;
|
|||
use std::time::Duration;
|
||||
|
||||
use glean_core::upload::PingUploadTask;
|
||||
pub use glean_core::upload::{PingRequest, UploadResult};
|
||||
pub use glean_core::upload::{PingRequest, UploadResult, UploadTaskAction};
|
||||
|
||||
pub use http_uploader::*;
|
||||
|
||||
|
|
@ -87,31 +87,38 @@ impl UploadManager {
|
|||
thread::Builder::new()
|
||||
.name("glean.upload".into())
|
||||
.spawn(move || {
|
||||
log::trace!("Started glean.upload thread");
|
||||
loop {
|
||||
let incoming_task = glean_core::glean_get_upload_task();
|
||||
|
||||
log::trace!("Received upload task: {:?}", incoming_task);
|
||||
match incoming_task {
|
||||
PingUploadTask::Upload { request } => {
|
||||
log::trace!("Received upload task with request {:?}", request);
|
||||
let doc_id = request.document_id.clone();
|
||||
let upload_url = format!("{}{}", inner.server_endpoint, request.path);
|
||||
let headers: Vec<(String, String)> =
|
||||
request.headers.into_iter().collect();
|
||||
let result = inner.uploader.upload(upload_url, request.body, headers);
|
||||
// Process the upload response.
|
||||
glean_core::glean_process_ping_upload_response(doc_id, result);
|
||||
match glean_core::glean_process_ping_upload_response(doc_id, result) {
|
||||
UploadTaskAction::Next => continue,
|
||||
UploadTaskAction::End => break,
|
||||
}
|
||||
}
|
||||
PingUploadTask::Wait { time } => {
|
||||
log::trace!("Instructed to wait for {:?}ms", time);
|
||||
thread::sleep(Duration::from_millis(time));
|
||||
}
|
||||
PingUploadTask::Done { .. } => {
|
||||
// Nothing to do here, break out of the loop and clear the
|
||||
// running flag.
|
||||
inner.thread_running.store(false, Ordering::SeqCst);
|
||||
return;
|
||||
log::trace!("Received PingUploadTask::Done. Exiting.");
|
||||
// Nothing to do here, break out of the loop.
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clear the running flag to signal that this thread is done.
|
||||
inner.thread_running.store(false, Ordering::SeqCst);
|
||||
})
|
||||
.expect("Failed to spawn Glean's uploader thread");
|
||||
}
|
||||
|
|
|
|||
3
third_party/rust/glean/src/system.rs
vendored
3
third_party/rust/glean/src/system.rs
vendored
|
|
@ -1,7 +1,8 @@
|
|||
// Copyright (c) 2017 The Rust Project Developers
|
||||
// Copyright (c) 2018-2020 The Rust Secure Code Working Group
|
||||
// Licensed under the MIT License.
|
||||
// Original license:
|
||||
// https://github.com/RustSec/platforms-crate/blob/ebbd3403243067ba3096f31684557285e352b639/LICENSE-MIT
|
||||
// https://github.com/rustsec/rustsec/blob/2a080f173ad9d8ac7fa260f0a3a6aebf0000de06/platforms/LICENSE-MIT
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any
|
||||
// person obtaining a copy of this software and associated
|
||||
|
|
|
|||
84
third_party/rust/glean/src/test.rs
vendored
84
third_party/rust/glean/src/test.rs
vendored
|
|
@ -2,6 +2,9 @@
|
|||
// License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
// file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||
|
||||
use std::sync::{Arc, Barrier, Mutex};
|
||||
use std::thread::{self, ThreadId};
|
||||
|
||||
use crate::private::PingType;
|
||||
use crate::private::{BooleanMetric, CounterMetric, EventMetric, StringMetric};
|
||||
|
||||
|
|
@ -1131,3 +1134,84 @@ fn test_boolean_get_num_errors() {
|
|||
|
||||
assert_eq!(result, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signaling_done() {
|
||||
let _lock = lock_test();
|
||||
|
||||
// Define a fake uploader that reports back the submission URL
|
||||
// using a crossbeam channel.
|
||||
#[derive(Debug)]
|
||||
pub struct FakeUploader {
|
||||
barrier: Arc<Barrier>,
|
||||
counter: Arc<Mutex<HashMap<ThreadId, u32>>>,
|
||||
}
|
||||
impl net::PingUploader for FakeUploader {
|
||||
fn upload(
|
||||
&self,
|
||||
_url: String,
|
||||
_body: Vec<u8>,
|
||||
_headers: Vec<(String, String)>,
|
||||
) -> net::UploadResult {
|
||||
let mut map = self.counter.lock().unwrap();
|
||||
*map.entry(thread::current().id()).or_insert(0) += 1;
|
||||
|
||||
// Wait for the sync.
|
||||
self.barrier.wait();
|
||||
|
||||
// Signal that this uploader thread is done.
|
||||
net::UploadResult::done()
|
||||
}
|
||||
}
|
||||
|
||||
// Create a custom configuration to use a fake uploader.
|
||||
let dir = tempfile::tempdir().unwrap();
|
||||
let tmpname = dir.path().to_path_buf();
|
||||
|
||||
// We use a barrier to sync this test thread with the uploader thread.
|
||||
let barrier = Arc::new(Barrier::new(2));
|
||||
// We count how many times `upload` was invoked per thread.
|
||||
let call_count = Arc::new(Mutex::default());
|
||||
|
||||
let cfg = Configuration {
|
||||
data_path: tmpname,
|
||||
application_id: GLOBAL_APPLICATION_ID.into(),
|
||||
upload_enabled: true,
|
||||
max_events: None,
|
||||
delay_ping_lifetime_io: false,
|
||||
server_endpoint: Some("invalid-test-host".into()),
|
||||
uploader: Some(Box::new(FakeUploader {
|
||||
barrier: Arc::clone(&barrier),
|
||||
counter: Arc::clone(&call_count),
|
||||
})),
|
||||
use_core_mps: false,
|
||||
};
|
||||
|
||||
let _t = new_glean(Some(cfg), true);
|
||||
|
||||
// Define a new ping and submit it.
|
||||
const PING_NAME: &str = "test-ping";
|
||||
let custom_ping = private::PingType::new(PING_NAME, true, true, vec![]);
|
||||
custom_ping.submit(None);
|
||||
custom_ping.submit(None);
|
||||
|
||||
// Sync up with the upload thread.
|
||||
barrier.wait();
|
||||
|
||||
// Submit another ping and wait for it to do work.
|
||||
custom_ping.submit(None);
|
||||
|
||||
// Sync up with the upload thread again.
|
||||
// This will not be the same thread as the one before (hopefully).
|
||||
barrier.wait();
|
||||
|
||||
// No one's ever gonna wait for the uploader thread (the RLB doesn't store the handle to it),
|
||||
// so all we can do is hope it finishes within time.
|
||||
std::thread::sleep(std::time::Duration::from_millis(100));
|
||||
|
||||
let map = call_count.lock().unwrap();
|
||||
assert_eq!(2, map.len(), "should have launched 2 uploader threads");
|
||||
for &count in map.values() {
|
||||
assert_eq!(1, count, "each thread should call upload only once");
|
||||
}
|
||||
}
|
||||
|
|
|
|||
2
third_party/rust/goblin/.cargo-checksum.json
vendored
2
third_party/rust/goblin/.cargo-checksum.json
vendored
File diff suppressed because one or more lines are too long
126
third_party/rust/goblin/CHANGELOG.md
vendored
126
third_party/rust/goblin/CHANGELOG.md
vendored
|
|
@ -3,7 +3,129 @@ All notable changes to this project will be documented in this file.
|
|||
|
||||
Before 1.0, this project does not adhere to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
|
||||
|
||||
Goblin is now 0.1, which means we will try our best to ease breaking changes. Tracking issue is here: https://github.com/m4b/goblin/issues/97
|
||||
Goblin is now 0.5, which means we will try our best to ease breaking changes. Tracking issue is here: https://github.com/m4b/goblin/issues/97
|
||||
|
||||
## [0.5.4] - 2022-8-14
|
||||
### Fixed
|
||||
pe: fix regression in PE binary parsing, thanks @SquareMan: https://github.com/m4b/goblin/pull/321
|
||||
|
||||
## [0.5.3] - 2022-7-16
|
||||
### Fixed
|
||||
elf: fix elf strtab parsing, thanks @tux3: https://github.com/m4b/goblin/pull/316
|
||||
### Added
|
||||
elf: implement plain for note headers, thanks @mkroening: https://github.com/m4b/goblin/pull/317
|
||||
|
||||
## [0.5.2] - 2022-6-5
|
||||
### Fixed
|
||||
elf: fix arithmetic overflows in `file_range()` and `vm_range()`, thanks @alessandron: https://github.com/m4b/goblin/pull/306
|
||||
pe: fix string table containing empty strings, thanks @track-5: https://github.com/m4b/goblin/pull/310
|
||||
pe: remove check on debug directory size, thanks @lzybkr: https://github.com/m4b/goblin/pull/313
|
||||
### Added
|
||||
elf: expose more of programheader impl regardless of alloc feature flag, thanks @dancrossnyc: https://github.com/m4b/goblin/pull/308
|
||||
mach.parse: Handle DyldExportsTrie, thanks @apalm: https://github.com/m4b/goblin/pull/303
|
||||
|
||||
## [0.5.1] - 2022-2-13
|
||||
### BREAKING
|
||||
goblin: guard all capacity allocations with bounds checks, this is breaking because we introduced a new error enum, which is now marked as non_exhaustive, thanks @Swatinem: https://github.com/m4b/goblin/pull/298
|
||||
pe: support exports without an offset, thanks @dureuill: https://github.com/m4b/goblin/pull/293
|
||||
### Fixed
|
||||
mach: fix overflow panics, thanks @Swatinem: https://github.com/m4b/goblin/pull/302
|
||||
pe: add signature header check, thanks @skdltmxn: https://github.com/m4b/goblin/pull/286
|
||||
elf: improve parsing `SHT_SYMTAB` complexity from O(N^2) to O(N), thanks @Lichsto: https://github.com/m4b/goblin/pull/297
|
||||
### Added
|
||||
elf: clarify documentation on strtab behavior better, and add nice doc example, thanks @n01e0: https://github.com/m4b/goblin/pull/301
|
||||
elf: add rpaths and runpath to elf, thanks @messense: https://github.com/m4b/goblin/pull/294
|
||||
elf: complete elf OSABI constants, thanks @messense: https://github.com/m4b/goblin/pull/295
|
||||
elf: fill out more elf constants, thanks @n01e0: https://github.com/m4b/goblin/pull/296
|
||||
|
||||
## [0.5.0] - 2022-2-13
|
||||
YANKED, see 0.5.1
|
||||
|
||||
## [0.4.3] - 2021-9-18
|
||||
### Added
|
||||
- elf: add initial versioned symbols support, thanks @johannst: https://github.com/m4b/goblin/pull/280
|
||||
- elf: add some missing constants, `PF_MASKOS` and `PF_MASKPROC`, thanks @npmccallum: https://github.com/m4b/goblin/pull/281
|
||||
|
||||
## [0.4.2] - 2021-7-4
|
||||
### Added
|
||||
- strtab: preparses the string table to prevent certain class of DoS attacks, thanks @Lichtsto: https://github.com/m4b/goblin/pull/275
|
||||
|
||||
## [0.4.1] - 2021-5-30
|
||||
### Fixed
|
||||
- elf: fix error when alloc, but not endian, thanks @dancrossnyc: https://github.com/m4b/goblin/pull/273
|
||||
|
||||
## [0.4.0] - 2021-4-11
|
||||
### BREAKING
|
||||
- elf: fix returning invalid ranges for SH_NOBIT sections,
|
||||
method changed to return optional range instead, thanks @Tiwalun: https://github.com/m4b/goblin/pull/253
|
||||
### Fixed
|
||||
pe: pass parse opts correctly in pe parser in lookup table, fixes some issues loading and parsing pe libraries: https://github.com/m4b/goblin/pull/268
|
||||
elf: remove unnecessary unsafe blocks, thanks @nico-abram: https://github.com/m4b/goblin/pull/261
|
||||
elf: replace pub type with pub use, thanks @sollyucko: https://github.com/m4b/goblin/pull/259
|
||||
### Added
|
||||
elf: add a lazy parse example, thanks @jesseui: https://github.com/m4b/goblin/pull/258
|
||||
elf: add a new fuzzing harness + fix overflows in hash functions and note data iterator construction, thanks @Mrmaxmeier: https://github.com/m4b/goblin/pull/260
|
||||
|
||||
## [0.3.4] - 2021-1-31
|
||||
### Added
|
||||
- elf: introduce "lazy" parsing of elf structure with new lazy_parse function, which allows user to fill in parts of the ELF struct they need later on; new example provided, as well as some tests, thanks @jessehui: https://github.com/m4b/goblin/pull/254
|
||||
- elf: also add new `Elf::parse_header` convenience function, which allows to parse elf header from bytes without e.g., explicitly depending on scroll, etc.
|
||||
|
||||
## [0.3.3] - 2021-1-31
|
||||
### Fixed
|
||||
- mach: fix debug print panic, thanks @messense: https://github.com/m4b/goblin/pull/251
|
||||
### Added
|
||||
- pe: allow pe virtual memory resolve to be optional, allowing memory/process dump parsing, thanks @ko1n (as well as patience for very long time to merge PR!): https://github.com/m4b/goblin/pull/188
|
||||
|
||||
## [0.3.2] - 2021-1-29
|
||||
### Fixed
|
||||
- elf: overflow panic when note name is 0, thanks @glandium: https://github.com/m4b/goblin/pull/256
|
||||
|
||||
## [0.3.1] - 2021-1-18
|
||||
### Added
|
||||
- mach: add rpaths, thanks @keith: https://github.com/m4b/goblin/pull/248
|
||||
### Fixed
|
||||
- elf: fix regression parsing binaries like busybox (https://github.com/m4b/bingrep/issues/28), thanks @jan-auer: https://github.com/m4b/goblin/pull/249
|
||||
|
||||
## [0.3.0] - 2020-11-26
|
||||
### BREAKING
|
||||
- mach: add missing load commands, and fixup minversion enum and api, thanks @woodruffw !: https://github.com/m4b/goblin/pull/240
|
||||
### Fixed
|
||||
- elf: prevent overflow in bad section sizes, thanks @jackcmay: https://github.com/m4b/goblin/pull/243
|
||||
- `Object::parse` no longer needs `std`! thanks @Evian-Zhang: https://github.com/m4b/goblin/pull/235
|
||||
- test: remove hardcoded CommandLineTools path in macos test, thanks @quake: https://github.com/m4b/goblin/pull/238
|
||||
- build: Resolve clippy lints, thanks @connorkuehl: https://github.com/m4b/goblin/pull/225
|
||||
### Added
|
||||
- elf: add the x86-64 unwind processor specific section header type https://github.com/m4b/goblin/pull/224
|
||||
- elf: Add ability to get archive members by index https://github.com/m4b/goblin/pull/225
|
||||
|
||||
## [0.2.3] - 2020-5-10
|
||||
### Fixed
|
||||
- pe: remove unwrap on coffheader strtab parsing, thanks @ExPixel: https://github.com/m4b/goblin/pull/222
|
||||
### Added
|
||||
- pe: add more machine constants, thanks @ExPixel: https://github.com/m4b/goblin/pull/223
|
||||
|
||||
## [0.2.2] - 2020-5-08
|
||||
### Fixed
|
||||
- elf: protect against out of memory when parsing, thanks @jackcmay: https://github.com/m4b/goblin/pull/219
|
||||
- pe: fix panic when parsing unwind info, thanks @jan-auer: https://github.com/m4b/goblin/pull/218
|
||||
|
||||
## [0.2.1] - 2020-3-14
|
||||
### Added
|
||||
- elf: add more robust debug printing to various elf data structures, thanks @connorkuehl, e.g.: https://github.com/m4b/goblin/pull/211
|
||||
- elf: derive PartialEq for DynamicInfo, thanks @connorkuehl: https://github.com/m4b/goblin/pull/209
|
||||
|
||||
## [0.2.0] - 2020-1-20
|
||||
### Changed
|
||||
- BREAKING: Changes in `elf::gnu_hash::GnuHash`:
|
||||
+ `new(*const u32, usize, &[sym::Sym]) -> Self`
|
||||
to `from_raw_table(&[u8], &[Sym]) -> Result<Self, &str>`
|
||||
+ `find(&self, &str, u32, &Strtab) -> Option<&Sym>`
|
||||
to `find(&self, &str, &Strtab) -> Option<&Sym>`.
|
||||
- BREAKING: mach: fix generic relocation constants, @philipc: https://github.com/m4b/goblin/pull/204/files
|
||||
### Added
|
||||
- elf: add more elf note values, thanks @xcoldhandsx: https://github.com/m4b/goblin/pull/201
|
||||
- Finally rustfmt'd entire repo :D
|
||||
|
||||
## [0.1.3] - 2019-12-28
|
||||
### Removed
|
||||
|
|
@ -25,9 +147,11 @@ elf: Don't fail entire elf parse when interpreter is malformed string, @jsgf htt
|
|||
## [0.1.0] - 2019-11-3
|
||||
### Added
|
||||
- update to scroll 0.10 api
|
||||
|
||||
### Changed
|
||||
- BREAKING: rename export to lib in Reexport::DLLOrdinal from @lzybkr
|
||||
- pe: only parse ExceptionData for machine X86_64, thanks @wyxloading
|
||||
|
||||
### Fixed
|
||||
pe: Fix resolution of redirect unwind info, thanks @jan-auer https://github.com/m4b/goblin/pull/183
|
||||
pe: fix reexport dll and ordinal, thanks @lzybkr: d62889f469846af0cceb789b415f1e14f5f9e402
|
||||
|
|
|
|||
88
third_party/rust/goblin/Cargo.lock
generated
vendored
88
third_party/rust/goblin/Cargo.lock
generated
vendored
|
|
@ -1,88 +0,0 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "goblin"
|
||||
version = "0.1.3"
|
||||
dependencies = [
|
||||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"plain 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"scroll 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "plain"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scroll"
|
||||
version = "0.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"scroll_derive 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scroll_derive"
|
||||
version = "0.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"syn 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-xid"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[metadata]
|
||||
"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
|
||||
"checksum log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7"
|
||||
"checksum plain 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6"
|
||||
"checksum proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "9c9e470a8dc4aeae2dee2f335e8f533e2d4b347e1434e5671afc49b054592f27"
|
||||
"checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe"
|
||||
"checksum scroll 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "abb2332cb595d33f7edd5700f4cbf94892e680c7f0ae56adab58a35190b66cb1"
|
||||
"checksum scroll_derive 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f8584eea9b9ff42825b46faf46a8c24d2cff13ec152fa2a50df788b87c07ee28"
|
||||
"checksum syn 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)" = "dff0acdb207ae2fe6d5976617f887eb1e35a2ba52c13c7234c790960cdad9238"
|
||||
"checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
|
||||
90
third_party/rust/goblin/Cargo.toml
vendored
90
third_party/rust/goblin/Cargo.toml
vendored
|
|
@ -3,26 +3,49 @@
|
|||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g., crates.io) dependencies
|
||||
# to registry (e.g., crates.io) dependencies.
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
# editing this file be aware that the upstream Cargo.toml
|
||||
# will likely look very different (and much more reasonable)
|
||||
# If you are reading this file be aware that the original Cargo.toml
|
||||
# will likely look very different (and much more reasonable).
|
||||
# See Cargo.toml.orig for the original contents.
|
||||
|
||||
[package]
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
rust-version = "1.56.0"
|
||||
name = "goblin"
|
||||
version = "0.1.3"
|
||||
authors = ["m4b <m4b.github.io@gmail.com>", "seu <seu@panopticon.re>", "Will Glynn <will@willglynn.com>", "Philip Craig <philipjcraig@gmail.com>"]
|
||||
include = ["src/**/*", "Cargo.toml", "CHANGELOG.md", "LICENSE", "README.md", "etc/*", "examples/*", "tests/*", "fuzz/**/*"]
|
||||
version = "0.5.4"
|
||||
authors = [
|
||||
"m4b <m4b.github.io@gmail.com>",
|
||||
"seu <seu@panopticon.re>",
|
||||
"Will Glynn <will@willglynn.com>",
|
||||
"Philip Craig <philipjcraig@gmail.com>",
|
||||
"Lzu Tao <taolzu@gmail.com>",
|
||||
]
|
||||
include = [
|
||||
"src",
|
||||
"CHANGELOG.md",
|
||||
"Cargo.toml",
|
||||
"LICENSE",
|
||||
"README.md",
|
||||
]
|
||||
description = "An impish, cross-platform, ELF, Mach-o, and PE binary parsing and loading crate"
|
||||
documentation = "https://docs.rs/goblin"
|
||||
readme = "README.md"
|
||||
keywords = ["binary", "elf", "mach", "pe", "archive"]
|
||||
categories = ["parsing", "development-tools::debugging"]
|
||||
keywords = [
|
||||
"binary",
|
||||
"elf",
|
||||
"mach",
|
||||
"pe",
|
||||
"archive",
|
||||
]
|
||||
categories = [
|
||||
"parsing",
|
||||
"development-tools::debugging",
|
||||
]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/m4b/goblin"
|
||||
resolver = "2"
|
||||
|
||||
[dependencies.log]
|
||||
version = "0.4"
|
||||
optional = true
|
||||
|
|
@ -32,21 +55,50 @@ default-features = false
|
|||
version = "0.2.3"
|
||||
|
||||
[dependencies.scroll]
|
||||
version = "0.10"
|
||||
version = "0.11"
|
||||
default_features = false
|
||||
|
||||
[features]
|
||||
alloc = ["scroll/derive", "log"]
|
||||
alloc = [
|
||||
"scroll/derive",
|
||||
"log",
|
||||
]
|
||||
archive = ["alloc"]
|
||||
default = ["std", "elf32", "elf64", "mach32", "mach64", "pe32", "pe64", "archive", "endian_fd"]
|
||||
default = [
|
||||
"std",
|
||||
"elf32",
|
||||
"elf64",
|
||||
"mach32",
|
||||
"mach64",
|
||||
"pe32",
|
||||
"pe64",
|
||||
"archive",
|
||||
"endian_fd",
|
||||
]
|
||||
elf32 = []
|
||||
elf64 = []
|
||||
endian_fd = ["alloc"]
|
||||
mach32 = ["alloc", "endian_fd"]
|
||||
mach64 = ["alloc", "endian_fd"]
|
||||
pe32 = ["alloc", "endian_fd"]
|
||||
pe64 = ["alloc", "endian_fd"]
|
||||
std = ["alloc", "scroll/std"]
|
||||
mach32 = [
|
||||
"alloc",
|
||||
"endian_fd",
|
||||
]
|
||||
mach64 = [
|
||||
"alloc",
|
||||
"endian_fd",
|
||||
]
|
||||
pe32 = [
|
||||
"alloc",
|
||||
"endian_fd",
|
||||
]
|
||||
pe64 = [
|
||||
"alloc",
|
||||
"endian_fd",
|
||||
]
|
||||
std = [
|
||||
"alloc",
|
||||
"scroll/std",
|
||||
]
|
||||
|
||||
[badges.travis-ci]
|
||||
branch = "master"
|
||||
repository = "m4b/goblin"
|
||||
|
|
|
|||
78
third_party/rust/goblin/README.md
vendored
78
third_party/rust/goblin/README.md
vendored
|
|
@ -1,9 +1,12 @@
|
|||
# libgoblin [![Build status][travis-badge]][travis-url] [![crates.io version][crates-goblin-badge]][crates-goblin]
|
||||
# libgoblin
|
||||
|
||||
[![Actions][actions-badge]][actions-url]
|
||||
[![crates.io version][crates-goblin-badge]][crates-goblin]
|
||||
|
||||
<!-- Badges' links -->
|
||||
|
||||
[travis-badge]: https://travis-ci.org/m4b/goblin.svg?branch=master
|
||||
[travis-url]: https://travis-ci.org/m4b/goblin
|
||||
[actions-badge]: https://github.com/m4b/goblin/workflows/CI/badge.svg?branch=master
|
||||
[actions-url]: https://github.com/m4b/goblin/actions
|
||||
[crates-goblin-badge]: https://img.shields.io/crates/v/goblin.svg
|
||||
[crates-goblin]: https://crates.io/crates/goblin
|
||||
|
||||
|
|
@ -17,13 +20,13 @@ https://docs.rs/goblin/
|
|||
|
||||
### Usage
|
||||
|
||||
Goblin requires `rustc` 1.36.0.
|
||||
Goblin requires `rustc` 1.56.0 (Rust 2021 edition).
|
||||
|
||||
Add to your `Cargo.toml`
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
goblin = "0.1"
|
||||
goblin = "0.5"
|
||||
```
|
||||
|
||||
### Features
|
||||
|
|
@ -71,6 +74,9 @@ Here are some things you could do with this crate (or help to implement so they
|
|||
|
||||
<!-- Related projects -->
|
||||
|
||||
[PyO3/maturin]: https://github.com/PyO3/maturin
|
||||
[occlum]: https://github.com/occlum/occlum
|
||||
[memflow]: https://github.com/memflow/memflow
|
||||
[cargo-sym]: https://github.com/m4b/cargo-sym
|
||||
[bingrep]: https://github.com/m4b/bingrep
|
||||
[faerie]: https://github.com/m4b/faerie
|
||||
|
|
@ -101,67 +107,129 @@ Thank you all :heart: !
|
|||
|
||||
In lexicographic order:
|
||||
|
||||
- [@2vg]
|
||||
- [@alessandrod]
|
||||
- [@amanieu]
|
||||
- [@apalm]
|
||||
- [@burjui]
|
||||
- [@connorkuehl]
|
||||
- [@dancrossnyc]
|
||||
- [@dureuill]
|
||||
- [@Evian-Zhang]
|
||||
- [@ExPixel]
|
||||
- [@flanfly]
|
||||
- [@glandium]
|
||||
- [@ibabushkin]
|
||||
- [@jackcmay]
|
||||
- [@jan-auer]
|
||||
- [@jessehui]
|
||||
- [@jdub]
|
||||
- [@johannst]
|
||||
- [@jrmuizel]
|
||||
- [@jsgf]
|
||||
- [@keith]
|
||||
- [@kjempelodott]
|
||||
- [@ko1n]
|
||||
- [@le-jzr]
|
||||
- [@Lichtso]
|
||||
- [@lion128]
|
||||
- [@llogiq]
|
||||
- [@lzutao]
|
||||
- [@lzybkr]
|
||||
- [@m4b]
|
||||
- [@messense]
|
||||
- [@mitsuhiko]
|
||||
- [@mkroening]
|
||||
- [@mre]
|
||||
- [@Mrmaxmeier]
|
||||
- [n01e0]
|
||||
- [@nico-abram]
|
||||
- [@npmccallum]
|
||||
- [@pchickey]
|
||||
- [@philipc]
|
||||
- [@Pzixel]
|
||||
- [@quake]
|
||||
- [@raindev]
|
||||
- [@rocallahan]
|
||||
- [@sanxiyn]
|
||||
- [@skdltmxn]
|
||||
- [@sollyucko]
|
||||
- [@Swatinem]
|
||||
- [@SquareMan]
|
||||
- [@tathanhdinh]
|
||||
- [@Techno-coder]
|
||||
- [@ticki]
|
||||
- [@Timmmm]
|
||||
- [@Tiwalun]
|
||||
- [@track-5]
|
||||
- [@tux3]
|
||||
- [@wickerwacka]
|
||||
- [@willglynn]
|
||||
- [@woodruffw]
|
||||
- [@wyxloading]
|
||||
- [@xcoldhandsx]
|
||||
|
||||
<!-- Contributors -->
|
||||
|
||||
[@2vg]: https://github.com/2vg
|
||||
[@alessandrod]: https://github.com/alessandrod
|
||||
[@amanieu]: https://github.com/amanieu
|
||||
[@apalm]: https://github.com/apalm
|
||||
[@burjui]: https://github.com/burjui
|
||||
[@connorkuehl]: https://github.com/connorkuehl
|
||||
[@dancrossnyc]: https://github.com/dancrossnyc
|
||||
[@dureuill]: https://github.com/dureuill
|
||||
[@Evian-Zhang]: https://github.com/Evian-Zhang
|
||||
[@ExPixel]: https://github.com/ExPixel
|
||||
[@flanfly]: https://github.com/flanfly
|
||||
[@glandium]: https://github.com/glandium
|
||||
[@ibabushkin]: https://github.com/ibabushkin
|
||||
[@jackcmay]: https://github.com/jackcmay
|
||||
[@jan-auer]: https://github.com/jan-auer
|
||||
[@jessehui]: https://github.com/jessehui
|
||||
[@johannst]: https://github.com/johannst
|
||||
[@jdub]: https://github.com/jdub
|
||||
[@jrmuizel]: https://github.com/jrmuizel
|
||||
[@jsgf]: https://github.com/jsgf
|
||||
[@keith]: https://github.com/keith
|
||||
[@kjempelodott]: https://github.com/kjempelodott
|
||||
[@ko1N]: https://github.com/ko1N
|
||||
[@le-jzr]: https://github.com/le-jzr
|
||||
[@Lichtso]: https://github.com/Lichtso
|
||||
[@lion128]: https://github.com/lion128
|
||||
[@llogiq]: https://github.com/llogiq
|
||||
[@lzutao]: https://github.com/lzutao
|
||||
[@lzybkr]: https://github.com/lzybkr
|
||||
[@m4b]: https://github.com/m4b
|
||||
[@messense]: https://github.com/messense
|
||||
[@mitsuhiko]: https://github.com/mitsuhiko
|
||||
[@mkroening]: https://github.com/mkroening
|
||||
[@mre]: https://github.com/mre
|
||||
[@Mrmaxmeier]: https://github.com/Mrmaxmeier
|
||||
[n01e0]: https://github.com/n01e0
|
||||
[@nico-abram]: https://github.com/nico-abram
|
||||
[@npmccallum]: https://github.com/npmccallum
|
||||
[@pchickey]: https://github.com/pchickey
|
||||
[@philipc]: https://github.com/philipc
|
||||
[@Pzixel]: https://github.com/Pzixel
|
||||
[@quake]: https://github.com/quake
|
||||
[@raindev]: https://github.com/raindev
|
||||
[@rocallahan]: https://github.com/rocallahan
|
||||
[@sanxiyn]: https://github.com/sanxiyn
|
||||
[@skdltmxn]: https://github.com/skdltmxn
|
||||
[@sollyucko]: https://github.com/sollyucko
|
||||
[@Swatinem]: https://github.com/Swatinem
|
||||
[@SquareMan]: https://github.com/SquareMan
|
||||
[@tathanhdinh]: https://github.com/tathanhdinh
|
||||
[@Techno-coder]: https://github.com/Techno-coder
|
||||
[@ticki]: https://github.com/ticki
|
||||
[@Timmmm]: https://github.com/Timmmm
|
||||
[@Tiwalun]: https://github.com/Tiwalun
|
||||
[@track-5]: https://github.com/track-5
|
||||
[@tux3]: https://github.com/tux3
|
||||
[@wickerwacka]: https://github.com/wickerwaka
|
||||
[@willglynn]: https://github.com/willglynn
|
||||
[@woodruffw]: https://github.com/woodruffw
|
||||
[@wyxloading]: https://github.com/wyxloading
|
||||
[@xcoldhandsx]: https://github.com/xcoldhandsx
|
||||
|
||||
|
|
|
|||
1
third_party/rust/goblin/etc/crt1.rs
vendored
1
third_party/rust/goblin/etc/crt1.rs
vendored
File diff suppressed because one or more lines are too long
1
third_party/rust/goblin/etc/crt132.rs
vendored
1
third_party/rust/goblin/etc/crt132.rs
vendored
File diff suppressed because one or more lines are too long
1
third_party/rust/goblin/etc/crt1a.rs
vendored
1
third_party/rust/goblin/etc/crt1a.rs
vendored
File diff suppressed because one or more lines are too long
45
third_party/rust/goblin/examples/ar.rs
vendored
45
third_party/rust/goblin/examples/ar.rs
vendored
|
|
@ -1,45 +0,0 @@
|
|||
//cargo run --example=ar -- crt1.a
|
||||
|
||||
use goblin::elf;
|
||||
use goblin::archive;
|
||||
use std::env;
|
||||
use std::path::Path;
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
|
||||
pub fn main () {
|
||||
let len = env::args().len();
|
||||
if len <= 2 {
|
||||
println!("usage: ar <path to archive> member")
|
||||
} else {
|
||||
let mut path = String::default();
|
||||
let mut member = String::default();
|
||||
for (i, arg) in env::args().enumerate() {
|
||||
if i == 1 {
|
||||
path = arg.as_str().to_owned();
|
||||
} else if i == 2 {
|
||||
member = arg.as_str().to_owned();
|
||||
}
|
||||
}
|
||||
let path = Path::new(&path);
|
||||
let buffer = { let mut v = Vec::new(); let mut f = File::open(&path).unwrap(); f.read_to_end(&mut v).unwrap(); v};
|
||||
match archive::Archive::parse(&buffer) {
|
||||
Ok(archive) => {
|
||||
println!("{:#?}", &archive);
|
||||
println!("start: {:?}", archive.member_of_symbol("_start"));
|
||||
match archive.extract(&member, &buffer) {
|
||||
Ok(bytes) => {
|
||||
match elf::Elf::parse(&bytes) {
|
||||
Ok(elf) => {
|
||||
println!("got elf: {:#?}", elf);
|
||||
},
|
||||
Err(err) => println!("Err: {:?}", err)
|
||||
}
|
||||
},
|
||||
Err(err) => println!("Extraction Error: {:?}", err)
|
||||
}
|
||||
},
|
||||
Err(err) => println!("Err: {:?}", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
23
third_party/rust/goblin/examples/automagic.rs
vendored
23
third_party/rust/goblin/examples/automagic.rs
vendored
|
|
@ -1,23 +0,0 @@
|
|||
use std::default::Default;
|
||||
|
||||
// demonstrates "automagical" elf32/64 switches via cfg on arch and pub use hacks.
|
||||
// SIZEOF_* will change depending on whether it's an x86_64 system or 32-bit x86, or really any cfg you can think of.
|
||||
// similarly the printers will be different, since they have different impls. #typepuns4life
|
||||
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
pub use goblin::elf64 as elf;
|
||||
|
||||
#[cfg(target_pointer_width = "32")]
|
||||
pub use goblin::elf32 as elf;
|
||||
|
||||
#[cfg(any(target_pointer_width = "64", target_pointer_width = "32"))]
|
||||
use crate::elf::{header, sym};
|
||||
|
||||
#[cfg(any(target_pointer_width = "64", target_pointer_width = "32"))]
|
||||
fn main() {
|
||||
let header: header::Header = Default::default();
|
||||
let sym: sym::Sym = Default::default();
|
||||
println!("header: {:?}, sym: {:?}", header, sym);
|
||||
println!("sizeof header: {}", header::SIZEOF_EHDR);
|
||||
println!("sizeof sym: {}", sym::SIZEOF_SYM);
|
||||
}
|
||||
|
|
@ -1,75 +0,0 @@
|
|||
/// Demonstrates how to read additional metadata (i.e. .Net runtime ones) from PE context
|
||||
|
||||
use goblin::container::Endian;
|
||||
use goblin::pe::data_directories::DataDirectory;
|
||||
use goblin::pe::PE;
|
||||
use goblin::pe::utils::get_data;
|
||||
use scroll::ctx::TryFromCtx;
|
||||
use scroll::Pread;
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Debug, Pread)]
|
||||
pub struct CliHeader {
|
||||
pub cb: u32,
|
||||
pub major_version: u16,
|
||||
pub minor_version: u16,
|
||||
pub metadata: DataDirectory,
|
||||
pub flags: u32,
|
||||
pub entry_point_token: u32,
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Debug)]
|
||||
struct MetadataRoot<'a> {
|
||||
pub signature: u32,
|
||||
pub major_version: u16,
|
||||
pub minor_version: u16,
|
||||
_reserved: u32,
|
||||
pub length: u32,
|
||||
pub version: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> TryFromCtx<'a, Endian> for MetadataRoot<'a> {
|
||||
type Error = scroll::Error;
|
||||
fn try_from_ctx(src: &'a [u8], endian: Endian) -> Result<(Self, usize), Self::Error> {
|
||||
let offset = &mut 0;
|
||||
let signature = src.gread_with(offset, endian)?;
|
||||
let major_version = src.gread_with(offset, endian)?;
|
||||
let minor_version = src.gread_with(offset, endian)?;
|
||||
let reserved = src.gread_with(offset, endian)?;
|
||||
let length = src.gread_with(offset, endian)?;
|
||||
let version = src.gread(offset)?;
|
||||
Ok((
|
||||
Self {
|
||||
signature,
|
||||
major_version,
|
||||
minor_version,
|
||||
_reserved: reserved,
|
||||
length,
|
||||
version,
|
||||
},
|
||||
*offset,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let file = include_bytes!("../assets/dotnet_executable_example.dll");
|
||||
let file = &file[..];
|
||||
let pe = PE::parse(file).unwrap();
|
||||
if pe.header.coff_header.machine != 0x14c {
|
||||
panic!("Is not a .Net executable");
|
||||
}
|
||||
let optional_header = pe.header.optional_header.expect("No optional header");
|
||||
let file_alignment = optional_header.windows_fields.file_alignment;
|
||||
let cli_header = optional_header
|
||||
.data_directories
|
||||
.get_clr_runtime_header()
|
||||
.expect("No CLI header");
|
||||
let sections = &pe.sections;
|
||||
|
||||
let cli_header_value: CliHeader = get_data(file, sections, cli_header, file_alignment).unwrap();
|
||||
println!("{:#?}", cli_header_value);
|
||||
let metadata_root: MetadataRoot = get_data(file, sections, cli_header_value.metadata, file_alignment).unwrap();
|
||||
println!("{:#?}", metadata_root);
|
||||
}
|
||||
162
third_party/rust/goblin/examples/dyldinfo.rs
vendored
162
third_party/rust/goblin/examples/dyldinfo.rs
vendored
|
|
@ -1,162 +0,0 @@
|
|||
use goblin::mach;
|
||||
use std::env;
|
||||
use std::process;
|
||||
use std::path::Path;
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use std::borrow::Cow;
|
||||
|
||||
fn usage() -> ! {
|
||||
println!("usage: dyldinfo <options> <mach-o file>");
|
||||
println!(" -bind print binds as seen by macho::imports()");
|
||||
println!(" -lazy_bind print lazy binds as seen by macho::imports()");
|
||||
process::exit(1);
|
||||
}
|
||||
|
||||
fn name_to_str(name: &[u8; 16]) -> Cow<'_, str> {
|
||||
for i in 0..16 {
|
||||
if name[i] == 0 {
|
||||
return String::from_utf8_lossy(&name[0..i])
|
||||
}
|
||||
}
|
||||
String::from_utf8_lossy(&name[..])
|
||||
}
|
||||
|
||||
fn dylib_name(name: &str) -> &str {
|
||||
// observed behavior:
|
||||
// "/usr/lib/libc++.1.dylib" => "libc++"
|
||||
// "/usr/lib/libSystem.B.dylib" => "libSystem"
|
||||
// "/System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation" => "CoreFoundation"
|
||||
name
|
||||
.rsplit('/').next().unwrap()
|
||||
.split('.').next().unwrap()
|
||||
}
|
||||
|
||||
fn print_binds(sections: &[mach::segment::Section], imports: &[mach::imports::Import]) {
|
||||
println!("bind information:");
|
||||
|
||||
println!(
|
||||
"{:7} {:16} {:14} {:7} {:6} {:16} symbol",
|
||||
"segment",
|
||||
"section",
|
||||
"address",
|
||||
"type",
|
||||
"addend",
|
||||
"dylib",
|
||||
);
|
||||
|
||||
for import in imports.iter().filter(|i| !i.is_lazy) {
|
||||
// find the section that imported this symbol
|
||||
let section = sections.iter()
|
||||
.find(|s| import.address >= s.addr && import.address < (s.addr + s.size));
|
||||
|
||||
// get &strs for its name
|
||||
let (segname, sectname) = section
|
||||
.map(|sect| (name_to_str(§.segname), name_to_str(§.sectname)))
|
||||
.unwrap_or((Cow::Borrowed("?"), Cow::Borrowed("?")));
|
||||
|
||||
println!(
|
||||
"{:7} {:16} 0x{:<12X} {:7} {:6} {:16} {}{}",
|
||||
segname,
|
||||
sectname,
|
||||
import.address,
|
||||
"pointer",
|
||||
import.addend,
|
||||
dylib_name(import.dylib),
|
||||
import.name,
|
||||
if import.is_weak { " (weak import)" } else { "" }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn print_lazy_binds(sections: &[mach::segment::Section], imports: &[mach::imports::Import]) {
|
||||
println!("lazy binding information (from lazy_bind part of dyld info):");
|
||||
|
||||
println!(
|
||||
"{:7} {:16} {:10} {:6} {:16} symbol",
|
||||
"segment",
|
||||
"section",
|
||||
"address",
|
||||
"index",
|
||||
"dylib",
|
||||
);
|
||||
|
||||
for import in imports.iter().filter(|i| i.is_lazy) {
|
||||
// find the section that imported this symbol
|
||||
let section = sections.iter()
|
||||
.find(|s| import.address >= s.addr && import.address < (s.addr + s.size));
|
||||
|
||||
// get &strs for its name
|
||||
let (segname, sectname) = section
|
||||
.map(|sect| (name_to_str(§.segname), name_to_str(§.sectname)))
|
||||
.unwrap_or((Cow::Borrowed("?"), Cow::Borrowed("?")));
|
||||
|
||||
println!(
|
||||
"{:7} {:16} 0x{:<8X} {:<06} {:16} {}",
|
||||
segname,
|
||||
sectname,
|
||||
import.address,
|
||||
format!("0x{:04X}", import.start_of_sequence_offset),
|
||||
dylib_name(import.dylib),
|
||||
import.name
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn main () {
|
||||
let len = env::args().len();
|
||||
|
||||
let mut bind = false;
|
||||
let mut lazy_bind = false;
|
||||
|
||||
if len <= 2 {
|
||||
usage();
|
||||
} else {
|
||||
// parse flags
|
||||
{
|
||||
let mut flags = env::args().collect::<Vec<_>>();
|
||||
flags.pop();
|
||||
flags.remove(0);
|
||||
for option in flags {
|
||||
match option.as_str() {
|
||||
"-bind" => { bind = true }
|
||||
"-lazy_bind" => { lazy_bind = true }
|
||||
other => {
|
||||
println!("unknown flag: {}", other);
|
||||
println!();
|
||||
usage();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// open the file
|
||||
let path = env::args_os().last().unwrap();
|
||||
let path = Path::new(&path);
|
||||
let buffer = { let mut v = Vec::new(); let mut f = File::open(&path).unwrap(); f.read_to_end(&mut v).unwrap(); v};
|
||||
match mach::MachO::parse(&buffer, 0) {
|
||||
Ok(macho) => {
|
||||
// collect sections and sort by address
|
||||
let mut sections: Vec<mach::segment::Section> = Vec::new();
|
||||
for sects in macho.segments.sections() {
|
||||
sections.extend(sects.map(|r| r.expect("section").0));
|
||||
}
|
||||
sections.sort_by_key(|s| s.addr);
|
||||
|
||||
// get the imports
|
||||
let imports = macho.imports().expect("imports");
|
||||
|
||||
if bind {
|
||||
print_binds(§ions, &imports);
|
||||
}
|
||||
if lazy_bind {
|
||||
print_lazy_binds(§ions, &imports);
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
println!("err: {:?}", err);
|
||||
process::exit(2);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
68
third_party/rust/goblin/examples/lipo.rs
vendored
68
third_party/rust/goblin/examples/lipo.rs
vendored
|
|
@ -1,68 +0,0 @@
|
|||
use goblin::mach::{self, Mach};
|
||||
use std::env;
|
||||
use std::process;
|
||||
use std::path::Path;
|
||||
use std::fs::File;
|
||||
use std::io::{Read, Write};
|
||||
|
||||
fn usage() -> ! {
|
||||
println!("usage: lipo <options> <mach-o fat file>");
|
||||
println!(" -m64 Extracts and writes the 64-bit binary in this fat container, if any");
|
||||
process::exit(1);
|
||||
}
|
||||
|
||||
fn main () {
|
||||
let len = env::args().len();
|
||||
|
||||
if len <= 1 {
|
||||
usage();
|
||||
} else {
|
||||
let mut m64 = false;
|
||||
{
|
||||
let mut flags = env::args().collect::<Vec<_>>();
|
||||
flags.pop();
|
||||
flags.remove(0);
|
||||
for option in flags {
|
||||
match option.as_str() {
|
||||
"-m64" => { m64 = true }
|
||||
other => {
|
||||
println!("unknown flag: {}", other);
|
||||
println!();
|
||||
usage();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let path_name = env::args_os().last().unwrap();
|
||||
let path = Path::new(&path_name);
|
||||
let buffer = { let mut v = Vec::new(); let mut f = File::open(&path).unwrap(); f.read_to_end(&mut v).unwrap(); v};
|
||||
match mach::Mach::parse(&buffer) {
|
||||
Ok(Mach::Binary(_macho)) => {
|
||||
println!("Already a single arch binary");
|
||||
process::exit(2);
|
||||
},
|
||||
Ok(Mach::Fat(fat)) => {
|
||||
for (i, arch) in fat.iter_arches().enumerate() {
|
||||
let arch = arch.unwrap();
|
||||
let name = format!("{}.{}", &path_name.to_string_lossy(), i);
|
||||
let path = Path::new(&name);
|
||||
if arch.is_64() && m64 {
|
||||
let bytes = &buffer[arch.offset as usize..][..arch.size as usize];
|
||||
let mut file = File::create(path).unwrap();
|
||||
file.write_all(bytes).unwrap();
|
||||
break;
|
||||
} else if !m64 {
|
||||
let bytes = &buffer[arch.offset as usize..][..arch.size as usize];
|
||||
let mut file = File::create(path).unwrap();
|
||||
file.write_all(bytes).unwrap();
|
||||
}
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
println!("err: {:?}", err);
|
||||
process::exit(2);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
25
third_party/rust/goblin/examples/rdr.rs
vendored
25
third_party/rust/goblin/examples/rdr.rs
vendored
|
|
@ -1,25 +0,0 @@
|
|||
use goblin::error;
|
||||
use std::path::Path;
|
||||
use std::env;
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
|
||||
fn run () -> error::Result<()> {
|
||||
for (i, arg) in env::args().enumerate() {
|
||||
if i == 1 {
|
||||
let path = Path::new(arg.as_str());
|
||||
let mut fd = File::open(path)?;
|
||||
let buffer = { let mut v = Vec::new(); fd.read_to_end(&mut v).unwrap(); v};
|
||||
let res = goblin::Object::parse(&buffer)?;
|
||||
println!("{:#?}", res);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn main () {
|
||||
match run() {
|
||||
Ok(()) => (),
|
||||
Err(err) => println!("{:#}", err)
|
||||
}
|
||||
}
|
||||
31
third_party/rust/goblin/examples/scroll.rs
vendored
31
third_party/rust/goblin/examples/scroll.rs
vendored
|
|
@ -1,31 +0,0 @@
|
|||
/// Demonstrates the magical powers of scroll + goblin
|
||||
/// Goblin implements `TryFromCtx` for the header type
|
||||
/// which means downstream crates/clients can just "parse" headers out of
|
||||
/// arbitrary buffers, without learning new crate specific function names
|
||||
/// I.e., all you need are Types + Pread = Happiness
|
||||
|
||||
use goblin::{error, elf64, elf};
|
||||
use scroll::{Pwrite, Pread};
|
||||
|
||||
fn run () -> error::Result<()> {
|
||||
let crt1: Vec<u8> = include!("../etc/crt1.rs");
|
||||
let header: elf64::header::Header = crt1.pread(0)?;
|
||||
assert_eq!(header.e_type, elf64::header::ET_REL);
|
||||
println!("header: {:?}", &header);
|
||||
// now lets write the header into some bytes
|
||||
let mut bytes = [0u8; elf64::header::SIZEOF_EHDR];
|
||||
bytes.pwrite(header, 0)?;
|
||||
// read it back out
|
||||
let header2: elf64::header::Header = bytes.pread(0)?;
|
||||
// they're the same
|
||||
assert_eq!(header, header2);
|
||||
let elf: elf::Elf = crt1.pread(0)?;
|
||||
println!("elf: {:#?}", &elf);
|
||||
let elf = elf::Elf::parse(&crt1)?;
|
||||
println!("elf: {:#?}", &elf);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn main() {
|
||||
run().unwrap();
|
||||
}
|
||||
185
third_party/rust/goblin/src/archive/mod.rs
vendored
185
third_party/rust/goblin/src/archive/mod.rs
vendored
|
|
@ -8,12 +8,12 @@
|
|||
|
||||
use scroll::{Pread, Pwrite, SizeWith};
|
||||
|
||||
use crate::error::{Error, Result};
|
||||
use crate::strtab;
|
||||
use crate::error::{Result, Error};
|
||||
|
||||
use core::usize;
|
||||
use alloc::collections::btree_map::BTreeMap;
|
||||
use alloc::vec::Vec;
|
||||
use core::usize;
|
||||
|
||||
pub const SIZEOF_MAGIC: usize = 8;
|
||||
/// The magic number of a Unix Archive
|
||||
|
|
@ -68,12 +68,22 @@ pub const SIZEOF_HEADER: usize = SIZEOF_FILE_IDENTIFER + 12 + 6 + 6 + 8 + SIZEOF
|
|||
|
||||
impl MemberHeader {
|
||||
pub fn name(&self) -> Result<&str> {
|
||||
Ok(self.identifier.pread_with::<&str>(0, ::scroll::ctx::StrCtx::Length(SIZEOF_FILE_IDENTIFER))?)
|
||||
Ok(self
|
||||
.identifier
|
||||
.pread_with::<&str>(0, ::scroll::ctx::StrCtx::Length(SIZEOF_FILE_IDENTIFER))?)
|
||||
}
|
||||
pub fn size(&self) -> Result<usize> {
|
||||
match usize::from_str_radix(self.file_size.pread_with::<&str>(0, ::scroll::ctx::StrCtx::Length(self.file_size.len()))?.trim_end(), 10) {
|
||||
match usize::from_str_radix(
|
||||
self.file_size
|
||||
.pread_with::<&str>(0, ::scroll::ctx::StrCtx::Length(self.file_size.len()))?
|
||||
.trim_end(),
|
||||
10,
|
||||
) {
|
||||
Ok(file_size) => Ok(file_size),
|
||||
Err(err) => Err(Error::Malformed(format!("{:?} Bad file_size in header: {:?}", err, self)))
|
||||
Err(err) => Err(Error::Malformed(format!(
|
||||
"{:?} Bad file_size in header: {:?}",
|
||||
err, self
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -99,9 +109,15 @@ impl<'a> Member<'a> {
|
|||
/// This is because just like members in the archive, the data section is 2-byte aligned.
|
||||
pub fn parse(buffer: &'a [u8], offset: &mut usize) -> Result<Member<'a>> {
|
||||
let header_offset = *offset;
|
||||
let name = buffer.pread_with::<&str>(*offset, ::scroll::ctx::StrCtx::Length(SIZEOF_FILE_IDENTIFER))?;
|
||||
let name = buffer.pread_with::<&str>(
|
||||
*offset,
|
||||
::scroll::ctx::StrCtx::Length(SIZEOF_FILE_IDENTIFER),
|
||||
)?;
|
||||
let archive_header = buffer.gread::<MemberHeader>(offset)?;
|
||||
let mut header = Header { name, size: archive_header.size()? };
|
||||
let mut header = Header {
|
||||
name,
|
||||
size: archive_header.size()?,
|
||||
};
|
||||
|
||||
// skip newline padding if we're on an uneven byte boundary
|
||||
if *offset & 1 == 1 {
|
||||
|
|
@ -110,7 +126,10 @@ impl<'a> Member<'a> {
|
|||
|
||||
let bsd_name = if let Some(len) = Self::bsd_filename_length(name) {
|
||||
// there's a filename of length `len` right after the header
|
||||
let name = buffer.pread_with::<&str>(header_offset + SIZEOF_HEADER, ::scroll::ctx::StrCtx::Length(len))?;
|
||||
let name = buffer.pread_with::<&str>(
|
||||
header_offset + SIZEOF_HEADER,
|
||||
::scroll::ctx::StrCtx::Length(len),
|
||||
)?;
|
||||
|
||||
// adjust the offset and size accordingly
|
||||
*offset = header_offset + SIZEOF_HEADER + len;
|
||||
|
|
@ -169,7 +188,6 @@ impl<'a> Member<'a> {
|
|||
pub fn raw_name(&self) -> &'a str {
|
||||
self.header.name
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
|
|
@ -198,13 +216,18 @@ impl<'a> Index<'a> {
|
|||
pub fn parse_sysv_index(buffer: &'a [u8]) -> Result<Self> {
|
||||
let offset = &mut 0;
|
||||
let sizeof_table = buffer.gread_with::<u32>(offset, scroll::BE)? as usize;
|
||||
|
||||
if sizeof_table > buffer.len() / 4 {
|
||||
return Err(Error::BufferTooShort(sizeof_table, "indices"));
|
||||
}
|
||||
|
||||
let mut indexes = Vec::with_capacity(sizeof_table);
|
||||
for _ in 0..sizeof_table {
|
||||
indexes.push(buffer.gread_with::<u32>(offset, scroll::BE)?);
|
||||
}
|
||||
let sizeof_strtab = buffer.len() - ((sizeof_table * 4) + 4);
|
||||
let strtab = strtab::Strtab::parse(buffer, *offset, sizeof_strtab, 0x0)?;
|
||||
Ok (Index {
|
||||
Ok(Index {
|
||||
size: sizeof_table,
|
||||
symbol_indexes: indexes,
|
||||
strtab: strtab.to_vec()?, // because i'm lazy
|
||||
|
|
@ -252,6 +275,10 @@ impl<'a> Index<'a> {
|
|||
let strtab_bytes = buffer.pread_with::<u32>(entries_bytes + 4, scroll::LE)? as usize;
|
||||
let strtab = strtab::Strtab::parse(buffer, entries_bytes + 8, strtab_bytes, 0x0)?;
|
||||
|
||||
if entries_bytes > buffer.len() {
|
||||
return Err(Error::BufferTooShort(entries, "entries"));
|
||||
}
|
||||
|
||||
// build the index
|
||||
let mut indexes = Vec::with_capacity(entries);
|
||||
let mut strings = Vec::with_capacity(entries);
|
||||
|
|
@ -262,18 +289,21 @@ impl<'a> Index<'a> {
|
|||
// Therefore, the `i`th entry starts at offset `(i*8)+4`. The first u32 is at that
|
||||
// address, and the second u32 follows 4 bytes later.
|
||||
let string_offset: u32 = buffer.pread_with(i * 8 + 4, scroll::LE)?;
|
||||
let archive_member: u32 = buffer.pread_with(i * 8 + 8, scroll::LE)?;
|
||||
let archive_member: u32 = buffer.pread_with(i * 8 + 8, scroll::LE)?;
|
||||
|
||||
let string = match strtab.get(string_offset as usize) {
|
||||
Some(result) => result,
|
||||
None => Err(Error::Malformed(format!("{} entry {} has string offset {}, which is out of bounds", BSD_SYMDEF_NAME, i, string_offset)))
|
||||
let string = match strtab.get_at(string_offset as usize) {
|
||||
Some(result) => Ok(result),
|
||||
None => Err(Error::Malformed(format!(
|
||||
"{} entry {} has string offset {}, which is out of bounds",
|
||||
BSD_SYMDEF_NAME, i, string_offset
|
||||
))),
|
||||
}?;
|
||||
|
||||
indexes.push(archive_member);
|
||||
strings.push(string);
|
||||
}
|
||||
|
||||
Ok (Index {
|
||||
Ok(Index {
|
||||
size: entries,
|
||||
symbol_indexes: indexes,
|
||||
strtab: strings,
|
||||
|
|
@ -290,14 +320,26 @@ impl<'a> Index<'a> {
|
|||
pub fn parse_windows_linker_member(buffer: &'a [u8]) -> Result<Self> {
|
||||
let offset = &mut 0;
|
||||
let members = buffer.gread_with::<u32>(offset, scroll::LE)? as usize;
|
||||
|
||||
if members > buffer.len() / 4 {
|
||||
return Err(Error::BufferTooShort(members, "members"));
|
||||
}
|
||||
|
||||
let mut member_offsets = Vec::with_capacity(members);
|
||||
for _ in 0..members {
|
||||
member_offsets.push(buffer.gread_with::<u32>(offset, scroll::LE)?);
|
||||
}
|
||||
|
||||
let symbols = buffer.gread_with::<u32>(offset, scroll::LE)? as usize;
|
||||
|
||||
if symbols > buffer.len() / 2 {
|
||||
return Err(Error::BufferTooShort(symbols, "symbols"));
|
||||
}
|
||||
|
||||
let mut symbol_offsets = Vec::with_capacity(symbols);
|
||||
for _ in 0..symbols {
|
||||
symbol_offsets.push(member_offsets[buffer.gread_with::<u16>(offset, scroll::LE)? as usize - 1]);
|
||||
symbol_offsets
|
||||
.push(member_offsets[buffer.gread_with::<u16>(offset, scroll::LE)? as usize - 1]);
|
||||
}
|
||||
let strtab = strtab::Strtab::parse(buffer, *offset, buffer.len() - *offset, 0x0)?;
|
||||
Ok(Index {
|
||||
|
|
@ -313,7 +355,7 @@ impl<'a> Index<'a> {
|
|||
/// of the archive.
|
||||
#[derive(Debug, Default)]
|
||||
struct NameIndex<'a> {
|
||||
strtab: strtab::Strtab<'a>
|
||||
strtab: strtab::Strtab<'a>,
|
||||
}
|
||||
|
||||
impl<'a> NameIndex<'a> {
|
||||
|
|
@ -321,32 +363,37 @@ impl<'a> NameIndex<'a> {
|
|||
// This is a total hack, because strtab returns "" if idx == 0, need to change
|
||||
// but previous behavior might rely on this, as ELF strtab's have "" at 0th index...
|
||||
let hacked_size = size + 1;
|
||||
let strtab = strtab::Strtab::parse(buffer, *offset-1, hacked_size, b'\n')?;
|
||||
let strtab = strtab::Strtab::parse(buffer, *offset - 1, hacked_size, b'\n')?;
|
||||
// precious time was lost when refactoring because strtab::parse doesn't update the mutable seek...
|
||||
*offset += hacked_size - 2;
|
||||
Ok (NameIndex {
|
||||
strtab
|
||||
})
|
||||
Ok(NameIndex { strtab })
|
||||
}
|
||||
|
||||
pub fn get(&self, name: &str) -> Result<&'a str> {
|
||||
let idx = name.trim_start_matches('/').trim_end();
|
||||
match usize::from_str_radix(idx, 10) {
|
||||
Ok(idx) => {
|
||||
let name = match self.strtab.get(idx+1) {
|
||||
Some(result) => result,
|
||||
None => Err(Error::Malformed(format!("Name {} is out of range in archive NameIndex", name)))
|
||||
let name = match self.strtab.get_at(idx + 1) {
|
||||
Some(result) => Ok(result),
|
||||
None => Err(Error::Malformed(format!(
|
||||
"Name {} is out of range in archive NameIndex",
|
||||
name
|
||||
))),
|
||||
}?;
|
||||
|
||||
if name != "" {
|
||||
Ok(name.trim_end_matches('/'))
|
||||
} else {
|
||||
Err(Error::Malformed(format!("Could not find {:?} in index", name)))
|
||||
} else {
|
||||
Err(Error::Malformed(format!(
|
||||
"Could not find {:?} in index",
|
||||
name
|
||||
)))
|
||||
}
|
||||
},
|
||||
Err (_) => {
|
||||
Err(Error::Malformed(format!("Bad name index {:?} in index", name)))
|
||||
}
|
||||
Err(_) => Err(Error::Malformed(format!(
|
||||
"Bad name index {:?} in index",
|
||||
name
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -371,23 +418,17 @@ pub enum IndexType {
|
|||
#[derive(Debug)]
|
||||
/// An in-memory representation of a parsed Unix Archive
|
||||
pub struct Archive<'a> {
|
||||
// we can chuck this because the symbol index is a better representation, but we keep for
|
||||
// debugging
|
||||
index: Index<'a>,
|
||||
sysv_name_index: NameIndex<'a>,
|
||||
// the array of members, which are indexed by the members hash and symbol index
|
||||
// The array of members, which are indexed by the members hash and symbol index.
|
||||
// These are in the same order they are found in the file.
|
||||
member_array: Vec<Member<'a>>,
|
||||
// file name -> member
|
||||
members: BTreeMap<&'a str, usize>,
|
||||
// symbol -> member
|
||||
symbol_index: BTreeMap<&'a str, usize>,
|
||||
/// Type of the symbol index that was found in the archive.
|
||||
index_type: IndexType,
|
||||
}
|
||||
|
||||
|
||||
impl<'a> Archive<'a> {
|
||||
pub fn parse(buffer: &'a [u8]) -> Result<Archive<'a>> {
|
||||
|
||||
let mut magic = [0u8; SIZEOF_MAGIC];
|
||||
let offset = &mut 0usize;
|
||||
buffer.gread_inout(offset, &mut magic)?;
|
||||
|
|
@ -416,28 +457,33 @@ impl<'a> Archive<'a> {
|
|||
IndexType::None => {
|
||||
index_type = IndexType::SysV;
|
||||
Index::parse_sysv_index(data)?
|
||||
},
|
||||
}
|
||||
IndexType::SysV => {
|
||||
index_type = IndexType::Windows;
|
||||
// second symbol index is Microsoft's extension of SysV format
|
||||
Index::parse_windows_linker_member(data)?
|
||||
},
|
||||
IndexType::BSD => return Err(Error::Malformed("SysV index occurs after BSD index".into())),
|
||||
IndexType::Windows => return Err(Error::Malformed("More than two Windows Linker members".into())),
|
||||
}
|
||||
IndexType::BSD => {
|
||||
return Err(Error::Malformed("SysV index occurs after BSD index".into()))
|
||||
}
|
||||
IndexType::Windows => {
|
||||
return Err(Error::Malformed(
|
||||
"More than two Windows Linker members".into(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
} else if member.bsd_name == Some(BSD_SYMDEF_NAME) || member.bsd_name == Some(BSD_SYMDEF_SORTED_NAME) {
|
||||
} else if member.bsd_name == Some(BSD_SYMDEF_NAME)
|
||||
|| member.bsd_name == Some(BSD_SYMDEF_SORTED_NAME)
|
||||
{
|
||||
if index_type != IndexType::None {
|
||||
return Err(Error::Malformed("BSD index occurs after SysV index".into()));
|
||||
}
|
||||
index_type = IndexType::BSD;
|
||||
let data: &[u8] = buffer.pread_with(member.offset as usize, member.size())?;
|
||||
index = Index::parse_bsd_symdef(data)?;
|
||||
|
||||
} else if name == NAME_INDEX_NAME {
|
||||
let mut name_index_offset: usize = member.offset as usize;
|
||||
sysv_name_index = NameIndex::parse(buffer, &mut name_index_offset, member.size())?;
|
||||
|
||||
} else {
|
||||
// record this as an archive member
|
||||
member_array.push(member);
|
||||
|
|
@ -464,23 +510,26 @@ impl<'a> Archive<'a> {
|
|||
// build the symbol index, translating symbol names into member indexes
|
||||
let mut symbol_index: BTreeMap<&str, usize> = BTreeMap::new();
|
||||
for (member_offset, name) in index.symbol_indexes.iter().zip(index.strtab.iter()) {
|
||||
let member_index = *member_index_by_offset.get(member_offset)
|
||||
.ok_or(Error::Malformed(format!("Could not get member {:?} at offset: {}", name, member_offset)))?;
|
||||
let member_index = *member_index_by_offset.get(member_offset).ok_or_else(|| {
|
||||
Error::Malformed(format!(
|
||||
"Could not get member {:?} at offset: {}",
|
||||
name, member_offset
|
||||
))
|
||||
})?;
|
||||
symbol_index.insert(&name, member_index);
|
||||
}
|
||||
|
||||
Ok(Archive {
|
||||
index,
|
||||
member_array,
|
||||
sysv_name_index,
|
||||
members,
|
||||
symbol_index,
|
||||
index_type,
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the member named `member` in this archive, if any
|
||||
pub fn get (&self, member: &str) -> Option<&Member> {
|
||||
/// Get the member named `member` in this archive, if any. If there are
|
||||
/// multiple files in the archive with the same name it only returns one
|
||||
/// of them.
|
||||
pub fn get(&self, member: &str) -> Option<&Member> {
|
||||
if let Some(idx) = self.members.get(member) {
|
||||
Some(&self.member_array[*idx])
|
||||
} else {
|
||||
|
|
@ -488,23 +537,36 @@ impl<'a> Archive<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Get the member at position `index` in this archive, if any.
|
||||
pub fn get_at(&self, index: usize) -> Option<&Member> {
|
||||
self.member_array.get(index)
|
||||
}
|
||||
|
||||
/// Return the number of archive members.
|
||||
pub fn len(&self) -> usize {
|
||||
self.member_array.len()
|
||||
}
|
||||
|
||||
/// Returns a slice of the raw bytes for the given `member` in the scrollable `buffer`
|
||||
pub fn extract<'b>(&self, member: &str, buffer: &'b [u8]) -> Result<&'b [u8]> {
|
||||
if let Some(member) = self.get(member) {
|
||||
let bytes = buffer.pread_with(member.offset as usize, member.size())?;
|
||||
Ok(bytes)
|
||||
} else {
|
||||
Err(Error::Malformed(format!("Cannot extract member {:?}", member)))
|
||||
Err(Error::Malformed(format!(
|
||||
"Cannot extract member {:?}",
|
||||
member
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets a summary of this archive, returning a list of membername, the member, and the list of symbols the member contains
|
||||
pub fn summarize(&self) -> Vec<(&str, &Member, Vec<&'a str>)> {
|
||||
// build a result array, with indexes matching the member indexes
|
||||
let mut result = self.member_array.iter()
|
||||
.map(|ref member| {
|
||||
(member.extended_name(), *member, Vec::new())
|
||||
})
|
||||
let mut result = self
|
||||
.member_array
|
||||
.iter()
|
||||
.map(|ref member| (member.extended_name(), *member, Vec::new()))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// walk the symbol index once, adding each symbol to the appropriate result Vec
|
||||
|
|
@ -516,12 +578,17 @@ impl<'a> Archive<'a> {
|
|||
}
|
||||
|
||||
/// Get the list of member names in this archive
|
||||
///
|
||||
/// This returns members in alphabetical order, not in the order they
|
||||
/// occurred in the archive. If there are multiple files with the same
|
||||
/// name, the size of the returned array will be less than the size of
|
||||
/// `len()`.
|
||||
pub fn members(&self) -> Vec<&'a str> {
|
||||
self.members.keys().cloned().collect()
|
||||
}
|
||||
|
||||
/// Returns the member's name which contains the given `symbol`, if it is in the archive
|
||||
pub fn member_of_symbol (&self, symbol: &str) -> Option<&'a str> {
|
||||
pub fn member_of_symbol(&self, symbol: &str) -> Option<&'a str> {
|
||||
if let Some(idx) = self.symbol_index.get(symbol) {
|
||||
Some(self.member_array[*idx].extended_name())
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ macro_rules! elf_compression_header {
|
|||
.finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// ZLIB/DEFLATE algorithm.
|
||||
|
|
@ -27,59 +27,60 @@ pub const ELFCOMPRESS_LOPROC: u32 = 0x7000_0000;
|
|||
/// End of processor-specific.
|
||||
pub const ELFCOMPRESS_HIPROC: u32 = 0x7fff_ffff;
|
||||
|
||||
macro_rules! elf_compression_header_std_impl { ($size:ty) => {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
#[test]
|
||||
fn size_of() {
|
||||
assert_eq!(::std::mem::size_of::<CompressionHeader>(), SIZEOF_CHDR);
|
||||
}
|
||||
}
|
||||
|
||||
if_alloc! {
|
||||
use crate::elf::compression_header::CompressionHeader as ElfCompressionHeader;
|
||||
|
||||
use plain::Plain;
|
||||
|
||||
if_std! {
|
||||
use crate::error::Result;
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::{Read, Seek};
|
||||
use std::io::SeekFrom::Start;
|
||||
macro_rules! elf_compression_header_std_impl {
|
||||
($size:ty) => {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
#[test]
|
||||
fn size_of() {
|
||||
assert_eq!(::std::mem::size_of::<CompressionHeader>(), SIZEOF_CHDR);
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CompressionHeader> for ElfCompressionHeader {
|
||||
fn from(ch: CompressionHeader) -> Self {
|
||||
ElfCompressionHeader {
|
||||
ch_type: ch.ch_type,
|
||||
ch_size: u64::from(ch.ch_size),
|
||||
ch_addralign: u64::from(ch.ch_addralign),
|
||||
if_alloc! {
|
||||
use crate::elf::compression_header::CompressionHeader as ElfCompressionHeader;
|
||||
|
||||
use plain::Plain;
|
||||
|
||||
if_std! {
|
||||
use crate::error::Result;
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::{Read, Seek};
|
||||
use std::io::SeekFrom::Start;
|
||||
}
|
||||
|
||||
impl From<CompressionHeader> for ElfCompressionHeader {
|
||||
fn from(ch: CompressionHeader) -> Self {
|
||||
ElfCompressionHeader {
|
||||
ch_type: ch.ch_type,
|
||||
ch_size: u64::from(ch.ch_size),
|
||||
ch_addralign: u64::from(ch.ch_addralign),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CompressionHeader {
|
||||
pub fn from_bytes(bytes: &[u8]) -> CompressionHeader {
|
||||
let mut chdr = CompressionHeader::default();
|
||||
chdr.copy_from_bytes(bytes).expect("buffer is too short for header");
|
||||
chdr
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
pub fn from_fd(fd: &mut File, offset: u64) -> Result<CompressionHeader> {
|
||||
let mut chdr = CompressionHeader::default();
|
||||
fd.seek(Start(offset))?;
|
||||
unsafe {
|
||||
fd.read_exact(plain::as_mut_bytes(&mut chdr))?;
|
||||
impl CompressionHeader {
|
||||
pub fn from_bytes(bytes: &[u8]) -> CompressionHeader {
|
||||
let mut chdr = CompressionHeader::default();
|
||||
chdr.copy_from_bytes(bytes).expect("buffer is too short for header");
|
||||
chdr
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
pub fn from_fd(fd: &mut File, offset: u64) -> Result<CompressionHeader> {
|
||||
let mut chdr = CompressionHeader::default();
|
||||
fd.seek(Start(offset))?;
|
||||
unsafe {
|
||||
fd.read_exact(plain::as_mut_bytes(&mut chdr))?;
|
||||
}
|
||||
Ok(chdr)
|
||||
}
|
||||
Ok(chdr)
|
||||
}
|
||||
}
|
||||
} // end if_alloc
|
||||
};}
|
||||
} // end if_alloc
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(feature = "alloc")]
|
||||
use scroll::{Pread, Pwrite, SizeWith};
|
||||
|
|
@ -119,7 +120,6 @@ pub mod compression_header32 {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
pub mod compression_header64 {
|
||||
pub use crate::elf::compression_header::*;
|
||||
|
||||
|
|
|
|||
|
|
@ -333,7 +333,9 @@ pub const EM_BA2: u16 = 202;
|
|||
pub const EM_XCORE: u16 = 203;
|
||||
/// Microchip 8-bit PIC(r)
|
||||
pub const EM_MCHP_PIC: u16 = 204;
|
||||
// reserved 205-209
|
||||
/// Intel Graphics Technology
|
||||
pub const EM_INTELGT: u16 = 205;
|
||||
// reserved 206-209
|
||||
/// KM211 KM32
|
||||
pub const EM_KM32: u16 = 210;
|
||||
/// KM211 KMX32
|
||||
|
|
@ -371,6 +373,9 @@ pub const EM_RISCV: u16 = 243;
|
|||
/// Linux BPF -- in-kernel virtual machine
|
||||
pub const EM_BPF: u16 = 247;
|
||||
|
||||
/// C-SKY
|
||||
pub const EM_CSKY: u16 = 252;
|
||||
|
||||
pub const EM_NUM: u16 = 248;
|
||||
|
||||
/// Convert machine to str representation
|
||||
|
|
|
|||
144
third_party/rust/goblin/src/elf/dynamic.rs
vendored
144
third_party/rust/goblin/src/elf/dynamic.rs
vendored
|
|
@ -225,6 +225,17 @@ pub const DF_BIND_NOW: u64 = 0x0000_0008;
|
|||
/// Module uses the static TLS model.
|
||||
pub const DF_STATIC_TLS: u64 = 0x0000_0010;
|
||||
|
||||
pub fn df_tag_to_str(tag: u64) -> &'static str {
|
||||
match tag {
|
||||
DF_ORIGIN => "DF_ORIGIN",
|
||||
DF_SYMBOLIC => "DF_SYMBOLIC",
|
||||
DF_TEXTREL => "DF_TEXTREL",
|
||||
DF_BIND_NOW => "DF_BIND_NOW",
|
||||
DF_STATIC_TLS => "DF_STATIC_TLS",
|
||||
_ => "UNKNOWN_TAG",
|
||||
}
|
||||
}
|
||||
|
||||
/// === State flags ===
|
||||
/// selectable in the `d_un.d_val` element of the DT_FLAGS_1 entry in the dynamic section.
|
||||
///
|
||||
|
|
@ -275,6 +286,41 @@ pub const DF_1_SYMINTPOSE: u64 = 0x0080_0000;
|
|||
pub const DF_1_GLOBAUDIT: u64 = 0x0100_0000;
|
||||
/// Singleton dyn are used.
|
||||
pub const DF_1_SINGLETON: u64 = 0x0200_0000;
|
||||
/// Object is a Position Independent Executable (PIE).
|
||||
pub const DF_1_PIE: u64 = 0x0800_0000;
|
||||
|
||||
pub fn df_1_tag_to_str(tag: u64) -> &'static str {
|
||||
match tag {
|
||||
DF_1_NOW => "DF_1_NOW",
|
||||
DF_1_GLOBAL => "DF_1_GLOBAL",
|
||||
DF_1_GROUP => "DF_1_GROUP",
|
||||
DF_1_NODELETE => "DF_1_NODELETE",
|
||||
DF_1_LOADFLTR => "DF_1_LOADFLTR",
|
||||
DF_1_INITFIRST => "DF_1_INITFIRST",
|
||||
DF_1_NOOPEN => "DF_1_NOOPEN",
|
||||
DF_1_ORIGIN => "DF_1_ORIGIN",
|
||||
DF_1_DIRECT => "DF_1_DIRECT",
|
||||
DF_1_TRANS => "DF_1_TRANS",
|
||||
DF_1_INTERPOSE => "DF_1_INTERPOSE",
|
||||
DF_1_NODEFLIB => "DF_1_NODEFLIB",
|
||||
DF_1_NODUMP => "DF_1_NODUMP",
|
||||
DF_1_CONFALT => "DF_1_CONFALT",
|
||||
DF_1_ENDFILTEE => "DF_1_ENDFILTEE",
|
||||
DF_1_DISPRELDNE => "DF_1_DISPRELDNE",
|
||||
DF_1_DISPRELPND => "DF_1_DISPRELPND",
|
||||
DF_1_NODIRECT => "DF_1_NODIRECT",
|
||||
DF_1_IGNMULDEF => "DF_1_IGNMULDEF",
|
||||
DF_1_NOKSYMS => "DF_1_NOKSYMS",
|
||||
DF_1_NOHDR => "DF_1_NOHDR",
|
||||
DF_1_EDITED => "DF_1_EDITED",
|
||||
DF_1_NORELOC => "DF_1_NORELOC",
|
||||
DF_1_SYMINTPOSE => "DF_1_SYMINTPOSE",
|
||||
DF_1_GLOBAUDIT => "DF_1_GLOBAUDIT",
|
||||
DF_1_SINGLETON => "DF_1_SINGLETON",
|
||||
DF_1_PIE => "DF_1_PIE",
|
||||
_ => "UNKNOWN_TAG",
|
||||
}
|
||||
}
|
||||
|
||||
if_alloc! {
|
||||
use core::fmt;
|
||||
|
|
@ -357,7 +403,6 @@ if_alloc! {
|
|||
pub struct Dynamic {
|
||||
pub dyns: Vec<Dyn>,
|
||||
pub info: DynamicInfo,
|
||||
count: usize,
|
||||
}
|
||||
|
||||
impl Dynamic {
|
||||
|
|
@ -381,6 +426,7 @@ if_alloc! {
|
|||
&[]
|
||||
};
|
||||
let size = Dyn::size_with(&ctx);
|
||||
// the validity of `count` was implicitly checked by reading `bytes`.
|
||||
let count = filesz / size;
|
||||
let mut dyns = Vec::with_capacity(count);
|
||||
let mut offset = 0;
|
||||
|
|
@ -394,8 +440,7 @@ if_alloc! {
|
|||
for dynamic in &dyns {
|
||||
info.update(phdrs, dynamic);
|
||||
}
|
||||
let count = dyns.len();
|
||||
return Ok(Some(Dynamic { dyns: dyns, info: info, count: count }));
|
||||
return Ok(Some(Dynamic { dyns: dyns, info: info, }));
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
|
|
@ -403,11 +448,11 @@ if_alloc! {
|
|||
|
||||
pub fn get_libraries<'a>(&self, strtab: &Strtab<'a>) -> Vec<&'a str> {
|
||||
use log::warn;
|
||||
let count = self.info.needed_count;
|
||||
let count = self.info.needed_count.min(self.dyns.len());
|
||||
let mut needed = Vec::with_capacity(count);
|
||||
for dynamic in &self.dyns {
|
||||
if dynamic.d_tag as u64 == DT_NEEDED {
|
||||
if let Some(Ok(lib)) = strtab.get(dynamic.d_val as usize) {
|
||||
if let Some(lib) = strtab.get_at(dynamic.d_val as usize) {
|
||||
needed.push(lib)
|
||||
} else {
|
||||
warn!("Invalid DT_NEEDED {}", dynamic.d_val)
|
||||
|
|
@ -519,7 +564,7 @@ macro_rules! elf_dyn_std_impl {
|
|||
|
||||
/// Gets the needed libraries from the `_DYNAMIC` array, with the str slices lifetime tied to the dynamic array/strtab's lifetime(s)
|
||||
pub unsafe fn get_needed<'a>(dyns: &[Dyn], strtab: *const Strtab<'a>, count: usize) -> Vec<&'a str> {
|
||||
let mut needed = Vec::with_capacity(count);
|
||||
let mut needed = Vec::with_capacity(count.min(dyns.len()));
|
||||
for dynamic in dyns {
|
||||
if u64::from(dynamic.d_tag) == DT_NEEDED {
|
||||
let lib = &(*strtab)[dynamic.d_val as usize];
|
||||
|
|
@ -540,7 +585,7 @@ macro_rules! elf_dynamic_info_std_impl {
|
|||
if address >= ph.p_vaddr {
|
||||
let offset = address - ph.p_vaddr;
|
||||
if offset < ph.p_memsz {
|
||||
return ph.p_offset.checked_add(offset );
|
||||
return ph.p_offset.checked_add(offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -548,7 +593,7 @@ macro_rules! elf_dynamic_info_std_impl {
|
|||
}
|
||||
|
||||
/// Important dynamic linking info generated via a single pass through the `_DYNAMIC` array
|
||||
#[derive(Default)]
|
||||
#[derive(Default, PartialEq)]
|
||||
pub struct DynamicInfo {
|
||||
pub rela: usize,
|
||||
pub relasz: usize,
|
||||
|
|
@ -568,6 +613,8 @@ macro_rules! elf_dynamic_info_std_impl {
|
|||
pub pltrelsz: usize,
|
||||
pub pltrel: $size,
|
||||
pub jmprel: usize,
|
||||
pub verdef: $size,
|
||||
pub verdefnum: $size,
|
||||
pub verneed: $size,
|
||||
pub verneednum: $size,
|
||||
pub versym: $size,
|
||||
|
|
@ -598,22 +645,34 @@ macro_rules! elf_dynamic_info_std_impl {
|
|||
DT_RELCOUNT => self.relcount = dynamic.d_val as usize,
|
||||
DT_GNU_HASH => self.gnu_hash = vm_to_offset(phdrs, dynamic.d_val),
|
||||
DT_HASH => self.hash = vm_to_offset(phdrs, dynamic.d_val),
|
||||
DT_STRTAB => self.strtab = vm_to_offset(phdrs, dynamic.d_val).unwrap_or(0) as usize,
|
||||
DT_STRTAB => {
|
||||
self.strtab = vm_to_offset(phdrs, dynamic.d_val).unwrap_or(0) as usize
|
||||
}
|
||||
DT_STRSZ => self.strsz = dynamic.d_val as usize,
|
||||
DT_SYMTAB => self.symtab = vm_to_offset(phdrs, dynamic.d_val).unwrap_or(0) as usize,
|
||||
DT_SYMTAB => {
|
||||
self.symtab = vm_to_offset(phdrs, dynamic.d_val).unwrap_or(0) as usize
|
||||
}
|
||||
DT_SYMENT => self.syment = dynamic.d_val as usize,
|
||||
DT_PLTGOT => self.pltgot = vm_to_offset(phdrs, dynamic.d_val),
|
||||
DT_PLTRELSZ => self.pltrelsz = dynamic.d_val as usize,
|
||||
DT_PLTREL => self.pltrel = dynamic.d_val as _,
|
||||
DT_JMPREL => self.jmprel = vm_to_offset(phdrs, dynamic.d_val).unwrap_or(0) as usize, // .rela.plt
|
||||
DT_JMPREL => {
|
||||
self.jmprel = vm_to_offset(phdrs, dynamic.d_val).unwrap_or(0) as usize
|
||||
} // .rela.plt
|
||||
DT_VERDEF => self.verdef = vm_to_offset(phdrs, dynamic.d_val).unwrap_or(0),
|
||||
DT_VERDEFNUM => self.verdefnum = vm_to_offset(phdrs, dynamic.d_val).unwrap_or(0),
|
||||
DT_VERNEED => self.verneed = vm_to_offset(phdrs, dynamic.d_val).unwrap_or(0),
|
||||
DT_VERNEEDNUM => self.verneednum = dynamic.d_val as _,
|
||||
DT_VERSYM => self.versym = vm_to_offset(phdrs, dynamic.d_val).unwrap_or(0),
|
||||
DT_INIT => self.init = vm_to_offset(phdrs, dynamic.d_val).unwrap_or(0),
|
||||
DT_FINI => self.fini = vm_to_offset(phdrs, dynamic.d_val).unwrap_or(0),
|
||||
DT_INIT_ARRAY => self.init_array = vm_to_offset(phdrs, dynamic.d_val).unwrap_or(0),
|
||||
DT_INIT_ARRAY => {
|
||||
self.init_array = vm_to_offset(phdrs, dynamic.d_val).unwrap_or(0)
|
||||
}
|
||||
DT_INIT_ARRAYSZ => self.init_arraysz = dynamic.d_val as _,
|
||||
DT_FINI_ARRAY => self.fini_array = vm_to_offset(phdrs, dynamic.d_val).unwrap_or(0),
|
||||
DT_FINI_ARRAY => {
|
||||
self.fini_array = vm_to_offset(phdrs, dynamic.d_val).unwrap_or(0)
|
||||
}
|
||||
DT_FINI_ARRAYSZ => self.fini_arraysz = dynamic.d_val as _,
|
||||
DT_NEEDED => self.needed_count += 1,
|
||||
DT_FLAGS => self.flags = dynamic.d_val as _,
|
||||
|
|
@ -638,6 +697,47 @@ macro_rules! elf_dynamic_info_std_impl {
|
|||
let gnu_hash = self.gnu_hash.unwrap_or(0);
|
||||
let hash = self.hash.unwrap_or(0);
|
||||
let pltgot = self.pltgot.unwrap_or(0);
|
||||
|
||||
let flags: Vec<&'static str> = [DF_ORIGIN, DF_SYMBOLIC, DF_TEXTREL, DF_BIND_NOW, DF_STATIC_TLS,][..]
|
||||
.iter()
|
||||
.filter(|f| (self.flags as u64 & *f) != 0)
|
||||
.map(|f| df_tag_to_str(*f))
|
||||
.collect();
|
||||
|
||||
let flags_1: Vec<&'static str> = [
|
||||
DF_1_NOW,
|
||||
DF_1_GLOBAL,
|
||||
DF_1_GROUP,
|
||||
DF_1_NODELETE,
|
||||
DF_1_LOADFLTR,
|
||||
DF_1_INITFIRST,
|
||||
DF_1_NOOPEN,
|
||||
DF_1_ORIGIN,
|
||||
DF_1_DIRECT,
|
||||
DF_1_TRANS,
|
||||
DF_1_INTERPOSE,
|
||||
DF_1_NODEFLIB,
|
||||
DF_1_NODUMP,
|
||||
DF_1_CONFALT,
|
||||
DF_1_ENDFILTEE,
|
||||
DF_1_DISPRELDNE,
|
||||
DF_1_DISPRELPND,
|
||||
DF_1_NODIRECT,
|
||||
DF_1_IGNMULDEF,
|
||||
DF_1_NOKSYMS,
|
||||
DF_1_NOHDR,
|
||||
DF_1_EDITED,
|
||||
DF_1_NORELOC,
|
||||
DF_1_SYMINTPOSE,
|
||||
DF_1_GLOBAUDIT,
|
||||
DF_1_SINGLETON,
|
||||
DF_1_PIE,
|
||||
][..]
|
||||
.iter()
|
||||
.filter(|f| (self.flags_1 as u64 & *f) != 0)
|
||||
.map(|f| df_1_tag_to_str(*f))
|
||||
.collect();
|
||||
|
||||
f.debug_struct("DynamicInfo")
|
||||
.field("rela", &format_args!("0x{:x}", self.rela))
|
||||
.field("relasz", &self.relasz)
|
||||
|
|
@ -653,12 +753,20 @@ macro_rules! elf_dynamic_info_std_impl {
|
|||
.field("pltrelsz", &self.pltrelsz)
|
||||
.field("pltrel", &self.pltrel)
|
||||
.field("jmprel", &format_args!("0x{:x}", self.jmprel))
|
||||
.field("verdef", &format_args!("0x{:x}", self.verdef))
|
||||
.field("verdefnum", &self.verdefnum)
|
||||
.field("verneed", &format_args!("0x{:x}", self.verneed))
|
||||
.field("verneednum", &self.verneednum)
|
||||
.field("versym", &format_args!("0x{:x}", self.versym))
|
||||
.field("init", &format_args!("0x{:x}", self.init))
|
||||
.field("fini", &format_args!("0x{:x}", self.fini))
|
||||
.field("init_array", &format_args!("{:#x}", self.init_array))
|
||||
.field("init_arraysz", &self.init_arraysz)
|
||||
.field("needed_count", &self.needed_count)
|
||||
.field("flags", &format_args!("{:#0width$x} {:?}", self.flags, flags, width = core::mem::size_of_val(&self.flags)))
|
||||
.field("flags_1", &format_args!("{:#0width$x} {:?}", self.flags_1, flags_1, width = core::mem::size_of_val(&self.flags_1)))
|
||||
.field("soname", &self.soname)
|
||||
.field("textrel", &self.textrel)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
|
@ -678,7 +786,10 @@ pub mod dyn32 {
|
|||
pub const SIZEOF_DYN: usize = 8;
|
||||
|
||||
elf_dyn_std_impl!(u32, crate::elf32::program_header::ProgramHeader);
|
||||
elf_dynamic_info_std_impl!(u32, crate::elf::program_header::program_header32::ProgramHeader);
|
||||
elf_dynamic_info_std_impl!(
|
||||
u32,
|
||||
crate::elf::program_header::program_header32::ProgramHeader
|
||||
);
|
||||
}
|
||||
|
||||
pub mod dyn64 {
|
||||
|
|
@ -689,5 +800,8 @@ pub mod dyn64 {
|
|||
pub const SIZEOF_DYN: usize = 16;
|
||||
|
||||
elf_dyn_std_impl!(u64, crate::elf64::program_header::ProgramHeader);
|
||||
elf_dynamic_info_std_impl!(u64, crate::elf::program_header::program_header64::ProgramHeader);
|
||||
elf_dynamic_info_std_impl!(
|
||||
u64,
|
||||
crate::elf::program_header::program_header64::ProgramHeader
|
||||
);
|
||||
}
|
||||
|
|
|
|||
271
third_party/rust/goblin/src/elf/gnu_hash.rs
vendored
271
third_party/rust/goblin/src/elf/gnu_hash.rs
vendored
|
|
@ -1,29 +1,33 @@
|
|||
//! A Gnu Hash table as 4 sections:
|
||||
//!
|
||||
//! 1. Header
|
||||
//! 2. Bloom Filter
|
||||
//! 3. Hash Buckets
|
||||
//! 4. Hash Values
|
||||
//! 1. Header
|
||||
//! 2. Bloom Filter
|
||||
//! 3. Hash Buckets
|
||||
//! 4. Chains
|
||||
//!
|
||||
//! The header has is an array of four (4) u32s:
|
||||
//! The header has is an array of four `u32`s:
|
||||
//!
|
||||
//! 1. nbuckets
|
||||
//! 2. symndx
|
||||
//! 3. maskwords
|
||||
//! 4. shift2
|
||||
//! 1. nbuckets
|
||||
//! 2. symndx
|
||||
//! 3. maskwords
|
||||
//! 4. shift2
|
||||
//!
|
||||
//! See: https://blogs.oracle.com/solaris/gnu-hash-elf-sections-v2
|
||||
//! See more:
|
||||
//! * http://www.linker-aliens.org/blogs/ali/entry/gnu_hash_elf_sections
|
||||
//! or https://blogs.oracle.com/solaris/gnu-hash-elf-sections-v2
|
||||
//! * https://flapenguin.me/2017/05/10/elf-lookup-dt-gnu-hash/
|
||||
|
||||
/// GNU hash function: takes a string and returns the u32 hash of that string
|
||||
/// GNU hash function: accepts a symbol name and returns a value that may be
|
||||
/// used to compute a bucket index.
|
||||
///
|
||||
/// Consequently, if the hashing function returns the value `x` for some name,
|
||||
/// `buckets[x % nbuckets]` gives an index, `y`, into both the symbol table
|
||||
/// and the chain table.
|
||||
pub fn hash(symbol: &str) -> u32 {
|
||||
const HASH_SEED: u32 = 5381;
|
||||
let mut hash = HASH_SEED;
|
||||
for b in symbol.as_bytes() {
|
||||
hash = hash
|
||||
.wrapping_mul(33)
|
||||
.wrapping_add(u32::from(*b));
|
||||
}
|
||||
hash
|
||||
symbol.bytes().fold(HASH_SEED, |hash, b| {
|
||||
hash.wrapping_mul(33).wrapping_add(u32::from(b))
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
@ -31,113 +35,186 @@ mod tests {
|
|||
use super::hash;
|
||||
#[test]
|
||||
fn test_hash() {
|
||||
assert_eq!(hash("") , 0x0000_1505);
|
||||
assert_eq!(hash("printf") , 0x156b_2bb8);
|
||||
assert_eq!(hash("exit") , 0x7c96_7e3f);
|
||||
assert_eq!(hash("syscall") , 0xbac2_12a0);
|
||||
assert_eq!(hash(""), 0x0000_1505);
|
||||
assert_eq!(hash("printf"), 0x156b_2bb8);
|
||||
assert_eq!(hash("exit"), 0x7c96_7e3f);
|
||||
assert_eq!(hash("syscall"), 0xbac2_12a0);
|
||||
assert_eq!(hash("flapenguin.me"), 0x8ae9_f18e);
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! elf_gnu_hash_impl {
|
||||
($size:ty) => {
|
||||
|
||||
use core::slice;
|
||||
use core::mem;
|
||||
($IntTy:ty) => {
|
||||
use crate::elf::sym::Sym;
|
||||
use crate::strtab::Strtab;
|
||||
use super::sym;
|
||||
use core::fmt;
|
||||
use core::mem;
|
||||
use core::slice;
|
||||
|
||||
pub struct GnuHash<'process> {
|
||||
nbuckets: u32,
|
||||
symindex: usize,
|
||||
const INT_SIZE: usize = mem::size_of::<$IntTy>();
|
||||
const U32_SIZE: usize = mem::size_of::<u32>();
|
||||
/// Size of a bits mask in bloom filter
|
||||
const ELFCLASS_BITS: u32 = INT_SIZE as u32 * 8;
|
||||
|
||||
/// A better hash table for the ELF used by GNU systems in GNU-compatible software.
|
||||
pub struct GnuHash<'a> {
|
||||
/// Index of the first symbol in the `.dynsym` table which is accessible with
|
||||
/// the hash table
|
||||
symindex: u32,
|
||||
/// Shift count used in the bloom filter
|
||||
shift2: u32,
|
||||
maskbits: u32,
|
||||
bloomwords: &'process [$size], // either 32 or 64 bit masks, depending on platform
|
||||
maskwords_bitmask: u32,
|
||||
buckets: &'process [u32],
|
||||
hashvalues: &'process [u32],
|
||||
symtab: &'process [sym::Sym],
|
||||
/// 2 bit bloom filter on `chains`
|
||||
// Either 32 or 64-bit depending on the class of object
|
||||
bloom_filter: &'a [$IntTy],
|
||||
/// GNU hash table bucket array; indexes start at 0. This array holds symbol
|
||||
/// table indexes and contains the index of hashes in `chains`
|
||||
buckets: &'a [u32],
|
||||
/// Hash values; indexes start at 0. This array holds symbol table indexes.
|
||||
chains: &'a [u32], // => chains[dynsyms.len() - symindex]
|
||||
dynsyms: &'a [Sym],
|
||||
}
|
||||
|
||||
impl<'process> GnuHash<'process> {
|
||||
pub unsafe fn new(hashtab: *const u32, total_dynsyms: usize, symtab: &'process [sym::Sym]) -> GnuHash<'process> {
|
||||
let nbuckets = *hashtab;
|
||||
let symindex = *hashtab.add(1) as usize;
|
||||
let maskwords = *hashtab.add(2) as usize; // how many words our bloom filter mask has
|
||||
let shift2 = *hashtab.add(3);
|
||||
let bloomwords_ptr = hashtab.add(4) as *const $size;
|
||||
let buckets_ptr = bloomwords_ptr.add(maskwords) as *const u32;
|
||||
impl fmt::Debug for GnuHash<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.debug_struct("GnuHash")
|
||||
.field("nbuckets", &self.buckets.len())
|
||||
.field("symindex", &self.symindex)
|
||||
.field("maskwords", &(self.bloom_filter.len() - 1))
|
||||
.field("shift2", &self.shift2)
|
||||
.field("bloom_filter", &self.bloom_filter.as_ptr())
|
||||
.field("bucket", &self.buckets.as_ptr())
|
||||
.field("chains", &self.chains.as_ptr())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> GnuHash<'a> {
|
||||
/// Initialize a GnuHash from a pointer to `.hash` (or `.gnu.hash`) section
|
||||
/// and total number of dynamic symbols.
|
||||
/// # Safety
|
||||
///
|
||||
/// This function creates a `GnuHash` directly from a raw pointer
|
||||
pub unsafe fn from_raw_table(
|
||||
hashtab: &'a [u8],
|
||||
dynsyms: &'a [Sym],
|
||||
) -> Result<Self, &'static str> {
|
||||
if hashtab.as_ptr() as usize % INT_SIZE != 0 {
|
||||
return Err("hashtab is not aligned with 64-bit");
|
||||
}
|
||||
|
||||
if hashtab.len() <= 16 {
|
||||
return Err("failed to read in number of buckets");
|
||||
}
|
||||
|
||||
let [nbuckets, symindex, maskwords, shift2] =
|
||||
(hashtab.as_ptr() as *const u32 as *const [u32; 4]).read();
|
||||
|
||||
if !maskwords.is_power_of_two() {
|
||||
return Err("maskwords must be a power of two");
|
||||
}
|
||||
|
||||
let hashtab = &hashtab[16..];
|
||||
{
|
||||
// SAFETY: Condition to check for an overflow
|
||||
// size_of(chains) + size_of(buckets) + size_of(bloom_filter) == size_of(hashtab)
|
||||
|
||||
if dynsyms.len() <= symindex as usize {
|
||||
return Err("symindex must be smaller than dynsyms.len()");
|
||||
}
|
||||
let chains_size = (dynsyms.len() - symindex as usize).checked_mul(U32_SIZE);
|
||||
let buckets_size = (nbuckets as usize).checked_mul(U32_SIZE);
|
||||
let bloom_size = (maskwords as usize).checked_mul(INT_SIZE);
|
||||
|
||||
let total_size = match (chains_size, buckets_size, bloom_size) {
|
||||
(Some(a), Some(b), Some(c)) => {
|
||||
a.checked_add(b).and_then(|t| t.checked_add(c))
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
match total_size {
|
||||
Some(size) if size == hashtab.len() => {}
|
||||
_ => return Err("index out of bound or non-complete hash section"),
|
||||
}
|
||||
}
|
||||
|
||||
let bloom_filter_ptr = hashtab.as_ptr() as *const $IntTy;
|
||||
let buckets_ptr = bloom_filter_ptr.add(maskwords as usize) as *const u32;
|
||||
let chains_ptr = buckets_ptr.add(nbuckets as usize);
|
||||
let bloom_filter = slice::from_raw_parts(bloom_filter_ptr, maskwords as usize);
|
||||
let buckets = slice::from_raw_parts(buckets_ptr, nbuckets as usize);
|
||||
let hashvalues_ptr = buckets_ptr.add(nbuckets as usize);
|
||||
let hashvalues = slice::from_raw_parts(hashvalues_ptr, total_dynsyms - symindex);
|
||||
let bloomwords = slice::from_raw_parts(bloomwords_ptr, maskwords);
|
||||
GnuHash {
|
||||
nbuckets,
|
||||
let chains = slice::from_raw_parts(chains_ptr, dynsyms.len() - symindex as usize);
|
||||
Ok(Self {
|
||||
symindex,
|
||||
shift2,
|
||||
maskbits: mem::size_of::<usize>() as u32,
|
||||
bloomwords,
|
||||
hashvalues,
|
||||
bloom_filter,
|
||||
buckets,
|
||||
maskwords_bitmask: ((maskwords as i32) - 1) as u32,
|
||||
symtab,
|
||||
}
|
||||
chains,
|
||||
dynsyms,
|
||||
})
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn lookup(&self,
|
||||
symbol: &str,
|
||||
hash: u32,
|
||||
strtab: &Strtab)
|
||||
-> Option<&sym::Sym> {
|
||||
let mut idx = self.buckets[(hash % self.nbuckets) as usize] as usize;
|
||||
// println!("lookup idx = buckets[hash % nbuckets] = {}", idx);
|
||||
if idx == 0 {
|
||||
/// Locate the hash chain, and corresponding hash value element.
|
||||
#[cold]
|
||||
fn lookup(&self, symbol: &str, hash: u32, dynstrtab: &Strtab) -> Option<&'a Sym> {
|
||||
const MASK_LOWEST_BIT: u32 = 0xffff_fffe;
|
||||
let bucket = self.buckets[hash as usize % self.buckets.len()];
|
||||
|
||||
// Empty hash chain, symbol not present
|
||||
if bucket < self.symindex {
|
||||
return None;
|
||||
}
|
||||
let mut hash_idx = idx - self.symindex;
|
||||
let hash = hash & !1;
|
||||
// TODO: replace this with an iterator
|
||||
loop {
|
||||
let symbol_ = &self.symtab[idx];
|
||||
let h2 = self.hashvalues[hash_idx];
|
||||
idx += 1;
|
||||
hash_idx += 1;
|
||||
let name = &strtab[symbol_.st_name as usize];
|
||||
// println!("{}: h2 0x{:x} resolves to: {}", i, h2, name);
|
||||
if hash == (h2 & !1) && name == symbol {
|
||||
// println!("lookup match for {} at: 0x{:x}", symbol, symbol_.st_value);
|
||||
return Some(symbol_);
|
||||
// Walk the chain until the symbol is found or the chain is exhausted.
|
||||
let chain_idx = bucket - self.symindex;
|
||||
let hash = hash & MASK_LOWEST_BIT;
|
||||
let chains = &self.chains.get((chain_idx as usize)..)?;
|
||||
let dynsyms = &self.dynsyms.get((bucket as usize)..)?;
|
||||
for (hash2, symb) in chains.iter().zip(dynsyms.iter()) {
|
||||
if (hash == (hash2 & MASK_LOWEST_BIT))
|
||||
&& (symbol == &dynstrtab[symb.st_name as usize])
|
||||
{
|
||||
return Some(symb);
|
||||
}
|
||||
if h2 & 1 == 1 {
|
||||
// Chain ends with an element with the lowest bit set to 1.
|
||||
if hash2 & 1 == 1 {
|
||||
break;
|
||||
} // end of chain
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn filter(&self, hash: u32) -> bool {
|
||||
let bloom_idx = (hash / self.maskbits) & self.maskwords_bitmask;
|
||||
let h2 = hash >> self.shift2;
|
||||
let bitmask = (1u64 << (hash % self.maskbits)) | (1u64 << (h2 % self.maskbits));
|
||||
// println!("lookup: maskwords: {} bloom_idx: {} bitmask: {} shift2: {}", self.maskwords, bloom_idx, bitmask, self.shift2);
|
||||
let filter = self.bloomwords[bloom_idx as usize] as usize; // FIXME: verify this is safe ;)
|
||||
filter & (bitmask as usize) != (bitmask as usize) // if true, def _don't have_
|
||||
/// Check if symbol maybe is in the hash table, or definitely not in it.
|
||||
#[inline]
|
||||
fn check_maybe_match(&self, hash: u32) -> bool {
|
||||
const MASK: u32 = ELFCLASS_BITS - 1;
|
||||
let hash2 = hash >> self.shift2;
|
||||
// `x & (N - 1)` is equivalent to `x % N` iff `N = 2^y`.
|
||||
let bitmask: $IntTy = 1 << (hash & (MASK)) | 1 << (hash2 & MASK);
|
||||
let bloom_idx = (hash / ELFCLASS_BITS) & (self.bloom_filter.len() as u32 - 1);
|
||||
let bitmask_word = self.bloom_filter[bloom_idx as usize];
|
||||
(bitmask_word & bitmask) == bitmask
|
||||
}
|
||||
|
||||
/// Given a name, a hash of that name, a strtab to cross-reference names, maybe returns a Sym
|
||||
pub fn find(&self,
|
||||
name: &str,
|
||||
hash: u32,
|
||||
strtab: &Strtab)
|
||||
-> Option<&sym::Sym> {
|
||||
if self.filter(hash) {
|
||||
None
|
||||
/// Given a symbol, a hash of that symbol, a dynamic string table and
|
||||
/// a `dynstrtab` to cross-reference names, maybe returns a Sym.
|
||||
pub fn find(&self, symbol: &str, dynstrtab: &Strtab) -> Option<&'a Sym> {
|
||||
let hash = self::hash(symbol);
|
||||
self.find_with_hash(symbol, hash, dynstrtab)
|
||||
}
|
||||
|
||||
/// This function will not check if the passed `hash` is really
|
||||
/// the hash of `symbol`
|
||||
pub fn find_with_hash(
|
||||
&self,
|
||||
symbol: &str,
|
||||
hash: u32,
|
||||
dynstrtab: &Strtab,
|
||||
) -> Option<&'a Sym> {
|
||||
if self.check_maybe_match(hash) {
|
||||
self.lookup(symbol, hash, dynstrtab)
|
||||
} else {
|
||||
self.lookup(name, hash, strtab)
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
|||
78
third_party/rust/goblin/src/elf/header.rs
vendored
78
third_party/rust/goblin/src/elf/header.rs
vendored
|
|
@ -69,7 +69,7 @@ macro_rules! elf_header {
|
|||
.finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// No file type.
|
||||
|
|
@ -84,6 +84,14 @@ pub const ET_DYN: u16 = 3;
|
|||
pub const ET_CORE: u16 = 4;
|
||||
/// Number of defined types.
|
||||
pub const ET_NUM: u16 = 5;
|
||||
/// OS-specific range start
|
||||
pub const ET_LOOS: u16 = 0xfe00;
|
||||
/// OS-specific range end
|
||||
pub const ET_HIOS: u16 = 0xfeff;
|
||||
/// Processor-specific range start
|
||||
pub const ET_LOPROC: u16 = 0xff00;
|
||||
/// Processor-specific range end
|
||||
pub const ET_HIPROC: u16 = 0xffff;
|
||||
|
||||
/// The ELF magic number.
|
||||
pub const ELFMAG: &[u8; 4] = b"\x7FELF";
|
||||
|
|
@ -119,6 +127,40 @@ pub const EV_CURRENT: u8 = 1;
|
|||
pub const EI_OSABI: usize = 7;
|
||||
/// UNIX System V ABI.
|
||||
pub const ELFOSABI_NONE: u8 = 0;
|
||||
/// UNIX System V ABI.
|
||||
///
|
||||
/// Alias.
|
||||
pub const ELFOSABI_SYSV: u8 = ELFOSABI_NONE;
|
||||
/// HP-UX.
|
||||
pub const ELFOSABI_HPUX: u8 = 1;
|
||||
/// NetBSD.
|
||||
pub const ELFOSABI_NETBSD: u8 = 2;
|
||||
/// Object uses GNU ELF extensions.
|
||||
pub const ELFOSABI_GNU: u8 = 3;
|
||||
/// Object uses GNU ELF extensions.
|
||||
///
|
||||
/// Alias.
|
||||
pub const ELFOSABI_LINUX: u8 = ELFOSABI_GNU;
|
||||
/// Sun Solaris.
|
||||
pub const ELFOSABI_SOLARIS: u8 = 6;
|
||||
/// IBM AIX.
|
||||
pub const ELFOSABI_AIX: u8 = 7;
|
||||
/// SGI Irix.
|
||||
pub const ELFOSABI_IRIX: u8 = 8;
|
||||
/// FreeBSD
|
||||
pub const ELFOSABI_FREEBSD: u8 = 9;
|
||||
/// Compaq TRU64 UNIX.
|
||||
pub const ELFOSABI_TRU64: u8 = 10;
|
||||
/// Novell Modesto.
|
||||
pub const ELFOSABI_MODESTO: u8 = 11;
|
||||
/// OpenBSD.
|
||||
pub const ELFOSABI_OPENBSD: u8 = 12;
|
||||
/// ARM EABI.
|
||||
pub const ELFOSABI_ARM_AEABI: u8 = 64;
|
||||
/// ARM.
|
||||
pub const ELFOSABI_ARM: u8 = 97;
|
||||
/// Standalone (embedded) application.
|
||||
pub const ELFOSABI_STANDALONE: u8 = 255;
|
||||
|
||||
/// ABI version byte index.
|
||||
pub const EI_ABIVERSION: usize = 8;
|
||||
|
|
@ -516,23 +558,22 @@ macro_rules! elf_header_test {
|
|||
($class:expr) => {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use scroll::{Pwrite, Pread};
|
||||
use crate::elf::header::Header as ElfHeader;
|
||||
use super::*;
|
||||
use crate::container::{Ctx, Container};
|
||||
use crate::container::{Container, Ctx};
|
||||
use crate::elf::header::Header as ElfHeader;
|
||||
use alloc::vec::Vec;
|
||||
use scroll::{Pread, Pwrite};
|
||||
#[test]
|
||||
fn size_of() {
|
||||
assert_eq!(::std::mem::size_of::<Header>(), SIZEOF_EHDR);
|
||||
}
|
||||
#[test]
|
||||
fn header_read_write () {
|
||||
let crt1: Vec<u8> =
|
||||
if $class == ELFCLASS64 {
|
||||
include!("../../etc/crt1.rs")
|
||||
} else {
|
||||
include!("../../etc/crt132.rs")
|
||||
};
|
||||
fn header_read_write() {
|
||||
let crt1: Vec<u8> = if $class == ELFCLASS64 {
|
||||
include!("../../etc/crt1.rs")
|
||||
} else {
|
||||
include!("../../etc/crt132.rs")
|
||||
};
|
||||
let header: Header = crt1.pread(0).unwrap();
|
||||
assert_eq!(header.e_type, ET_REL);
|
||||
println!("header: {:?}", &header);
|
||||
|
|
@ -542,13 +583,12 @@ macro_rules! elf_header_test {
|
|||
assert_eq!(header, header2);
|
||||
}
|
||||
#[test]
|
||||
fn elfheader_read_write () {
|
||||
let (container, crt1): (Container, Vec<u8>) =
|
||||
if $class == ELFCLASS64 {
|
||||
(Container::Big, include!("../../etc/crt1.rs"))
|
||||
} else {
|
||||
(Container::Little, include!("../../etc/crt132.rs"))
|
||||
};
|
||||
fn elfheader_read_write() {
|
||||
let (container, crt1): (Container, Vec<u8>) = if $class == ELFCLASS64 {
|
||||
(Container::Big, include!("../../etc/crt1.rs"))
|
||||
} else {
|
||||
(Container::Little, include!("../../etc/crt132.rs"))
|
||||
};
|
||||
let header: Header = crt1.pread(0).unwrap();
|
||||
assert_eq!(header.e_type, ET_REL);
|
||||
println!("header: {:?}", &header);
|
||||
|
|
@ -564,7 +604,7 @@ macro_rules! elf_header_test {
|
|||
bytes.pwrite(header, 0).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub mod header32 {
|
||||
|
|
|
|||
217
third_party/rust/goblin/src/elf/mod.rs
vendored
217
third_party/rust/goblin/src/elf/mod.rs
vendored
|
|
@ -43,16 +43,18 @@ pub(crate) mod gnu_hash;
|
|||
// These are shareable values for the 32/64 bit implementations.
|
||||
//
|
||||
// They are publicly re-exported by the pub-using module
|
||||
pub mod compression_header;
|
||||
pub mod header;
|
||||
pub mod program_header;
|
||||
pub mod section_header;
|
||||
pub mod compression_header;
|
||||
#[macro_use]
|
||||
pub mod sym;
|
||||
pub mod dynamic;
|
||||
#[macro_use]
|
||||
pub mod reloc;
|
||||
pub mod note;
|
||||
#[cfg(all(any(feature = "elf32", feature = "elf64"), feature = "alloc"))]
|
||||
pub mod symver;
|
||||
|
||||
macro_rules! if_sylvan {
|
||||
($($i:item)*) => ($(
|
||||
|
|
@ -69,15 +71,16 @@ if_sylvan! {
|
|||
use alloc::vec::Vec;
|
||||
use core::cmp;
|
||||
|
||||
pub type Header = header::Header;
|
||||
pub type ProgramHeader = program_header::ProgramHeader;
|
||||
pub type SectionHeader = section_header::SectionHeader;
|
||||
pub type Symtab<'a> = sym::Symtab<'a>;
|
||||
pub type Sym = sym::Sym;
|
||||
pub type Dyn = dynamic::Dyn;
|
||||
pub type Dynamic = dynamic::Dynamic;
|
||||
pub type Reloc = reloc::Reloc;
|
||||
pub type RelocSection<'a> = reloc::RelocSection<'a>;
|
||||
pub use header::Header;
|
||||
pub use program_header::ProgramHeader;
|
||||
pub use section_header::SectionHeader;
|
||||
pub use sym::Symtab;
|
||||
pub use sym::Sym;
|
||||
pub use dynamic::Dyn;
|
||||
pub use dynamic::Dynamic;
|
||||
pub use reloc::Reloc;
|
||||
pub use reloc::RelocSection;
|
||||
pub use symver::{VersymSection, VerdefSection, VerneedSection};
|
||||
|
||||
pub type ProgramHeaders = Vec<ProgramHeader>;
|
||||
pub type SectionHeaders = Vec<SectionHeader>;
|
||||
|
|
@ -122,6 +125,13 @@ if_sylvan! {
|
|||
pub interpreter: Option<&'a str>,
|
||||
/// A list of this binary's dynamic libraries it uses, if there are any
|
||||
pub libraries: Vec<&'a str>,
|
||||
/// A list of runtime search paths for this binary's dynamic libraries it uses, if there
|
||||
/// are any. (deprecated)
|
||||
pub rpaths: Vec<&'a str>,
|
||||
/// A list of runtime search paths for this binary's dynamic libraries it uses, if there
|
||||
/// are any.
|
||||
pub runpaths: Vec<&'a str>,
|
||||
/// Whether this is a 64-bit elf or not
|
||||
pub is_64: bool,
|
||||
/// Whether this is a shared object or not
|
||||
pub is_lib: bool,
|
||||
|
|
@ -129,6 +139,15 @@ if_sylvan! {
|
|||
pub entry: u64,
|
||||
/// Whether the binary is little endian or not
|
||||
pub little_endian: bool,
|
||||
/// Contains the symbol version information from the optional section
|
||||
/// [`SHT_GNU_VERSYM`][section_header::SHT_GNU_VERSYM] (GNU extenstion).
|
||||
pub versym : Option<VersymSection<'a>>,
|
||||
/// Contains the version definition information from the optional section
|
||||
/// [`SHT_GNU_VERDEF`][section_header::SHT_GNU_VERDEF] (GNU extenstion).
|
||||
pub verdef : Option<VerdefSection<'a>>,
|
||||
/// Contains the version needed information from the optional section
|
||||
/// [`SHT_GNU_VERNEED`][section_header::SHT_GNU_VERNEED] (GNU extenstion).
|
||||
pub verneed : Option<VerneedSection<'a>>,
|
||||
ctx: Ctx,
|
||||
}
|
||||
|
||||
|
|
@ -144,7 +163,7 @@ if_sylvan! {
|
|||
iters.push(note::NoteDataIterator {
|
||||
data,
|
||||
offset,
|
||||
size: offset + phdr.p_filesz as usize,
|
||||
size: offset.saturating_add(phdr.p_filesz as usize),
|
||||
ctx: (alignment, self.ctx)
|
||||
});
|
||||
}
|
||||
|
|
@ -173,9 +192,7 @@ if_sylvan! {
|
|||
continue;
|
||||
}
|
||||
|
||||
if section_name.is_some() && !self.shdr_strtab
|
||||
.get(sect.sh_name)
|
||||
.map_or(false, |r| r.ok() == section_name) {
|
||||
if section_name.is_some() && self.shdr_strtab.get_at(sect.sh_name) != section_name {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
@ -184,7 +201,7 @@ if_sylvan! {
|
|||
iters.push(note::NoteDataIterator {
|
||||
data,
|
||||
offset,
|
||||
size: offset + sect.sh_size as usize,
|
||||
size: offset.saturating_add(sect.sh_size as usize),
|
||||
ctx: (alignment, self.ctx)
|
||||
});
|
||||
}
|
||||
|
|
@ -201,22 +218,51 @@ if_sylvan! {
|
|||
pub fn is_object_file(&self) -> bool {
|
||||
self.header.e_type == header::ET_REL
|
||||
}
|
||||
|
||||
/// Parses the contents to get the Header only. This `bytes` buffer should contain at least the length for parsing Header.
|
||||
pub fn parse_header(bytes: &'a [u8]) -> error::Result<Header> {
|
||||
bytes.pread::<Header>(0)
|
||||
}
|
||||
|
||||
/// Lazy parse the ELF contents. This function mainly just assembles an Elf struct. Once we have the struct, we can choose to parse whatever we want.
|
||||
pub fn lazy_parse(header: Header) -> error::Result<Self> {
|
||||
let misc = parse_misc(&header)?;
|
||||
|
||||
Ok(Elf {
|
||||
header,
|
||||
program_headers: vec![],
|
||||
section_headers: Default::default(),
|
||||
shdr_strtab: Default::default(),
|
||||
dynamic: None,
|
||||
dynsyms: Default::default(),
|
||||
dynstrtab: Strtab::default(),
|
||||
syms: Default::default(),
|
||||
strtab: Default::default(),
|
||||
dynrelas: Default::default(),
|
||||
dynrels: Default::default(),
|
||||
pltrelocs: Default::default(),
|
||||
shdr_relocs: Default::default(),
|
||||
soname: None,
|
||||
interpreter: None,
|
||||
libraries: vec![],
|
||||
rpaths: vec![],
|
||||
runpaths: vec![],
|
||||
is_64: misc.is_64,
|
||||
is_lib: misc.is_lib,
|
||||
entry: misc.entry,
|
||||
little_endian: misc.little_endian,
|
||||
ctx: misc.ctx,
|
||||
versym: None,
|
||||
verdef: None,
|
||||
verneed: None,
|
||||
})
|
||||
}
|
||||
|
||||
/// Parses the contents of the byte stream in `bytes`, and maybe returns a unified binary
|
||||
pub fn parse(bytes: &'a [u8]) -> error::Result<Self> {
|
||||
let header = bytes.pread::<Header>(0)?;
|
||||
let entry = header.e_entry as usize;
|
||||
let is_lib = header.e_type == header::ET_DYN;
|
||||
let is_lsb = header.e_ident[header::EI_DATA] == header::ELFDATA2LSB;
|
||||
let endianness = scroll::Endian::from(is_lsb);
|
||||
let class = header.e_ident[header::EI_CLASS];
|
||||
if class != header::ELFCLASS64 && class != header::ELFCLASS32 {
|
||||
return Err(error::Error::Malformed(format!("Unknown values in ELF ident header: class: {} endianness: {}",
|
||||
class,
|
||||
header.e_ident[header::EI_DATA])));
|
||||
}
|
||||
let is_64 = class == header::ELFCLASS64;
|
||||
let container = if is_64 { Container::Big } else { Container::Little };
|
||||
let ctx = Ctx::new(container, endianness);
|
||||
let header = Self::parse_header(bytes)?;
|
||||
let misc = parse_misc(&header)?;
|
||||
let ctx = misc.ctx;
|
||||
|
||||
let program_headers = ProgramHeader::parse(bytes, header.e_phoff as usize, header.e_phnum as usize, ctx)?;
|
||||
|
||||
|
|
@ -231,7 +277,14 @@ if_sylvan! {
|
|||
|
||||
let section_headers = SectionHeader::parse(bytes, header.e_shoff as usize, header.e_shnum as usize, ctx)?;
|
||||
|
||||
let get_strtab = |section_headers: &[SectionHeader], section_idx: usize| {
|
||||
let get_strtab = |section_headers: &[SectionHeader], mut section_idx: usize| {
|
||||
if section_idx == section_header::SHN_XINDEX as usize {
|
||||
if section_headers.is_empty() {
|
||||
return Ok(Strtab::default())
|
||||
}
|
||||
section_idx = section_headers[0].sh_link as usize;
|
||||
}
|
||||
|
||||
if section_idx >= section_headers.len() {
|
||||
// FIXME: warn! here
|
||||
Ok(Strtab::default())
|
||||
|
|
@ -247,17 +300,17 @@ if_sylvan! {
|
|||
|
||||
let mut syms = Symtab::default();
|
||||
let mut strtab = Strtab::default();
|
||||
for shdr in §ion_headers {
|
||||
if shdr.sh_type as u32 == section_header::SHT_SYMTAB {
|
||||
let size = shdr.sh_entsize;
|
||||
let count = if size == 0 { 0 } else { shdr.sh_size / size };
|
||||
syms = Symtab::parse(bytes, shdr.sh_offset as usize, count as usize, ctx)?;
|
||||
strtab = get_strtab(§ion_headers, shdr.sh_link as usize)?;
|
||||
}
|
||||
if let Some(shdr) = section_headers.iter().rfind(|shdr| shdr.sh_type as u32 == section_header::SHT_SYMTAB) {
|
||||
let size = shdr.sh_entsize;
|
||||
let count = if size == 0 { 0 } else { shdr.sh_size / size };
|
||||
syms = Symtab::parse(bytes, shdr.sh_offset as usize, count as usize, ctx)?;
|
||||
strtab = get_strtab(§ion_headers, shdr.sh_link as usize)?;
|
||||
}
|
||||
|
||||
let mut soname = None;
|
||||
let mut libraries = vec![];
|
||||
let mut rpaths = vec![];
|
||||
let mut runpaths = vec![];
|
||||
let mut dynsyms = Symtab::default();
|
||||
let mut dynrelas = RelocSection::default();
|
||||
let mut dynrels = RelocSection::default();
|
||||
|
|
@ -273,11 +326,22 @@ if_sylvan! {
|
|||
|
||||
if dyn_info.soname != 0 {
|
||||
// FIXME: warn! here
|
||||
soname = match dynstrtab.get(dyn_info.soname) { Some(Ok(soname)) => Some(soname), _ => None };
|
||||
soname = dynstrtab.get_at(dyn_info.soname);
|
||||
}
|
||||
if dyn_info.needed_count > 0 {
|
||||
libraries = dynamic.get_libraries(&dynstrtab);
|
||||
}
|
||||
for dyn_ in &dynamic.dyns {
|
||||
if dyn_.d_tag == dynamic::DT_RPATH {
|
||||
if let Some(path) = dynstrtab.get_at(dyn_.d_val as usize) {
|
||||
rpaths.push(path);
|
||||
}
|
||||
} else if dyn_.d_tag == dynamic::DT_RUNPATH {
|
||||
if let Some(path) = dynstrtab.get_at(dyn_.d_val as usize) {
|
||||
runpaths.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
// parse the dynamic relocations
|
||||
dynrelas = RelocSection::parse(bytes, dyn_info.rela, dyn_info.relasz, true, ctx)?;
|
||||
dynrels = RelocSection::parse(bytes, dyn_info.rel, dyn_info.relsz, false, ctx)?;
|
||||
|
|
@ -311,6 +375,10 @@ if_sylvan! {
|
|||
}
|
||||
}
|
||||
|
||||
let versym = symver::VersymSection::parse(bytes, §ion_headers, ctx)?;
|
||||
let verdef = symver::VerdefSection::parse(bytes, §ion_headers, ctx)?;
|
||||
let verneed = symver::VerneedSection::parse(bytes, §ion_headers, ctx)?;
|
||||
|
||||
Ok(Elf {
|
||||
header,
|
||||
program_headers,
|
||||
|
|
@ -328,11 +396,16 @@ if_sylvan! {
|
|||
soname,
|
||||
interpreter,
|
||||
libraries,
|
||||
is_64,
|
||||
is_lib,
|
||||
entry: entry as u64,
|
||||
little_endian: is_lsb,
|
||||
ctx,
|
||||
rpaths,
|
||||
runpaths,
|
||||
is_64: misc.is_64,
|
||||
is_lib: misc.is_lib,
|
||||
entry: misc.entry,
|
||||
little_endian: misc.little_endian,
|
||||
ctx: ctx,
|
||||
versym,
|
||||
verdef,
|
||||
verneed,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -381,12 +454,44 @@ if_sylvan! {
|
|||
fn hash_len(bytes: &[u8], offset: usize, machine: u16, ctx: Ctx) -> error::Result<usize> {
|
||||
// Based on readelf code.
|
||||
let nchain = if (machine == header::EM_FAKE_ALPHA || machine == header::EM_S390) && ctx.container.is_big() {
|
||||
bytes.pread_with::<u64>(offset + 4, ctx.le)? as usize
|
||||
bytes.pread_with::<u64>(offset.saturating_add(4), ctx.le)? as usize
|
||||
} else {
|
||||
bytes.pread_with::<u32>(offset + 4, ctx.le)? as usize
|
||||
bytes.pread_with::<u32>(offset.saturating_add(4), ctx.le)? as usize
|
||||
};
|
||||
Ok(nchain)
|
||||
}
|
||||
|
||||
struct Misc {
|
||||
is_64: bool,
|
||||
is_lib: bool,
|
||||
entry: u64,
|
||||
little_endian: bool,
|
||||
ctx: Ctx,
|
||||
}
|
||||
|
||||
fn parse_misc(header: &Header) -> error::Result<Misc> {
|
||||
let entry = header.e_entry as usize;
|
||||
let is_lib = header.e_type == header::ET_DYN;
|
||||
let is_lsb = header.e_ident[header::EI_DATA] == header::ELFDATA2LSB;
|
||||
let endianness = scroll::Endian::from(is_lsb);
|
||||
let class = header.e_ident[header::EI_CLASS];
|
||||
if class != header::ELFCLASS64 && class != header::ELFCLASS32 {
|
||||
return Err(error::Error::Malformed(format!("Unknown values in ELF ident header: class: {} endianness: {}",
|
||||
class,
|
||||
header.e_ident[header::EI_DATA])));
|
||||
}
|
||||
let is_64 = class == header::ELFCLASS64;
|
||||
let container = if is_64 { Container::Big } else { Container::Little };
|
||||
let ctx = Ctx::new(container, endianness);
|
||||
|
||||
Ok(Misc{
|
||||
is_64,
|
||||
is_lib,
|
||||
entry: entry as u64,
|
||||
little_endian:is_lsb,
|
||||
ctx,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
@ -397,7 +502,7 @@ mod tests {
|
|||
fn parse_crt1_64bit() {
|
||||
let crt1: Vec<u8> = include!("../../etc/crt1.rs");
|
||||
match Elf::parse(&crt1) {
|
||||
Ok (binary) => {
|
||||
Ok(binary) => {
|
||||
assert!(binary.is_64);
|
||||
assert!(!binary.is_lib);
|
||||
assert_eq!(binary.entry, 0);
|
||||
|
|
@ -414,8 +519,8 @@ mod tests {
|
|||
}
|
||||
}
|
||||
assert!(!syms.is_empty());
|
||||
},
|
||||
Err (err) => {
|
||||
}
|
||||
Err(err) => {
|
||||
panic!("failed: {}", err);
|
||||
}
|
||||
}
|
||||
|
|
@ -425,7 +530,7 @@ mod tests {
|
|||
fn parse_crt1_32bit() {
|
||||
let crt1: Vec<u8> = include!("../../etc/crt132.rs");
|
||||
match Elf::parse(&crt1) {
|
||||
Ok (binary) => {
|
||||
Ok(binary) => {
|
||||
assert!(!binary.is_64);
|
||||
assert!(!binary.is_lib);
|
||||
assert_eq!(binary.entry, 0);
|
||||
|
|
@ -442,10 +547,20 @@ mod tests {
|
|||
}
|
||||
}
|
||||
assert!(!syms.is_empty());
|
||||
},
|
||||
Err (err) => {
|
||||
}
|
||||
Err(err) => {
|
||||
panic!("failed: {}", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// See https://github.com/m4b/goblin/issues/257
|
||||
#[test]
|
||||
#[allow(unused)]
|
||||
fn no_use_statement_conflict() {
|
||||
use crate::elf::section_header::*;
|
||||
use crate::elf::*;
|
||||
|
||||
fn f(_: SectionHeader) {}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
26
third_party/rust/goblin/src/elf/note.rs
vendored
26
third_party/rust/goblin/src/elf/note.rs
vendored
|
|
@ -3,7 +3,7 @@
|
|||
#[cfg(feature = "log")]
|
||||
use log::debug;
|
||||
#[cfg(feature = "alloc")]
|
||||
use scroll::{Pread, Pwrite, IOread, IOwrite, SizeWith};
|
||||
use scroll::{IOread, IOwrite, Pread, Pwrite, SizeWith};
|
||||
|
||||
/// ABI information.
|
||||
///
|
||||
|
|
@ -42,6 +42,18 @@ pub const NT_GNU_BUILD_ID: u32 = 3;
|
|||
/// Version note generated by GNU gold containing a version string.
|
||||
pub const NT_GNU_GOLD_VERSION: u32 = 4;
|
||||
|
||||
///Contains copy of prstatus struct.
|
||||
pub const NT_PRSTATUS: u32 = 1;
|
||||
|
||||
///Contains copy of prpsinfo struct.
|
||||
pub const NT_PRPSINFO: u32 = 3;
|
||||
|
||||
///Fields of siginfo_t.
|
||||
pub const NT_SIGINFO: u32 = 0x5349_4749;
|
||||
|
||||
///Description of mapped files.
|
||||
pub const NT_FILE: u32 = 0x4649_4c45;
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
#[cfg_attr(feature = "alloc", derive(Pread, Pwrite, IOread, IOwrite, SizeWith))]
|
||||
#[repr(C)]
|
||||
|
|
@ -56,6 +68,9 @@ pub struct Nhdr32 {
|
|||
pub n_type: u32,
|
||||
}
|
||||
|
||||
// Declare that this is a plain type.
|
||||
unsafe impl plain::Plain for Nhdr32 {}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
#[cfg_attr(feature = "alloc", derive(Pread, Pwrite, IOread, IOwrite, SizeWith))]
|
||||
#[repr(C)]
|
||||
|
|
@ -70,6 +85,9 @@ pub struct Nhdr64 {
|
|||
pub n_type: u64,
|
||||
}
|
||||
|
||||
// Declare that this is a plain type.
|
||||
unsafe impl plain::Plain for Nhdr64 {}
|
||||
|
||||
if_alloc! {
|
||||
use crate::error;
|
||||
use crate::container;
|
||||
|
|
@ -193,8 +211,10 @@ if_alloc! {
|
|||
};
|
||||
debug!("{:?} - {:#x}", header, *offset);
|
||||
// -1 because includes \0 terminator
|
||||
let name = bytes.gread_with::<&'a str>(offset, ctx::StrCtx::Length(header.n_namesz - 1))?;
|
||||
*offset += 1;
|
||||
let name = bytes.gread_with::<&'a str>(offset, ctx::StrCtx::Length(header.n_namesz.saturating_sub(1)))?;
|
||||
if header.n_namesz > 0 {
|
||||
*offset += 1;
|
||||
}
|
||||
align(alignment, offset);
|
||||
debug!("note name {} - {:#x}", name, *offset);
|
||||
let desc = bytes.gread_with::<&'a [u8]>(offset, header.n_descsz)?;
|
||||
|
|
|
|||
153
third_party/rust/goblin/src/elf/program_header.rs
vendored
153
third_party/rust/goblin/src/elf/program_header.rs
vendored
|
|
@ -1,6 +1,6 @@
|
|||
/* Legal values for p_type (segment type). */
|
||||
|
||||
/// Program header table entry unused
|
||||
/// Programg header table entry unused
|
||||
pub const PT_NULL: u32 = 0;
|
||||
/// Loadable program segment
|
||||
pub const PT_LOAD: u32 = 1;
|
||||
|
|
@ -51,6 +51,10 @@ pub const PF_X: u32 = 1;
|
|||
pub const PF_W: u32 = 1 << 1;
|
||||
/// Segment is readable
|
||||
pub const PF_R: u32 = 1 << 2;
|
||||
/// Bits reserved for OS-specific usage
|
||||
pub const PF_MASKOS: u32 = 0x0ff0_0000;
|
||||
/// Bits reserved for processor-specific usage
|
||||
pub const PF_MASKPROC: u32 = 0xf000_0000;
|
||||
|
||||
pub fn pt_to_str(pt: u32) -> &'static str {
|
||||
match pt {
|
||||
|
|
@ -121,11 +125,11 @@ if_alloc! {
|
|||
}
|
||||
/// Returns this program header's file offset range
|
||||
pub fn file_range(&self) -> Range<usize> {
|
||||
(self.p_offset as usize..self.p_offset as usize + self.p_filesz as usize)
|
||||
self.p_offset as usize..self.p_offset.saturating_add(self.p_filesz) as usize
|
||||
}
|
||||
/// Returns this program header's virtual memory range
|
||||
pub fn vm_range(&self) -> Range<usize> {
|
||||
(self.p_vaddr as usize..self.p_vaddr as usize + self.p_memsz as usize)
|
||||
self.p_vaddr as usize..self.p_vaddr.saturating_add(self.p_memsz) as usize
|
||||
}
|
||||
/// Sets the executable flag
|
||||
pub fn executable(&mut self) {
|
||||
|
|
@ -154,6 +158,10 @@ if_alloc! {
|
|||
#[cfg(feature = "endian_fd")]
|
||||
pub fn parse(bytes: &[u8], mut offset: usize, count: usize, ctx: Ctx) -> crate::error::Result<Vec<ProgramHeader>> {
|
||||
use scroll::Pread;
|
||||
// Sanity check to avoid OOM
|
||||
if count > bytes.len() / Self::size(ctx) {
|
||||
return Err(crate::error::Error::BufferTooShort(count, "program headers"));
|
||||
}
|
||||
let mut program_headers = Vec::with_capacity(count);
|
||||
for _ in 0..count {
|
||||
let phdr = bytes.gread_with(&mut offset, ctx)?;
|
||||
|
|
@ -225,63 +233,65 @@ if_alloc! {
|
|||
}
|
||||
} // end if_alloc
|
||||
|
||||
macro_rules! elf_program_header_std_impl { ($size:ty) => {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
#[test]
|
||||
fn size_of() {
|
||||
assert_eq!(::std::mem::size_of::<ProgramHeader>(), SIZEOF_PHDR);
|
||||
macro_rules! elf_program_header_std_impl {
|
||||
($size:ty) => {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
#[test]
|
||||
fn size_of() {
|
||||
assert_eq!(::std::mem::size_of::<ProgramHeader>(), SIZEOF_PHDR);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if_alloc! {
|
||||
if_alloc! {
|
||||
|
||||
use crate::elf::program_header::ProgramHeader as ElfProgramHeader;
|
||||
#[cfg(any(feature = "std", feature = "endian_fd"))]
|
||||
use crate::error::Result;
|
||||
|
||||
use core::slice;
|
||||
use crate::elf::program_header::ProgramHeader as ElfProgramHeader;
|
||||
#[cfg(any(feature = "std", feature = "endian_fd"))]
|
||||
use crate::error::Result;
|
||||
|
||||
use plain::Plain;
|
||||
|
||||
if_std! {
|
||||
use std::fs::File;
|
||||
use std::io::{Seek, Read};
|
||||
use std::io::SeekFrom::Start;
|
||||
}
|
||||
|
||||
impl From<ProgramHeader> for ElfProgramHeader {
|
||||
fn from(ph: ProgramHeader) -> Self {
|
||||
ElfProgramHeader {
|
||||
p_type : ph.p_type,
|
||||
p_flags : ph.p_flags,
|
||||
p_offset : u64::from(ph.p_offset),
|
||||
p_vaddr : u64::from(ph.p_vaddr),
|
||||
p_paddr : u64::from(ph.p_paddr),
|
||||
p_filesz : u64::from(ph.p_filesz),
|
||||
p_memsz : u64::from(ph.p_memsz),
|
||||
p_align : u64::from(ph.p_align),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ElfProgramHeader> for ProgramHeader {
|
||||
fn from(ph: ElfProgramHeader) -> Self {
|
||||
ProgramHeader {
|
||||
p_type : ph.p_type,
|
||||
p_flags : ph.p_flags,
|
||||
p_offset : ph.p_offset as $size,
|
||||
p_vaddr : ph.p_vaddr as $size,
|
||||
p_paddr : ph.p_paddr as $size,
|
||||
p_filesz : ph.p_filesz as $size,
|
||||
p_memsz : ph.p_memsz as $size,
|
||||
p_align : ph.p_align as $size,
|
||||
}
|
||||
}
|
||||
}
|
||||
} // end if_alloc
|
||||
|
||||
use core::fmt;
|
||||
|
||||
use plain::Plain;
|
||||
|
||||
if_std! {
|
||||
use std::fs::File;
|
||||
use std::io::{Seek, Read};
|
||||
use std::io::SeekFrom::Start;
|
||||
}
|
||||
|
||||
impl From<ProgramHeader> for ElfProgramHeader {
|
||||
fn from(ph: ProgramHeader) -> Self {
|
||||
ElfProgramHeader {
|
||||
p_type : ph.p_type,
|
||||
p_flags : ph.p_flags,
|
||||
p_offset : u64::from(ph.p_offset),
|
||||
p_vaddr : u64::from(ph.p_vaddr),
|
||||
p_paddr : u64::from(ph.p_paddr),
|
||||
p_filesz : u64::from(ph.p_filesz),
|
||||
p_memsz : u64::from(ph.p_memsz),
|
||||
p_align : u64::from(ph.p_align),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ElfProgramHeader> for ProgramHeader {
|
||||
fn from(ph: ElfProgramHeader) -> Self {
|
||||
ProgramHeader {
|
||||
p_type : ph.p_type,
|
||||
p_flags : ph.p_flags,
|
||||
p_offset : ph.p_offset as $size,
|
||||
p_vaddr : ph.p_vaddr as $size,
|
||||
p_paddr : ph.p_paddr as $size,
|
||||
p_filesz : ph.p_filesz as $size,
|
||||
p_memsz : ph.p_memsz as $size,
|
||||
p_align : ph.p_align as $size,
|
||||
}
|
||||
}
|
||||
}
|
||||
use core::slice;
|
||||
|
||||
impl fmt::Debug for ProgramHeader {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
|
|
@ -300,7 +310,12 @@ macro_rules! elf_program_header_std_impl { ($size:ty) => {
|
|||
|
||||
impl ProgramHeader {
|
||||
#[cfg(feature = "endian_fd")]
|
||||
pub fn parse(bytes: &[u8], mut offset: usize, count: usize, ctx: ::scroll::Endian) -> Result<Vec<ProgramHeader>> {
|
||||
pub fn parse(
|
||||
bytes: &[u8],
|
||||
mut offset: usize,
|
||||
count: usize,
|
||||
ctx: ::scroll::Endian,
|
||||
) -> Result<Vec<ProgramHeader>> {
|
||||
use scroll::Pread;
|
||||
let mut program_headers = vec![ProgramHeader::default(); count];
|
||||
let offset = &mut offset;
|
||||
|
|
@ -308,15 +323,22 @@ macro_rules! elf_program_header_std_impl { ($size:ty) => {
|
|||
Ok(program_headers)
|
||||
}
|
||||
|
||||
#[cfg(feature = "alloc")]
|
||||
pub fn from_bytes(bytes: &[u8], phnum: usize) -> Vec<ProgramHeader> {
|
||||
let mut phdrs = vec![ProgramHeader::default(); phnum];
|
||||
phdrs.copy_from_bytes(bytes).expect("buffer is too short for given number of entries");
|
||||
phdrs
|
||||
.copy_from_bytes(bytes)
|
||||
.expect("buffer is too short for given number of entries");
|
||||
phdrs
|
||||
}
|
||||
|
||||
pub unsafe fn from_raw_parts<'a>(phdrp: *const ProgramHeader,
|
||||
phnum: usize)
|
||||
-> &'a [ProgramHeader] {
|
||||
/// # Safety
|
||||
///
|
||||
/// This function creates a `ProgramHeader` directly from a raw pointer
|
||||
pub unsafe fn from_raw_parts<'a>(
|
||||
phdrp: *const ProgramHeader,
|
||||
phnum: usize,
|
||||
) -> &'a [ProgramHeader] {
|
||||
slice::from_raw_parts(phdrp, phnum)
|
||||
}
|
||||
|
||||
|
|
@ -330,8 +352,8 @@ macro_rules! elf_program_header_std_impl { ($size:ty) => {
|
|||
Ok(phdrs)
|
||||
}
|
||||
}
|
||||
} // end if_alloc
|
||||
};}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(feature = "alloc")]
|
||||
use scroll::{Pread, Pwrite, SizeWith};
|
||||
|
|
@ -342,7 +364,7 @@ pub mod program_header32 {
|
|||
#[repr(C)]
|
||||
#[derive(Copy, Clone, PartialEq, Default)]
|
||||
#[cfg_attr(feature = "alloc", derive(Pread, Pwrite, SizeWith))]
|
||||
/// A 64-bit ProgramHeader typically specifies how to map executable and data segments into memory
|
||||
/// A 32-bit ProgramHeader typically specifies how to map executable and data segments into memory
|
||||
pub struct ProgramHeader {
|
||||
/// Segment type
|
||||
pub p_type: u32,
|
||||
|
|
@ -364,21 +386,19 @@ pub mod program_header32 {
|
|||
|
||||
pub const SIZEOF_PHDR: usize = 32;
|
||||
|
||||
use plain;
|
||||
// Declare that this is a plain type.
|
||||
unsafe impl plain::Plain for ProgramHeader {}
|
||||
|
||||
elf_program_header_std_impl!(u32);
|
||||
}
|
||||
|
||||
|
||||
pub mod program_header64 {
|
||||
pub use crate::elf::program_header::*;
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Copy, Clone, PartialEq, Default)]
|
||||
#[cfg_attr(feature = "alloc", derive(Pread, Pwrite, SizeWith))]
|
||||
/// A 32-bit ProgramHeader typically specifies how to map executable and data segments into memory
|
||||
/// A 64-bit ProgramHeader typically specifies how to map executable and data segments into memory
|
||||
pub struct ProgramHeader {
|
||||
/// Segment type
|
||||
pub p_type: u32,
|
||||
|
|
@ -400,7 +420,6 @@ pub mod program_header64 {
|
|||
|
||||
pub const SIZEOF_PHDR: usize = 56;
|
||||
|
||||
use plain;
|
||||
// Declare that this is a plain type.
|
||||
unsafe impl plain::Plain for ProgramHeader {}
|
||||
|
||||
|
|
|
|||
9
third_party/rust/goblin/src/elf/reloc.rs
vendored
9
third_party/rust/goblin/src/elf/reloc.rs
vendored
|
|
@ -126,9 +126,9 @@ macro_rules! elf_reloc {
|
|||
};
|
||||
}
|
||||
|
||||
macro_rules! elf_rela_std_impl { ($size:ident, $isize:ty) => {
|
||||
|
||||
if_alloc! {
|
||||
macro_rules! elf_rela_std_impl {
|
||||
($size:ident, $isize:ty) => {
|
||||
if_alloc! {
|
||||
use crate::elf::reloc::Reloc;
|
||||
|
||||
use core::slice;
|
||||
|
|
@ -218,7 +218,6 @@ macro_rules! elf_rela_std_impl { ($size:ident, $isize:ty) => {
|
|||
};
|
||||
}
|
||||
|
||||
|
||||
pub mod reloc32 {
|
||||
|
||||
pub use crate::elf::reloc::*;
|
||||
|
|
@ -246,7 +245,6 @@ pub mod reloc32 {
|
|||
elf_rela_std_impl!(u32, i32);
|
||||
}
|
||||
|
||||
|
||||
pub mod reloc64 {
|
||||
pub use crate::elf::reloc::*;
|
||||
|
||||
|
|
@ -282,7 +280,6 @@ if_alloc! {
|
|||
use core::fmt;
|
||||
use core::result;
|
||||
use crate::container::{Ctx, Container};
|
||||
#[cfg(feature = "endian_fd")]
|
||||
use alloc::vec::Vec;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Default)]
|
||||
|
|
|
|||
|
|
@ -147,6 +147,8 @@ pub const SHT_HISUNW: u32 = 0x6fff_ffff;
|
|||
pub const SHT_HIOS: u32 = 0x6fff_ffff;
|
||||
/// Start of processor-specific.
|
||||
pub const SHT_LOPROC: u32 = 0x7000_0000;
|
||||
/// X86-64 unwind information.
|
||||
pub const SHT_X86_64_UNWIND: u32 = 0x7000_0001;
|
||||
/// End of processor-specific.
|
||||
pub const SHT_HIPROC: u32 = 0x7fff_ffff;
|
||||
/// Start of application-specific.
|
||||
|
|
@ -185,8 +187,8 @@ pub const SHF_MASKPROC: u32 = 0xf000_0000;
|
|||
pub const SHF_ORDERED: u32 = 1 << 30;
|
||||
/// Number of "regular" section header flags
|
||||
pub const SHF_NUM_REGULAR_FLAGS: usize = 12;
|
||||
// /// Section is excluded unless referenced or allocated (Solaris).
|
||||
// pub const SHF_EXCLUDE: u32 = 1U << 31;
|
||||
/// Section is excluded unless referenced or allocated (Solaris).
|
||||
pub const SHF_EXCLUDE: u32 = 0x80000000; // 1U << 31
|
||||
|
||||
pub const SHF_FLAGS: [u32; SHF_NUM_REGULAR_FLAGS] = [
|
||||
SHF_WRITE,
|
||||
|
|
@ -235,6 +237,7 @@ pub fn sht_to_str(sht: u32) -> &'static str {
|
|||
SHT_GNU_VERNEED => "SHT_GNU_VERNEED",
|
||||
SHT_GNU_VERSYM => "SHT_GNU_VERSYM",
|
||||
SHT_LOPROC => "SHT_LOPROC",
|
||||
SHT_X86_64_UNWIND => "SHT_X86_64_UNWIND",
|
||||
SHT_HIPROC => "SHT_HIPROC",
|
||||
SHT_LOUSER => "SHT_LOUSER",
|
||||
SHT_HIUSER => "SHT_HIUSER",
|
||||
|
|
@ -257,7 +260,7 @@ pub fn shf_to_str(shf: u32) -> &'static str {
|
|||
SHF_COMPRESSED => "SHF_COMPRESSED",
|
||||
//SHF_MASKOS..SHF_MASKPROC => "SHF_OSFLAG",
|
||||
SHF_ORDERED => "SHF_ORDERED",
|
||||
_ => "SHF_UNKNOWN"
|
||||
_ => "SHF_UNKNOWN",
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -341,7 +344,6 @@ macro_rules! elf_section_header_std_impl { ($size:ty) => {
|
|||
} // end if_alloc
|
||||
};}
|
||||
|
||||
|
||||
pub mod section_header32 {
|
||||
pub use crate::elf::section_header::*;
|
||||
|
||||
|
|
@ -352,7 +354,6 @@ pub mod section_header32 {
|
|||
elf_section_header_std_impl!(u32);
|
||||
}
|
||||
|
||||
|
||||
pub mod section_header64 {
|
||||
|
||||
pub use crate::elf::section_header::*;
|
||||
|
|
@ -425,13 +426,20 @@ if_alloc! {
|
|||
sh_entsize: 0,
|
||||
}
|
||||
}
|
||||
/// Returns this section header's file offset range
|
||||
pub fn file_range(&self) -> Range<usize> {
|
||||
(self.sh_offset as usize..self.sh_offset as usize + self.sh_size as usize)
|
||||
/// Returns this section header's file offset range,
|
||||
/// if the section occupies space in fhe file.
|
||||
pub fn file_range(&self) -> Option<Range<usize>> {
|
||||
// Sections with type SHT_NOBITS have no data in the file itself,
|
||||
// they only exist in memory.
|
||||
if self.sh_type == SHT_NOBITS {
|
||||
None
|
||||
} else {
|
||||
Some(self.sh_offset as usize..(self.sh_offset as usize).saturating_add(self.sh_size as usize))
|
||||
}
|
||||
}
|
||||
/// Returns this section header's virtual memory range
|
||||
pub fn vm_range(&self) -> Range<usize> {
|
||||
(self.sh_addr as usize..self.sh_addr as usize + self.sh_size as usize)
|
||||
self.sh_addr as usize..(self.sh_addr as usize).saturating_add(self.sh_size as usize)
|
||||
}
|
||||
/// Parse `count` section headers from `bytes` at `offset`, using the given `ctx`
|
||||
#[cfg(feature = "endian_fd")]
|
||||
|
|
@ -448,6 +456,11 @@ if_alloc! {
|
|||
// case the count is stored in the sh_size field of the null section header.
|
||||
count = empty_sh.sh_size as usize;
|
||||
}
|
||||
|
||||
// Sanity check to avoid OOM
|
||||
if count > bytes.len() / Self::size(ctx) {
|
||||
return Err(error::Error::BufferTooShort(count, "section headers"));
|
||||
}
|
||||
let mut section_headers = Vec::with_capacity(count);
|
||||
section_headers.push(empty_sh);
|
||||
for _ in 1..count {
|
||||
|
|
@ -466,6 +479,12 @@ if_alloc! {
|
|||
self.sh_name, self.sh_offset, self.sh_size, overflow);
|
||||
return Err(error::Error::Malformed(message));
|
||||
}
|
||||
let (_, overflow) = self.sh_addr.overflowing_add(self.sh_size);
|
||||
if overflow {
|
||||
let message = format!("Section {} size ({}) + addr ({}) is out of bounds. Overflowed: {}",
|
||||
self.sh_name, self.sh_addr, self.sh_size, overflow);
|
||||
return Err(error::Error::Malformed(message));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
pub fn is_relocation(&self) -> bool {
|
||||
|
|
|
|||
97
third_party/rust/goblin/src/elf/sym.rs
vendored
97
third_party/rust/goblin/src/elf/sym.rs
vendored
|
|
@ -150,10 +150,8 @@ pub fn visibility_to_str(typ: u8) -> &'static str {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
macro_rules! elf_sym_std_impl {
|
||||
($size:ty) => {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
@ -186,12 +184,12 @@ macro_rules! elf_sym_std_impl {
|
|||
#[inline]
|
||||
fn from(sym: Sym) -> Self {
|
||||
ElfSym {
|
||||
st_name: sym.st_name as usize,
|
||||
st_info: sym.st_info,
|
||||
st_other: sym.st_other,
|
||||
st_shndx: sym.st_shndx as usize,
|
||||
st_value: u64::from(sym.st_value),
|
||||
st_size: u64::from(sym.st_size),
|
||||
st_name: sym.st_name as usize,
|
||||
st_info: sym.st_info,
|
||||
st_other: sym.st_other,
|
||||
st_shndx: sym.st_shndx as usize,
|
||||
st_value: u64::from(sym.st_value),
|
||||
st_size: u64::from(sym.st_size),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -200,12 +198,12 @@ macro_rules! elf_sym_std_impl {
|
|||
#[inline]
|
||||
fn from(sym: ElfSym) -> Self {
|
||||
Sym {
|
||||
st_name: sym.st_name as u32,
|
||||
st_info: sym.st_info,
|
||||
st_other: sym.st_other,
|
||||
st_shndx: sym.st_shndx as u16,
|
||||
st_value: sym.st_value as $size,
|
||||
st_size: sym.st_size as $size,
|
||||
st_name: sym.st_name as u32,
|
||||
st_info: sym.st_info,
|
||||
st_other: sym.st_other,
|
||||
st_shndx: sym.st_shndx as u16,
|
||||
st_value: sym.st_value as $size,
|
||||
st_size: sym.st_size as $size,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -219,13 +217,27 @@ macro_rules! elf_sym_std_impl {
|
|||
.field("st_name", &self.st_name)
|
||||
.field("st_value", &format_args!("{:x}", self.st_value))
|
||||
.field("st_size", &self.st_size)
|
||||
.field("st_info", &format_args!("{:x} {} {}", self.st_info, bind_to_str(bind), type_to_str(typ)))
|
||||
.field("st_other", &format_args!("{} {}", self.st_other, visibility_to_str(vis)))
|
||||
.field(
|
||||
"st_info",
|
||||
&format_args!(
|
||||
"{:x} {} {}",
|
||||
self.st_info,
|
||||
bind_to_str(bind),
|
||||
type_to_str(typ)
|
||||
),
|
||||
)
|
||||
.field(
|
||||
"st_other",
|
||||
&format_args!("{} {}", self.st_other, visibility_to_str(vis)),
|
||||
)
|
||||
.field("st_shndx", &self.st_shndx)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
///
|
||||
/// This function creates a `Sym` slice directly from a raw pointer
|
||||
#[inline]
|
||||
pub unsafe fn from_raw<'a>(symp: *const Sym, count: usize) -> &'a [Sym] {
|
||||
slice::from_raw_parts(symp, count)
|
||||
|
|
@ -315,25 +327,24 @@ pub mod sym64 {
|
|||
elf_sym_std_impl!(u64);
|
||||
}
|
||||
|
||||
use scroll::ctx;
|
||||
use scroll::ctx::SizeWith;
|
||||
use core::fmt::{self, Debug};
|
||||
use core::result;
|
||||
use crate::container::{Ctx, Container};
|
||||
use crate::container::{Container, Ctx};
|
||||
#[cfg(feature = "alloc")]
|
||||
use crate::error::Result;
|
||||
#[cfg(feature = "alloc")]
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt;
|
||||
use scroll::ctx;
|
||||
use scroll::ctx::SizeWith;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Default)]
|
||||
/// A unified Sym definition - convertible to and from 32-bit and 64-bit variants
|
||||
pub struct Sym {
|
||||
pub st_name: usize,
|
||||
pub st_info: u8,
|
||||
pub st_other: u8,
|
||||
pub st_shndx: usize,
|
||||
pub st_value: u64,
|
||||
pub st_size: u64,
|
||||
pub st_name: usize,
|
||||
pub st_info: u8,
|
||||
pub st_other: u8,
|
||||
pub st_shndx: usize,
|
||||
pub st_value: u64,
|
||||
pub st_size: u64,
|
||||
}
|
||||
|
||||
impl Sym {
|
||||
|
|
@ -376,6 +387,9 @@ impl Sym {
|
|||
#[cfg(feature = "endian_fd")]
|
||||
/// Parse `count` vector of ELF symbols from `offset`
|
||||
pub fn parse(bytes: &[u8], mut offset: usize, count: usize, ctx: Ctx) -> Result<Vec<Sym>> {
|
||||
if count > bytes.len() / Sym::size_with(&ctx) {
|
||||
return Err(crate::error::Error::BufferTooShort(count, "symbols"));
|
||||
}
|
||||
let mut syms = Vec::with_capacity(count);
|
||||
for _ in 0..count {
|
||||
let sym = bytes.gread_with(&mut offset, ctx)?;
|
||||
|
|
@ -392,8 +406,19 @@ impl fmt::Debug for Sym {
|
|||
let vis = self.st_visibility();
|
||||
f.debug_struct("Sym")
|
||||
.field("st_name", &self.st_name)
|
||||
.field("st_info", &format_args!("0x{:x} {} {}", self.st_info, bind_to_str(bind), type_to_str(typ)))
|
||||
.field("st_other", &format_args!("{} {}", self.st_other, visibility_to_str(vis)))
|
||||
.field(
|
||||
"st_info",
|
||||
&format_args!(
|
||||
"0x{:x} {} {}",
|
||||
self.st_info,
|
||||
bind_to_str(bind),
|
||||
type_to_str(typ)
|
||||
),
|
||||
)
|
||||
.field(
|
||||
"st_other",
|
||||
&format_args!("{} {}", self.st_other, visibility_to_str(vis)),
|
||||
)
|
||||
.field("st_shndx", &self.st_shndx)
|
||||
.field("st_value", &format_args!("0x{:x}", self.st_value))
|
||||
.field("st_size", &self.st_size)
|
||||
|
|
@ -403,19 +428,17 @@ impl fmt::Debug for Sym {
|
|||
|
||||
impl ctx::SizeWith<Ctx> for Sym {
|
||||
#[inline]
|
||||
fn size_with(&Ctx {container, .. }: &Ctx) -> usize {
|
||||
fn size_with(&Ctx { container, .. }: &Ctx) -> usize {
|
||||
match container {
|
||||
Container::Little => {
|
||||
sym32::SIZEOF_SYM
|
||||
},
|
||||
Container::Big => {
|
||||
sym64::SIZEOF_SYM
|
||||
},
|
||||
Container::Little => sym32::SIZEOF_SYM,
|
||||
Container::Big => sym64::SIZEOF_SYM,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if_alloc! {
|
||||
use core::result;
|
||||
|
||||
impl<'a> ctx::TryFromCtx<'a, Ctx> for Sym {
|
||||
type Error = crate::error::Error;
|
||||
#[inline]
|
||||
|
|
@ -477,7 +500,7 @@ if_alloc! {
|
|||
end: usize,
|
||||
}
|
||||
|
||||
impl<'a> Debug for Symtab<'a> {
|
||||
impl<'a> fmt::Debug for Symtab<'a> {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
let len = self.bytes.len();
|
||||
fmt.debug_struct("Symtab")
|
||||
|
|
|
|||
880
third_party/rust/goblin/src/elf/symver.rs
vendored
Normal file
880
third_party/rust/goblin/src/elf/symver.rs
vendored
Normal file
|
|
@ -0,0 +1,880 @@
|
|||
//! Symbol versioning
|
||||
//!
|
||||
//! Implementation of the GNU symbol versioning extension according to
|
||||
//! [LSB Core Specification - Symbol Versioning][lsb-symver].
|
||||
//!
|
||||
//! # Examples
|
||||
//!
|
||||
//! List the dependencies of an ELF file that have [version needed][lsb-verneed] information along
|
||||
//! with the versions needed for each dependency.
|
||||
//! ```rust
|
||||
//! use goblin::error::Error;
|
||||
//!
|
||||
//! pub fn show_verneed(bytes: &[u8]) -> Result<(), Error> {
|
||||
//! let binary = goblin::elf::Elf::parse(&bytes)?;
|
||||
//!
|
||||
//! if let Some(verneed) = binary.verneed {
|
||||
//! for need_file in verneed.iter() {
|
||||
//! println!(
|
||||
//! "Depend on {:?} with version(s):",
|
||||
//! binary.dynstrtab.get_at(need_file.vn_file)
|
||||
//! );
|
||||
//! for need_ver in need_file.iter() {
|
||||
//! println!("{:?}", binary.dynstrtab.get_at(need_ver.vna_name));
|
||||
//! }
|
||||
//! }
|
||||
//! }
|
||||
//!
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! List the [version defined][lsb-verdef] information of an ELF file, effectively listing the version
|
||||
//! defined by this ELF file.
|
||||
//! ```rust
|
||||
//! use goblin::error::Error;
|
||||
//!
|
||||
//! pub fn show_verdef(bytes: &[u8]) -> Result<(), Error> {
|
||||
//! let binary = goblin::elf::Elf::parse(&bytes)?;
|
||||
//!
|
||||
//! if let Some(verdef) = &binary.verdef {
|
||||
//! for def in verdef.iter() {
|
||||
//! for (n, aux) in def.iter().enumerate() {
|
||||
//! let name = binary.dynstrtab.get_at(aux.vda_name);
|
||||
//! match n {
|
||||
//! 0 => print!("Name: {:?}", name),
|
||||
//! 1 => print!(" Parent: {:?}", name),
|
||||
//! _ => print!(", {:?}", name),
|
||||
//! }
|
||||
//! }
|
||||
//! print!("\n");
|
||||
//! }
|
||||
//! }
|
||||
//!
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! [lsb-symver]: https://refspecs.linuxbase.org/LSB_5.0.0/LSB-Core-generic/LSB-Core-generic/symversion.html
|
||||
//! [lsb-verneed]: https://refspecs.linuxbase.org/LSB_5.0.0/LSB-Core-generic/LSB-Core-generic/symversion.html#SYMVERRQMTS
|
||||
//! [lsb-verdef]: https://refspecs.linuxbase.org/LSB_5.0.0/LSB-Core-generic/LSB-Core-generic/symversion.html#SYMVERDEFS
|
||||
|
||||
use crate::container;
|
||||
use crate::elf::section_header::{SectionHeader, SHT_GNU_VERDEF, SHT_GNU_VERNEED, SHT_GNU_VERSYM};
|
||||
use crate::error::Result;
|
||||
use core::iter::FusedIterator;
|
||||
use scroll::Pread;
|
||||
|
||||
/******************
|
||||
* ELF Constants *
|
||||
******************/
|
||||
|
||||
// Versym constants.
|
||||
|
||||
/// Constant describing a local symbol, see [`Versym::is_local`].
|
||||
pub const VER_NDX_LOCAL: u16 = 0;
|
||||
/// Constant describing a global symbol, see [`Versym::is_global`].
|
||||
pub const VER_NDX_GLOBAL: u16 = 1;
|
||||
/// Bitmask to check hidden bit, see [`Versym::is_hidden`].
|
||||
pub const VERSYM_HIDDEN: u16 = 0x8000;
|
||||
/// Bitmask to get version information, see [`Versym::version`].
|
||||
pub const VERSYM_VERSION: u16 = 0x7fff;
|
||||
|
||||
// Verdef constants.
|
||||
|
||||
/// Bitmask to check `base` flag in [`Verdef::vd_flags`].
|
||||
pub const VER_FLG_BASE: u16 = 0x1;
|
||||
/// Bitmask to check `weak` flag in [`Verdef::vd_flags`].
|
||||
pub const VER_FLG_WEAK: u16 = 0x2;
|
||||
/// Bitmask to check `info` flag in [`Verdef::vd_flags`].
|
||||
pub const VER_FLG_INFO: u16 = 0x4;
|
||||
|
||||
/********************
|
||||
* ELF Structures *
|
||||
********************/
|
||||
|
||||
/// An ELF `Symbol Version` entry.
|
||||
///
|
||||
/// https://refspecs.linuxbase.org/LSB_5.0.0/LSB-Core-generic/LSB-Core-generic/symversion.html#SYMVERTBL
|
||||
#[repr(C)]
|
||||
#[derive(Debug, Pread)]
|
||||
struct ElfVersym {
|
||||
vs_val: u16,
|
||||
}
|
||||
|
||||
/// An ELF `Version Definition` entry Elfxx_Verdef.
|
||||
///
|
||||
/// https://refspecs.linuxbase.org/LSB_5.0.0/LSB-Core-generic/LSB-Core-generic/symversion.html#VERDEFENTRIES
|
||||
#[repr(C)]
|
||||
#[derive(Debug, Pread)]
|
||||
struct ElfVerdef {
|
||||
vd_version: u16,
|
||||
vd_flags: u16,
|
||||
vd_ndx: u16,
|
||||
vd_cnt: u16,
|
||||
vd_hash: u32,
|
||||
vd_aux: u32,
|
||||
vd_next: u32,
|
||||
}
|
||||
|
||||
/// An ELF `Version Definition Auxiliary` entry Elfxx_Verdaux.
|
||||
///
|
||||
/// https://refspecs.linuxbase.org/LSB_5.0.0/LSB-Core-generic/LSB-Core-generic/symversion.html#VERDEFEXTS
|
||||
#[repr(C)]
|
||||
#[derive(Debug, Pread)]
|
||||
struct ElfVerdaux {
|
||||
vda_name: u32,
|
||||
vda_next: u32,
|
||||
}
|
||||
|
||||
/// An ELF `Version Need` entry Elfxx_Verneed.
|
||||
///
|
||||
/// https://refspecs.linuxbase.org/LSB_5.0.0/LSB-Core-generic/LSB-Core-generic/symversion.html#VERNEEDFIG
|
||||
#[repr(C)]
|
||||
#[derive(Debug, Pread)]
|
||||
struct ElfVerneed {
|
||||
vn_version: u16,
|
||||
vn_cnt: u16,
|
||||
vn_file: u32,
|
||||
vn_aux: u32,
|
||||
vn_next: u32,
|
||||
}
|
||||
|
||||
/// An ELF `Version Need Auxiliary` entry Elfxx_Vernaux.
|
||||
///
|
||||
/// https://refspecs.linuxbase.org/LSB_5.0.0/LSB-Core-generic/LSB-Core-generic/symversion.html#VERNEEDEXTFIG
|
||||
#[repr(C)]
|
||||
#[derive(Debug, Pread)]
|
||||
struct ElfVernaux {
|
||||
vna_hash: u32,
|
||||
vna_flags: u16,
|
||||
vna_other: u16,
|
||||
vna_name: u32,
|
||||
vna_next: u32,
|
||||
}
|
||||
|
||||
/********************
|
||||
* Symbol Version *
|
||||
********************/
|
||||
|
||||
/// Helper struct to iterate over [Symbol Version][Versym] entries.
|
||||
#[derive(Debug)]
|
||||
pub struct VersymSection<'a> {
|
||||
bytes: &'a [u8],
|
||||
ctx: container::Ctx,
|
||||
}
|
||||
|
||||
impl<'a> VersymSection<'a> {
|
||||
pub fn parse(
|
||||
bytes: &'a [u8],
|
||||
shdrs: &[SectionHeader],
|
||||
ctx: container::Ctx,
|
||||
) -> Result<Option<VersymSection<'a>>> {
|
||||
// Get fields needed from optional `symbol version` section.
|
||||
let (offset, size) =
|
||||
if let Some(shdr) = shdrs.iter().find(|shdr| shdr.sh_type == SHT_GNU_VERSYM) {
|
||||
(shdr.sh_offset as usize, shdr.sh_size as usize)
|
||||
} else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
// Get a slice of bytes of the `version definition` section content.
|
||||
let bytes: &'a [u8] = bytes.pread_with(offset, size)?;
|
||||
|
||||
Ok(Some(VersymSection { bytes, ctx }))
|
||||
}
|
||||
|
||||
/// Get an iterator over the [`Versym`] entries.
|
||||
#[inline]
|
||||
pub fn iter(&'a self) -> VersymIter<'a> {
|
||||
self.into_iter()
|
||||
}
|
||||
|
||||
/// True if there are no [`Versym`] entries.
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.bytes.is_empty()
|
||||
}
|
||||
|
||||
/// Number of [`Versym`] entries.
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize {
|
||||
let entsize = core::mem::size_of::<ElfVersym>();
|
||||
|
||||
self.bytes.len() / entsize
|
||||
}
|
||||
|
||||
/// Get [`Versym`] entry at index.
|
||||
#[inline]
|
||||
pub fn get_at(&self, idx: usize) -> Option<Versym> {
|
||||
let entsize = core::mem::size_of::<ElfVersym>();
|
||||
let offset = idx.checked_mul(entsize)?;
|
||||
|
||||
self.bytes
|
||||
.pread_with::<ElfVersym>(offset, self.ctx.le)
|
||||
.ok()
|
||||
.map(Versym::from)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'_ VersymSection<'a> {
|
||||
type Item = <VersymIter<'a> as Iterator>::Item;
|
||||
type IntoIter = VersymIter<'a>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
VersymIter {
|
||||
bytes: self.bytes,
|
||||
offset: 0,
|
||||
ctx: self.ctx,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator over the [`Versym`] entries from the [`SHT_GNU_VERSYM`] section.
|
||||
pub struct VersymIter<'a> {
|
||||
bytes: &'a [u8],
|
||||
offset: usize,
|
||||
ctx: container::Ctx,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for VersymIter<'a> {
|
||||
type Item = Versym;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.offset >= self.bytes.len() {
|
||||
None
|
||||
} else {
|
||||
self.bytes
|
||||
.gread_with::<ElfVersym>(&mut self.offset, self.ctx.le)
|
||||
.ok()
|
||||
.map(Versym::from)
|
||||
.or_else(|| {
|
||||
// self.bytes are not a multiple of ElfVersym.
|
||||
// Adjust offset to continue yielding Nones.
|
||||
self.offset = self.bytes.len();
|
||||
None
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
let len = (self.bytes.len() - self.offset) / core::mem::size_of::<Self::Item>();
|
||||
(len, Some(len))
|
||||
}
|
||||
}
|
||||
|
||||
impl ExactSizeIterator for VersymIter<'_> {}
|
||||
|
||||
impl FusedIterator for VersymIter<'_> {}
|
||||
|
||||
/// An ELF [Symbol Version][lsb-versym] entry.
|
||||
///
|
||||
/// [lsb-versym]: https://refspecs.linuxbase.org/LSB_5.0.0/LSB-Core-generic/LSB-Core-generic/symversion.html#SYMVERTBL
|
||||
#[derive(Debug)]
|
||||
pub struct Versym {
|
||||
pub vs_val: u16,
|
||||
}
|
||||
|
||||
impl Versym {
|
||||
/// Returns true if the symbol is local and not available outside the object according to
|
||||
/// [`VER_NDX_LOCAL`].
|
||||
#[inline]
|
||||
pub fn is_local(&self) -> bool {
|
||||
self.vs_val == VER_NDX_LOCAL
|
||||
}
|
||||
|
||||
/// Returns true if the symbol is defined in this object and globally available according
|
||||
/// to [`VER_NDX_GLOBAL`].
|
||||
#[inline]
|
||||
pub fn is_global(&self) -> bool {
|
||||
self.vs_val == VER_NDX_GLOBAL
|
||||
}
|
||||
|
||||
/// Returns true if the `hidden` bit is set according to the [`VERSYM_HIDDEN`] bitmask.
|
||||
#[inline]
|
||||
pub fn is_hidden(&self) -> bool {
|
||||
(self.vs_val & VERSYM_HIDDEN) == VERSYM_HIDDEN
|
||||
}
|
||||
|
||||
/// Returns the symbol version index according to the [`VERSYM_VERSION`] bitmask.
|
||||
#[inline]
|
||||
pub fn version(&self) -> u16 {
|
||||
self.vs_val & VERSYM_VERSION
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ElfVersym> for Versym {
|
||||
fn from(ElfVersym { vs_val }: ElfVersym) -> Self {
|
||||
Versym { vs_val }
|
||||
}
|
||||
}
|
||||
|
||||
/************************
|
||||
* Version Definition *
|
||||
************************/
|
||||
|
||||
/// Helper struct to iterate over [Version Definition][Verdef] and [Version Definition
|
||||
/// Auxiliary][Verdaux] entries.
|
||||
#[derive(Debug)]
|
||||
pub struct VerdefSection<'a> {
|
||||
/// String table used to resolve version strings.
|
||||
bytes: &'a [u8],
|
||||
count: usize,
|
||||
ctx: container::Ctx,
|
||||
}
|
||||
|
||||
impl<'a> VerdefSection<'a> {
|
||||
pub fn parse(
|
||||
bytes: &'a [u8],
|
||||
shdrs: &[SectionHeader],
|
||||
ctx: container::Ctx,
|
||||
) -> Result<Option<VerdefSection<'a>>> {
|
||||
// Get fields needed from optional `version definition` section.
|
||||
let (offset, size, count) =
|
||||
if let Some(shdr) = shdrs.iter().find(|shdr| shdr.sh_type == SHT_GNU_VERDEF) {
|
||||
(
|
||||
shdr.sh_offset as usize,
|
||||
shdr.sh_size as usize,
|
||||
shdr.sh_info as usize, // Encodes the number of ElfVerdef entries.
|
||||
)
|
||||
} else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
// Get a slice of bytes of the `version definition` section content.
|
||||
let bytes: &'a [u8] = bytes.pread_with(offset, size)?;
|
||||
|
||||
Ok(Some(VerdefSection { bytes, count, ctx }))
|
||||
}
|
||||
|
||||
/// Get an iterator over the [`Verdef`] entries.
|
||||
#[inline]
|
||||
pub fn iter(&'a self) -> VerdefIter<'a> {
|
||||
self.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'_ VerdefSection<'a> {
|
||||
type Item = <VerdefIter<'a> as Iterator>::Item;
|
||||
type IntoIter = VerdefIter<'a>;
|
||||
|
||||
#[inline]
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
VerdefIter {
|
||||
bytes: self.bytes,
|
||||
count: self.count,
|
||||
index: 0,
|
||||
offset: 0,
|
||||
ctx: self.ctx,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator over the [`Verdef`] entries from the [`SHT_GNU_VERDEF`] section.
|
||||
pub struct VerdefIter<'a> {
|
||||
bytes: &'a [u8],
|
||||
count: usize,
|
||||
index: usize,
|
||||
offset: usize,
|
||||
ctx: container::Ctx,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for VerdefIter<'a> {
|
||||
type Item = Verdef<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.index >= self.count {
|
||||
None
|
||||
} else {
|
||||
self.index += 1;
|
||||
|
||||
let do_next = |iter: &mut Self| {
|
||||
let ElfVerdef {
|
||||
vd_version,
|
||||
vd_flags,
|
||||
vd_ndx,
|
||||
vd_cnt,
|
||||
vd_hash,
|
||||
vd_aux,
|
||||
vd_next,
|
||||
} = iter.bytes.pread_with(iter.offset, iter.ctx.le).ok()?;
|
||||
|
||||
// Validate offset to first ElfVerdaux entry.
|
||||
let offset = iter.offset.checked_add(vd_aux as usize)?;
|
||||
|
||||
// Validate if offset is valid index into bytes slice.
|
||||
if offset >= iter.bytes.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Get a slice of bytes starting with the first ElfVerdaux entry.
|
||||
let bytes: &'a [u8] = &iter.bytes[offset..];
|
||||
|
||||
// Bump the offset to the next ElfVerdef entry.
|
||||
iter.offset = iter.offset.checked_add(vd_next as usize)?;
|
||||
|
||||
// Start yielding None on the next call if there is no next offset.
|
||||
if vd_next == 0 {
|
||||
iter.index = iter.count;
|
||||
}
|
||||
|
||||
Some(Verdef {
|
||||
vd_version,
|
||||
vd_flags,
|
||||
vd_ndx,
|
||||
vd_cnt,
|
||||
vd_hash,
|
||||
vd_aux,
|
||||
vd_next,
|
||||
bytes,
|
||||
ctx: iter.ctx,
|
||||
})
|
||||
};
|
||||
|
||||
do_next(self).or_else(|| {
|
||||
// Adjust current index to count in case of an error.
|
||||
self.index = self.count;
|
||||
None
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
let len = self.count - self.index;
|
||||
(0, Some(len))
|
||||
}
|
||||
}
|
||||
|
||||
impl ExactSizeIterator for VerdefIter<'_> {}
|
||||
|
||||
impl FusedIterator for VerdefIter<'_> {}
|
||||
|
||||
/// An ELF [Version Definition][lsb-verdef] entry .
|
||||
///
|
||||
/// [lsb-verdef]: https://refspecs.linuxbase.org/LSB_5.0.0/LSB-Core-generic/LSB-Core-generic/symversion.html#VERDEFENTRIES
|
||||
#[derive(Debug)]
|
||||
pub struct Verdef<'a> {
|
||||
/// Version revision. This field shall be set to 1.
|
||||
pub vd_version: u16,
|
||||
/// Version information flag bitmask.
|
||||
pub vd_flags: u16,
|
||||
/// Version index numeric value referencing the SHT_GNU_versym section.
|
||||
pub vd_ndx: u16,
|
||||
/// Number of associated verdaux array entries.
|
||||
pub vd_cnt: u16,
|
||||
/// Version name hash value (ELF hash function).
|
||||
pub vd_hash: u32,
|
||||
/// Offset in bytes to a corresponding entry in an array of Elfxx_Verdaux structures.
|
||||
pub vd_aux: u32,
|
||||
/// Offset to the next verdef entry, in bytes.
|
||||
pub vd_next: u32,
|
||||
|
||||
bytes: &'a [u8],
|
||||
ctx: container::Ctx,
|
||||
}
|
||||
|
||||
impl<'a> Verdef<'a> {
|
||||
/// Get an iterator over the [`Verdaux`] entries of this [`Verdef`] entry.
|
||||
#[inline]
|
||||
pub fn iter(&'a self) -> VerdauxIter<'a> {
|
||||
self.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'_ Verdef<'a> {
|
||||
type Item = <VerdauxIter<'a> as Iterator>::Item;
|
||||
type IntoIter = VerdauxIter<'a>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
VerdauxIter {
|
||||
bytes: self.bytes,
|
||||
count: self.vd_cnt,
|
||||
index: 0,
|
||||
offset: 0,
|
||||
ctx: self.ctx,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator over the [`Verdaux`] entries for an specific [`Verdef`] entry.
|
||||
pub struct VerdauxIter<'a> {
|
||||
bytes: &'a [u8],
|
||||
count: u16,
|
||||
index: u16,
|
||||
offset: usize,
|
||||
ctx: container::Ctx,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for VerdauxIter<'a> {
|
||||
type Item = Verdaux;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.index >= self.count {
|
||||
None
|
||||
} else {
|
||||
self.index += 1;
|
||||
|
||||
let do_next = |iter: &mut Self| {
|
||||
let ElfVerdaux { vda_name, vda_next } =
|
||||
iter.bytes.pread_with(iter.offset, iter.ctx.le).ok()?;
|
||||
|
||||
// Bump the offset to the next ElfVerdaux entry.
|
||||
iter.offset = iter.offset.checked_add(vda_next as usize)?;
|
||||
|
||||
// Start yielding None on the next call if there is no next offset.
|
||||
if vda_next == 0 {
|
||||
iter.index = iter.count;
|
||||
}
|
||||
|
||||
Some(Verdaux {
|
||||
vda_name: vda_name as usize,
|
||||
vda_next,
|
||||
})
|
||||
};
|
||||
|
||||
do_next(self).or_else(|| {
|
||||
// Adjust current index to count in case of an error.
|
||||
self.index = self.count;
|
||||
None
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
let len = usize::from(self.count - self.index);
|
||||
(0, Some(len))
|
||||
}
|
||||
}
|
||||
|
||||
impl ExactSizeIterator for VerdauxIter<'_> {}
|
||||
|
||||
impl FusedIterator for VerdauxIter<'_> {}
|
||||
|
||||
/// An ELF [Version Definition Auxiliary][lsb-verdaux] entry.
|
||||
///
|
||||
/// [lsb-verdaux]: https://refspecs.linuxbase.org/LSB_5.0.0/LSB-Core-generic/LSB-Core-generic/symversion.html#VERDEFEXTS
|
||||
#[derive(Debug)]
|
||||
pub struct Verdaux {
|
||||
/// Offset to the version or dependency name string in the section header, in bytes.
|
||||
pub vda_name: usize,
|
||||
/// Offset to the next verdaux entry, in bytes.
|
||||
pub vda_next: u32,
|
||||
}
|
||||
|
||||
/**************************
|
||||
* Version Requirements *
|
||||
**************************/
|
||||
|
||||
/// Helper struct to iterate over [Version Needed][Verneed] and [Version Needed
|
||||
/// Auxiliary][Vernaux] entries.
|
||||
#[derive(Debug)]
|
||||
pub struct VerneedSection<'a> {
|
||||
bytes: &'a [u8],
|
||||
count: usize,
|
||||
ctx: container::Ctx,
|
||||
}
|
||||
|
||||
impl<'a> VerneedSection<'a> {
|
||||
/// Try to parse the optional [`SHT_GNU_VERNEED`] section.
|
||||
pub fn parse(
|
||||
bytes: &'a [u8],
|
||||
shdrs: &[SectionHeader],
|
||||
ctx: container::Ctx,
|
||||
) -> Result<Option<VerneedSection<'a>>> {
|
||||
// Get fields needed from optional `version needed` section.
|
||||
let (offset, size, count) =
|
||||
if let Some(shdr) = shdrs.iter().find(|shdr| shdr.sh_type == SHT_GNU_VERNEED) {
|
||||
(
|
||||
shdr.sh_offset as usize,
|
||||
shdr.sh_size as usize,
|
||||
shdr.sh_info as usize, // Encodes the number of ElfVerneed entries.
|
||||
)
|
||||
} else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
// Get a slice of bytes of the `version needed` section content.
|
||||
let bytes: &'a [u8] = bytes.pread_with(offset, size)?;
|
||||
|
||||
Ok(Some(VerneedSection { bytes, count, ctx }))
|
||||
}
|
||||
|
||||
/// Get an iterator over the [`Verneed`] entries.
|
||||
#[inline]
|
||||
pub fn iter(&'a self) -> VerneedIter<'a> {
|
||||
self.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'_ VerneedSection<'a> {
|
||||
type Item = <VerneedIter<'a> as Iterator>::Item;
|
||||
type IntoIter = VerneedIter<'a>;
|
||||
|
||||
#[inline]
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
VerneedIter {
|
||||
bytes: self.bytes,
|
||||
count: self.count,
|
||||
index: 0,
|
||||
offset: 0,
|
||||
ctx: self.ctx,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator over the [`Verneed`] entries from the [`SHT_GNU_VERNEED`] section.
|
||||
pub struct VerneedIter<'a> {
|
||||
bytes: &'a [u8],
|
||||
count: usize,
|
||||
index: usize,
|
||||
offset: usize,
|
||||
ctx: container::Ctx,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for VerneedIter<'a> {
|
||||
type Item = Verneed<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.index >= self.count {
|
||||
None
|
||||
} else {
|
||||
self.index += 1;
|
||||
|
||||
let do_next = |iter: &mut Self| {
|
||||
let ElfVerneed {
|
||||
vn_version,
|
||||
vn_cnt,
|
||||
vn_file,
|
||||
vn_aux,
|
||||
vn_next,
|
||||
} = iter.bytes.pread_with(iter.offset, iter.ctx.le).ok()?;
|
||||
|
||||
// Validate offset to first ElfVernaux entry.
|
||||
let offset = iter.offset.checked_add(vn_aux as usize)?;
|
||||
|
||||
// Validate if offset is valid index into bytes slice.
|
||||
if offset >= iter.bytes.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Get a slice of bytes starting with the first ElfVernaux entry.
|
||||
let bytes: &'a [u8] = &iter.bytes[offset..];
|
||||
|
||||
// Bump the offset to the next ElfVerneed entry.
|
||||
iter.offset = iter.offset.checked_add(vn_next as usize)?;
|
||||
|
||||
// Start yielding None on the next call if there is no next offset.
|
||||
if vn_next == 0 {
|
||||
iter.index = iter.count;
|
||||
}
|
||||
|
||||
Some(Verneed {
|
||||
vn_version,
|
||||
vn_cnt,
|
||||
vn_file: vn_file as usize,
|
||||
vn_aux,
|
||||
vn_next,
|
||||
bytes,
|
||||
ctx: iter.ctx,
|
||||
})
|
||||
};
|
||||
|
||||
do_next(self).or_else(|| {
|
||||
// Adjust current index to count in case of an error.
|
||||
self.index = self.count;
|
||||
None
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
let len = self.count - self.index;
|
||||
(0, Some(len))
|
||||
}
|
||||
}
|
||||
|
||||
impl ExactSizeIterator for VerneedIter<'_> {}
|
||||
|
||||
impl FusedIterator for VerneedIter<'_> {}
|
||||
|
||||
/// An ELF [Version Need][lsb-verneed] entry .
|
||||
///
|
||||
/// [lsb-verneed]: https://refspecs.linuxbase.org/LSB_5.0.0/LSB-Core-generic/LSB-Core-generic/symversion.html#VERNEEDFIG
|
||||
#[derive(Debug)]
|
||||
pub struct Verneed<'a> {
|
||||
/// Version of structure. This value is currently set to 1, and will be reset if the versioning
|
||||
/// implementation is incompatibly altered.
|
||||
pub vn_version: u16,
|
||||
/// Number of associated verneed array entries.
|
||||
pub vn_cnt: u16,
|
||||
/// Offset to the file name string in the section header, in bytes.
|
||||
pub vn_file: usize,
|
||||
/// Offset to a corresponding entry in the vernaux array, in bytes.
|
||||
pub vn_aux: u32,
|
||||
/// Offset to the next verneed entry, in bytes.
|
||||
pub vn_next: u32,
|
||||
|
||||
bytes: &'a [u8],
|
||||
ctx: container::Ctx,
|
||||
}
|
||||
|
||||
impl<'a> Verneed<'a> {
|
||||
/// Get an iterator over the [`Vernaux`] entries of this [`Verneed`] entry.
|
||||
#[inline]
|
||||
pub fn iter(&'a self) -> VernauxIter<'a> {
|
||||
self.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'_ Verneed<'a> {
|
||||
type Item = <VernauxIter<'a> as Iterator>::Item;
|
||||
type IntoIter = VernauxIter<'a>;
|
||||
|
||||
#[inline]
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
VernauxIter {
|
||||
bytes: self.bytes,
|
||||
count: self.vn_cnt,
|
||||
index: 0,
|
||||
offset: 0,
|
||||
ctx: self.ctx,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator over the [`Vernaux`] entries for an specific [`Verneed`] entry.
|
||||
pub struct VernauxIter<'a> {
|
||||
bytes: &'a [u8],
|
||||
count: u16,
|
||||
index: u16,
|
||||
offset: usize,
|
||||
ctx: container::Ctx,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for VernauxIter<'a> {
|
||||
type Item = Vernaux;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.index >= self.count {
|
||||
None
|
||||
} else {
|
||||
self.index += 1;
|
||||
|
||||
let do_next = |iter: &mut Self| {
|
||||
let ElfVernaux {
|
||||
vna_hash,
|
||||
vna_flags,
|
||||
vna_other,
|
||||
vna_name,
|
||||
vna_next,
|
||||
} = iter.bytes.pread_with(iter.offset, iter.ctx.le).ok()?;
|
||||
|
||||
// Bump the offset to the next ElfVernaux entry.
|
||||
iter.offset = iter.offset.checked_add(vna_next as usize)?;
|
||||
|
||||
// Start yielding None on the next call if there is no next offset.
|
||||
if vna_next == 0 {
|
||||
iter.index = iter.count;
|
||||
}
|
||||
|
||||
Some(Vernaux {
|
||||
vna_hash,
|
||||
vna_flags,
|
||||
vna_other,
|
||||
vna_name: vna_name as usize,
|
||||
vna_next,
|
||||
})
|
||||
};
|
||||
|
||||
do_next(self).or_else(|| {
|
||||
// Adjust current index to count in case of an error.
|
||||
self.index = self.count;
|
||||
None
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
let len = usize::from(self.count - self.index);
|
||||
(0, Some(len))
|
||||
}
|
||||
}
|
||||
|
||||
impl ExactSizeIterator for VernauxIter<'_> {}
|
||||
|
||||
impl FusedIterator for VernauxIter<'_> {}
|
||||
|
||||
/// An ELF [Version Need Auxiliary][lsb-vernaux] entry.
|
||||
///
|
||||
/// [lsb-vernaux]: https://refspecs.linuxbase.org/LSB_5.0.0/LSB-Core-generic/LSB-Core-generic/symversion.html#VERNEEDEXTFIG
|
||||
#[derive(Debug)]
|
||||
pub struct Vernaux {
|
||||
/// Dependency name hash value (ELF hash function).
|
||||
pub vna_hash: u32,
|
||||
/// Dependency information flag bitmask.
|
||||
pub vna_flags: u16,
|
||||
/// Object file version identifier used in the .gnu.version symbol version array. Bit number 15
|
||||
/// controls whether or not the object is hidden; if this bit is set, the object cannot be used
|
||||
/// and the static linker will ignore the symbol's presence in the object.
|
||||
pub vna_other: u16,
|
||||
/// Offset to the dependency name string in the section header, in bytes.
|
||||
pub vna_name: usize,
|
||||
/// Offset to the next vernaux entry, in bytes.
|
||||
pub vna_next: u32,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::{ElfVerdaux, ElfVerdef, ElfVernaux, ElfVerneed, ElfVersym};
|
||||
use super::{Versym, VERSYM_HIDDEN, VER_NDX_GLOBAL, VER_NDX_LOCAL};
|
||||
use core::mem::size_of;
|
||||
|
||||
#[test]
|
||||
fn check_size() {
|
||||
assert_eq!(2, size_of::<ElfVersym>());
|
||||
assert_eq!(20, size_of::<ElfVerdef>());
|
||||
assert_eq!(8, size_of::<ElfVerdaux>());
|
||||
assert_eq!(16, size_of::<ElfVerneed>());
|
||||
assert_eq!(16, size_of::<ElfVernaux>());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_versym() {
|
||||
let local = Versym {
|
||||
vs_val: VER_NDX_LOCAL,
|
||||
};
|
||||
assert_eq!(true, local.is_local());
|
||||
assert_eq!(false, local.is_global());
|
||||
assert_eq!(false, local.is_hidden());
|
||||
assert_eq!(VER_NDX_LOCAL, local.version());
|
||||
|
||||
let global = Versym {
|
||||
vs_val: VER_NDX_GLOBAL,
|
||||
};
|
||||
assert_eq!(false, global.is_local());
|
||||
assert_eq!(true, global.is_global());
|
||||
assert_eq!(false, global.is_hidden());
|
||||
assert_eq!(VER_NDX_GLOBAL, global.version());
|
||||
|
||||
let hidden = Versym {
|
||||
vs_val: VERSYM_HIDDEN,
|
||||
};
|
||||
assert_eq!(false, hidden.is_local());
|
||||
assert_eq!(false, hidden.is_global());
|
||||
assert_eq!(true, hidden.is_hidden());
|
||||
assert_eq!(0, hidden.version());
|
||||
|
||||
let hidden = Versym {
|
||||
vs_val: VERSYM_HIDDEN | 0x123,
|
||||
};
|
||||
assert_eq!(false, hidden.is_local());
|
||||
assert_eq!(false, hidden.is_global());
|
||||
assert_eq!(true, hidden.is_hidden());
|
||||
assert_eq!(0x123, hidden.version());
|
||||
}
|
||||
}
|
||||
12
third_party/rust/goblin/src/error.rs
vendored
12
third_party/rust/goblin/src/error.rs
vendored
|
|
@ -1,13 +1,13 @@
|
|||
//! A custom Goblin error
|
||||
//!
|
||||
|
||||
use scroll;
|
||||
use core::result;
|
||||
use core::fmt;
|
||||
use alloc::string::String;
|
||||
use core::fmt;
|
||||
use core::result;
|
||||
#[cfg(feature = "std")]
|
||||
use std::{error, io};
|
||||
|
||||
#[non_exhaustive]
|
||||
#[derive(Debug)]
|
||||
/// A custom Goblin error
|
||||
pub enum Error {
|
||||
|
|
@ -20,6 +20,8 @@ pub enum Error {
|
|||
/// An IO based error
|
||||
#[cfg(feature = "std")]
|
||||
IO(io::Error),
|
||||
/// Buffer is too short to hold N items
|
||||
BufferTooShort(usize, &'static str),
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
|
|
@ -28,8 +30,7 @@ impl error::Error for Error {
|
|||
match *self {
|
||||
Error::IO(ref io) => Some(io),
|
||||
Error::Scroll(ref scroll) => Some(scroll),
|
||||
Error::BadMagic(_) => None,
|
||||
Error::Malformed(_) => None,
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -55,6 +56,7 @@ impl fmt::Display for Error {
|
|||
Error::Scroll(ref err) => write!(fmt, "{}", err),
|
||||
Error::BadMagic(magic) => write!(fmt, "Invalid magic number: 0x{:x}", magic),
|
||||
Error::Malformed(ref msg) => write!(fmt, "Malformed entity: {}", msg),
|
||||
Error::BufferTooShort(n, item) => write!(fmt, "Buffer is too short for {} {}", n, item),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
100
third_party/rust/goblin/src/lib.rs
vendored
100
third_party/rust/goblin/src/lib.rs
vendored
|
|
@ -28,16 +28,13 @@
|
|||
//! use goblin::{error, Object};
|
||||
//! use std::path::Path;
|
||||
//! use std::env;
|
||||
//! use std::fs::File;
|
||||
//! use std::io::Read;
|
||||
//! use std::fs;
|
||||
//!
|
||||
//! fn run () -> error::Result<()> {
|
||||
//! for (i, arg) in env::args().enumerate() {
|
||||
//! if i == 1 {
|
||||
//! let path = Path::new(arg.as_str());
|
||||
//! let mut fd = File::open(path)?;
|
||||
//! let mut buffer = Vec::new();
|
||||
//! fd.read_to_end(&mut buffer)?;
|
||||
//! let buffer = fs::read(path)?;
|
||||
//! match Object::parse(&buffer)? {
|
||||
//! Object::Elf(elf) => {
|
||||
//! println!("elf: {:#?}", &elf);
|
||||
|
|
@ -114,7 +111,6 @@ pub mod strtab;
|
|||
|
||||
/// Binary container size information and byte-order context
|
||||
pub mod container {
|
||||
use scroll;
|
||||
pub use scroll::Endian;
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
||||
|
|
@ -133,11 +129,11 @@ pub mod container {
|
|||
|
||||
#[cfg(not(target_pointer_width = "64"))]
|
||||
/// The default binary container size - either `Big` or `Little`, depending on whether the host machine's pointer size is 64 or not
|
||||
pub const CONTAINER: Container = Container::Little;
|
||||
pub const CONTAINER: Container = Container::Little;
|
||||
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
/// The default binary container size - either `Big` or `Little`, depending on whether the host machine's pointer size is 64 or not
|
||||
pub const CONTAINER: Container = Container::Big;
|
||||
pub const CONTAINER: Container = Container::Big;
|
||||
|
||||
impl Default for Container {
|
||||
#[inline]
|
||||
|
|
@ -163,7 +159,7 @@ pub mod container {
|
|||
self.le.is_little()
|
||||
}
|
||||
/// Create a new binary container context
|
||||
pub fn new (container: Container, le: scroll::Endian) -> Self {
|
||||
pub fn new(container: Container, le: scroll::Endian) -> Self {
|
||||
Ctx { container, le }
|
||||
}
|
||||
/// Return a dubious pointer/address byte size for the container
|
||||
|
|
@ -171,27 +167,36 @@ pub mod container {
|
|||
match self.container {
|
||||
// TODO: require pointer size initialization/setting or default to container size with these values, e.g., avr pointer width will be smaller iirc
|
||||
Container::Little => 4,
|
||||
Container::Big => 8,
|
||||
Container::Big => 8,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Container> for Ctx {
|
||||
fn from(container: Container) -> Self {
|
||||
Ctx { container, le: scroll::Endian::default() }
|
||||
Ctx {
|
||||
container,
|
||||
le: scroll::Endian::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<scroll::Endian> for Ctx {
|
||||
fn from(le: scroll::Endian) -> Self {
|
||||
Ctx { container: CONTAINER, le }
|
||||
Ctx {
|
||||
container: CONTAINER,
|
||||
le,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Ctx {
|
||||
#[inline]
|
||||
fn default() -> Self {
|
||||
Ctx { container: Container::default(), le: scroll::Endian::default() }
|
||||
Ctx {
|
||||
container: Container::default(),
|
||||
le: scroll::Endian::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -274,6 +279,16 @@ if_everything! {
|
|||
peek_bytes(&bytes)
|
||||
}
|
||||
|
||||
/// Takes a reference to the first 16 bytes of the total bytes slice and convert it to an array for `peek_bytes` to use.
|
||||
/// Returns None if bytes's length is less than 16.
|
||||
fn take_hint_bytes(bytes: &[u8]) -> Option<&[u8; 16]> {
|
||||
use core::convert::TryInto;
|
||||
bytes.get(0..16)
|
||||
.and_then(|hint_bytes_slice| {
|
||||
hint_bytes_slice.try_into().ok()
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
/// A parseable object that goblin understands
|
||||
|
|
@ -290,18 +305,19 @@ if_everything! {
|
|||
Unknown(u64),
|
||||
}
|
||||
|
||||
// TODO: this could avoid std using peek_bytes
|
||||
#[cfg(feature = "std")]
|
||||
impl<'a> Object<'a> {
|
||||
/// Tries to parse an `Object` from `bytes`
|
||||
pub fn parse(bytes: &[u8]) -> error::Result<Object> {
|
||||
use std::io::Cursor;
|
||||
match peek(&mut Cursor::new(&bytes))? {
|
||||
Hint::Elf(_) => Ok(Object::Elf(elf::Elf::parse(bytes)?)),
|
||||
Hint::Mach(_) | Hint::MachFat(_) => Ok(Object::Mach(mach::Mach::parse(bytes)?)),
|
||||
Hint::Archive => Ok(Object::Archive(archive::Archive::parse(bytes)?)),
|
||||
Hint::PE => Ok(Object::PE(pe::PE::parse(bytes)?)),
|
||||
Hint::Unknown(magic) => Ok(Object::Unknown(magic))
|
||||
if let Some(hint_bytes) = take_hint_bytes(bytes) {
|
||||
match peek_bytes(hint_bytes)? {
|
||||
Hint::Elf(_) => Ok(Object::Elf(elf::Elf::parse(bytes)?)),
|
||||
Hint::Mach(_) | Hint::MachFat(_) => Ok(Object::Mach(mach::Mach::parse(bytes)?)),
|
||||
Hint::Archive => Ok(Object::Archive(archive::Archive::parse(bytes)?)),
|
||||
Hint::PE => Ok(Object::PE(pe::PE::parse(bytes)?)),
|
||||
Hint::Unknown(magic) => Ok(Object::Unknown(magic))
|
||||
}
|
||||
} else {
|
||||
Err(error::Error::Malformed(format!("Object is too small.")))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -318,13 +334,13 @@ pub mod elf;
|
|||
#[cfg(feature = "elf32")]
|
||||
/// The ELF 32-bit struct definitions and associated values, re-exported for easy "type-punning"
|
||||
pub mod elf32 {
|
||||
pub use crate::elf::header::header32 as header;
|
||||
pub use crate::elf::program_header::program_header32 as program_header;
|
||||
pub use crate::elf::section_header::section_header32 as section_header;
|
||||
pub use crate::elf::dynamic::dyn32 as dynamic;
|
||||
pub use crate::elf::sym::sym32 as sym;
|
||||
pub use crate::elf::reloc::reloc32 as reloc;
|
||||
pub use crate::elf::header::header32 as header;
|
||||
pub use crate::elf::note::Nhdr32 as Note;
|
||||
pub use crate::elf::program_header::program_header32 as program_header;
|
||||
pub use crate::elf::reloc::reloc32 as reloc;
|
||||
pub use crate::elf::section_header::section_header32 as section_header;
|
||||
pub use crate::elf::sym::sym32 as sym;
|
||||
|
||||
pub mod gnu_hash {
|
||||
pub use crate::elf::gnu_hash::hash;
|
||||
|
|
@ -335,13 +351,13 @@ pub mod elf32 {
|
|||
#[cfg(feature = "elf64")]
|
||||
/// The ELF 64-bit struct definitions and associated values, re-exported for easy "type-punning"
|
||||
pub mod elf64 {
|
||||
pub use crate::elf::header::header64 as header;
|
||||
pub use crate::elf::program_header::program_header64 as program_header;
|
||||
pub use crate::elf::section_header::section_header64 as section_header;
|
||||
pub use crate::elf::dynamic::dyn64 as dynamic;
|
||||
pub use crate::elf::sym::sym64 as sym;
|
||||
pub use crate::elf::reloc::reloc64 as reloc;
|
||||
pub use crate::elf::header::header64 as header;
|
||||
pub use crate::elf::note::Nhdr64 as Note;
|
||||
pub use crate::elf::program_header::program_header64 as program_header;
|
||||
pub use crate::elf::reloc::reloc64 as reloc;
|
||||
pub use crate::elf::section_header::section_header64 as section_header;
|
||||
pub use crate::elf::sym::sym64 as sym;
|
||||
|
||||
pub mod gnu_hash {
|
||||
pub use crate::elf::gnu_hash::hash;
|
||||
|
|
@ -357,3 +373,23 @@ pub mod pe;
|
|||
|
||||
#[cfg(feature = "archive")]
|
||||
pub mod archive;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
if_everything! {
|
||||
#[test]
|
||||
fn take_hint_bytes_long_enough() {
|
||||
let bytes_array = [1; 32];
|
||||
let bytes = &bytes_array[..];
|
||||
assert!(take_hint_bytes(bytes).is_some())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn take_hint_bytes_not_long_enough() {
|
||||
let bytes_array = [1; 8];
|
||||
let bytes = &bytes_array[..];
|
||||
assert!(take_hint_bytes(bytes).is_none())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
52
third_party/rust/goblin/src/mach/bind_opcodes.rs
vendored
52
third_party/rust/goblin/src/mach/bind_opcodes.rs
vendored
|
|
@ -14,29 +14,29 @@
|
|||
pub type Opcode = u8;
|
||||
|
||||
// The following are used to encode binding information
|
||||
pub const BIND_TYPE_POINTER : u8 = 1;
|
||||
pub const BIND_TYPE_TEXT_ABSOLUTE32 : u8 = 2;
|
||||
pub const BIND_TYPE_TEXT_PCREL32 : u8 = 3;
|
||||
pub const BIND_SPECIAL_DYLIB_SELF : u8 = 0;
|
||||
pub const BIND_SPECIAL_DYLIB_MAIN_EXECUTABLE : u8 = 0xf; // -1
|
||||
pub const BIND_SPECIAL_DYLIB_FLAT_LOOKUP : u8 = 0xe; // -2
|
||||
pub const BIND_SYMBOL_FLAGS_WEAK_IMPORT : u8 = 0x1;
|
||||
pub const BIND_SYMBOL_FLAGS_NON_WEAK_DEFINITION : u8 = 0x8;
|
||||
pub const BIND_OPCODE_MASK : u8 = 0xF0;
|
||||
pub const BIND_IMMEDIATE_MASK : u8 = 0x0F;
|
||||
pub const BIND_OPCODE_DONE : Opcode = 0x00;
|
||||
pub const BIND_OPCODE_SET_DYLIB_ORDINAL_IMM : Opcode = 0x10;
|
||||
pub const BIND_OPCODE_SET_DYLIB_ORDINAL_ULEB : Opcode = 0x20;
|
||||
pub const BIND_OPCODE_SET_DYLIB_SPECIAL_IMM : Opcode = 0x30;
|
||||
pub const BIND_OPCODE_SET_SYMBOL_TRAILING_FLAGS_IMM : Opcode = 0x40;
|
||||
pub const BIND_OPCODE_SET_TYPE_IMM : Opcode = 0x50;
|
||||
pub const BIND_OPCODE_SET_ADDEND_SLEB : Opcode = 0x60;
|
||||
pub const BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB : Opcode = 0x70;
|
||||
pub const BIND_OPCODE_ADD_ADDR_ULEB : Opcode = 0x80;
|
||||
pub const BIND_OPCODE_DO_BIND : Opcode = 0x90;
|
||||
pub const BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB : Opcode = 0xA0;
|
||||
pub const BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED : Opcode = 0xB0;
|
||||
pub const BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB : Opcode = 0xC0;
|
||||
pub const BIND_TYPE_POINTER: u8 = 1;
|
||||
pub const BIND_TYPE_TEXT_ABSOLUTE32: u8 = 2;
|
||||
pub const BIND_TYPE_TEXT_PCREL32: u8 = 3;
|
||||
pub const BIND_SPECIAL_DYLIB_SELF: u8 = 0;
|
||||
pub const BIND_SPECIAL_DYLIB_MAIN_EXECUTABLE: u8 = 0xf; // -1
|
||||
pub const BIND_SPECIAL_DYLIB_FLAT_LOOKUP: u8 = 0xe; // -2
|
||||
pub const BIND_SYMBOL_FLAGS_WEAK_IMPORT: u8 = 0x1;
|
||||
pub const BIND_SYMBOL_FLAGS_NON_WEAK_DEFINITION: u8 = 0x8;
|
||||
pub const BIND_OPCODE_MASK: u8 = 0xF0;
|
||||
pub const BIND_IMMEDIATE_MASK: u8 = 0x0F;
|
||||
pub const BIND_OPCODE_DONE: Opcode = 0x00;
|
||||
pub const BIND_OPCODE_SET_DYLIB_ORDINAL_IMM: Opcode = 0x10;
|
||||
pub const BIND_OPCODE_SET_DYLIB_ORDINAL_ULEB: Opcode = 0x20;
|
||||
pub const BIND_OPCODE_SET_DYLIB_SPECIAL_IMM: Opcode = 0x30;
|
||||
pub const BIND_OPCODE_SET_SYMBOL_TRAILING_FLAGS_IMM: Opcode = 0x40;
|
||||
pub const BIND_OPCODE_SET_TYPE_IMM: Opcode = 0x50;
|
||||
pub const BIND_OPCODE_SET_ADDEND_SLEB: Opcode = 0x60;
|
||||
pub const BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB: Opcode = 0x70;
|
||||
pub const BIND_OPCODE_ADD_ADDR_ULEB: Opcode = 0x80;
|
||||
pub const BIND_OPCODE_DO_BIND: Opcode = 0x90;
|
||||
pub const BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB: Opcode = 0xA0;
|
||||
pub const BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED: Opcode = 0xB0;
|
||||
pub const BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB: Opcode = 0xC0;
|
||||
|
||||
pub fn opcode_to_str(opcode: Opcode) -> &'static str {
|
||||
match opcode {
|
||||
|
|
@ -52,7 +52,9 @@ pub fn opcode_to_str(opcode: Opcode) -> &'static str {
|
|||
BIND_OPCODE_DO_BIND => "BIND_OPCODE_DO_BIND",
|
||||
BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB => "BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB",
|
||||
BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED => "BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED",
|
||||
BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB => "BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB",
|
||||
_ => "UNKNOWN OPCODE"
|
||||
BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB => {
|
||||
"BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB"
|
||||
}
|
||||
_ => "UNKNOWN OPCODE",
|
||||
}
|
||||
}
|
||||
|
|
|
|||
55
third_party/rust/goblin/src/mach/constants.rs
vendored
55
third_party/rust/goblin/src/mach/constants.rs
vendored
|
|
@ -219,19 +219,19 @@ pub mod cputype {
|
|||
pub const CPU_TYPE_MC680X0: CpuType = 6;
|
||||
pub const CPU_TYPE_X86: CpuType = 7;
|
||||
pub const CPU_TYPE_I386: CpuType = CPU_TYPE_X86;
|
||||
pub const CPU_TYPE_X86_64: CpuType = (CPU_TYPE_X86 | CPU_ARCH_ABI64);
|
||||
pub const CPU_TYPE_X86_64: CpuType = CPU_TYPE_X86 | CPU_ARCH_ABI64;
|
||||
pub const CPU_TYPE_MIPS: CpuType = 8;
|
||||
pub const CPU_TYPE_MC98000: CpuType = 10;
|
||||
pub const CPU_TYPE_HPPA: CpuType = 11;
|
||||
pub const CPU_TYPE_ARM: CpuType = 12;
|
||||
pub const CPU_TYPE_ARM64: CpuType = (CPU_TYPE_ARM | CPU_ARCH_ABI64);
|
||||
pub const CPU_TYPE_ARM64_32: CpuType = (CPU_TYPE_ARM | CPU_ARCH_ABI64_32);
|
||||
pub const CPU_TYPE_ARM64: CpuType = CPU_TYPE_ARM | CPU_ARCH_ABI64;
|
||||
pub const CPU_TYPE_ARM64_32: CpuType = CPU_TYPE_ARM | CPU_ARCH_ABI64_32;
|
||||
pub const CPU_TYPE_MC88000: CpuType = 13;
|
||||
pub const CPU_TYPE_SPARC: CpuType = 14;
|
||||
pub const CPU_TYPE_I860: CpuType = 15;
|
||||
pub const CPU_TYPE_ALPHA: CpuType = 16;
|
||||
pub const CPU_TYPE_POWERPC: CpuType = 18;
|
||||
pub const CPU_TYPE_POWERPC64: CpuType = (CPU_TYPE_POWERPC | CPU_ARCH_ABI64);
|
||||
pub const CPU_TYPE_POWERPC64: CpuType = CPU_TYPE_POWERPC | CPU_ARCH_ABI64;
|
||||
|
||||
// CPU Subtypes
|
||||
pub const CPU_SUBTYPE_MULTIPLE: CpuSubType = !0;
|
||||
|
|
@ -256,9 +256,9 @@ pub mod cputype {
|
|||
pub const CPU_SUBTYPE_MC68030_ONLY: CpuSubType = 3;
|
||||
|
||||
macro_rules! CPU_SUBTYPE_INTEL {
|
||||
($f:expr, $m:expr) => ({
|
||||
($f:expr, $m:expr) => {{
|
||||
($f) + (($m) << 4)
|
||||
})
|
||||
}};
|
||||
}
|
||||
|
||||
pub const CPU_SUBTYPE_I386_ALL: CpuSubType = CPU_SUBTYPE_INTEL!(3, 0);
|
||||
|
|
@ -351,6 +351,29 @@ pub mod cputype {
|
|||
}
|
||||
|
||||
/// Get the architecture name from cputype and cpusubtype
|
||||
///
|
||||
/// When using this method to determine the architecture
|
||||
/// name of an instance of
|
||||
/// [`goblin::mach::header::Header`](/goblin/mach/header/struct.Header.html),
|
||||
/// use the provided method
|
||||
/// [`cputype()`](/goblin/mach/header/struct.Header.html#method.cputype) and
|
||||
/// [`cpusubtype()`](/goblin/mach/header/struct.Header.html#method.cpusubtype)
|
||||
/// instead of corresponding field `cputype` and `cpusubtype`.
|
||||
///
|
||||
/// For example:
|
||||
///
|
||||
/// ```rust
|
||||
/// use std::fs::read;
|
||||
/// use goblin::mach::constants::cputype::get_arch_name_from_types;
|
||||
/// use goblin::mach::Mach;
|
||||
///
|
||||
/// read("path/to/macho").and_then(|buf| {
|
||||
/// if let Ok(Mach::Binary(a)) = Mach::parse(&buf) {
|
||||
/// println!("arch name: {}", get_arch_name_from_types(a.header.cputype(), a.header.cpusubtype()).unwrap());
|
||||
/// }
|
||||
/// Ok(())
|
||||
/// });
|
||||
/// ```
|
||||
pub fn get_arch_name_from_types(cputype: CpuType, cpusubtype: CpuSubType)
|
||||
-> Option<&'static str> {
|
||||
match (cputype, cpusubtype) {
|
||||
|
|
@ -440,9 +463,21 @@ mod tests {
|
|||
fn test_basic_mapping() {
|
||||
use super::cputype::*;
|
||||
|
||||
assert_eq!(get_arch_from_flag("armv7"), Some((CPU_TYPE_ARM, CPU_SUBTYPE_ARM_V7)));
|
||||
assert_eq!(get_arch_name_from_types(CPU_TYPE_ARM, CPU_SUBTYPE_ARM_V7), Some("armv7"));
|
||||
assert_eq!(get_arch_from_flag("i386"), Some((CPU_TYPE_I386, CPU_SUBTYPE_I386_ALL)));
|
||||
assert_eq!(get_arch_from_flag("x86"), Some((CPU_TYPE_I386, CPU_SUBTYPE_I386_ALL)));
|
||||
assert_eq!(
|
||||
get_arch_from_flag("armv7"),
|
||||
Some((CPU_TYPE_ARM, CPU_SUBTYPE_ARM_V7))
|
||||
);
|
||||
assert_eq!(
|
||||
get_arch_name_from_types(CPU_TYPE_ARM, CPU_SUBTYPE_ARM_V7),
|
||||
Some("armv7")
|
||||
);
|
||||
assert_eq!(
|
||||
get_arch_from_flag("i386"),
|
||||
Some((CPU_TYPE_I386, CPU_SUBTYPE_I386_ALL))
|
||||
);
|
||||
assert_eq!(
|
||||
get_arch_from_flag("x86"),
|
||||
Some((CPU_TYPE_I386, CPU_SUBTYPE_I386_ALL))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
192
third_party/rust/goblin/src/mach/exports.rs
vendored
192
third_party/rust/goblin/src/mach/exports.rs
vendored
|
|
@ -6,25 +6,25 @@
|
|||
// (1) Weak of regular_symbol_info type probably needs to be added ?
|
||||
// (3) /usr/lib/libstdc++.6.0.9.dylib has flag 0xc at many offsets... they're weak
|
||||
|
||||
use crate::error;
|
||||
use crate::mach::load_command;
|
||||
use alloc::string::String;
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt::{self, Debug};
|
||||
use core::ops::Range;
|
||||
use scroll::{Pread, Uleb128};
|
||||
use crate::error;
|
||||
use core::fmt::{self, Debug};
|
||||
use crate::mach::load_command;
|
||||
use alloc::vec::Vec;
|
||||
use alloc::string::String;
|
||||
|
||||
type Flag = u64;
|
||||
|
||||
// "The following are used on the flags byte of a terminal node
|
||||
// in the export information."
|
||||
pub const EXPORT_SYMBOL_FLAGS_KIND_MASK : Flag = 0x03;
|
||||
pub const EXPORT_SYMBOL_FLAGS_KIND_REGULAR : Flag = 0x00;
|
||||
pub const EXPORT_SYMBOL_FLAGS_KIND_ABSOLUTE : Flag = 0x02; // this is a symbol not present in the loader.h but only in the dyld compressed image loader source code, and only available with a #def macro for export flags but libobjc. def has this
|
||||
pub const EXPORT_SYMBOL_FLAGS_KIND_THREAD_LOCAL : Flag = 0x01;
|
||||
pub const EXPORT_SYMBOL_FLAGS_WEAK_DEFINITION : Flag = 0x04;
|
||||
pub const EXPORT_SYMBOL_FLAGS_REEXPORT : Flag = 0x08;
|
||||
pub const EXPORT_SYMBOL_FLAGS_STUB_AND_RESOLVER : Flag = 0x10;
|
||||
// "The following are used on the flags byte of a terminal node
|
||||
// in the export information."
|
||||
pub const EXPORT_SYMBOL_FLAGS_KIND_MASK: Flag = 0x03;
|
||||
pub const EXPORT_SYMBOL_FLAGS_KIND_REGULAR: Flag = 0x00;
|
||||
pub const EXPORT_SYMBOL_FLAGS_KIND_ABSOLUTE: Flag = 0x02; // this is a symbol not present in the loader.h but only in the dyld compressed image loader source code, and only available with a #def macro for export flags but libobjc. def has this
|
||||
pub const EXPORT_SYMBOL_FLAGS_KIND_THREAD_LOCAL: Flag = 0x01;
|
||||
pub const EXPORT_SYMBOL_FLAGS_WEAK_DEFINITION: Flag = 0x04;
|
||||
pub const EXPORT_SYMBOL_FLAGS_REEXPORT: Flag = 0x08;
|
||||
pub const EXPORT_SYMBOL_FLAGS_STUB_AND_RESOLVER: Flag = 0x10;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SymbolKind {
|
||||
|
|
@ -40,7 +40,7 @@ impl SymbolKind {
|
|||
0x00 => SymbolKind::Regular,
|
||||
0x01 => SymbolKind::ThreadLocal,
|
||||
0x02 => SymbolKind::Absolute,
|
||||
_ => SymbolKind::UnknownSymbolKind(kind),
|
||||
_ => SymbolKind::UnknownSymbolKind(kind),
|
||||
}
|
||||
}
|
||||
pub fn to_str(&self) -> &'static str {
|
||||
|
|
@ -57,10 +57,7 @@ impl SymbolKind {
|
|||
/// An export can be a regular export, a re-export, or a stub
|
||||
pub enum ExportInfo<'a> {
|
||||
/// A regular exported symbol, which is an address where it is found, and the flags associated with it
|
||||
Regular {
|
||||
address: u64,
|
||||
flags: Flag,
|
||||
},
|
||||
Regular { address: u64, flags: Flag },
|
||||
/// if lib_symbol_name None then same symbol name, otherwise reexport of lib_symbol_name with name in the trie
|
||||
/// "If the string is zero length, then the symbol is re-export from the specified dylib with the same name"
|
||||
Reexport {
|
||||
|
|
@ -78,13 +75,18 @@ pub enum ExportInfo<'a> {
|
|||
|
||||
impl<'a> ExportInfo<'a> {
|
||||
/// Parse out the export info from `bytes`, at `offset`
|
||||
pub fn parse(bytes: &'a [u8], libs: &[&'a str], flags: Flag, mut offset: usize) -> error::Result<ExportInfo<'a>> {
|
||||
pub fn parse(
|
||||
bytes: &'a [u8],
|
||||
libs: &[&'a str],
|
||||
flags: Flag,
|
||||
mut offset: usize,
|
||||
) -> error::Result<ExportInfo<'a>> {
|
||||
use self::ExportInfo::*;
|
||||
let regular = |offset| -> error::Result<ExportInfo> {
|
||||
let address = bytes.pread::<Uleb128>(offset)?;
|
||||
Ok(Regular {
|
||||
address: address.into(),
|
||||
flags
|
||||
flags,
|
||||
})
|
||||
};
|
||||
let reexport = |mut offset| -> error::Result<ExportInfo<'a>> {
|
||||
|
|
@ -95,38 +97,43 @@ impl<'a> ExportInfo<'a> {
|
|||
};
|
||||
let lib_symbol_name = bytes.pread::<&str>(offset)?;
|
||||
let lib = libs[lib_ordinal as usize];
|
||||
let lib_symbol_name = if lib_symbol_name == "" { None } else { Some (lib_symbol_name)};
|
||||
let lib_symbol_name = if lib_symbol_name == "" {
|
||||
None
|
||||
} else {
|
||||
Some(lib_symbol_name)
|
||||
};
|
||||
Ok(Reexport {
|
||||
lib,
|
||||
lib_symbol_name,
|
||||
flags
|
||||
flags,
|
||||
})
|
||||
};
|
||||
match SymbolKind::new(flags) {
|
||||
SymbolKind::Regular => {
|
||||
if flags & EXPORT_SYMBOL_FLAGS_REEXPORT != 0 {
|
||||
reexport(offset)
|
||||
} else if flags & EXPORT_SYMBOL_FLAGS_STUB_AND_RESOLVER != 0 { // 0x10
|
||||
} else if flags & EXPORT_SYMBOL_FLAGS_STUB_AND_RESOLVER != 0 {
|
||||
// 0x10
|
||||
let stub_offset = bytes.pread::<Uleb128>(offset)?;
|
||||
offset += stub_offset.size();
|
||||
let resolver_offset = bytes.pread::<Uleb128>(offset)?;
|
||||
Ok(Stub {
|
||||
stub_offset,
|
||||
resolver_offset,
|
||||
flags
|
||||
flags,
|
||||
})
|
||||
// else if (flags = kEXPORT_SYMBOL_FLAGS_WEAK_DEFINITION) then (*0x40 unused*)
|
||||
// else if (flags = kEXPORT_SYMBOL_FLAGS_WEAK_DEFINITION) then (*0x40 unused*)
|
||||
} else {
|
||||
regular(offset)
|
||||
}
|
||||
},
|
||||
}
|
||||
SymbolKind::ThreadLocal | SymbolKind::Absolute => {
|
||||
if flags & EXPORT_SYMBOL_FLAGS_REEXPORT != 0 {
|
||||
reexport(offset)
|
||||
} else {
|
||||
regular(offset)
|
||||
}
|
||||
},
|
||||
}
|
||||
SymbolKind::UnknownSymbolKind(_kind) => {
|
||||
// 0x5f causes errors, but parsing as regular symbol resolves...
|
||||
//Err(error::Error::Malformed(format!("Unknown kind {:#x} from flags {:#x} in get_symbol_type at offset {}", kind, flags, offset)))
|
||||
|
|
@ -156,7 +163,12 @@ impl<'a> Export<'a> {
|
|||
ExportInfo::Regular { address, .. } => address,
|
||||
_ => 0x0,
|
||||
};
|
||||
Export { name, info, size: 0, offset }
|
||||
Export {
|
||||
name,
|
||||
info,
|
||||
size: 0,
|
||||
offset,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -167,9 +179,13 @@ pub struct ExportTrie<'a> {
|
|||
}
|
||||
|
||||
impl<'a> ExportTrie<'a> {
|
||||
|
||||
#[inline]
|
||||
fn walk_nodes(&self, libs: &[&'a str], branches: Vec<(String, usize)>, acc: &mut Vec<Export<'a>>) -> error::Result<()> {
|
||||
fn walk_nodes(
|
||||
&self,
|
||||
libs: &[&'a str],
|
||||
branches: Vec<(String, usize)>,
|
||||
acc: &mut Vec<Export<'a>>,
|
||||
) -> error::Result<()> {
|
||||
for (symbol, next_node) in branches {
|
||||
self.walk_trie(libs, symbol, next_node, acc)?;
|
||||
}
|
||||
|
|
@ -177,7 +193,15 @@ impl<'a> ExportTrie<'a> {
|
|||
}
|
||||
|
||||
// current_symbol can be a str iiuc
|
||||
fn walk_branches(&self, nbranches: usize, current_symbol: String, mut offset: usize) -> error::Result<Vec<(String, usize)>> {
|
||||
fn walk_branches(
|
||||
&self,
|
||||
nbranches: usize,
|
||||
current_symbol: String,
|
||||
mut offset: usize,
|
||||
) -> error::Result<Vec<(String, usize)>> {
|
||||
if nbranches > self.data.len() {
|
||||
return Err(error::Error::BufferTooShort(nbranches, "branches"));
|
||||
}
|
||||
let mut branches = Vec::with_capacity(nbranches);
|
||||
//println!("\t@{:#x}", *offset);
|
||||
for _i in 0..nbranches {
|
||||
|
|
@ -197,7 +221,13 @@ impl<'a> ExportTrie<'a> {
|
|||
Ok(branches)
|
||||
}
|
||||
|
||||
fn walk_trie(&self, libs: &[&'a str], current_symbol: String, start: usize, exports: &mut Vec<Export<'a>>) -> error::Result<()> {
|
||||
fn walk_trie(
|
||||
&self,
|
||||
libs: &[&'a str],
|
||||
current_symbol: String,
|
||||
start: usize,
|
||||
exports: &mut Vec<Export<'a>>,
|
||||
) -> error::Result<()> {
|
||||
if start < self.location.end {
|
||||
let mut offset = start;
|
||||
let terminal_size = Uleb128::read(&self.data, &mut offset)?;
|
||||
|
|
@ -209,7 +239,8 @@ impl<'a> ExportTrie<'a> {
|
|||
//println!("\t@ {:#x} BRAN {}", *offset, nbranches);
|
||||
let branches = self.walk_branches(nbranches, current_symbol, offset)?;
|
||||
self.walk_nodes(libs, branches, exports)
|
||||
} else { // terminal node, but the tricky part is that they can have children...
|
||||
} else {
|
||||
// terminal node, but the tricky part is that they can have children...
|
||||
let pos = offset;
|
||||
let children_start = &mut (pos + terminal_size as usize);
|
||||
let nchildren = Uleb128::read(&self.data, children_start)? as usize;
|
||||
|
|
@ -224,11 +255,33 @@ impl<'a> ExportTrie<'a> {
|
|||
Ok(())
|
||||
} else {
|
||||
// more branches to walk
|
||||
let branches = self.walk_branches(nchildren, current_symbol, *children_start)?;
|
||||
let branches =
|
||||
self.walk_branches(nchildren, current_symbol, *children_start)?;
|
||||
self.walk_nodes(libs, branches, exports)
|
||||
}
|
||||
}
|
||||
} else { Ok(()) }
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn new_impl(bytes: &'a [u8], start: usize, size: usize) -> Self {
|
||||
// FIXME: Ideally, this should validate `command`, but the best we can
|
||||
// do for now is return an empty `Range`.
|
||||
let location = match start
|
||||
.checked_add(size)
|
||||
.and_then(|end| bytes.get(start..end).map(|_| start..end))
|
||||
{
|
||||
Some(location) => location,
|
||||
None => {
|
||||
log::warn!("Invalid `DyldInfo` `command`.");
|
||||
0..0
|
||||
}
|
||||
};
|
||||
ExportTrie {
|
||||
data: bytes,
|
||||
location,
|
||||
}
|
||||
}
|
||||
|
||||
/// Walk the export trie for symbols exported by this binary, using the provided `libs` to resolve re-exports
|
||||
|
|
@ -242,12 +295,19 @@ impl<'a> ExportTrie<'a> {
|
|||
|
||||
/// Create a new, lazy, zero-copy export trie from the `DyldInfo` `command`
|
||||
pub fn new(bytes: &'a [u8], command: &load_command::DyldInfoCommand) -> Self {
|
||||
let start = command.export_off as usize;
|
||||
let end = (command.export_size + command.export_off) as usize;
|
||||
ExportTrie {
|
||||
data: bytes,
|
||||
location: start..end,
|
||||
}
|
||||
Self::new_impl(
|
||||
bytes,
|
||||
command.export_off as usize,
|
||||
command.export_size as usize,
|
||||
)
|
||||
}
|
||||
|
||||
/// Create a new, lazy, zero-copy export trie from the `LinkeditDataCommand` `command`
|
||||
pub fn new_from_linkedit_data_command(
|
||||
bytes: &'a [u8],
|
||||
command: &load_command::LinkeditDataCommand,
|
||||
) -> Self {
|
||||
Self::new_impl(bytes, command.dataoff as usize, command.datasize as usize)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -255,7 +315,10 @@ impl<'a> Debug for ExportTrie<'a> {
|
|||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt.debug_struct("ExportTrie")
|
||||
.field("data", &"<... redacted ...>")
|
||||
.field("location", &format_args!("{:#x}..{:#x}", self.location.start, self.location.end))
|
||||
.field(
|
||||
"location",
|
||||
&format_args!("{:#x}..{:#x}", self.location.start, self.location.end),
|
||||
)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
|
@ -264,8 +327,14 @@ impl<'a> Debug for ExportTrie<'a> {
|
|||
mod tests {
|
||||
use super::*;
|
||||
#[test]
|
||||
fn export_trie () {
|
||||
const EXPORTS: [u8; 64] = [0x00,0x01,0x5f,0x00,0x05,0x00,0x02,0x5f,0x6d,0x68,0x5f,0x65,0x78,0x65,0x63,0x75,0x74,0x65,0x5f,0x68,0x65,0x61,0x64,0x65,0x72,0x00,0x1f,0x6d,0x61,0x00,0x23,0x02,0x00,0x00,0x00,0x00,0x02,0x78,0x69,0x6d,0x75,0x6d,0x00,0x30,0x69,0x6e,0x00,0x35,0x03,0x00,0xc0,0x1e,0x00,0x03,0x00,0xd0,0x1e,0x00,0x00,0x00,0x00,0x00,0x00,0x00];
|
||||
fn export_trie() {
|
||||
const EXPORTS: [u8; 64] = [
|
||||
0x00, 0x01, 0x5f, 0x00, 0x05, 0x00, 0x02, 0x5f, 0x6d, 0x68, 0x5f, 0x65, 0x78, 0x65,
|
||||
0x63, 0x75, 0x74, 0x65, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x00, 0x1f, 0x6d,
|
||||
0x61, 0x00, 0x23, 0x02, 0x00, 0x00, 0x00, 0x00, 0x02, 0x78, 0x69, 0x6d, 0x75, 0x6d,
|
||||
0x00, 0x30, 0x69, 0x6e, 0x00, 0x35, 0x03, 0x00, 0xc0, 0x1e, 0x00, 0x03, 0x00, 0xd0,
|
||||
0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
];
|
||||
let exports = &EXPORTS[..];
|
||||
let libs = vec!["/usr/lib/libderp.so", "/usr/lib/libthuglife.so"];
|
||||
let mut command = load_command::DyldInfoCommand::default();
|
||||
|
|
@ -276,4 +345,37 @@ mod tests {
|
|||
println!("len: {} exports: {:#?}", exports.len(), &exports);
|
||||
assert_eq!(exports.len() as usize, 3usize)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn export_trie_linkedit_data() {
|
||||
const EXPORTS: [u8; 64] = [
|
||||
0x00, 0x01, 0x5f, 0x00, 0x05, 0x00, 0x02, 0x5f, 0x6d, 0x68, 0x5f, 0x65, 0x78, 0x65,
|
||||
0x63, 0x75, 0x74, 0x65, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x00, 0x1f, 0x6d,
|
||||
0x61, 0x00, 0x23, 0x02, 0x00, 0x00, 0x00, 0x00, 0x02, 0x78, 0x69, 0x6d, 0x75, 0x6d,
|
||||
0x00, 0x30, 0x69, 0x6e, 0x00, 0x35, 0x03, 0x00, 0xc0, 0x1e, 0x00, 0x03, 0x00, 0xd0,
|
||||
0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
];
|
||||
let exports = &EXPORTS[..];
|
||||
let libs = vec!["/usr/lib/libderp.so", "/usr/lib/libthuglife.so"];
|
||||
let command = load_command::LinkeditDataCommand {
|
||||
datasize: exports.len() as u32,
|
||||
..Default::default()
|
||||
};
|
||||
let trie = ExportTrie::new_from_linkedit_data_command(exports, &command);
|
||||
println!("trie: {:#?}", &trie);
|
||||
let exports = trie.exports(&libs).unwrap();
|
||||
println!("len: {} exports: {:#?}", exports.len(), &exports);
|
||||
assert_eq!(exports.len() as usize, 3usize);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_range() {
|
||||
let mut command = load_command::DyldInfoCommand::default();
|
||||
command.export_off = 0xffff_ff00;
|
||||
command.export_size = 0x00ff_ff00;
|
||||
let trie = ExportTrie::new(&[], &command);
|
||||
// FIXME: it would have been nice if this were an `Err`.
|
||||
let exports = trie.exports(&[]).unwrap();
|
||||
assert_eq!(exports.len(), 0);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
30
third_party/rust/goblin/src/mach/fat.rs
vendored
30
third_party/rust/goblin/src/mach/fat.rs
vendored
|
|
@ -7,9 +7,9 @@ if_std! {
|
|||
use std::io::{self, Read};
|
||||
}
|
||||
|
||||
use scroll::{Pread, Pwrite, SizeWith};
|
||||
use crate::mach::constants::cputype::{CpuType, CpuSubType, CPU_SUBTYPE_MASK, CPU_ARCH_ABI64};
|
||||
use crate::error;
|
||||
use crate::mach::constants::cputype::{CpuSubType, CpuType, CPU_ARCH_ABI64, CPU_SUBTYPE_MASK};
|
||||
use scroll::{Pread, Pwrite, SizeWith};
|
||||
|
||||
pub const FAT_MAGIC: u32 = 0xcafe_babe;
|
||||
pub const FAT_CIGAM: u32 = 0xbeba_feca;
|
||||
|
|
@ -41,10 +41,7 @@ impl FatHeader {
|
|||
let mut offset = 0;
|
||||
let magic = bytes.gread_with(&mut offset, scroll::BE).unwrap();
|
||||
let nfat_arch = bytes.gread_with(&mut offset, scroll::BE).unwrap();
|
||||
FatHeader {
|
||||
magic,
|
||||
nfat_arch,
|
||||
}
|
||||
FatHeader { magic, nfat_arch }
|
||||
}
|
||||
|
||||
/// Reads a `FatHeader` from a `File` on disk
|
||||
|
|
@ -59,7 +56,6 @@ impl FatHeader {
|
|||
pub fn parse(bytes: &[u8]) -> error::Result<FatHeader> {
|
||||
Ok(bytes.pread_with::<FatHeader>(0, scroll::BE)?)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
|
|
@ -83,9 +79,9 @@ impl fmt::Debug for FatArch {
|
|||
fmt.debug_struct("FatArch")
|
||||
.field("cputype", &self.cputype())
|
||||
.field("cmdsize", &self.cpusubtype())
|
||||
.field("offset", &format_args!("{:#x}", &self.offset))
|
||||
.field("size", &self.size)
|
||||
.field("align", &self.align)
|
||||
.field("offset", &format_args!("{:#x}", &self.offset))
|
||||
.field("size", &self.size)
|
||||
.field("align", &self.align)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
|
@ -93,9 +89,19 @@ impl fmt::Debug for FatArch {
|
|||
impl FatArch {
|
||||
/// Get the slice of bytes this header describes from `bytes`
|
||||
pub fn slice<'a>(&self, bytes: &'a [u8]) -> &'a [u8] {
|
||||
// FIXME: This function should ideally validate the inputs and return a `Result`.
|
||||
// Best we can do for now without `panic`ing is return an empty slice.
|
||||
let start = self.offset as usize;
|
||||
let end = (self.offset + self.size) as usize;
|
||||
&bytes[start..end]
|
||||
match start
|
||||
.checked_add(self.size as usize)
|
||||
.and_then(|end| bytes.get(start..end))
|
||||
{
|
||||
Some(slice) => slice,
|
||||
None => {
|
||||
log::warn!("invalid `FatArch` offset");
|
||||
&[]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the cpu type
|
||||
|
|
|
|||
112
third_party/rust/goblin/src/mach/header.rs
vendored
112
third_party/rust/goblin/src/mach/header.rs
vendored
|
|
@ -1,14 +1,14 @@
|
|||
//! A header contains minimal architecture information, the binary kind, the number of load commands, as well as an endianness hint
|
||||
|
||||
use core::fmt;
|
||||
use scroll::ctx;
|
||||
use scroll::{Pread, Pwrite, SizeWith};
|
||||
use scroll::ctx::SizeWith;
|
||||
use plain::Plain;
|
||||
use scroll::ctx;
|
||||
use scroll::ctx::SizeWith;
|
||||
use scroll::{Pread, Pwrite, SizeWith};
|
||||
|
||||
use crate::mach::constants::cputype::{CpuType, CpuSubType, CPU_SUBTYPE_MASK};
|
||||
use crate::error;
|
||||
use crate::container::{self, Container};
|
||||
use crate::error;
|
||||
use crate::mach::constants::cputype::{CpuSubType, CpuType, CPU_SUBTYPE_MASK};
|
||||
|
||||
// Constants for the flags field of the mach_header
|
||||
/// the object file has no undefined references
|
||||
|
|
@ -158,8 +158,7 @@ pub fn filetype_to_str(filetype: u32) -> &'static str {
|
|||
}
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Clone, Copy, Default, Debug)]
|
||||
#[derive(Pread, Pwrite, SizeWith)]
|
||||
#[derive(Clone, Copy, Default, Debug, Pread, Pwrite, SizeWith)]
|
||||
/// A 32-bit Mach-o header
|
||||
pub struct Header32 {
|
||||
/// mach magic number identifier
|
||||
|
|
@ -193,8 +192,7 @@ impl Header32 {
|
|||
}
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Clone, Copy, Default, Debug)]
|
||||
#[derive(Pread, Pwrite, SizeWith)]
|
||||
#[derive(Clone, Copy, Default, Debug, Pread, Pwrite, SizeWith)]
|
||||
/// A 64-bit Mach-o header
|
||||
pub struct Header64 {
|
||||
/// mach magic number identifier
|
||||
|
|
@ -262,60 +260,60 @@ impl fmt::Debug for Header {
|
|||
}
|
||||
|
||||
impl From<Header32> for Header {
|
||||
fn from (header: Header32) -> Self {
|
||||
fn from(header: Header32) -> Self {
|
||||
Header {
|
||||
magic: header.magic,
|
||||
cputype: header.cputype,
|
||||
magic: header.magic,
|
||||
cputype: header.cputype,
|
||||
cpusubtype: header.cpusubtype,
|
||||
filetype: header.filetype,
|
||||
ncmds: header.ncmds as usize,
|
||||
filetype: header.filetype,
|
||||
ncmds: header.ncmds as usize,
|
||||
sizeofcmds: header.sizeofcmds,
|
||||
flags: header.flags,
|
||||
reserved: 0,
|
||||
flags: header.flags,
|
||||
reserved: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Header> for Header32 {
|
||||
fn from (header: Header) -> Self {
|
||||
fn from(header: Header) -> Self {
|
||||
Header32 {
|
||||
magic: header.magic,
|
||||
cputype: header.cputype,
|
||||
magic: header.magic,
|
||||
cputype: header.cputype,
|
||||
cpusubtype: header.cpusubtype,
|
||||
filetype: header.filetype,
|
||||
ncmds: header.ncmds as u32,
|
||||
filetype: header.filetype,
|
||||
ncmds: header.ncmds as u32,
|
||||
sizeofcmds: header.sizeofcmds,
|
||||
flags: header.flags,
|
||||
flags: header.flags,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Header64> for Header {
|
||||
fn from (header: Header64) -> Self {
|
||||
fn from(header: Header64) -> Self {
|
||||
Header {
|
||||
magic: header.magic,
|
||||
cputype: header.cputype,
|
||||
magic: header.magic,
|
||||
cputype: header.cputype,
|
||||
cpusubtype: header.cpusubtype,
|
||||
filetype: header.filetype,
|
||||
ncmds: header.ncmds as usize,
|
||||
filetype: header.filetype,
|
||||
ncmds: header.ncmds as usize,
|
||||
sizeofcmds: header.sizeofcmds,
|
||||
flags: header.flags,
|
||||
reserved: header.reserved,
|
||||
flags: header.flags,
|
||||
reserved: header.reserved,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Header> for Header64 {
|
||||
fn from (header: Header) -> Self {
|
||||
fn from(header: Header) -> Self {
|
||||
Header64 {
|
||||
magic: header.magic,
|
||||
cputype: header.cputype,
|
||||
magic: header.magic,
|
||||
cputype: header.cputype,
|
||||
cpusubtype: header.cpusubtype,
|
||||
filetype: header.filetype,
|
||||
ncmds: header.ncmds as u32,
|
||||
filetype: header.filetype,
|
||||
ncmds: header.ncmds as u32,
|
||||
sizeofcmds: header.sizeofcmds,
|
||||
flags: header.flags,
|
||||
reserved: header.reserved,
|
||||
flags: header.flags,
|
||||
reserved: header.reserved,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -323,7 +321,7 @@ impl From<Header> for Header64 {
|
|||
impl Header {
|
||||
pub fn new(ctx: container::Ctx) -> Self {
|
||||
let mut header = Header::default();
|
||||
header.magic = if ctx.is_big () { MH_MAGIC_64 } else { MH_MAGIC };
|
||||
header.magic = if ctx.is_big() { MH_MAGIC_64 } else { MH_MAGIC };
|
||||
header
|
||||
}
|
||||
/// Returns the cpu type
|
||||
|
|
@ -343,12 +341,8 @@ impl Header {
|
|||
impl ctx::SizeWith<container::Ctx> for Header {
|
||||
fn size_with(container: &container::Ctx) -> usize {
|
||||
match container.container {
|
||||
Container::Little => {
|
||||
SIZEOF_HEADER_32
|
||||
},
|
||||
Container::Big => {
|
||||
SIZEOF_HEADER_64
|
||||
},
|
||||
Container::Little => SIZEOF_HEADER_32,
|
||||
Container::Big => SIZEOF_HEADER_64,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -356,33 +350,33 @@ impl ctx::SizeWith<container::Ctx> for Header {
|
|||
impl ctx::SizeWith<Container> for Header {
|
||||
fn size_with(container: &Container) -> usize {
|
||||
match container {
|
||||
Container::Little => {
|
||||
SIZEOF_HEADER_32
|
||||
},
|
||||
Container::Big => {
|
||||
SIZEOF_HEADER_64
|
||||
},
|
||||
Container::Little => SIZEOF_HEADER_32,
|
||||
Container::Big => SIZEOF_HEADER_64,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ctx::TryFromCtx<'a, container::Ctx> for Header {
|
||||
type Error = crate::error::Error;
|
||||
fn try_from_ctx(bytes: &'a [u8], container::Ctx { le, container }: container::Ctx) -> error::Result<(Self, usize)> {
|
||||
fn try_from_ctx(
|
||||
bytes: &'a [u8],
|
||||
container::Ctx { le, container }: container::Ctx,
|
||||
) -> error::Result<(Self, usize)> {
|
||||
let size = bytes.len();
|
||||
if size < SIZEOF_HEADER_32 || size < SIZEOF_HEADER_64 {
|
||||
let error = error::Error::Malformed("bytes size is smaller than a Mach-o header".into());
|
||||
let error =
|
||||
error::Error::Malformed("bytes size is smaller than a Mach-o header".into());
|
||||
Err(error)
|
||||
} else {
|
||||
match container {
|
||||
Container::Little => {
|
||||
let header = bytes.pread_with::<Header32>(0, le)?;
|
||||
Ok((Header::from(header), SIZEOF_HEADER_32))
|
||||
},
|
||||
}
|
||||
Container::Big => {
|
||||
let header = bytes.pread_with::<Header64>(0, le)?;
|
||||
Ok((Header::from(header), SIZEOF_HEADER_64))
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -394,7 +388,7 @@ impl ctx::TryIntoCtx<container::Ctx> for Header {
|
|||
match ctx.container {
|
||||
Container::Little => {
|
||||
bytes.pwrite_with(Header32::from(self), 0, ctx.le)?;
|
||||
},
|
||||
}
|
||||
Container::Big => {
|
||||
bytes.pwrite_with(Header64::from(self), 0, ctx.le)?;
|
||||
}
|
||||
|
|
@ -411,18 +405,20 @@ impl ctx::IntoCtx<container::Ctx> for Header {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::mem::size_of;
|
||||
use super::*;
|
||||
use std::mem::size_of;
|
||||
|
||||
#[test]
|
||||
fn test_parse_armv7_header() {
|
||||
use crate::mach::constants::cputype::CPU_TYPE_ARM;
|
||||
const CPU_SUBTYPE_ARM_V7: u32 = 9;
|
||||
use super::Header;
|
||||
use crate::container::{Ctx, Container, Endian};
|
||||
use scroll::{Pread};
|
||||
use crate::container::{Container, Ctx, Endian};
|
||||
use scroll::Pread;
|
||||
let bytes = b"\xce\xfa\xed\xfe\x0c\x00\x00\x00\t\x00\x00\x00\n\x00\x00\x00\x06\x00\x00\x00\x8c\r\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x18\x00\x00\x00\xe0\xf7B\xbb\x1c\xf50w\xa6\xf7u\xa3\xba(";
|
||||
let header: Header = bytes.pread_with(0, Ctx::new(Container::Little, Endian::Little)).unwrap();
|
||||
let header: Header = bytes
|
||||
.pread_with(0, Ctx::new(Container::Little, Endian::Little))
|
||||
.unwrap();
|
||||
assert_eq!(header.cputype, CPU_TYPE_ARM);
|
||||
assert_eq!(header.cpusubtype, CPU_SUBTYPE_ARM_V7);
|
||||
}
|
||||
|
|
|
|||
205
third_party/rust/goblin/src/mach/imports.rs
vendored
205
third_party/rust/goblin/src/mach/imports.rs
vendored
|
|
@ -4,33 +4,33 @@
|
|||
// <seg-index, seg-offset, type, symbol-library-ordinal, symbol-name, addend>
|
||||
// symbol flags are undocumented
|
||||
|
||||
use core::ops::Range;
|
||||
use core::fmt::{self, Debug};
|
||||
use scroll::{Sleb128, Uleb128, Pread};
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt::{self, Debug};
|
||||
use core::ops::Range;
|
||||
use scroll::{Pread, Sleb128, Uleb128};
|
||||
|
||||
use crate::container;
|
||||
use crate::error;
|
||||
use crate::mach::load_command;
|
||||
use crate::mach::bind_opcodes;
|
||||
use crate::mach::load_command;
|
||||
use crate::mach::segment;
|
||||
|
||||
#[derive(Debug)]
|
||||
/// Import binding information generated by running the Finite State Automaton programmed via `bind_opcodes`
|
||||
struct BindInformation<'a> {
|
||||
seg_index: u8,
|
||||
seg_offset: u64,
|
||||
bind_type: u8,
|
||||
symbol_library_ordinal: u8,
|
||||
symbol_name: &'a str,
|
||||
symbol_flags: u8,
|
||||
addend: i64,
|
||||
special_dylib: u8, // seeing self = 0 assuming this means the symbol is imported from itself, because its... libSystem.B.dylib?
|
||||
is_lazy: bool,
|
||||
seg_index: u8,
|
||||
seg_offset: u64,
|
||||
bind_type: u8,
|
||||
symbol_library_ordinal: u8,
|
||||
symbol_name: &'a str,
|
||||
symbol_flags: u8,
|
||||
addend: i64,
|
||||
special_dylib: u8, // seeing self = 0 assuming this means the symbol is imported from itself, because its... libSystem.B.dylib?
|
||||
is_lazy: bool,
|
||||
}
|
||||
|
||||
impl<'a> BindInformation<'a> {
|
||||
pub fn new (is_lazy: bool) -> Self {
|
||||
pub fn new(is_lazy: bool) -> Self {
|
||||
let mut bind_info = BindInformation::default();
|
||||
if is_lazy {
|
||||
bind_info.is_lazy = true;
|
||||
|
|
@ -46,15 +46,15 @@ impl<'a> BindInformation<'a> {
|
|||
impl<'a> Default for BindInformation<'a> {
|
||||
fn default() -> Self {
|
||||
BindInformation {
|
||||
seg_index: 0,
|
||||
seg_offset: 0x0,
|
||||
bind_type: 0x0,
|
||||
seg_index: 0,
|
||||
seg_offset: 0x0,
|
||||
bind_type: 0x0,
|
||||
special_dylib: 1,
|
||||
symbol_library_ordinal: 0,
|
||||
symbol_name: "",
|
||||
symbol_flags: 0,
|
||||
addend: 0,
|
||||
is_lazy: false
|
||||
is_lazy: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -63,33 +63,38 @@ impl<'a> Default for BindInformation<'a> {
|
|||
/// An dynamically linked symbolic import
|
||||
pub struct Import<'a> {
|
||||
/// The symbol name dyld uses to resolve this import
|
||||
pub name: &'a str,
|
||||
pub name: &'a str,
|
||||
/// The library this symbol belongs to (thanks to two-level namespaces)
|
||||
pub dylib: &'a str,
|
||||
pub dylib: &'a str,
|
||||
/// Whether the symbol is lazily resolved or not
|
||||
pub is_lazy: bool,
|
||||
/// The offset in the binary this import is found
|
||||
pub offset: u64,
|
||||
pub offset: u64,
|
||||
/// The size of this import
|
||||
pub size: usize,
|
||||
pub size: usize,
|
||||
/// The virtual memory address at which this import is found
|
||||
pub address: u64,
|
||||
/// The addend of this import
|
||||
pub addend: i64,
|
||||
pub addend: i64,
|
||||
/// Whether this import is weak
|
||||
pub is_weak: bool,
|
||||
/// The offset in the stream of bind opcodes that caused this import
|
||||
pub start_of_sequence_offset: u64
|
||||
pub start_of_sequence_offset: u64,
|
||||
}
|
||||
|
||||
impl<'a> Import<'a> {
|
||||
/// Create a new import from the import binding information in `bi`
|
||||
fn new(bi: &BindInformation<'a>, libs: &[&'a str], segments: &[segment::Segment], start_of_sequence_offset: usize) -> Import<'a> {
|
||||
fn new(
|
||||
bi: &BindInformation<'a>,
|
||||
libs: &[&'a str],
|
||||
segments: &[segment::Segment],
|
||||
start_of_sequence_offset: usize,
|
||||
) -> Import<'a> {
|
||||
let (offset, address) = {
|
||||
let segment = &segments[bi.seg_index as usize];
|
||||
(
|
||||
segment.fileoff + bi.seg_offset,
|
||||
segment.vmaddr + bi.seg_offset
|
||||
segment.vmaddr + bi.seg_offset,
|
||||
)
|
||||
};
|
||||
let size = if bi.is_lazy { 8 } else { 0 };
|
||||
|
|
@ -102,7 +107,7 @@ impl<'a> Import<'a> {
|
|||
address,
|
||||
addend: bi.addend,
|
||||
is_weak: bi.is_weak(),
|
||||
start_of_sequence_offset: start_of_sequence_offset as u64
|
||||
start_of_sequence_offset: start_of_sequence_offset as u64,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -120,18 +125,27 @@ impl<'a> Debug for BindInterpreter<'a> {
|
|||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt.debug_struct("BindInterpreter")
|
||||
.field("data", &"<... redacted ...>")
|
||||
.field("location", &format_args!("{:#x}..{:#x}", self.location.start, self.location.end))
|
||||
.field("lazy_location", &format_args!("{:#x}..{:#x}", self.lazy_location.start, self.lazy_location.end))
|
||||
.field(
|
||||
"location",
|
||||
&format_args!("{:#x}..{:#x}", self.location.start, self.location.end),
|
||||
)
|
||||
.field(
|
||||
"lazy_location",
|
||||
&format_args!(
|
||||
"{:#x}..{:#x}",
|
||||
self.lazy_location.start, self.lazy_location.end
|
||||
),
|
||||
)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl<'a> BindInterpreter<'a> {
|
||||
/// Construct a new import binding interpreter from `bytes` and the load `command`
|
||||
pub fn new(bytes: &'a [u8], command: &load_command::DyldInfoCommand) -> Self {
|
||||
let get_pos = |off: u32, size: u32| -> Range<usize> {
|
||||
off as usize..(off + size) as usize
|
||||
let start = off as usize;
|
||||
start..start.saturating_add(size as usize)
|
||||
};
|
||||
let location = get_pos(command.bind_off, command.bind_size);
|
||||
let lazy_location = get_pos(command.lazy_bind_off, command.lazy_bind_size);
|
||||
|
|
@ -142,13 +156,25 @@ impl<'a> BindInterpreter<'a> {
|
|||
}
|
||||
}
|
||||
/// Return the imports in this binary
|
||||
pub fn imports(&self, libs: &[&'a str], segments: &[segment::Segment], ctx: container::Ctx) -> error::Result<Vec<Import<'a>>>{
|
||||
pub fn imports(
|
||||
&self,
|
||||
libs: &[&'a str],
|
||||
segments: &[segment::Segment],
|
||||
ctx: container::Ctx,
|
||||
) -> error::Result<Vec<Import<'a>>> {
|
||||
let mut imports = Vec::new();
|
||||
self.run(false, libs, segments, ctx, &mut imports)?;
|
||||
self.run( true, libs, segments, ctx, &mut imports)?;
|
||||
self.run(true, libs, segments, ctx, &mut imports)?;
|
||||
Ok(imports)
|
||||
}
|
||||
fn run(&self, is_lazy: bool, libs: &[&'a str], segments: &[segment::Segment], ctx: container::Ctx, imports: &mut Vec<Import<'a>>) -> error::Result<()>{
|
||||
fn run(
|
||||
&self,
|
||||
is_lazy: bool,
|
||||
libs: &[&'a str],
|
||||
segments: &[segment::Segment],
|
||||
ctx: container::Ctx,
|
||||
imports: &mut Vec<Import<'a>>,
|
||||
) -> error::Result<()> {
|
||||
use crate::mach::bind_opcodes::*;
|
||||
let location = if is_lazy {
|
||||
&self.lazy_location
|
||||
|
|
@ -168,108 +194,113 @@ impl<'a> BindInterpreter<'a> {
|
|||
BIND_OPCODE_DONE => {
|
||||
bind_info = BindInformation::new(is_lazy);
|
||||
start_of_sequence = offset - location.start;
|
||||
},
|
||||
}
|
||||
BIND_OPCODE_SET_DYLIB_ORDINAL_IMM => {
|
||||
let symbol_library_ordinal = opcode & BIND_IMMEDIATE_MASK;
|
||||
bind_info.symbol_library_ordinal = symbol_library_ordinal;
|
||||
},
|
||||
let symbol_library_ordinal = opcode & BIND_IMMEDIATE_MASK;
|
||||
bind_info.symbol_library_ordinal = symbol_library_ordinal;
|
||||
}
|
||||
BIND_OPCODE_SET_DYLIB_ORDINAL_ULEB => {
|
||||
let symbol_library_ordinal = Uleb128::read(&self.data, &mut offset)?;
|
||||
bind_info.symbol_library_ordinal = symbol_library_ordinal as u8;
|
||||
},
|
||||
let symbol_library_ordinal = Uleb128::read(&self.data, &mut offset)?;
|
||||
bind_info.symbol_library_ordinal = symbol_library_ordinal as u8;
|
||||
}
|
||||
BIND_OPCODE_SET_DYLIB_SPECIAL_IMM => {
|
||||
// dyld puts the immediate into the symbol_library_ordinal field...
|
||||
let special_dylib = opcode & BIND_IMMEDIATE_MASK;
|
||||
// Printf.printf "special_dylib: 0x%x\n" special_dylib
|
||||
bind_info.special_dylib = special_dylib;
|
||||
},
|
||||
}
|
||||
BIND_OPCODE_SET_SYMBOL_TRAILING_FLAGS_IMM => {
|
||||
let symbol_flags = opcode & BIND_IMMEDIATE_MASK;
|
||||
let symbol_name = self.data.pread::<&str>(offset)?;
|
||||
let symbol_flags = opcode & BIND_IMMEDIATE_MASK;
|
||||
let symbol_name = self.data.pread::<&str>(offset)?;
|
||||
offset += symbol_name.len() + 1; // second time this \0 caused debug woes
|
||||
bind_info.symbol_name = symbol_name;
|
||||
bind_info.symbol_name = symbol_name;
|
||||
bind_info.symbol_flags = symbol_flags;
|
||||
},
|
||||
}
|
||||
BIND_OPCODE_SET_TYPE_IMM => {
|
||||
let bind_type = opcode & BIND_IMMEDIATE_MASK;
|
||||
bind_info.bind_type = bind_type;
|
||||
},
|
||||
let bind_type = opcode & BIND_IMMEDIATE_MASK;
|
||||
bind_info.bind_type = bind_type;
|
||||
}
|
||||
BIND_OPCODE_SET_ADDEND_SLEB => {
|
||||
let addend = Sleb128::read(&self.data, &mut offset)?;
|
||||
bind_info.addend = addend;
|
||||
},
|
||||
}
|
||||
BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB => {
|
||||
let seg_index = opcode & BIND_IMMEDIATE_MASK;
|
||||
let seg_index = opcode & BIND_IMMEDIATE_MASK;
|
||||
// dyld sets the address to the segActualLoadAddress(segIndex) + uleb128
|
||||
// address = segActualLoadAddress(segmentIndex) + read_uleb128(p, end);
|
||||
let seg_offset = Uleb128::read(&self.data, &mut offset)?;
|
||||
bind_info.seg_index = seg_index;
|
||||
let seg_offset = Uleb128::read(&self.data, &mut offset)?;
|
||||
bind_info.seg_index = seg_index;
|
||||
bind_info.seg_offset = seg_offset;
|
||||
},
|
||||
}
|
||||
BIND_OPCODE_ADD_ADDR_ULEB => {
|
||||
let addr = Uleb128::read(&self.data, &mut offset)?;
|
||||
let seg_offset = bind_info.seg_offset.wrapping_add(addr);
|
||||
bind_info.seg_offset = seg_offset;
|
||||
},
|
||||
let addr = Uleb128::read(&self.data, &mut offset)?;
|
||||
let seg_offset = bind_info.seg_offset.wrapping_add(addr);
|
||||
bind_info.seg_offset = seg_offset;
|
||||
}
|
||||
// record the record by placing its value into our list
|
||||
BIND_OPCODE_DO_BIND => {
|
||||
// from dyld:
|
||||
// if ( address >= segmentEndAddress )
|
||||
// throwBadBindingAddress(address, segmentEndAddress, segmentIndex, start, end, p);
|
||||
// (this->*handler)(context, address, type, symbolName, symboFlags, addend, libraryOrdinal, "", &last);
|
||||
// address += sizeof(intptr_t);
|
||||
// throwBadBindingAddress(address, segmentEndAddress, segmentIndex, start, end, p);
|
||||
// (this->*handler)(context, address, type, symbolName, symboFlags, addend, libraryOrdinal, "", &last);
|
||||
// address += sizeof(intptr_t);
|
||||
imports.push(Import::new(&bind_info, libs, segments, start_of_sequence));
|
||||
let seg_offset = bind_info.seg_offset.wrapping_add(ctx.size() as u64);
|
||||
bind_info.seg_offset = seg_offset;
|
||||
},
|
||||
}
|
||||
BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB => {
|
||||
// dyld:
|
||||
// if ( address >= segmentEndAddress )
|
||||
// throwBadBindingAddress(address, segmentEndAddress, segmentIndex, start, end, p);
|
||||
// (this->*handler)(context, address, type, symbolName, symboFlags, addend, libraryOrdinal, "", &last);
|
||||
// address += read_uleb128(p, end) + sizeof(intptr_t);
|
||||
// if ( address >= segmentEndAddress )
|
||||
// throwBadBindingAddress(address, segmentEndAddress, segmentIndex, start, end, p);
|
||||
// (this->*handler)(context, address, type, symbolName, symboFlags, addend, libraryOrdinal, "", &last);
|
||||
// address += read_uleb128(p, end) + sizeof(intptr_t);
|
||||
// we bind the old record, then increment bind info address for the next guy, plus the ptr offset *)
|
||||
imports.push(Import::new(&bind_info, libs, segments, start_of_sequence));
|
||||
let addr = Uleb128::read(&self.data, &mut offset)?;
|
||||
let seg_offset = bind_info.seg_offset.wrapping_add(addr).wrapping_add(ctx.size() as u64);
|
||||
let seg_offset = bind_info
|
||||
.seg_offset
|
||||
.wrapping_add(addr)
|
||||
.wrapping_add(ctx.size() as u64);
|
||||
bind_info.seg_offset = seg_offset;
|
||||
},
|
||||
}
|
||||
BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED => {
|
||||
// dyld:
|
||||
// if ( address >= segmentEndAddress )
|
||||
// throwBadBindingAddress(address, segmentEndAddress, segmentIndex, start, end, p);
|
||||
// (this->*handler)(context, address, type, symbolName, symboFlags, addend, libraryOrdinal, "", &last);
|
||||
// address += immediate*sizeof(intptr_t) + sizeof(intptr_t);
|
||||
// break;
|
||||
// throwBadBindingAddress(address, segmentEndAddress, segmentIndex, start, end, p);
|
||||
// (this->*handler)(context, address, type, symbolName, symboFlags, addend, libraryOrdinal, "", &last);
|
||||
// address += immediate*sizeof(intptr_t) + sizeof(intptr_t);
|
||||
// break;
|
||||
// similarly, we bind the old record, then perform address manipulation for the next record
|
||||
imports.push(Import::new(&bind_info, libs, segments, start_of_sequence));
|
||||
let scale = opcode & BIND_IMMEDIATE_MASK;
|
||||
let scale = opcode & BIND_IMMEDIATE_MASK;
|
||||
let size = ctx.size() as u64;
|
||||
let seg_offset = bind_info.seg_offset.wrapping_add(u64::from(scale) * size).wrapping_add(size);
|
||||
let seg_offset = bind_info
|
||||
.seg_offset
|
||||
.wrapping_add(u64::from(scale) * size)
|
||||
.wrapping_add(size);
|
||||
bind_info.seg_offset = seg_offset;
|
||||
},
|
||||
}
|
||||
BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB => {
|
||||
// dyld:
|
||||
// count = read_uleb128(p, end);
|
||||
// skip = read_uleb128(p, end);
|
||||
// for (uint32_t i=0; i < count; ++i) {
|
||||
// if ( address >= segmentEndAddress )
|
||||
// throwBadBindingAddress(address, segmentEndAddress, segmentIndex, start, end, p);
|
||||
// (this->*handler)(context, address, type, symbolName, symboFlags, addend, libraryOrdinal, "", &last);
|
||||
// address += skip + sizeof(intptr_t);
|
||||
// }
|
||||
// break;
|
||||
// skip = read_uleb128(p, end);
|
||||
// for (uint32_t i=0; i < count; ++i) {
|
||||
// if ( address >= segmentEndAddress )
|
||||
// throwBadBindingAddress(address, segmentEndAddress, segmentIndex, start, end, p);
|
||||
// (this->*handler)(context, address, type, symbolName, symboFlags, addend, libraryOrdinal, "", &last);
|
||||
// address += skip + sizeof(intptr_t);
|
||||
// }
|
||||
// break;
|
||||
let count = Uleb128::read(&self.data, &mut offset)?;
|
||||
let skip = Uleb128::read(&self.data, &mut offset)?;
|
||||
let skip = Uleb128::read(&self.data, &mut offset)?;
|
||||
let skip_plus_size = skip + ctx.size() as u64;
|
||||
for _i in 0..count {
|
||||
for _i in 0..count {
|
||||
imports.push(Import::new(&bind_info, libs, segments, start_of_sequence));
|
||||
let seg_offset = bind_info.seg_offset.wrapping_add(skip_plus_size);
|
||||
bind_info.seg_offset = seg_offset;
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
|
|
|||
920
third_party/rust/goblin/src/mach/load_command.rs
vendored
920
third_party/rust/goblin/src/mach/load_command.rs
vendored
File diff suppressed because it is too large
Load diff
173
third_party/rust/goblin/src/mach/mod.rs
vendored
173
third_party/rust/goblin/src/mach/mod.rs
vendored
|
|
@ -1,27 +1,27 @@
|
|||
//! The Mach-o, mostly zero-copy, binary format parser and raw struct definitions
|
||||
use core::fmt;
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt;
|
||||
|
||||
use log::debug;
|
||||
|
||||
use scroll::{Pread, BE};
|
||||
use scroll::ctx::SizeWith;
|
||||
use scroll::{Pread, BE};
|
||||
|
||||
use crate::error;
|
||||
use crate::container;
|
||||
use crate::error;
|
||||
|
||||
pub mod header;
|
||||
pub mod constants;
|
||||
pub mod fat;
|
||||
pub mod load_command;
|
||||
pub mod symbols;
|
||||
pub mod exports;
|
||||
pub mod imports;
|
||||
pub mod bind_opcodes;
|
||||
pub mod constants;
|
||||
pub mod exports;
|
||||
pub mod fat;
|
||||
pub mod header;
|
||||
pub mod imports;
|
||||
pub mod load_command;
|
||||
pub mod relocation;
|
||||
pub mod segment;
|
||||
pub mod symbols;
|
||||
|
||||
pub use self::constants::cputype as cputype;
|
||||
pub use self::constants::cputype;
|
||||
|
||||
/// Returns a big endian magical number
|
||||
pub fn peek(bytes: &[u8], offset: usize) -> error::Result<u32> {
|
||||
|
|
@ -29,17 +29,24 @@ pub fn peek(bytes: &[u8], offset: usize) -> error::Result<u32> {
|
|||
}
|
||||
|
||||
/// Parses a magic number, and an accompanying mach-o binary parsing context, according to the magic number.
|
||||
pub fn parse_magic_and_ctx(bytes: &[u8], offset: usize) -> error::Result<(u32, Option<container::Ctx>)> {
|
||||
use crate::mach::header::*;
|
||||
pub fn parse_magic_and_ctx(
|
||||
bytes: &[u8],
|
||||
offset: usize,
|
||||
) -> error::Result<(u32, Option<container::Ctx>)> {
|
||||
use crate::container::Container;
|
||||
use crate::mach::header::*;
|
||||
let magic = bytes.pread_with::<u32>(offset, BE)?;
|
||||
let ctx = match magic {
|
||||
MH_CIGAM_64 | MH_CIGAM | MH_MAGIC_64 | MH_MAGIC => {
|
||||
let is_lsb = magic == MH_CIGAM || magic == MH_CIGAM_64;
|
||||
let le = scroll::Endian::from(is_lsb);
|
||||
let container = if magic == MH_MAGIC_64 || magic == MH_CIGAM_64 { Container::Big } else { Container::Little };
|
||||
let container = if magic == MH_MAGIC_64 || magic == MH_CIGAM_64 {
|
||||
Container::Big
|
||||
} else {
|
||||
Container::Little
|
||||
};
|
||||
Some(container::Ctx::new(container, le))
|
||||
},
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
Ok((magic, ctx))
|
||||
|
|
@ -57,6 +64,8 @@ pub struct MachO<'a> {
|
|||
pub symbols: Option<symbols::Symbols<'a>>,
|
||||
/// The dylibs this library depends on
|
||||
pub libs: Vec<&'a str>,
|
||||
/// The runtime search paths for dylibs this library depends on
|
||||
pub rpaths: Vec<&'a str>,
|
||||
/// The entry point (as a virtual memory address), 0 if none
|
||||
pub entry: u64,
|
||||
/// Whether `entry` refers to an older `LC_UNIXTHREAD` instead of the newer `LC_MAIN` entrypoint
|
||||
|
|
@ -76,18 +85,18 @@ pub struct MachO<'a> {
|
|||
impl<'a> fmt::Debug for MachO<'a> {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt.debug_struct("MachO")
|
||||
.field("header", &self.header)
|
||||
.field("load_commands", &self.load_commands)
|
||||
.field("segments", &self.segments)
|
||||
.field("entry", &self.entry)
|
||||
.field("header", &self.header)
|
||||
.field("load_commands", &self.load_commands)
|
||||
.field("segments", &self.segments)
|
||||
.field("entry", &self.entry)
|
||||
.field("old_style_entry", &self.old_style_entry)
|
||||
.field("libs", &self.libs)
|
||||
.field("name", &self.name)
|
||||
.field("little_endian", &self.little_endian)
|
||||
.field("is_64", &self.is_64)
|
||||
.field("symbols()", &self.symbols().collect::<Vec<_>>())
|
||||
.field("exports()", &self.exports())
|
||||
.field("imports()", &self.imports())
|
||||
.field("libs", &self.libs)
|
||||
.field("name", &self.name)
|
||||
.field("little_endian", &self.little_endian)
|
||||
.field("is_64", &self.is_64)
|
||||
.field("symbols()", &self.symbols().collect::<Vec<_>>())
|
||||
.field("exports()", &self.exports())
|
||||
.field("imports()", &self.imports())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
|
@ -106,7 +115,9 @@ impl<'a> MachO<'a> {
|
|||
}
|
||||
}
|
||||
/// Return a vector of the relocations in this binary
|
||||
pub fn relocations(&self) -> error::Result<Vec<(usize, segment::RelocationIterator, segment::Section)>> {
|
||||
pub fn relocations(
|
||||
&self,
|
||||
) -> error::Result<Vec<(usize, segment::RelocationIterator, segment::Section)>> {
|
||||
debug!("Iterating relocations");
|
||||
let mut relocs = Vec::new();
|
||||
for (_i, segment) in (&self.segments).into_iter().enumerate() {
|
||||
|
|
@ -138,7 +149,11 @@ impl<'a> MachO<'a> {
|
|||
/// Parses the Mach-o binary from `bytes` at `offset`
|
||||
pub fn parse(bytes: &'a [u8], mut offset: usize) -> error::Result<MachO<'a>> {
|
||||
let (magic, maybe_ctx) = parse_magic_and_ctx(bytes, offset)?;
|
||||
let ctx = if let Some(ctx) = maybe_ctx { ctx } else { return Err(error::Error::BadMagic(u64::from(magic))) };
|
||||
let ctx = if let Some(ctx) = maybe_ctx {
|
||||
ctx
|
||||
} else {
|
||||
return Err(error::Error::BadMagic(u64::from(magic)));
|
||||
};
|
||||
debug!("Ctx: {:?}", ctx);
|
||||
let offset = &mut offset;
|
||||
let header: header::Header = bytes.pread_with(*offset, ctx)?;
|
||||
|
|
@ -147,9 +162,17 @@ impl<'a> MachO<'a> {
|
|||
let is_64 = ctx.container.is_big();
|
||||
*offset += header::Header::size_with(&ctx.container);
|
||||
let ncmds = header.ncmds;
|
||||
|
||||
let sizeofcmds = header.sizeofcmds as usize;
|
||||
// a load cmd is at least 2 * 4 bytes, (type, sizeof)
|
||||
if ncmds > sizeofcmds / 8 || sizeofcmds > bytes.len() {
|
||||
return Err(error::Error::BufferTooShort(ncmds, "load commands"));
|
||||
}
|
||||
|
||||
let mut cmds: Vec<load_command::LoadCommand> = Vec::with_capacity(ncmds);
|
||||
let mut symbols = None;
|
||||
let mut libs = vec!["self"];
|
||||
let mut rpaths = vec![];
|
||||
let mut export_trie = None;
|
||||
let mut bind_interpreter = None;
|
||||
let mut unixthread_entry_address = None;
|
||||
|
|
@ -163,44 +186,54 @@ impl<'a> MachO<'a> {
|
|||
load_command::CommandVariant::Segment32(command) => {
|
||||
// FIXME: we may want to be less strict about failure here, and just return an empty segment to allow parsing to continue?
|
||||
segments.push(segment::Segment::from_32(bytes, &command, cmd.offset, ctx)?)
|
||||
},
|
||||
}
|
||||
load_command::CommandVariant::Segment64(command) => {
|
||||
segments.push(segment::Segment::from_64(bytes, &command, cmd.offset, ctx)?)
|
||||
},
|
||||
}
|
||||
load_command::CommandVariant::Symtab(command) => {
|
||||
symbols = Some(symbols::Symbols::parse(bytes, &command, ctx)?);
|
||||
},
|
||||
load_command::CommandVariant::LoadDylib (command)
|
||||
}
|
||||
load_command::CommandVariant::LoadDylib(command)
|
||||
| load_command::CommandVariant::LoadUpwardDylib(command)
|
||||
| load_command::CommandVariant::ReexportDylib (command)
|
||||
| load_command::CommandVariant::LoadWeakDylib (command)
|
||||
| load_command::CommandVariant::LazyLoadDylib (command) => {
|
||||
| load_command::CommandVariant::ReexportDylib(command)
|
||||
| load_command::CommandVariant::LoadWeakDylib(command)
|
||||
| load_command::CommandVariant::LazyLoadDylib(command) => {
|
||||
let lib = bytes.pread::<&str>(cmd.offset + command.dylib.name as usize)?;
|
||||
libs.push(lib);
|
||||
},
|
||||
load_command::CommandVariant::DyldInfo (command)
|
||||
}
|
||||
load_command::CommandVariant::Rpath(command) => {
|
||||
let rpath = bytes.pread::<&str>(cmd.offset + command.path as usize)?;
|
||||
rpaths.push(rpath);
|
||||
}
|
||||
load_command::CommandVariant::DyldInfo(command)
|
||||
| load_command::CommandVariant::DyldInfoOnly(command) => {
|
||||
export_trie = Some(exports::ExportTrie::new(bytes, &command));
|
||||
bind_interpreter = Some(imports::BindInterpreter::new(bytes, &command));
|
||||
},
|
||||
}
|
||||
load_command::CommandVariant::DyldExportsTrie(command) => {
|
||||
export_trie = Some(exports::ExportTrie::new_from_linkedit_data_command(
|
||||
bytes, &command,
|
||||
));
|
||||
}
|
||||
load_command::CommandVariant::Unixthread(command) => {
|
||||
// dyld cares only about the first LC_UNIXTHREAD
|
||||
if unixthread_entry_address.is_none() {
|
||||
unixthread_entry_address = Some(command.instruction_pointer(header.cputype)?);
|
||||
unixthread_entry_address =
|
||||
Some(command.instruction_pointer(header.cputype)?);
|
||||
}
|
||||
},
|
||||
}
|
||||
load_command::CommandVariant::Main(command) => {
|
||||
// dyld cares only about the first LC_MAIN
|
||||
if main_entry_offset.is_none() {
|
||||
main_entry_offset = Some(command.entryoff);
|
||||
}
|
||||
},
|
||||
}
|
||||
load_command::CommandVariant::IdDylib(command) => {
|
||||
let id = bytes.pread::<&str>(cmd.offset + command.dylib.name as usize)?;
|
||||
libs[0] = id;
|
||||
name = Some(id);
|
||||
},
|
||||
_ => ()
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
cmds.push(cmd)
|
||||
}
|
||||
|
|
@ -209,13 +242,17 @@ impl<'a> MachO<'a> {
|
|||
// choose the same way here
|
||||
let (entry, old_style_entry) = if let Some(offset) = main_entry_offset {
|
||||
// map the entrypoint offset to a virtual memory address
|
||||
let base_address = segments.iter()
|
||||
let base_address = segments
|
||||
.iter()
|
||||
.filter(|s| &s.segname[0..7] == b"__TEXT\0")
|
||||
.map(|s| s.vmaddr - s.fileoff)
|
||||
.next()
|
||||
.ok_or_else(||
|
||||
error::Error::Malformed(format!("image specifies LC_MAIN offset {} but has no __TEXT segment", offset))
|
||||
)?;
|
||||
.ok_or_else(|| {
|
||||
error::Error::Malformed(format!(
|
||||
"image specifies LC_MAIN offset {} but has no __TEXT segment",
|
||||
offset
|
||||
))
|
||||
})?;
|
||||
|
||||
(base_address + offset, false)
|
||||
} else if let Some(address) = unixthread_entry_address {
|
||||
|
|
@ -230,6 +267,7 @@ impl<'a> MachO<'a> {
|
|||
segments,
|
||||
symbols,
|
||||
libs,
|
||||
rpaths,
|
||||
export_trie,
|
||||
bind_interpreter,
|
||||
entry,
|
||||
|
|
@ -253,7 +291,7 @@ pub struct MultiArch<'a> {
|
|||
/// Iterator over the fat architecture headers in a `MultiArch` container
|
||||
pub struct FatArchIterator<'a> {
|
||||
index: usize,
|
||||
data: &'a[u8],
|
||||
data: &'a [u8],
|
||||
narches: usize,
|
||||
start: usize,
|
||||
}
|
||||
|
|
@ -265,7 +303,10 @@ impl<'a> Iterator for FatArchIterator<'a> {
|
|||
None
|
||||
} else {
|
||||
let offset = (self.index * fat::SIZEOF_FAT_ARCH) + self.start;
|
||||
let arch = self.data.pread_with::<fat::FatArch>(offset, scroll::BE).map_err(core::convert::Into::into);
|
||||
let arch = self
|
||||
.data
|
||||
.pread_with::<fat::FatArch>(offset, scroll::BE)
|
||||
.map_err(core::convert::Into::into);
|
||||
self.index += 1;
|
||||
Some(arch)
|
||||
}
|
||||
|
|
@ -275,7 +316,7 @@ impl<'a> Iterator for FatArchIterator<'a> {
|
|||
/// Iterator over every `MachO` binary contained in this `MultiArch` container
|
||||
pub struct MachOIterator<'a> {
|
||||
index: usize,
|
||||
data: &'a[u8],
|
||||
data: &'a [u8],
|
||||
narches: usize,
|
||||
start: usize,
|
||||
}
|
||||
|
|
@ -294,8 +335,8 @@ impl<'a> Iterator for MachOIterator<'a> {
|
|||
let bytes = arch.slice(self.data);
|
||||
let binary = MachO::parse(bytes, 0);
|
||||
Some(binary)
|
||||
},
|
||||
Err(e) => Some(Err(e.into()))
|
||||
}
|
||||
Err(e) => Some(Err(e.into())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -321,7 +362,7 @@ impl<'a> MultiArch<'a> {
|
|||
Ok(MultiArch {
|
||||
data: bytes,
|
||||
start: fat::SIZEOF_FAT_HEADER,
|
||||
narches: header.nfat_arch as usize
|
||||
narches: header.nfat_arch as usize,
|
||||
})
|
||||
}
|
||||
/// Iterate every fat arch header
|
||||
|
|
@ -335,6 +376,10 @@ impl<'a> MultiArch<'a> {
|
|||
}
|
||||
/// Return all the architectures in this binary
|
||||
pub fn arches(&self) -> error::Result<Vec<fat::FatArch>> {
|
||||
if self.narches > self.data.len() / fat::SIZEOF_FAT_ARCH {
|
||||
return Err(error::Error::BufferTooShort(self.narches, "arches"));
|
||||
}
|
||||
|
||||
let mut arches = Vec::with_capacity(self.narches);
|
||||
for arch in self.iter_arches() {
|
||||
arches.push(arch?);
|
||||
|
|
@ -344,7 +389,10 @@ impl<'a> MultiArch<'a> {
|
|||
/// Try to get the Mach-o binary at `index`
|
||||
pub fn get(&self, index: usize) -> error::Result<MachO<'a>> {
|
||||
if index >= self.narches {
|
||||
return Err(error::Error::Malformed(format!("Requested the {}-th binary, but there are only {} architectures in this container", index, self.narches)))
|
||||
return Err(error::Error::Malformed(format!(
|
||||
"Requested the {}-th binary, but there are only {} architectures in this container",
|
||||
index, self.narches
|
||||
)));
|
||||
}
|
||||
let offset = (index * fat::SIZEOF_FAT_ARCH) + self.start;
|
||||
let arch = self.data.pread_with::<fat::FatArch>(offset, scroll::BE)?;
|
||||
|
|
@ -352,7 +400,10 @@ impl<'a> MultiArch<'a> {
|
|||
Ok(MachO::parse(bytes, 0)?)
|
||||
}
|
||||
|
||||
pub fn find<F: Fn(error::Result<fat::FatArch>) -> bool>(&'a self, f: F) -> Option<error::Result<MachO<'a>>> {
|
||||
pub fn find<F: Fn(error::Result<fat::FatArch>) -> bool>(
|
||||
&'a self,
|
||||
f: F,
|
||||
) -> Option<error::Result<MachO<'a>>> {
|
||||
for (i, arch) in self.iter_arches().enumerate() {
|
||||
if f(arch) {
|
||||
return Some(self.get(i));
|
||||
|
|
@ -364,7 +415,9 @@ impl<'a> MultiArch<'a> {
|
|||
pub fn find_cputype(&self, cputype: u32) -> error::Result<Option<fat::FatArch>> {
|
||||
for arch in self.iter_arches() {
|
||||
let arch = arch?;
|
||||
if arch.cputype == cputype { return Ok(Some(arch)) }
|
||||
if arch.cputype == cputype {
|
||||
return Ok(Some(arch));
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
|
@ -373,8 +426,8 @@ impl<'a> MultiArch<'a> {
|
|||
impl<'a> fmt::Debug for MultiArch<'a> {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt.debug_struct("MultiArch")
|
||||
.field("arches", &self.arches().unwrap())
|
||||
.field("data", &self.data.len())
|
||||
.field("arches", &self.arches().unwrap_or_default())
|
||||
.field("data", &self.data.len())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
|
@ -386,7 +439,7 @@ pub enum Mach<'a> {
|
|||
/// A "fat" multi-architecture binary container
|
||||
Fat(MultiArch<'a>),
|
||||
/// A regular Mach-o binary
|
||||
Binary(MachO<'a>)
|
||||
Binary(MachO<'a>),
|
||||
}
|
||||
|
||||
impl<'a> Mach<'a> {
|
||||
|
|
@ -402,7 +455,7 @@ impl<'a> Mach<'a> {
|
|||
fat::FAT_MAGIC => {
|
||||
let multi = MultiArch::new(bytes)?;
|
||||
Ok(Mach::Fat(multi))
|
||||
},
|
||||
}
|
||||
// we might be a regular binary
|
||||
_ => {
|
||||
let binary = MachO::parse(bytes, 0)?;
|
||||
|
|
|
|||
108
third_party/rust/goblin/src/mach/relocation.rs
vendored
108
third_party/rust/goblin/src/mach/relocation.rs
vendored
|
|
@ -22,9 +22,9 @@
|
|||
// sections. And further could have still different ordinals when combined
|
||||
// by the link-editor. The value R_ABS is used for relocation entries for
|
||||
// absolute symbols which need no further relocation.
|
||||
use core::fmt;
|
||||
use crate::mach;
|
||||
use scroll::{Pread, Pwrite, IOwrite, SizeWith, IOread};
|
||||
use core::fmt;
|
||||
use scroll::{IOread, IOwrite, Pread, Pwrite, SizeWith};
|
||||
|
||||
// TODO: armv7 relocations are scattered, must and r_address with 0x8000_0000 to check if its scattered or not
|
||||
#[derive(Copy, Clone, Pread, Pwrite, IOwrite, SizeWith, IOread)]
|
||||
|
|
@ -125,8 +125,9 @@ pub const X86_64_RELOC_TLV: RelocType = 9;
|
|||
pub const GENERIC_RELOC_VANILLA: RelocType = 0;
|
||||
pub const GENERIC_RELOC_PAIR: RelocType = 1;
|
||||
pub const GENERIC_RELOC_SECTDIFF: RelocType = 2;
|
||||
pub const GENERIC_RELOC_LOCAL_SECTDIFF: RelocType = 3;
|
||||
pub const GENERIC_RELOC_PB_LA_P: RelocType = 4;
|
||||
pub const GENERIC_RELOC_PB_LA_PTR: RelocType = 3;
|
||||
pub const GENERIC_RELOC_LOCAL_SECTDIFF: RelocType = 4;
|
||||
pub const GENERIC_RELOC_TLV: RelocType = 5;
|
||||
|
||||
// arm relocations
|
||||
pub const ARM_RELOC_VANILLA: RelocType = GENERIC_RELOC_VANILLA;
|
||||
|
|
@ -167,62 +168,55 @@ pub const ARM64_RELOC_ADDEND: RelocType = 10;
|
|||
pub fn reloc_to_str(reloc: RelocType, cputype: mach::cputype::CpuType) -> &'static str {
|
||||
use crate::mach::constants::cputype::*;
|
||||
match cputype {
|
||||
CPU_TYPE_ARM64 | CPU_TYPE_ARM64_32 => {
|
||||
match reloc {
|
||||
ARM64_RELOC_UNSIGNED => "ARM64_RELOC_UNSIGNED",
|
||||
ARM64_RELOC_SUBTRACTOR => "ARM64_RELOC_SUBTRACTOR",
|
||||
ARM64_RELOC_BRANCH26 => "ARM64_RELOC_BRANCH26",
|
||||
ARM64_RELOC_PAGE21 => "ARM64_RELOC_PAGE21",
|
||||
ARM64_RELOC_PAGEOFF12 => "ARM64_RELOC_PAGEOFF12",
|
||||
ARM64_RELOC_GOT_LOAD_PAGE21 => "ARM64_RELOC_GOT_LOAD_PAGE21",
|
||||
ARM64_RELOC_GOT_LOAD_PAGEOFF12 => "ARM64_RELOC_GOT_LOAD_PAGEOFF12",
|
||||
ARM64_RELOC_POINTER_TO_GOT => "ARM64_RELOC_POINTER_TO_GOT",
|
||||
ARM64_RELOC_TLVP_LOAD_PAGE21 => "ARM64_RELOC_TLVP_LOAD_PAGE21",
|
||||
ARM64_RELOC_TLVP_LOAD_PAGEOFF12 => "ARM64_RELOC_TLVP_LOAD_PAGEOFF12",
|
||||
ARM64_RELOC_ADDEND => "ARM64_RELOC_ADDEND",
|
||||
_ => "UNKNOWN",
|
||||
}
|
||||
CPU_TYPE_ARM64 | CPU_TYPE_ARM64_32 => match reloc {
|
||||
ARM64_RELOC_UNSIGNED => "ARM64_RELOC_UNSIGNED",
|
||||
ARM64_RELOC_SUBTRACTOR => "ARM64_RELOC_SUBTRACTOR",
|
||||
ARM64_RELOC_BRANCH26 => "ARM64_RELOC_BRANCH26",
|
||||
ARM64_RELOC_PAGE21 => "ARM64_RELOC_PAGE21",
|
||||
ARM64_RELOC_PAGEOFF12 => "ARM64_RELOC_PAGEOFF12",
|
||||
ARM64_RELOC_GOT_LOAD_PAGE21 => "ARM64_RELOC_GOT_LOAD_PAGE21",
|
||||
ARM64_RELOC_GOT_LOAD_PAGEOFF12 => "ARM64_RELOC_GOT_LOAD_PAGEOFF12",
|
||||
ARM64_RELOC_POINTER_TO_GOT => "ARM64_RELOC_POINTER_TO_GOT",
|
||||
ARM64_RELOC_TLVP_LOAD_PAGE21 => "ARM64_RELOC_TLVP_LOAD_PAGE21",
|
||||
ARM64_RELOC_TLVP_LOAD_PAGEOFF12 => "ARM64_RELOC_TLVP_LOAD_PAGEOFF12",
|
||||
ARM64_RELOC_ADDEND => "ARM64_RELOC_ADDEND",
|
||||
_ => "UNKNOWN",
|
||||
},
|
||||
CPU_TYPE_X86_64 => {
|
||||
match reloc {
|
||||
X86_64_RELOC_UNSIGNED => "X86_64_RELOC_UNSIGNED",
|
||||
X86_64_RELOC_SIGNED => "X86_64_RELOC_SIGNED",
|
||||
X86_64_RELOC_BRANCH => "X86_64_RELOC_BRANCH",
|
||||
X86_64_RELOC_GOT_LOAD => "X86_64_RELOC_GOT_LOAD",
|
||||
X86_64_RELOC_GOT => "X86_64_RELOC_GOT",
|
||||
X86_64_RELOC_SUBTRACTOR => "X86_64_RELOC_SUBTRACTOR",
|
||||
X86_64_RELOC_SIGNED_1 => "X86_64_RELOC_SIGNED_1",
|
||||
X86_64_RELOC_SIGNED_2 => "X86_64_RELOC_SIGNED_2",
|
||||
X86_64_RELOC_SIGNED_4 => "X86_64_RELOC_SIGNED_4",
|
||||
X86_64_RELOC_TLV => "X86_64_RELOC_TLV",
|
||||
_ => "UNKNOWN",
|
||||
}
|
||||
CPU_TYPE_X86_64 => match reloc {
|
||||
X86_64_RELOC_UNSIGNED => "X86_64_RELOC_UNSIGNED",
|
||||
X86_64_RELOC_SIGNED => "X86_64_RELOC_SIGNED",
|
||||
X86_64_RELOC_BRANCH => "X86_64_RELOC_BRANCH",
|
||||
X86_64_RELOC_GOT_LOAD => "X86_64_RELOC_GOT_LOAD",
|
||||
X86_64_RELOC_GOT => "X86_64_RELOC_GOT",
|
||||
X86_64_RELOC_SUBTRACTOR => "X86_64_RELOC_SUBTRACTOR",
|
||||
X86_64_RELOC_SIGNED_1 => "X86_64_RELOC_SIGNED_1",
|
||||
X86_64_RELOC_SIGNED_2 => "X86_64_RELOC_SIGNED_2",
|
||||
X86_64_RELOC_SIGNED_4 => "X86_64_RELOC_SIGNED_4",
|
||||
X86_64_RELOC_TLV => "X86_64_RELOC_TLV",
|
||||
_ => "UNKNOWN",
|
||||
},
|
||||
CPU_TYPE_ARM => {
|
||||
match reloc {
|
||||
ARM_RELOC_VANILLA => "ARM_RELOC_VANILLA",
|
||||
ARM_RELOC_PAIR => "ARM_RELOC_PAIR",
|
||||
ARM_RELOC_SECTDIFF => "ARM_RELOC_SECTDIFF",
|
||||
ARM_RELOC_LOCAL_SECTDIFF => "ARM_RELOC_LOCAL_SECTDIFF",
|
||||
ARM_RELOC_PB_LA_PTR => "ARM_RELOC_PB_LA_PTR",
|
||||
ARM_RELOC_BR24 => "ARM_RELOC_BR24",
|
||||
ARM_THUMB_RELOC_BR22 => "ARM_THUMB_RELOC_BR22",
|
||||
ARM_THUMB_32BIT_BRANCH => "ARM_THUMB_32BIT_BRANCH",
|
||||
ARM_RELOC_HALF => "ARM_RELOC_HALF",
|
||||
ARM_RELOC_HALF_SECTDIFF => "ARM_RELOC_HALF_SECTDIFF",
|
||||
_ => "UNKNOWN",
|
||||
}
|
||||
CPU_TYPE_ARM => match reloc {
|
||||
ARM_RELOC_VANILLA => "ARM_RELOC_VANILLA",
|
||||
ARM_RELOC_PAIR => "ARM_RELOC_PAIR",
|
||||
ARM_RELOC_SECTDIFF => "ARM_RELOC_SECTDIFF",
|
||||
ARM_RELOC_LOCAL_SECTDIFF => "ARM_RELOC_LOCAL_SECTDIFF",
|
||||
ARM_RELOC_PB_LA_PTR => "ARM_RELOC_PB_LA_PTR",
|
||||
ARM_RELOC_BR24 => "ARM_RELOC_BR24",
|
||||
ARM_THUMB_RELOC_BR22 => "ARM_THUMB_RELOC_BR22",
|
||||
ARM_THUMB_32BIT_BRANCH => "ARM_THUMB_32BIT_BRANCH",
|
||||
ARM_RELOC_HALF => "ARM_RELOC_HALF",
|
||||
ARM_RELOC_HALF_SECTDIFF => "ARM_RELOC_HALF_SECTDIFF",
|
||||
_ => "UNKNOWN",
|
||||
},
|
||||
CPU_TYPE_X86 => {
|
||||
match reloc {
|
||||
GENERIC_RELOC_VANILLA => "GENERIC_RELOC_VANILLA",
|
||||
GENERIC_RELOC_PAIR => "GENERIC_RELOC_PAIR",
|
||||
GENERIC_RELOC_SECTDIFF => "GENERIC_RELOC_SECTDIFF",
|
||||
GENERIC_RELOC_LOCAL_SECTDIFF => "GENERIC_RELOC_LOCAL_SECTDIFF",
|
||||
GENERIC_RELOC_PB_LA_P => "GENERIC_RELOC_PB_LA_P",
|
||||
_ => "UNKNOWN",
|
||||
}
|
||||
CPU_TYPE_X86 => match reloc {
|
||||
GENERIC_RELOC_VANILLA => "GENERIC_RELOC_VANILLA",
|
||||
GENERIC_RELOC_PAIR => "GENERIC_RELOC_PAIR",
|
||||
GENERIC_RELOC_SECTDIFF => "GENERIC_RELOC_SECTDIFF",
|
||||
GENERIC_RELOC_PB_LA_PTR => "GENERIC_RELOC_PB_LA_PTR",
|
||||
GENERIC_RELOC_LOCAL_SECTDIFF => "GENERIC_RELOC_LOCAL_SECTDIFF",
|
||||
GENERIC_RELOC_TLV => "GENERIC_RELOC_TLV",
|
||||
_ => "UNKNOWN",
|
||||
},
|
||||
_ => "BAD_CPUTYPE"
|
||||
_ => "BAD_CPUTYPE",
|
||||
}
|
||||
}
|
||||
|
|
|
|||
322
third_party/rust/goblin/src/mach/segment.rs
vendored
322
third_party/rust/goblin/src/mach/segment.rs
vendored
|
|
@ -1,19 +1,22 @@
|
|||
use scroll::{Pread, Pwrite};
|
||||
use scroll::ctx::{self, SizeWith};
|
||||
use scroll::{Pread, Pwrite};
|
||||
|
||||
use log::{debug, warn};
|
||||
|
||||
use core::fmt;
|
||||
use core::ops::{Deref, DerefMut};
|
||||
use alloc::boxed::Box;
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt;
|
||||
use core::ops::{Deref, DerefMut};
|
||||
|
||||
use crate::container;
|
||||
use crate::error;
|
||||
|
||||
use crate::mach::constants::{SECTION_TYPE, S_GB_ZEROFILL, S_THREAD_LOCAL_ZEROFILL, S_ZEROFILL};
|
||||
use crate::mach::load_command::{
|
||||
Section32, Section64, SegmentCommand32, SegmentCommand64, LC_SEGMENT, LC_SEGMENT_64,
|
||||
SIZEOF_SECTION_32, SIZEOF_SECTION_64, SIZEOF_SEGMENT_COMMAND_32, SIZEOF_SEGMENT_COMMAND_64,
|
||||
};
|
||||
use crate::mach::relocation::RelocationInfo;
|
||||
use crate::mach::load_command::{Section32, Section64, SegmentCommand32, SegmentCommand64, SIZEOF_SECTION_32, SIZEOF_SECTION_64, SIZEOF_SEGMENT_COMMAND_32, SIZEOF_SEGMENT_COMMAND_64, LC_SEGMENT, LC_SEGMENT_64};
|
||||
use crate::mach::constants::{SECTION_TYPE, S_ZEROFILL};
|
||||
|
||||
pub struct RelocationIterator<'a> {
|
||||
data: &'a [u8],
|
||||
|
|
@ -32,7 +35,7 @@ impl<'a> Iterator for RelocationIterator<'a> {
|
|||
self.count += 1;
|
||||
match self.data.gread_with(&mut self.offset, self.ctx) {
|
||||
Ok(res) => Some(Ok(res)),
|
||||
Err(e) => Some(Err(e.into()))
|
||||
Err(e) => Some(Err(e.into())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -42,23 +45,23 @@ impl<'a> Iterator for RelocationIterator<'a> {
|
|||
#[derive(Default)]
|
||||
pub struct Section {
|
||||
/// name of this section
|
||||
pub sectname: [u8; 16],
|
||||
pub sectname: [u8; 16],
|
||||
/// segment this section goes in
|
||||
pub segname: [u8; 16],
|
||||
pub segname: [u8; 16],
|
||||
/// memory address of this section
|
||||
pub addr: u64,
|
||||
pub addr: u64,
|
||||
/// size in bytes of this section
|
||||
pub size: u64,
|
||||
pub size: u64,
|
||||
/// file offset of this section
|
||||
pub offset: u32,
|
||||
pub offset: u32,
|
||||
/// section alignment (power of 2)
|
||||
pub align: u32,
|
||||
pub align: u32,
|
||||
/// file offset of relocation entries
|
||||
pub reloff: u32,
|
||||
pub reloff: u32,
|
||||
/// number of relocation entries
|
||||
pub nreloc: u32,
|
||||
pub nreloc: u32,
|
||||
/// flags (section type and attributes
|
||||
pub flags: u32,
|
||||
pub flags: u32,
|
||||
}
|
||||
|
||||
impl Section {
|
||||
|
|
@ -71,9 +74,17 @@ impl Section {
|
|||
Ok(self.segname.pread::<&str>(0)?)
|
||||
}
|
||||
/// Iterate this sections relocations given `data`; `data` must be the original binary
|
||||
pub fn iter_relocations<'b>(&self, data: &'b [u8], ctx: container::Ctx) -> RelocationIterator<'b> {
|
||||
pub fn iter_relocations<'b>(
|
||||
&self,
|
||||
data: &'b [u8],
|
||||
ctx: container::Ctx,
|
||||
) -> RelocationIterator<'b> {
|
||||
let offset = self.reloff as usize;
|
||||
debug!("Relocations for {} starting at offset: {:#x}", self.name().unwrap_or("BAD_SECTION_NAME"), offset);
|
||||
debug!(
|
||||
"Relocations for {} starting at offset: {:#x}",
|
||||
self.name().unwrap_or("BAD_SECTION_NAME"),
|
||||
offset
|
||||
);
|
||||
RelocationIterator {
|
||||
offset,
|
||||
nrelocs: self.nreloc as usize,
|
||||
|
|
@ -88,14 +99,14 @@ impl From<Section> for Section64 {
|
|||
fn from(section: Section) -> Self {
|
||||
Section64 {
|
||||
sectname: section.sectname,
|
||||
segname: section.segname,
|
||||
addr: section.addr as u64,
|
||||
size: section.size as u64,
|
||||
offset: section.offset,
|
||||
align: section.align,
|
||||
reloff: section.reloff,
|
||||
nreloc: section.nreloc,
|
||||
flags: section.flags,
|
||||
segname: section.segname,
|
||||
addr: section.addr as u64,
|
||||
size: section.size as u64,
|
||||
offset: section.offset,
|
||||
align: section.align,
|
||||
reloff: section.reloff,
|
||||
nreloc: section.nreloc,
|
||||
flags: section.flags,
|
||||
reserved1: 0,
|
||||
reserved2: 0,
|
||||
reserved3: 0,
|
||||
|
|
@ -107,14 +118,14 @@ impl From<Section> for Section32 {
|
|||
fn from(section: Section) -> Self {
|
||||
Section32 {
|
||||
sectname: section.sectname,
|
||||
segname: section.segname,
|
||||
addr: section.addr as u32,
|
||||
size: section.size as u32,
|
||||
offset: section.offset,
|
||||
align: section.align,
|
||||
reloff: section.reloff,
|
||||
nreloc: section.nreloc,
|
||||
flags: section.flags,
|
||||
segname: section.segname,
|
||||
addr: section.addr as u32,
|
||||
size: section.size as u32,
|
||||
offset: section.offset,
|
||||
align: section.align,
|
||||
reloff: section.reloff,
|
||||
nreloc: section.nreloc,
|
||||
flags: section.flags,
|
||||
reserved1: 0,
|
||||
reserved2: 0,
|
||||
}
|
||||
|
|
@ -125,14 +136,14 @@ impl fmt::Debug for Section {
|
|||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt.debug_struct("Section")
|
||||
.field("sectname", &self.name().unwrap())
|
||||
.field("segname", &self.segname().unwrap())
|
||||
.field("addr", &self.addr)
|
||||
.field("size", &self.size)
|
||||
.field("offset", &self.offset)
|
||||
.field("align", &self.align)
|
||||
.field("reloff", &self.reloff)
|
||||
.field("nreloc", &self.nreloc)
|
||||
.field("flags", &self.flags)
|
||||
.field("segname", &self.segname().unwrap())
|
||||
.field("addr", &self.addr)
|
||||
.field("size", &self.size)
|
||||
.field("offset", &self.offset)
|
||||
.field("align", &self.align)
|
||||
.field("reloff", &self.reloff)
|
||||
.field("nreloc", &self.nreloc)
|
||||
.field("flags", &self.flags)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
|
@ -141,14 +152,14 @@ impl From<Section32> for Section {
|
|||
fn from(section: Section32) -> Self {
|
||||
Section {
|
||||
sectname: section.sectname,
|
||||
segname: section.segname,
|
||||
addr: u64::from(section.addr),
|
||||
size: u64::from(section.size),
|
||||
offset: section.offset,
|
||||
align: section.align,
|
||||
reloff: section.reloff,
|
||||
nreloc: section.nreloc,
|
||||
flags: section.flags,
|
||||
segname: section.segname,
|
||||
addr: u64::from(section.addr),
|
||||
size: u64::from(section.size),
|
||||
offset: section.offset,
|
||||
align: section.align,
|
||||
reloff: section.reloff,
|
||||
nreloc: section.nreloc,
|
||||
flags: section.flags,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -157,14 +168,14 @@ impl From<Section64> for Section {
|
|||
fn from(section: Section64) -> Self {
|
||||
Section {
|
||||
sectname: section.sectname,
|
||||
segname: section.segname,
|
||||
addr: section.addr,
|
||||
size: section.size,
|
||||
offset: section.offset,
|
||||
align: section.align,
|
||||
reloff: section.reloff,
|
||||
nreloc: section.nreloc,
|
||||
flags: section.flags,
|
||||
segname: section.segname,
|
||||
addr: section.addr,
|
||||
size: section.size,
|
||||
offset: section.offset,
|
||||
align: section.align,
|
||||
reloff: section.reloff,
|
||||
nreloc: section.nreloc,
|
||||
flags: section.flags,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -176,11 +187,11 @@ impl<'a> ctx::TryFromCtx<'a, container::Ctx> for Section {
|
|||
container::Container::Little => {
|
||||
let section = Section::from(bytes.pread_with::<Section32>(0, ctx.le)?);
|
||||
Ok((section, SIZEOF_SECTION_32))
|
||||
},
|
||||
container::Container::Big => {
|
||||
}
|
||||
container::Container::Big => {
|
||||
let section = Section::from(bytes.pread_with::<Section64>(0, ctx.le)?);
|
||||
Ok((section, SIZEOF_SECTION_64))
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -189,7 +200,7 @@ impl ctx::SizeWith<container::Ctx> for Section {
|
|||
fn size_with(ctx: &container::Ctx) -> usize {
|
||||
match ctx.container {
|
||||
container::Container::Little => SIZEOF_SECTION_32,
|
||||
container::Container::Big => SIZEOF_SECTION_64,
|
||||
container::Container::Big => SIZEOF_SECTION_64,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -197,7 +208,7 @@ impl ctx::SizeWith<container::Ctx> for Section {
|
|||
impl ctx::TryIntoCtx<container::Ctx> for Section {
|
||||
type Error = crate::error::Error;
|
||||
fn try_into_ctx(self, bytes: &mut [u8], ctx: container::Ctx) -> Result<usize, Self::Error> {
|
||||
if ctx.is_big () {
|
||||
if ctx.is_big() {
|
||||
bytes.pwrite_with::<Section64>(self.into(), 0, ctx.le)?;
|
||||
} else {
|
||||
bytes.pwrite_with::<Section32>(self.into(), 0, ctx.le)?;
|
||||
|
|
@ -237,7 +248,11 @@ impl<'a> Iterator for SectionIterator<'a> {
|
|||
self.idx += 1;
|
||||
match self.data.gread_with::<Section>(&mut self.offset, self.ctx) {
|
||||
Ok(section) => {
|
||||
let data = if section.flags & SECTION_TYPE == S_ZEROFILL {
|
||||
let section_type = section.flags & SECTION_TYPE;
|
||||
let data = if section_type == S_ZEROFILL
|
||||
|| section_type == S_GB_ZEROFILL
|
||||
|| section_type == S_THREAD_LOCAL_ZEROFILL
|
||||
{
|
||||
&[]
|
||||
} else {
|
||||
// it's not uncommon to encounter macho files where files are
|
||||
|
|
@ -248,7 +263,10 @@ impl<'a> Iterator for SectionIterator<'a> {
|
|||
self.data
|
||||
.get(section.offset as usize..)
|
||||
.unwrap_or_else(|| {
|
||||
warn!("section #{} offset {} out of bounds", self.idx, section.offset);
|
||||
warn!(
|
||||
"section #{} offset {} out of bounds",
|
||||
self.idx, section.offset
|
||||
);
|
||||
&[]
|
||||
})
|
||||
.get(..section.size as usize)
|
||||
|
|
@ -258,8 +276,8 @@ impl<'a> Iterator for SectionIterator<'a> {
|
|||
})
|
||||
};
|
||||
Some(Ok((section, data)))
|
||||
},
|
||||
Err(e) => Some(Err(e))
|
||||
}
|
||||
Err(e) => Some(Err(e)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -281,37 +299,37 @@ impl<'a, 'b> IntoIterator for &'b Segment<'a> {
|
|||
|
||||
/// Generalized 32/64 bit Segment Command
|
||||
pub struct Segment<'a> {
|
||||
pub cmd: u32,
|
||||
pub cmdsize: u32,
|
||||
pub segname: [u8; 16],
|
||||
pub vmaddr: u64,
|
||||
pub vmsize: u64,
|
||||
pub fileoff: u64,
|
||||
pub cmd: u32,
|
||||
pub cmdsize: u32,
|
||||
pub segname: [u8; 16],
|
||||
pub vmaddr: u64,
|
||||
pub vmsize: u64,
|
||||
pub fileoff: u64,
|
||||
pub filesize: u64,
|
||||
pub maxprot: u32,
|
||||
pub maxprot: u32,
|
||||
pub initprot: u32,
|
||||
pub nsects: u32,
|
||||
pub flags: u32,
|
||||
pub data: &'a [u8],
|
||||
offset: usize,
|
||||
raw_data: &'a [u8],
|
||||
ctx: container::Ctx,
|
||||
pub nsects: u32,
|
||||
pub flags: u32,
|
||||
pub data: &'a [u8],
|
||||
offset: usize,
|
||||
raw_data: &'a [u8],
|
||||
ctx: container::Ctx,
|
||||
}
|
||||
|
||||
impl<'a> From<Segment<'a>> for SegmentCommand64 {
|
||||
fn from(segment: Segment<'a>) -> Self {
|
||||
SegmentCommand64 {
|
||||
cmd: segment.cmd,
|
||||
cmdsize: segment.cmdsize,
|
||||
segname: segment.segname,
|
||||
vmaddr: segment.vmaddr as u64,
|
||||
vmsize: segment.vmsize as u64,
|
||||
fileoff: segment.fileoff as u64,
|
||||
cmd: segment.cmd,
|
||||
cmdsize: segment.cmdsize,
|
||||
segname: segment.segname,
|
||||
vmaddr: segment.vmaddr as u64,
|
||||
vmsize: segment.vmsize as u64,
|
||||
fileoff: segment.fileoff as u64,
|
||||
filesize: segment.filesize as u64,
|
||||
maxprot: segment.maxprot,
|
||||
maxprot: segment.maxprot,
|
||||
initprot: segment.initprot,
|
||||
nsects: segment.nsects,
|
||||
flags: segment.flags,
|
||||
nsects: segment.nsects,
|
||||
flags: segment.flags,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -319,17 +337,17 @@ impl<'a> From<Segment<'a>> for SegmentCommand64 {
|
|||
impl<'a> From<Segment<'a>> for SegmentCommand32 {
|
||||
fn from(segment: Segment<'a>) -> Self {
|
||||
SegmentCommand32 {
|
||||
cmd: segment.cmd,
|
||||
cmdsize: segment.cmdsize,
|
||||
segname: segment.segname,
|
||||
vmaddr: segment.vmaddr as u32,
|
||||
vmsize: segment.vmsize as u32,
|
||||
fileoff: segment.fileoff as u32,
|
||||
cmd: segment.cmd,
|
||||
cmdsize: segment.cmdsize,
|
||||
segname: segment.segname,
|
||||
vmaddr: segment.vmaddr as u32,
|
||||
vmsize: segment.vmsize as u32,
|
||||
fileoff: segment.fileoff as u32,
|
||||
filesize: segment.filesize as u32,
|
||||
maxprot: segment.maxprot,
|
||||
maxprot: segment.maxprot,
|
||||
initprot: segment.initprot,
|
||||
nsects: segment.nsects,
|
||||
flags: segment.flags,
|
||||
nsects: segment.nsects,
|
||||
flags: segment.flags,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -340,8 +358,8 @@ impl<'a> fmt::Debug for Segment<'a> {
|
|||
.field("cmd", &self.cmd)
|
||||
.field("cmdsize", &self.cmdsize)
|
||||
.field("segname", &self.segname.pread::<&str>(0).unwrap())
|
||||
.field("vmaddr", &self.vmaddr)
|
||||
.field("vmsize", &self.vmsize)
|
||||
.field("vmaddr", &self.vmaddr)
|
||||
.field("vmsize", &self.vmsize)
|
||||
.field("fileoff", &self.fileoff)
|
||||
.field("filesize", &self.filesize)
|
||||
.field("maxprot", &self.maxprot)
|
||||
|
|
@ -349,8 +367,14 @@ impl<'a> fmt::Debug for Segment<'a> {
|
|||
.field("nsects", &self.nsects)
|
||||
.field("flags", &self.flags)
|
||||
.field("data", &self.data.len())
|
||||
.field("sections()", &self.sections().map(|sections|
|
||||
sections.into_iter().map(|(section,_)| section).collect::<Vec<_>>())
|
||||
.field(
|
||||
"sections()",
|
||||
&self.sections().map(|sections| {
|
||||
sections
|
||||
.into_iter()
|
||||
.map(|(section, _)| section)
|
||||
.collect::<Vec<_>>()
|
||||
}),
|
||||
)
|
||||
.finish()
|
||||
}
|
||||
|
|
@ -360,7 +384,7 @@ impl<'a> ctx::SizeWith<container::Ctx> for Segment<'a> {
|
|||
fn size_with(ctx: &container::Ctx) -> usize {
|
||||
match ctx.container {
|
||||
container::Container::Little => SIZEOF_SEGMENT_COMMAND_32,
|
||||
container::Container::Big => SIZEOF_SEGMENT_COMMAND_64,
|
||||
container::Container::Big => SIZEOF_SEGMENT_COMMAND_64,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -372,7 +396,7 @@ impl<'a> ctx::TryIntoCtx<container::Ctx> for Segment<'a> {
|
|||
// should be able to write the section data inline after this, but not working at the moment
|
||||
//let section_size = bytes.pwrite(data, segment_size)?;
|
||||
//debug!("Segment size: {} raw section data size: {}", segment_size, data.len());
|
||||
if ctx.is_big () {
|
||||
if ctx.is_big() {
|
||||
bytes.pwrite_with::<SegmentCommand64>(self.into(), 0, ctx.le)?;
|
||||
} else {
|
||||
bytes.pwrite_with::<SegmentCommand32>(self.into(), 0, ctx.le)?;
|
||||
|
|
@ -389,8 +413,8 @@ impl<'a> ctx::IntoCtx<container::Ctx> for Segment<'a> {
|
|||
}
|
||||
|
||||
/// Read data that belongs to a segment if the offset is within the boundaries of bytes.
|
||||
fn segment_data(bytes: &[u8], fileoff :u64, filesize :u64) -> Result<&[u8], error::Error> {
|
||||
let data :&[u8] = if filesize != 0 {
|
||||
fn segment_data(bytes: &[u8], fileoff: u64, filesize: u64) -> Result<&[u8], error::Error> {
|
||||
let data: &[u8] = if filesize != 0 {
|
||||
bytes.pread_with(fileoff as usize, filesize as usize)?
|
||||
} else {
|
||||
&[]
|
||||
|
|
@ -403,19 +427,23 @@ impl<'a> Segment<'a> {
|
|||
/// **NB** You are responsible for providing a correctly marshalled byte array as the sections. You should not use this for anything other than writing.
|
||||
pub fn new(ctx: container::Ctx, sections: &'a [u8]) -> Self {
|
||||
Segment {
|
||||
cmd: if ctx.is_big() { LC_SEGMENT_64 } else { LC_SEGMENT },
|
||||
cmdsize: (Self::size_with(&ctx) + sections.len()) as u32,
|
||||
segname: [0; 16],
|
||||
vmaddr: 0,
|
||||
vmsize: 0,
|
||||
fileoff: 0,
|
||||
cmd: if ctx.is_big() {
|
||||
LC_SEGMENT_64
|
||||
} else {
|
||||
LC_SEGMENT
|
||||
},
|
||||
cmdsize: (Self::size_with(&ctx) + sections.len()) as u32,
|
||||
segname: [0; 16],
|
||||
vmaddr: 0,
|
||||
vmsize: 0,
|
||||
fileoff: 0,
|
||||
filesize: 0,
|
||||
maxprot: 0,
|
||||
maxprot: 0,
|
||||
initprot: 0,
|
||||
nsects: 0,
|
||||
flags: 0,
|
||||
data: sections,
|
||||
offset: 0,
|
||||
nsects: 0,
|
||||
flags: 0,
|
||||
data: sections,
|
||||
offset: 0,
|
||||
raw_data: &[],
|
||||
ctx,
|
||||
}
|
||||
|
|
@ -433,39 +461,53 @@ impl<'a> Segment<'a> {
|
|||
Ok(sections)
|
||||
}
|
||||
/// Convert the raw C 32-bit segment command to a generalized version
|
||||
pub fn from_32(bytes: &'a[u8], segment: &SegmentCommand32, offset: usize, ctx: container::Ctx) -> Result<Self, error::Error> {
|
||||
pub fn from_32(
|
||||
bytes: &'a [u8],
|
||||
segment: &SegmentCommand32,
|
||||
offset: usize,
|
||||
ctx: container::Ctx,
|
||||
) -> Result<Self, error::Error> {
|
||||
Ok(Segment {
|
||||
cmd: segment.cmd,
|
||||
cmdsize: segment.cmdsize,
|
||||
segname: segment.segname,
|
||||
vmaddr: u64::from(segment.vmaddr),
|
||||
vmsize: u64::from(segment.vmsize),
|
||||
fileoff: u64::from(segment.fileoff),
|
||||
cmd: segment.cmd,
|
||||
cmdsize: segment.cmdsize,
|
||||
segname: segment.segname,
|
||||
vmaddr: u64::from(segment.vmaddr),
|
||||
vmsize: u64::from(segment.vmsize),
|
||||
fileoff: u64::from(segment.fileoff),
|
||||
filesize: u64::from(segment.filesize),
|
||||
maxprot: segment.maxprot,
|
||||
maxprot: segment.maxprot,
|
||||
initprot: segment.initprot,
|
||||
nsects: segment.nsects,
|
||||
flags: segment.flags,
|
||||
data: segment_data(bytes, u64::from(segment.fileoff), u64::from(segment.filesize))?,
|
||||
nsects: segment.nsects,
|
||||
flags: segment.flags,
|
||||
data: segment_data(
|
||||
bytes,
|
||||
u64::from(segment.fileoff),
|
||||
u64::from(segment.filesize),
|
||||
)?,
|
||||
offset,
|
||||
raw_data: bytes,
|
||||
ctx,
|
||||
})
|
||||
}
|
||||
/// Convert the raw C 64-bit segment command to a generalized version
|
||||
pub fn from_64(bytes: &'a [u8], segment: &SegmentCommand64, offset: usize, ctx: container::Ctx) -> Result<Self, error::Error> {
|
||||
pub fn from_64(
|
||||
bytes: &'a [u8],
|
||||
segment: &SegmentCommand64,
|
||||
offset: usize,
|
||||
ctx: container::Ctx,
|
||||
) -> Result<Self, error::Error> {
|
||||
Ok(Segment {
|
||||
cmd: segment.cmd,
|
||||
cmdsize: segment.cmdsize,
|
||||
segname: segment.segname,
|
||||
vmaddr: segment.vmaddr,
|
||||
vmsize: segment.vmsize,
|
||||
fileoff: segment.fileoff,
|
||||
cmd: segment.cmd,
|
||||
cmdsize: segment.cmdsize,
|
||||
segname: segment.segname,
|
||||
vmaddr: segment.vmaddr,
|
||||
vmsize: segment.vmsize,
|
||||
fileoff: segment.fileoff,
|
||||
filesize: segment.filesize,
|
||||
maxprot: segment.maxprot,
|
||||
maxprot: segment.maxprot,
|
||||
initprot: segment.initprot,
|
||||
nsects: segment.nsects,
|
||||
flags: segment.flags,
|
||||
nsects: segment.nsects,
|
||||
flags: segment.flags,
|
||||
data: segment_data(bytes, segment.fileoff, segment.filesize)?,
|
||||
offset,
|
||||
raw_data: bytes,
|
||||
|
|
@ -478,7 +520,6 @@ impl<'a> Segment<'a> {
|
|||
/// An opaque 32/64-bit container for Mach-o segments
|
||||
pub struct Segments<'a> {
|
||||
segments: Vec<Segment<'a>>,
|
||||
ctx: container::Ctx,
|
||||
}
|
||||
|
||||
impl<'a> Deref for Segments<'a> {
|
||||
|
|
@ -504,15 +545,14 @@ impl<'a, 'b> IntoIterator for &'b Segments<'a> {
|
|||
|
||||
impl<'a> Segments<'a> {
|
||||
/// Construct a new generalized segment container from this `ctx`
|
||||
pub fn new(ctx: container::Ctx) -> Self {
|
||||
pub fn new(_ctx: container::Ctx) -> Self {
|
||||
Segments {
|
||||
segments: Vec::new(),
|
||||
ctx,
|
||||
}
|
||||
}
|
||||
/// Get every section from every segment
|
||||
// thanks to SpaceManic for figuring out the 'b lifetimes here :)
|
||||
pub fn sections<'b>(&'b self) -> Box<dyn Iterator<Item=SectionIterator<'a>> + 'b> {
|
||||
pub fn sections<'b>(&'b self) -> Box<dyn Iterator<Item = SectionIterator<'a>> + 'b> {
|
||||
Box::new(self.segments.iter().map(|segment| segment.into_iter()))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
207
third_party/rust/goblin/src/mach/symbols.rs
vendored
207
third_party/rust/goblin/src/mach/symbols.rs
vendored
|
|
@ -2,13 +2,13 @@
|
|||
//!
|
||||
//! Symbols are essentially a type, offset, and the symbol name
|
||||
|
||||
use scroll::ctx;
|
||||
use scroll::ctx::SizeWith;
|
||||
use scroll::{Pread, Pwrite, SizeWith, IOread, IOwrite};
|
||||
use crate::error;
|
||||
use crate::container::{self, Container};
|
||||
use crate::error;
|
||||
use crate::mach::load_command;
|
||||
use core::fmt::{self, Debug};
|
||||
use scroll::ctx;
|
||||
use scroll::ctx::SizeWith;
|
||||
use scroll::{IOread, IOwrite, Pread, Pwrite, SizeWith};
|
||||
|
||||
// The n_type field really contains four fields which are used via the following masks.
|
||||
/// if any of these bits set, a symbolic debugging entry
|
||||
|
|
@ -38,7 +38,7 @@ pub const MAX_SECT: u8 = 255;
|
|||
/// undefined, n_sect == NO_SECT
|
||||
pub const N_UNDF: u8 = 0x0;
|
||||
/// absolute, n_sect == NO_SECT
|
||||
pub const N_ABS: u8 = 0x2;
|
||||
pub const N_ABS: u8 = 0x2;
|
||||
/// defined in section number n_sect
|
||||
pub const N_SECT: u8 = 0xe;
|
||||
/// prebound undefined (defined in a dylib)
|
||||
|
|
@ -47,37 +47,37 @@ pub const N_PBUD: u8 = 0xc;
|
|||
pub const N_INDR: u8 = 0xa;
|
||||
|
||||
// n_types when N_STAB
|
||||
pub const N_GSYM: u8 = 0x20;
|
||||
pub const N_FNAME: u8 = 0x22;
|
||||
pub const N_FUN: u8 = 0x24;
|
||||
pub const N_STSYM: u8 = 0x26;
|
||||
pub const N_LCSYM: u8 = 0x28;
|
||||
pub const N_BNSYM: u8 = 0x2e;
|
||||
pub const N_PC: u8 = 0x30;
|
||||
pub const N_AST: u8 = 0x32;
|
||||
pub const N_OPT: u8 = 0x3c;
|
||||
pub const N_RSYM: u8 = 0x40;
|
||||
pub const N_SLINE: u8 = 0x44;
|
||||
pub const N_ENSYM: u8 = 0x4e;
|
||||
pub const N_SSYM: u8 = 0x60;
|
||||
pub const N_SO: u8 = 0x64;
|
||||
pub const N_OSO: u8 = 0x66;
|
||||
pub const N_LSYM: u8 = 0x80;
|
||||
pub const N_BINCL: u8 = 0x82;
|
||||
pub const N_SOL: u8 = 0x84;
|
||||
pub const N_PARAMS: u8 = 0x86;
|
||||
pub const N_GSYM: u8 = 0x20;
|
||||
pub const N_FNAME: u8 = 0x22;
|
||||
pub const N_FUN: u8 = 0x24;
|
||||
pub const N_STSYM: u8 = 0x26;
|
||||
pub const N_LCSYM: u8 = 0x28;
|
||||
pub const N_BNSYM: u8 = 0x2e;
|
||||
pub const N_PC: u8 = 0x30;
|
||||
pub const N_AST: u8 = 0x32;
|
||||
pub const N_OPT: u8 = 0x3c;
|
||||
pub const N_RSYM: u8 = 0x40;
|
||||
pub const N_SLINE: u8 = 0x44;
|
||||
pub const N_ENSYM: u8 = 0x4e;
|
||||
pub const N_SSYM: u8 = 0x60;
|
||||
pub const N_SO: u8 = 0x64;
|
||||
pub const N_OSO: u8 = 0x66;
|
||||
pub const N_LSYM: u8 = 0x80;
|
||||
pub const N_BINCL: u8 = 0x82;
|
||||
pub const N_SOL: u8 = 0x84;
|
||||
pub const N_PARAMS: u8 = 0x86;
|
||||
pub const N_VERSION: u8 = 0x88;
|
||||
pub const N_OLEVEL: u8 = 0x8a;
|
||||
pub const N_PSYM: u8 = 0xa0;
|
||||
pub const N_EINCL: u8 = 0xa2;
|
||||
pub const N_ENTRY: u8 = 0xa4;
|
||||
pub const N_LBRAC: u8 = 0xc0;
|
||||
pub const N_EXCL: u8 = 0xc2;
|
||||
pub const N_RBRAC: u8 = 0xe0;
|
||||
pub const N_BCOMM: u8 = 0xe2;
|
||||
pub const N_ECOMM: u8 = 0xe4;
|
||||
pub const N_ECOML: u8 = 0xe8;
|
||||
pub const N_LENG: u8 = 0xfe;
|
||||
pub const N_OLEVEL: u8 = 0x8a;
|
||||
pub const N_PSYM: u8 = 0xa0;
|
||||
pub const N_EINCL: u8 = 0xa2;
|
||||
pub const N_ENTRY: u8 = 0xa4;
|
||||
pub const N_LBRAC: u8 = 0xc0;
|
||||
pub const N_EXCL: u8 = 0xc2;
|
||||
pub const N_RBRAC: u8 = 0xe0;
|
||||
pub const N_BCOMM: u8 = 0xe2;
|
||||
pub const N_ECOMM: u8 = 0xe4;
|
||||
pub const N_ECOML: u8 = 0xe8;
|
||||
pub const N_LENG: u8 = 0xfe;
|
||||
|
||||
pub const NLIST_TYPE_MASK: u8 = 0xe;
|
||||
pub const NLIST_TYPE_GLOBAL: u8 = 0x1;
|
||||
|
|
@ -132,7 +132,7 @@ pub fn n_type_to_str(n_type: u8) -> &'static str {
|
|||
N_SECT => "N_SECT",
|
||||
N_PBUD => "N_PBUD",
|
||||
N_INDR => "N_INDR",
|
||||
_ => "UNKNOWN_N_TYPE"
|
||||
_ => "UNKNOWN_N_TYPE",
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -156,12 +156,12 @@ pub const SIZEOF_NLIST_32: usize = 12;
|
|||
impl Debug for Nlist32 {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt.debug_struct("Nlist32")
|
||||
.field("n_strx", &format_args!("{:04}", self.n_strx))
|
||||
.field("n_type", &format_args!("{:#02x}", self.n_type))
|
||||
.field("n_sect", &format_args!("{:#x}", self.n_sect))
|
||||
.field("n_desc", &format_args!("{:#03x}", self.n_desc))
|
||||
.field("n_value", &format_args!("{:#x}", self.n_value))
|
||||
.finish()
|
||||
.field("n_strx", &format_args!("{:04}", self.n_strx))
|
||||
.field("n_type", &format_args!("{:#02x}", self.n_type))
|
||||
.field("n_sect", &format_args!("{:#x}", self.n_sect))
|
||||
.field("n_desc", &format_args!("{:#03x}", self.n_desc))
|
||||
.field("n_value", &format_args!("{:#x}", self.n_value))
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -185,16 +185,16 @@ pub const SIZEOF_NLIST_64: usize = 16;
|
|||
impl Debug for Nlist64 {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt.debug_struct("Nlist64")
|
||||
.field("n_strx", &format_args!("{:04}", self.n_strx))
|
||||
.field("n_type", &format_args!("{:#02x}", self.n_type))
|
||||
.field("n_sect", &format_args!("{:#x}", self.n_sect))
|
||||
.field("n_desc", &format_args!("{:#03x}", self.n_desc))
|
||||
.field("n_value", &format_args!("{:#x}", self.n_value))
|
||||
.finish()
|
||||
.field("n_strx", &format_args!("{:04}", self.n_strx))
|
||||
.field("n_type", &format_args!("{:#02x}", self.n_type))
|
||||
.field("n_sect", &format_args!("{:#x}", self.n_sect))
|
||||
.field("n_desc", &format_args!("{:#03x}", self.n_desc))
|
||||
.field("n_value", &format_args!("{:#x}", self.n_value))
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone,)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Nlist {
|
||||
/// index into the string table
|
||||
pub n_strx: usize,
|
||||
|
|
@ -238,12 +238,8 @@ impl Nlist {
|
|||
impl ctx::SizeWith<container::Ctx> for Nlist {
|
||||
fn size_with(ctx: &container::Ctx) -> usize {
|
||||
match ctx.container {
|
||||
Container::Little => {
|
||||
SIZEOF_NLIST_32
|
||||
},
|
||||
Container::Big => {
|
||||
SIZEOF_NLIST_64
|
||||
},
|
||||
Container::Little => SIZEOF_NLIST_32,
|
||||
Container::Big => SIZEOF_NLIST_64,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -298,14 +294,13 @@ impl From<Nlist> for Nlist64 {
|
|||
|
||||
impl<'a> ctx::TryFromCtx<'a, container::Ctx> for Nlist {
|
||||
type Error = crate::error::Error;
|
||||
fn try_from_ctx(bytes: &'a [u8], container::Ctx { container, le }: container::Ctx) -> crate::error::Result<(Self, usize)> {
|
||||
fn try_from_ctx(
|
||||
bytes: &'a [u8],
|
||||
container::Ctx { container, le }: container::Ctx,
|
||||
) -> crate::error::Result<(Self, usize)> {
|
||||
let nlist = match container {
|
||||
Container::Little => {
|
||||
(bytes.pread_with::<Nlist32>(0, le)?.into(), SIZEOF_NLIST_32)
|
||||
},
|
||||
Container::Big => {
|
||||
(bytes.pread_with::<Nlist64>(0, le)?.into(), SIZEOF_NLIST_64)
|
||||
},
|
||||
Container::Little => (bytes.pread_with::<Nlist32>(0, le)?.into(), SIZEOF_NLIST_32),
|
||||
Container::Big => (bytes.pread_with::<Nlist64>(0, le)?.into(), SIZEOF_NLIST_64),
|
||||
};
|
||||
Ok(nlist)
|
||||
}
|
||||
|
|
@ -313,14 +308,14 @@ impl<'a> ctx::TryFromCtx<'a, container::Ctx> for Nlist {
|
|||
|
||||
impl ctx::TryIntoCtx<container::Ctx> for Nlist {
|
||||
type Error = crate::error::Error;
|
||||
fn try_into_ctx(self, bytes: &mut [u8], container::Ctx { container, le }: container::Ctx) -> Result<usize, Self::Error> {
|
||||
fn try_into_ctx(
|
||||
self,
|
||||
bytes: &mut [u8],
|
||||
container::Ctx { container, le }: container::Ctx,
|
||||
) -> Result<usize, Self::Error> {
|
||||
let size = match container {
|
||||
Container::Little => {
|
||||
(bytes.pwrite_with::<Nlist32>(self.into(), 0, le)?)
|
||||
},
|
||||
Container::Big => {
|
||||
(bytes.pwrite_with::<Nlist64>(self.into(), 0, le)?)
|
||||
},
|
||||
Container::Little => (bytes.pwrite_with::<Nlist32>(self.into(), 0, le)?),
|
||||
Container::Big => (bytes.pwrite_with::<Nlist64>(self.into(), 0, le)?),
|
||||
};
|
||||
Ok(size)
|
||||
}
|
||||
|
|
@ -339,19 +334,26 @@ pub struct SymbolsCtx {
|
|||
pub ctx: container::Ctx,
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized> ctx::TryFromCtx<'a, SymbolsCtx, T> for Symbols<'a> where T: AsRef<[u8]> {
|
||||
impl<'a, T: ?Sized> ctx::TryFromCtx<'a, SymbolsCtx, T> for Symbols<'a>
|
||||
where
|
||||
T: AsRef<[u8]>,
|
||||
{
|
||||
type Error = crate::error::Error;
|
||||
fn try_from_ctx(bytes: &'a T, SymbolsCtx {
|
||||
nsyms, strtab, ctx
|
||||
}: SymbolsCtx) -> crate::error::Result<(Self, usize)> {
|
||||
fn try_from_ctx(
|
||||
bytes: &'a T,
|
||||
SymbolsCtx { nsyms, strtab, ctx }: SymbolsCtx,
|
||||
) -> crate::error::Result<(Self, usize)> {
|
||||
let data = bytes.as_ref();
|
||||
Ok ((Symbols {
|
||||
data,
|
||||
start: 0,
|
||||
nsyms,
|
||||
strtab,
|
||||
ctx,
|
||||
}, data.len()))
|
||||
Ok((
|
||||
Symbols {
|
||||
data,
|
||||
start: 0,
|
||||
nsyms,
|
||||
strtab,
|
||||
ctx,
|
||||
},
|
||||
data.len(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -373,15 +375,11 @@ impl<'a> Iterator for SymbolIterator<'a> {
|
|||
} else {
|
||||
self.count += 1;
|
||||
match self.data.gread_with::<Nlist>(&mut self.offset, self.ctx) {
|
||||
Ok(symbol) => {
|
||||
match self.data.pread(self.strtab + symbol.n_strx) {
|
||||
Ok(name) => {
|
||||
Some(Ok((name, symbol)))
|
||||
},
|
||||
Err(e) => Some(Err(e.into()))
|
||||
}
|
||||
Ok(symbol) => match self.data.pread(self.strtab + symbol.n_strx) {
|
||||
Ok(name) => Some(Ok((name, symbol))),
|
||||
Err(e) => Some(Err(e.into())),
|
||||
},
|
||||
Err(e) => Some(Err(e))
|
||||
Err(e) => Some(Err(e)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -409,9 +407,14 @@ impl<'a> Symbols<'a> {
|
|||
/// Creates a new symbol table with `count` elements, from the `start` offset, using the string table at `strtab`, with a _default_ ctx.
|
||||
////
|
||||
/// **Beware**, this will provide incorrect results if you construct this on a 32-bit mach binary, using a 64-bit machine; use `parse` instead if you want 32/64 bit support
|
||||
pub fn new(bytes: &'a [u8], start: usize, count: usize, strtab: usize) -> error::Result<Symbols<'a>> {
|
||||
pub fn new(
|
||||
bytes: &'a [u8],
|
||||
start: usize,
|
||||
count: usize,
|
||||
strtab: usize,
|
||||
) -> error::Result<Symbols<'a>> {
|
||||
let nsyms = count;
|
||||
Ok (Symbols {
|
||||
Ok(Symbols {
|
||||
data: bytes,
|
||||
start,
|
||||
nsyms,
|
||||
|
|
@ -419,10 +422,24 @@ impl<'a> Symbols<'a> {
|
|||
ctx: container::Ctx::default(),
|
||||
})
|
||||
}
|
||||
pub fn parse(bytes: &'a [u8], symtab: &load_command::SymtabCommand, ctx: container::Ctx) -> error::Result<Symbols<'a>> {
|
||||
pub fn parse(
|
||||
bytes: &'a [u8],
|
||||
symtab: &load_command::SymtabCommand,
|
||||
ctx: container::Ctx,
|
||||
) -> error::Result<Symbols<'a>> {
|
||||
// we need to normalize the strtab offset before we receive the truncated bytes in pread_with
|
||||
let strtab = symtab.stroff - symtab.symoff;
|
||||
Ok(bytes.pread_with(symtab.symoff as usize, SymbolsCtx { nsyms: symtab.nsyms as usize, strtab: strtab as usize, ctx })?)
|
||||
let strtab = symtab
|
||||
.stroff
|
||||
.checked_sub(symtab.symoff)
|
||||
.ok_or_else(|| error::Error::Malformed("invalid symbol table offset".into()))?;
|
||||
bytes.pread_with(
|
||||
symtab.symoff as usize,
|
||||
SymbolsCtx {
|
||||
nsyms: symtab.nsyms as usize,
|
||||
strtab: strtab as usize,
|
||||
ctx,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> SymbolIterator<'a> {
|
||||
|
|
@ -438,7 +455,9 @@ impl<'a> Symbols<'a> {
|
|||
|
||||
/// Parses a single Nlist symbol from the binary, with its accompanying name
|
||||
pub fn get(&self, index: usize) -> crate::error::Result<(&'a str, Nlist)> {
|
||||
let sym: Nlist = self.data.pread_with(self.start + (index * Nlist::size_with(&self.ctx)), self.ctx)?;
|
||||
let sym: Nlist = self
|
||||
.data
|
||||
.pread_with(self.start + (index * Nlist::size_with(&self.ctx)), self.ctx)?;
|
||||
let name = self.data.pread(self.strtab + sym.n_strx)?;
|
||||
Ok((name, sym))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -91,9 +91,9 @@ pub const IMAGE_FILE_UP_SYSTEM_ONLY: u16 = 0x4000;
|
|||
pub const IMAGE_FILE_BYTES_REVERSED_HI: u16 = 0x8000;
|
||||
|
||||
pub fn is_dll(characteristics: u16) -> bool {
|
||||
characteristics & IMAGE_FILE_DLL == IMAGE_FILE_DLL
|
||||
characteristics & IMAGE_FILE_DLL == IMAGE_FILE_DLL
|
||||
}
|
||||
|
||||
pub fn is_exe(characteristics: u16) -> bool {
|
||||
characteristics & IMAGE_FILE_EXECUTABLE_IMAGE == IMAGE_FILE_EXECUTABLE_IMAGE
|
||||
characteristics & IMAGE_FILE_EXECUTABLE_IMAGE == IMAGE_FILE_EXECUTABLE_IMAGE
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,8 +2,7 @@ use crate::error;
|
|||
use scroll::{Pread, Pwrite, SizeWith};
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Debug, PartialEq, Copy, Clone, Default)]
|
||||
#[derive(Pread, Pwrite, SizeWith)]
|
||||
#[derive(Debug, PartialEq, Copy, Clone, Default, Pread, Pwrite, SizeWith)]
|
||||
pub struct DataDirectory {
|
||||
pub virtual_address: u32,
|
||||
pub size: u32,
|
||||
|
|
@ -15,7 +14,7 @@ const NUM_DATA_DIRECTORIES: usize = 16;
|
|||
impl DataDirectory {
|
||||
pub fn parse(bytes: &[u8], offset: &mut usize) -> error::Result<Self> {
|
||||
let dd = bytes.gread_with(offset, scroll::LE)?;
|
||||
Ok (dd)
|
||||
Ok(dd)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -27,72 +26,81 @@ pub struct DataDirectories {
|
|||
impl DataDirectories {
|
||||
pub fn parse(bytes: &[u8], count: usize, offset: &mut usize) -> error::Result<Self> {
|
||||
let mut data_directories = [None; NUM_DATA_DIRECTORIES];
|
||||
if count > NUM_DATA_DIRECTORIES { return Err (error::Error::Malformed(format!("data directory count ({}) is greater than maximum number of data directories ({})", count, NUM_DATA_DIRECTORIES))) }
|
||||
if count > NUM_DATA_DIRECTORIES {
|
||||
return Err(error::Error::Malformed(format!(
|
||||
"data directory count ({}) is greater than maximum number of data directories ({})",
|
||||
count, NUM_DATA_DIRECTORIES
|
||||
)));
|
||||
}
|
||||
for dir in data_directories.iter_mut().take(count) {
|
||||
let dd = DataDirectory::parse(bytes, offset)?;
|
||||
let dd = if dd.virtual_address == 0 && dd.size == 0 { None } else { Some (dd) };
|
||||
let dd = if dd.virtual_address == 0 && dd.size == 0 {
|
||||
None
|
||||
} else {
|
||||
Some(dd)
|
||||
};
|
||||
*dir = dd;
|
||||
}
|
||||
Ok (DataDirectories { data_directories })
|
||||
Ok(DataDirectories { data_directories })
|
||||
}
|
||||
pub fn get_export_table(&self) -> &Option<DataDirectory> {
|
||||
let idx = 0;
|
||||
unsafe { self.data_directories.get_unchecked(idx) }
|
||||
&self.data_directories[idx]
|
||||
}
|
||||
pub fn get_import_table(&self) -> &Option<DataDirectory> {
|
||||
let idx = 1;
|
||||
unsafe { self.data_directories.get_unchecked(idx) }
|
||||
&self.data_directories[idx]
|
||||
}
|
||||
pub fn get_resource_table(&self) -> &Option<DataDirectory> {
|
||||
pub fn get_resource_table(&self) -> &Option<DataDirectory> {
|
||||
let idx = 2;
|
||||
unsafe { self.data_directories.get_unchecked(idx) }
|
||||
&self.data_directories[idx]
|
||||
}
|
||||
pub fn get_exception_table(&self) -> &Option<DataDirectory> {
|
||||
pub fn get_exception_table(&self) -> &Option<DataDirectory> {
|
||||
let idx = 3;
|
||||
unsafe { self.data_directories.get_unchecked(idx) }
|
||||
&self.data_directories[idx]
|
||||
}
|
||||
pub fn get_certificate_table(&self) -> &Option<DataDirectory> {
|
||||
pub fn get_certificate_table(&self) -> &Option<DataDirectory> {
|
||||
let idx = 4;
|
||||
unsafe { self.data_directories.get_unchecked(idx) }
|
||||
&self.data_directories[idx]
|
||||
}
|
||||
pub fn get_base_relocation_table(&self) -> &Option<DataDirectory> {
|
||||
pub fn get_base_relocation_table(&self) -> &Option<DataDirectory> {
|
||||
let idx = 5;
|
||||
unsafe { self.data_directories.get_unchecked(idx) }
|
||||
&self.data_directories[idx]
|
||||
}
|
||||
pub fn get_debug_table(&self) -> &Option<DataDirectory> {
|
||||
pub fn get_debug_table(&self) -> &Option<DataDirectory> {
|
||||
let idx = 6;
|
||||
unsafe { self.data_directories.get_unchecked(idx) }
|
||||
&self.data_directories[idx]
|
||||
}
|
||||
pub fn get_architecture(&self) -> &Option<DataDirectory> {
|
||||
pub fn get_architecture(&self) -> &Option<DataDirectory> {
|
||||
let idx = 7;
|
||||
unsafe { self.data_directories.get_unchecked(idx) }
|
||||
&self.data_directories[idx]
|
||||
}
|
||||
pub fn get_global_ptr(&self) -> &Option<DataDirectory> {
|
||||
pub fn get_global_ptr(&self) -> &Option<DataDirectory> {
|
||||
let idx = 8;
|
||||
unsafe { self.data_directories.get_unchecked(idx) }
|
||||
&self.data_directories[idx]
|
||||
}
|
||||
pub fn get_tls_table(&self) -> &Option<DataDirectory> {
|
||||
pub fn get_tls_table(&self) -> &Option<DataDirectory> {
|
||||
let idx = 9;
|
||||
unsafe { self.data_directories.get_unchecked(idx) }
|
||||
&self.data_directories[idx]
|
||||
}
|
||||
pub fn get_load_config_table(&self) -> &Option<DataDirectory> {
|
||||
pub fn get_load_config_table(&self) -> &Option<DataDirectory> {
|
||||
let idx = 10;
|
||||
unsafe { self.data_directories.get_unchecked(idx) }
|
||||
&self.data_directories[idx]
|
||||
}
|
||||
pub fn get_bound_import_table(&self) -> &Option<DataDirectory> {
|
||||
pub fn get_bound_import_table(&self) -> &Option<DataDirectory> {
|
||||
let idx = 11;
|
||||
unsafe { self.data_directories.get_unchecked(idx) }
|
||||
&self.data_directories[idx]
|
||||
}
|
||||
pub fn get_import_address_table(&self) -> &Option<DataDirectory> {
|
||||
pub fn get_import_address_table(&self) -> &Option<DataDirectory> {
|
||||
let idx = 12;
|
||||
unsafe { self.data_directories.get_unchecked(idx) }
|
||||
&self.data_directories[idx]
|
||||
}
|
||||
pub fn get_delay_import_descriptor(&self) -> &Option<DataDirectory> {
|
||||
let idx = 13;
|
||||
unsafe { self.data_directories.get_unchecked(idx) }
|
||||
&self.data_directories[idx]
|
||||
}
|
||||
pub fn get_clr_runtime_header(&self) -> &Option<DataDirectory> {
|
||||
pub fn get_clr_runtime_header(&self) -> &Option<DataDirectory> {
|
||||
let idx = 14;
|
||||
unsafe { self.data_directories.get_unchecked(idx) }
|
||||
&self.data_directories[idx]
|
||||
}
|
||||
}
|
||||
|
|
|
|||
101
third_party/rust/goblin/src/pe/debug.rs
vendored
101
third_party/rust/goblin/src/pe/debug.rs
vendored
|
|
@ -1,9 +1,10 @@
|
|||
use scroll::{Pread, Pwrite, SizeWith};
|
||||
use crate::error;
|
||||
use scroll::{Pread, Pwrite, SizeWith};
|
||||
|
||||
use crate::pe::data_directories;
|
||||
use crate::pe::options;
|
||||
use crate::pe::section_table;
|
||||
use crate::pe::utils;
|
||||
use crate::pe::data_directories;
|
||||
|
||||
#[derive(Debug, PartialEq, Copy, Clone, Default)]
|
||||
pub struct DebugData<'a> {
|
||||
|
|
@ -12,27 +13,48 @@ pub struct DebugData<'a> {
|
|||
}
|
||||
|
||||
impl<'a> DebugData<'a> {
|
||||
pub fn parse(bytes: &'a [u8], dd: data_directories::DataDirectory, sections: &[section_table::SectionTable], file_alignment: u32) -> error::Result<Self> {
|
||||
let image_debug_directory = ImageDebugDirectory::parse(bytes, dd, sections, file_alignment)?;
|
||||
let codeview_pdb70_debug_info = CodeviewPDB70DebugInfo::parse(bytes, &image_debug_directory)?;
|
||||
pub fn parse(
|
||||
bytes: &'a [u8],
|
||||
dd: data_directories::DataDirectory,
|
||||
sections: &[section_table::SectionTable],
|
||||
file_alignment: u32,
|
||||
) -> error::Result<Self> {
|
||||
Self::parse_with_opts(
|
||||
bytes,
|
||||
dd,
|
||||
sections,
|
||||
file_alignment,
|
||||
&options::ParseOptions::default(),
|
||||
)
|
||||
}
|
||||
|
||||
Ok(DebugData{
|
||||
pub fn parse_with_opts(
|
||||
bytes: &'a [u8],
|
||||
dd: data_directories::DataDirectory,
|
||||
sections: &[section_table::SectionTable],
|
||||
file_alignment: u32,
|
||||
opts: &options::ParseOptions,
|
||||
) -> error::Result<Self> {
|
||||
let image_debug_directory =
|
||||
ImageDebugDirectory::parse_with_opts(bytes, dd, sections, file_alignment, opts)?;
|
||||
let codeview_pdb70_debug_info =
|
||||
CodeviewPDB70DebugInfo::parse_with_opts(bytes, &image_debug_directory, opts)?;
|
||||
|
||||
Ok(DebugData {
|
||||
image_debug_directory,
|
||||
codeview_pdb70_debug_info
|
||||
codeview_pdb70_debug_info,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return this executable's debugging GUID, suitable for matching against a PDB file.
|
||||
pub fn guid(&self) -> Option<[u8; 16]> {
|
||||
self.codeview_pdb70_debug_info
|
||||
.map(|pdb70| pdb70.signature)
|
||||
self.codeview_pdb70_debug_info.map(|pdb70| pdb70.signature)
|
||||
}
|
||||
}
|
||||
|
||||
// https://msdn.microsoft.com/en-us/library/windows/desktop/ms680307(v=vs.85).aspx
|
||||
#[repr(C)]
|
||||
#[derive(Debug, PartialEq, Copy, Clone, Default)]
|
||||
#[derive(Pread, Pwrite, SizeWith)]
|
||||
#[derive(Debug, PartialEq, Copy, Clone, Default, Pread, Pwrite, SizeWith)]
|
||||
pub struct ImageDebugDirectory {
|
||||
pub characteristics: u32,
|
||||
pub time_date_stamp: u32,
|
||||
|
|
@ -54,11 +76,38 @@ pub const IMAGE_DEBUG_TYPE_FIXUP: u32 = 6;
|
|||
pub const IMAGE_DEBUG_TYPE_BORLAND: u32 = 9;
|
||||
|
||||
impl ImageDebugDirectory {
|
||||
fn parse(bytes: &[u8], dd: data_directories::DataDirectory, sections: &[section_table::SectionTable], file_alignment: u32) -> error::Result<Self> {
|
||||
#[allow(unused)]
|
||||
fn parse(
|
||||
bytes: &[u8],
|
||||
dd: data_directories::DataDirectory,
|
||||
sections: &[section_table::SectionTable],
|
||||
file_alignment: u32,
|
||||
) -> error::Result<Self> {
|
||||
Self::parse_with_opts(
|
||||
bytes,
|
||||
dd,
|
||||
sections,
|
||||
file_alignment,
|
||||
&options::ParseOptions::default(),
|
||||
)
|
||||
}
|
||||
|
||||
fn parse_with_opts(
|
||||
bytes: &[u8],
|
||||
dd: data_directories::DataDirectory,
|
||||
sections: &[section_table::SectionTable],
|
||||
file_alignment: u32,
|
||||
opts: &options::ParseOptions,
|
||||
) -> error::Result<Self> {
|
||||
let rva = dd.virtual_address as usize;
|
||||
let offset = utils::find_offset(rva, sections, file_alignment).ok_or_else(|| error::Error::Malformed(format!("Cannot map ImageDebugDirectory rva {:#x} into offset", rva)))?;
|
||||
let offset = utils::find_offset(rva, sections, file_alignment, opts).ok_or_else(|| {
|
||||
error::Error::Malformed(format!(
|
||||
"Cannot map ImageDebugDirectory rva {:#x} into offset",
|
||||
rva
|
||||
))
|
||||
})?;
|
||||
let idd: Self = bytes.pread_with(offset, scroll::LE)?;
|
||||
Ok (idd)
|
||||
Ok(idd)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -79,6 +128,14 @@ pub struct CodeviewPDB70DebugInfo<'a> {
|
|||
|
||||
impl<'a> CodeviewPDB70DebugInfo<'a> {
|
||||
pub fn parse(bytes: &'a [u8], idd: &ImageDebugDirectory) -> error::Result<Option<Self>> {
|
||||
Self::parse_with_opts(bytes, idd, &options::ParseOptions::default())
|
||||
}
|
||||
|
||||
pub fn parse_with_opts(
|
||||
bytes: &'a [u8],
|
||||
idd: &ImageDebugDirectory,
|
||||
opts: &options::ParseOptions,
|
||||
) -> error::Result<Option<Self>> {
|
||||
if idd.data_type != IMAGE_DEBUG_TYPE_CODEVIEW {
|
||||
// not a codeview debug directory
|
||||
// that's not an error, but it's not a CodeviewPDB70DebugInfo either
|
||||
|
|
@ -86,13 +143,19 @@ impl<'a> CodeviewPDB70DebugInfo<'a> {
|
|||
}
|
||||
|
||||
// ImageDebugDirectory.pointer_to_raw_data stores a raw offset -- not a virtual offset -- which we can use directly
|
||||
let mut offset: usize = idd.pointer_to_raw_data as usize;
|
||||
let mut offset: usize = match opts.resolve_rva {
|
||||
true => idd.pointer_to_raw_data as usize,
|
||||
false => idd.address_of_raw_data as usize,
|
||||
};
|
||||
|
||||
// calculate how long the eventual filename will be, which doubles as a check of the record size
|
||||
let filename_length = idd.size_of_data as isize - 24;
|
||||
if filename_length < 0 || filename_length > 1024 {
|
||||
// the record is too short or too long to be plausible
|
||||
return Err(error::Error::Malformed(format!("ImageDebugDirectory size of data seems wrong: {:?}", idd.size_of_data)));
|
||||
if filename_length < 0 {
|
||||
// the record is too short to be plausible
|
||||
return Err(error::Error::Malformed(format!(
|
||||
"ImageDebugDirectory size of data seems wrong: {:?}",
|
||||
idd.size_of_data
|
||||
)));
|
||||
}
|
||||
let filename_length = filename_length as usize;
|
||||
|
||||
|
|
@ -108,7 +171,7 @@ impl<'a> CodeviewPDB70DebugInfo<'a> {
|
|||
let age: u32 = bytes.gread_with(&mut offset, scroll::LE)?;
|
||||
let filename = &bytes[offset..offset + filename_length];
|
||||
|
||||
Ok(Some(CodeviewPDB70DebugInfo{
|
||||
Ok(Some(CodeviewPDB70DebugInfo {
|
||||
codeview_signature,
|
||||
signature,
|
||||
age,
|
||||
|
|
|
|||
70
third_party/rust/goblin/src/pe/exception.rs
vendored
70
third_party/rust/goblin/src/pe/exception.rs
vendored
|
|
@ -49,6 +49,7 @@ use scroll::{self, Pread, Pwrite};
|
|||
use crate::error;
|
||||
|
||||
use crate::pe::data_directories;
|
||||
use crate::pe::options;
|
||||
use crate::pe::section_table;
|
||||
use crate::pe::utils;
|
||||
|
||||
|
|
@ -346,10 +347,7 @@ pub struct UnwindCode {
|
|||
impl<'a> TryFromCtx<'a, UnwindOpContext> for UnwindCode {
|
||||
type Error = error::Error;
|
||||
#[inline]
|
||||
fn try_from_ctx(
|
||||
bytes: &'a [u8],
|
||||
ctx: UnwindOpContext,
|
||||
) -> Result<(Self, usize), Self::Error> {
|
||||
fn try_from_ctx(bytes: &'a [u8], ctx: UnwindOpContext) -> Result<(Self, usize), Self::Error> {
|
||||
let mut read = 0;
|
||||
let code_offset = bytes.gread_with::<u8>(&mut read, scroll::LE)?;
|
||||
let operation = bytes.gread_with::<u8>(&mut read, scroll::LE)?;
|
||||
|
|
@ -590,13 +588,13 @@ impl<'a> UnwindInfo<'a> {
|
|||
// whenever flags UNW_FLAG_EHANDLER or UNW_FLAG_UHANDLER are set. The language-specific
|
||||
// handler is called as part of the search for an exception handler or as part of an unwind.
|
||||
} else if flags & (UNW_FLAG_EHANDLER | UNW_FLAG_UHANDLER) != 0 {
|
||||
let offset = bytes.gread_with::<u32>(&mut offset, scroll::LE)? as usize;
|
||||
let address = bytes.gread_with::<u32>(&mut offset, scroll::LE)?;
|
||||
let data = &bytes[offset..];
|
||||
|
||||
handler = Some(if flags & UNW_FLAG_EHANDLER != 0 {
|
||||
UnwindHandler::ExceptionHandler(offset as u32, data)
|
||||
UnwindHandler::ExceptionHandler(address, data)
|
||||
} else {
|
||||
UnwindHandler::TerminationHandler(offset as u32, data)
|
||||
UnwindHandler::TerminationHandler(address, data)
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -669,6 +667,23 @@ impl<'a> ExceptionData<'a> {
|
|||
directory: data_directories::DataDirectory,
|
||||
sections: &[section_table::SectionTable],
|
||||
file_alignment: u32,
|
||||
) -> error::Result<Self> {
|
||||
Self::parse_with_opts(
|
||||
bytes,
|
||||
directory,
|
||||
sections,
|
||||
file_alignment,
|
||||
&options::ParseOptions::default(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Parses exception data from the image at the given offset.
|
||||
pub fn parse_with_opts(
|
||||
bytes: &'a [u8],
|
||||
directory: data_directories::DataDirectory,
|
||||
sections: &[section_table::SectionTable],
|
||||
file_alignment: u32,
|
||||
opts: &options::ParseOptions,
|
||||
) -> error::Result<Self> {
|
||||
let size = directory.size as usize;
|
||||
|
||||
|
|
@ -680,7 +695,7 @@ impl<'a> ExceptionData<'a> {
|
|||
}
|
||||
|
||||
let rva = directory.virtual_address as usize;
|
||||
let offset = utils::find_offset(rva, sections, file_alignment).ok_or_else(|| {
|
||||
let offset = utils::find_offset(rva, sections, file_alignment, opts).ok_or_else(|| {
|
||||
error::Error::Malformed(format!("cannot map exception_rva ({:#x}) into offset", rva))
|
||||
})?;
|
||||
|
||||
|
|
@ -764,31 +779,56 @@ impl<'a> ExceptionData<'a> {
|
|||
|
||||
/// Resolves unwind information for the given function entry.
|
||||
pub fn get_unwind_info(
|
||||
&self,
|
||||
function: RuntimeFunction,
|
||||
sections: &[section_table::SectionTable],
|
||||
) -> error::Result<UnwindInfo<'a>> {
|
||||
self.get_unwind_info_with_opts(function, sections, &options::ParseOptions::default())
|
||||
}
|
||||
|
||||
/// Resolves unwind information for the given function entry.
|
||||
pub fn get_unwind_info_with_opts(
|
||||
&self,
|
||||
mut function: RuntimeFunction,
|
||||
sections: &[section_table::SectionTable],
|
||||
opts: &options::ParseOptions,
|
||||
) -> error::Result<UnwindInfo<'a>> {
|
||||
while function.unwind_info_address % 2 != 0 {
|
||||
let rva = (function.unwind_info_address & !1) as usize;
|
||||
function = self.get_function_by_rva(rva, sections)?;
|
||||
function = self.get_function_by_rva_with_opts(rva, sections, opts)?;
|
||||
}
|
||||
|
||||
let rva = function.unwind_info_address as usize;
|
||||
let offset = utils::find_offset(rva, sections, self.file_alignment).ok_or_else(|| {
|
||||
error::Error::Malformed(format!("cannot map unwind rva ({:#x}) into offset", rva))
|
||||
})?;
|
||||
let offset =
|
||||
utils::find_offset(rva, sections, self.file_alignment, opts).ok_or_else(|| {
|
||||
error::Error::Malformed(format!("cannot map unwind rva ({:#x}) into offset", rva))
|
||||
})?;
|
||||
|
||||
UnwindInfo::parse(self.bytes, offset)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn get_function_by_rva(
|
||||
&self,
|
||||
rva: usize,
|
||||
sections: &[section_table::SectionTable],
|
||||
) -> error::Result<RuntimeFunction> {
|
||||
let offset = utils::find_offset(rva, sections, self.file_alignment).ok_or_else(|| {
|
||||
error::Error::Malformed(format!("cannot map exception rva ({:#x}) into offset", rva))
|
||||
})?;
|
||||
self.get_function_by_rva_with_opts(rva, sections, &options::ParseOptions::default())
|
||||
}
|
||||
|
||||
fn get_function_by_rva_with_opts(
|
||||
&self,
|
||||
rva: usize,
|
||||
sections: &[section_table::SectionTable],
|
||||
opts: &options::ParseOptions,
|
||||
) -> error::Result<RuntimeFunction> {
|
||||
let offset =
|
||||
utils::find_offset(rva, sections, self.file_alignment, opts).ok_or_else(|| {
|
||||
error::Error::Malformed(format!(
|
||||
"cannot map exception rva ({:#x}) into offset",
|
||||
rva
|
||||
))
|
||||
})?;
|
||||
|
||||
self.get_function_by_offset(offset)
|
||||
}
|
||||
|
|
|
|||
378
third_party/rust/goblin/src/pe/export.rs
vendored
378
third_party/rust/goblin/src/pe/export.rs
vendored
|
|
@ -1,17 +1,17 @@
|
|||
use scroll::{Pread, Pwrite};
|
||||
use alloc::vec::Vec;
|
||||
use scroll::{Pread, Pwrite};
|
||||
|
||||
use log::debug;
|
||||
|
||||
use crate::error;
|
||||
|
||||
use crate::pe::utils;
|
||||
use crate::pe::section_table;
|
||||
use crate::pe::data_directories;
|
||||
use crate::pe::options;
|
||||
use crate::pe::section_table;
|
||||
use crate::pe::utils;
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Debug, PartialEq, Copy, Clone, Default)]
|
||||
#[derive(Pread, Pwrite)]
|
||||
#[derive(Debug, PartialEq, Copy, Clone, Default, Pread, Pwrite)]
|
||||
pub struct ExportDirectoryTable {
|
||||
pub export_flags: u32,
|
||||
pub time_date_stamp: u32,
|
||||
|
|
@ -37,8 +37,8 @@ impl ExportDirectoryTable {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub enum ExportAddressTableEntry {
|
||||
ExportRVA(u32),
|
||||
ForwarderRVA(u32),
|
||||
ExportRVA(u32),
|
||||
ForwarderRVA(u32),
|
||||
}
|
||||
|
||||
pub const SIZEOF_EXPORT_ADDRESS_TABLE_ENTRY: usize = 4;
|
||||
|
|
@ -66,17 +66,68 @@ pub struct ExportData<'a> {
|
|||
}
|
||||
|
||||
impl<'a> ExportData<'a> {
|
||||
pub fn parse(bytes: &'a [u8], dd: data_directories::DataDirectory, sections: &[section_table::SectionTable], file_alignment: u32) -> error::Result<ExportData<'a>> {
|
||||
pub fn parse(
|
||||
bytes: &'a [u8],
|
||||
dd: data_directories::DataDirectory,
|
||||
sections: &[section_table::SectionTable],
|
||||
file_alignment: u32,
|
||||
) -> error::Result<ExportData<'a>> {
|
||||
Self::parse_with_opts(
|
||||
bytes,
|
||||
dd,
|
||||
sections,
|
||||
file_alignment,
|
||||
&options::ParseOptions::default(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn parse_with_opts(
|
||||
bytes: &'a [u8],
|
||||
dd: data_directories::DataDirectory,
|
||||
sections: &[section_table::SectionTable],
|
||||
file_alignment: u32,
|
||||
opts: &options::ParseOptions,
|
||||
) -> error::Result<ExportData<'a>> {
|
||||
let export_rva = dd.virtual_address as usize;
|
||||
let size = dd.size as usize;
|
||||
debug!("export_rva {:#x} size {:#}", export_rva, size);
|
||||
let export_offset = utils::find_offset_or(export_rva, sections, file_alignment, &format!("cannot map export_rva ({:#x}) into offset", export_rva))?;
|
||||
let export_directory_table = ExportDirectoryTable::parse(bytes, export_offset)
|
||||
.map_err(|_| error::Error::Malformed(format!("cannot parse export_directory_table (offset {:#x})", export_offset)))?;
|
||||
let export_offset = utils::find_offset_or(
|
||||
export_rva,
|
||||
sections,
|
||||
file_alignment,
|
||||
opts,
|
||||
&format!("cannot map export_rva ({:#x}) into offset", export_rva),
|
||||
)?;
|
||||
let export_directory_table =
|
||||
ExportDirectoryTable::parse(bytes, export_offset).map_err(|_| {
|
||||
error::Error::Malformed(format!(
|
||||
"cannot parse export_directory_table (offset {:#x})",
|
||||
export_offset
|
||||
))
|
||||
})?;
|
||||
let number_of_name_pointers = export_directory_table.number_of_name_pointers as usize;
|
||||
let address_table_entries = export_directory_table.address_table_entries as usize;
|
||||
|
||||
let export_name_pointer_table = utils::find_offset(export_directory_table.name_pointer_rva as usize, sections, file_alignment).map_or(vec![], |table_offset| {
|
||||
if number_of_name_pointers > bytes.len() {
|
||||
return Err(error::Error::BufferTooShort(
|
||||
number_of_name_pointers,
|
||||
"name pointers",
|
||||
));
|
||||
}
|
||||
if address_table_entries > bytes.len() {
|
||||
return Err(error::Error::BufferTooShort(
|
||||
address_table_entries,
|
||||
"address table entries",
|
||||
));
|
||||
}
|
||||
|
||||
let export_name_pointer_table = utils::find_offset(
|
||||
export_directory_table.name_pointer_rva as usize,
|
||||
sections,
|
||||
file_alignment,
|
||||
opts,
|
||||
)
|
||||
.map_or(vec![], |table_offset| {
|
||||
let mut offset = table_offset;
|
||||
let mut table: ExportNamePointerTable = Vec::with_capacity(number_of_name_pointers);
|
||||
|
||||
|
|
@ -91,7 +142,13 @@ impl<'a> ExportData<'a> {
|
|||
table
|
||||
});
|
||||
|
||||
let export_ordinal_table = utils::find_offset(export_directory_table.ordinal_table_rva as usize, sections, file_alignment).map_or(vec![], |table_offset| {
|
||||
let export_ordinal_table = utils::find_offset(
|
||||
export_directory_table.ordinal_table_rva as usize,
|
||||
sections,
|
||||
file_alignment,
|
||||
opts,
|
||||
)
|
||||
.map_or(vec![], |table_offset| {
|
||||
let mut offset = table_offset;
|
||||
let mut table: ExportOrdinalTable = Vec::with_capacity(number_of_name_pointers);
|
||||
|
||||
|
|
@ -106,7 +163,13 @@ impl<'a> ExportData<'a> {
|
|||
table
|
||||
});
|
||||
|
||||
let export_address_table = utils::find_offset(export_directory_table.export_address_table_rva as usize, sections, file_alignment).map_or(vec![], |table_offset| {
|
||||
let export_address_table = utils::find_offset(
|
||||
export_directory_table.export_address_table_rva as usize,
|
||||
sections,
|
||||
file_alignment,
|
||||
opts,
|
||||
)
|
||||
.map_or(vec![], |table_offset| {
|
||||
let mut offset = table_offset;
|
||||
let mut table: ExportAddressTable = Vec::with_capacity(address_table_entries);
|
||||
let export_end = export_rva + size;
|
||||
|
|
@ -126,7 +189,13 @@ impl<'a> ExportData<'a> {
|
|||
table
|
||||
});
|
||||
|
||||
let name = utils::find_offset(export_directory_table.name_rva as usize, sections, file_alignment).and_then(|offset| bytes.pread(offset).ok());
|
||||
let name = utils::find_offset(
|
||||
export_directory_table.name_rva as usize,
|
||||
sections,
|
||||
file_alignment,
|
||||
opts,
|
||||
)
|
||||
.and_then(|offset| bytes.pread(offset).ok());
|
||||
|
||||
Ok(ExportData {
|
||||
name,
|
||||
|
|
@ -141,8 +210,8 @@ impl<'a> ExportData<'a> {
|
|||
#[derive(Debug)]
|
||||
/// PE binaries have two kinds of reexports, either specifying the dll's name, or the ordinal value of the dll
|
||||
pub enum Reexport<'a> {
|
||||
DLLName { export: &'a str, lib: &'a str },
|
||||
DLLOrdinal { ordinal: usize, lib: &'a str }
|
||||
DLLName { export: &'a str, lib: &'a str },
|
||||
DLLOrdinal { ordinal: usize, lib: &'a str },
|
||||
}
|
||||
|
||||
impl<'a> scroll::ctx::TryFromCtx<'a, scroll::Endian> for Reexport<'a> {
|
||||
|
|
@ -165,16 +234,31 @@ impl<'a> scroll::ctx::TryFromCtx<'a, scroll::Endian> for Reexport<'a> {
|
|||
let rest: &'a [u8] = bytes.pread_with(o + 1, len)?;
|
||||
debug!("rest: {:?}", &rest);
|
||||
if rest[0] == b'#' {
|
||||
let ordinal = rest.pread_with::<&str>(1, scroll::ctx::StrCtx::Length(len - 1))?;
|
||||
let ordinal = ordinal.parse::<u32>().map_err(|_e| error::Error::Malformed(format!("Cannot parse reexport ordinal from {} bytes", bytes.len())))?;
|
||||
return Ok((Reexport::DLLOrdinal { ordinal: ordinal as usize, lib: dll }, reexport_len + 1))
|
||||
let ordinal =
|
||||
rest.pread_with::<&str>(1, scroll::ctx::StrCtx::Length(len - 1))?;
|
||||
let ordinal = ordinal.parse::<u32>().map_err(|_e| {
|
||||
error::Error::Malformed(format!(
|
||||
"Cannot parse reexport ordinal from {} bytes",
|
||||
bytes.len()
|
||||
))
|
||||
})?;
|
||||
return Ok((
|
||||
Reexport::DLLOrdinal {
|
||||
ordinal: ordinal as usize,
|
||||
lib: dll,
|
||||
},
|
||||
reexport_len + 1,
|
||||
));
|
||||
} else {
|
||||
let export = rest.pread_with::<&str>(0, scroll::ctx::StrCtx::Length(len))?;
|
||||
return Ok((Reexport::DLLName { export, lib: dll }, reexport_len + 1))
|
||||
return Ok((Reexport::DLLName { export, lib: dll }, reexport_len + 1));
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(error::Error::Malformed(format!("Reexport {:#} is malformed", reexport)))
|
||||
Err(error::Error::Malformed(format!(
|
||||
"Reexport {:#} is malformed",
|
||||
reexport
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -188,7 +272,7 @@ impl<'a> Reexport<'a> {
|
|||
/// An exported symbol in this binary, contains synthetic data (name offset, etc., are computed)
|
||||
pub struct Export<'a> {
|
||||
pub name: Option<&'a str>,
|
||||
pub offset: usize,
|
||||
pub offset: Option<usize>,
|
||||
pub rva: usize,
|
||||
pub size: usize,
|
||||
pub reexport: Option<Reexport<'a>>,
|
||||
|
|
@ -202,138 +286,226 @@ struct ExportCtx<'a> {
|
|||
pub file_alignment: u32,
|
||||
pub addresses: &'a ExportAddressTable,
|
||||
pub ordinals: &'a ExportOrdinalTable,
|
||||
pub opts: options::ParseOptions,
|
||||
}
|
||||
|
||||
impl<'a, 'b> scroll::ctx::TryFromCtx<'a, ExportCtx<'b>> for Export<'a> {
|
||||
type Error = error::Error;
|
||||
#[inline]
|
||||
fn try_from_ctx(bytes: &'a [u8], ExportCtx { ptr, idx, sections, file_alignment, addresses, ordinals }: ExportCtx<'b>) -> Result<(Self, usize), Self::Error> {
|
||||
fn try_from_ctx(
|
||||
bytes: &'a [u8],
|
||||
ExportCtx {
|
||||
ptr,
|
||||
idx,
|
||||
sections,
|
||||
file_alignment,
|
||||
addresses,
|
||||
ordinals,
|
||||
opts,
|
||||
}: ExportCtx<'b>,
|
||||
) -> Result<(Self, usize), Self::Error> {
|
||||
use self::ExportAddressTableEntry::*;
|
||||
|
||||
let name = utils::find_offset(ptr as usize, sections, file_alignment).and_then(|offset| bytes.pread::<&str>(offset).ok());
|
||||
let name = utils::find_offset(ptr as usize, sections, file_alignment, &opts)
|
||||
.and_then(|offset| bytes.pread::<&str>(offset).ok());
|
||||
|
||||
if let Some(ordinal) = ordinals.get(idx) {
|
||||
if let Some(rva) = addresses.get(*ordinal as usize) {
|
||||
match *rva {
|
||||
ExportRVA(rva) => {
|
||||
let rva = rva as usize;
|
||||
let offset = utils::find_offset_or(rva, sections, file_alignment, &format!("cannot map RVA ({:#x}) of export ordinal {} into offset", rva, ordinal))?;
|
||||
Ok((Export { name, offset, rva, reexport: None, size: 0 }, 0))
|
||||
},
|
||||
let offset = utils::find_offset(rva, sections, file_alignment, &opts);
|
||||
Ok((
|
||||
Export {
|
||||
name,
|
||||
offset,
|
||||
rva,
|
||||
reexport: None,
|
||||
size: 0,
|
||||
},
|
||||
0,
|
||||
))
|
||||
}
|
||||
|
||||
ForwarderRVA(rva) => {
|
||||
let rva = rva as usize;
|
||||
let offset = utils::find_offset_or(rva, sections, file_alignment, &format!("cannot map RVA ({:#x}) of export ordinal {} into offset", rva, ordinal))?;
|
||||
let offset = utils::find_offset_or(
|
||||
rva,
|
||||
sections,
|
||||
file_alignment,
|
||||
&opts,
|
||||
&format!(
|
||||
"cannot map RVA ({:#x}) of export ordinal {} into offset",
|
||||
rva, ordinal
|
||||
),
|
||||
)?;
|
||||
let reexport = Reexport::parse(bytes, offset)?;
|
||||
Ok((Export { name, offset, rva, reexport: Some(reexport), size: 0 }, 0))
|
||||
Ok((
|
||||
Export {
|
||||
name,
|
||||
offset: Some(offset),
|
||||
rva,
|
||||
reexport: Some(reexport),
|
||||
size: 0,
|
||||
},
|
||||
0,
|
||||
))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Err(error::Error::Malformed(format!("cannot get RVA of export ordinal {}", ordinal)))
|
||||
Err(error::Error::Malformed(format!(
|
||||
"cannot get RVA of export ordinal {}",
|
||||
ordinal
|
||||
)))
|
||||
}
|
||||
} else {
|
||||
Err(error::Error::Malformed(format!("cannot get ordinal of export name entry {}", idx)))
|
||||
Err(error::Error::Malformed(format!(
|
||||
"cannot get ordinal of export name entry {}",
|
||||
idx
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Export<'a> {
|
||||
pub fn parse(bytes: &'a [u8], export_data: &ExportData, sections: &[section_table::SectionTable], file_alignment: u32) -> error::Result<Vec<Export<'a>>> {
|
||||
pub fn parse(
|
||||
bytes: &'a [u8],
|
||||
export_data: &ExportData,
|
||||
sections: &[section_table::SectionTable],
|
||||
file_alignment: u32,
|
||||
) -> error::Result<Vec<Export<'a>>> {
|
||||
Self::parse_with_opts(
|
||||
bytes,
|
||||
export_data,
|
||||
sections,
|
||||
file_alignment,
|
||||
&options::ParseOptions::default(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn parse_with_opts(
|
||||
bytes: &'a [u8],
|
||||
export_data: &ExportData,
|
||||
sections: &[section_table::SectionTable],
|
||||
file_alignment: u32,
|
||||
opts: &options::ParseOptions,
|
||||
) -> error::Result<Vec<Export<'a>>> {
|
||||
let pointers = &export_data.export_name_pointer_table;
|
||||
let addresses = &export_data.export_address_table;
|
||||
let ordinals = &export_data.export_ordinal_table;
|
||||
|
||||
let mut exports = Vec::with_capacity(pointers.len());
|
||||
for (idx, &ptr) in pointers.iter().enumerate() {
|
||||
if let Ok(export) = bytes.pread_with(0, ExportCtx { ptr, idx, sections, file_alignment, addresses, ordinals }) {
|
||||
if let Ok(export) = bytes.pread_with(
|
||||
0,
|
||||
ExportCtx {
|
||||
ptr,
|
||||
idx,
|
||||
sections,
|
||||
file_alignment,
|
||||
addresses,
|
||||
ordinals,
|
||||
opts: *opts,
|
||||
},
|
||||
) {
|
||||
exports.push(export);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: sort + compute size
|
||||
Ok (exports)
|
||||
Ok(exports)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use self::data_directories::*;
|
||||
use super::*;
|
||||
|
||||
static CORKAMI_POCS_PE_EXPORTSDATA_EXE: [u8; 0x400] =
|
||||
[ 0x4d, 0x5a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00,
|
||||
0x50, 0x45, 0x00, 0x00, 0x4c, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0xe0, 0x00, 0x02, 0x01, 0x0b, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x10, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x20, 0x00, 0x00, 0x60, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xb0, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x48, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xa0,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x80, 0x01, 0x00, 0x00, 0x80, 0x02, 0x00, 0x00, 0x80, 0x03, 0x00, 0x00, 0x80,
|
||||
0x00, 0x00, 0x00, 0x00, 0x20, 0x2a, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x73, 0x74, 0x6f, 0x72,
|
||||
0x65, 0x64, 0x20, 0x61, 0x73, 0x20, 0x66, 0x61, 0x6b, 0x65, 0x20, 0x65, 0x78, 0x70, 0x6f, 0x72,
|
||||
0x74, 0x20, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x0a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x8c, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x84, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x95, 0x10, 0x00, 0x00, 0x40, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xa0, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x8c, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x70, 0x72,
|
||||
0x69, 0x6e, 0x74, 0x66, 0x00, 0x6d, 0x73, 0x76, 0x63, 0x72, 0x74, 0x2e, 0x64, 0x6c, 0x6c, 0x00,
|
||||
0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x73, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x65, 0x78, 0x65, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe0, 0x10, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x68, 0x14, 0x10, 0xf0, 0x10, 0xff, 0x15, 0x30, 0x10, 0x00, 0x10, 0x73, 0xc4, 0x04, 0xc3, 0xbc,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x01, 0x00, 0x02, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 ];
|
||||
static CORKAMI_POCS_PE_EXPORTSDATA_EXE: [u8; 0x400] = [
|
||||
0x4d, 0x5a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x40, 0x00, 0x00, 0x00, 0x50, 0x45, 0x00, 0x00, 0x4c, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe0, 0x00, 0x02, 0x01, 0x0b, 0x01,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10,
|
||||
0x00, 0x10, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x60, 0x01,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x10, 0x00, 0x00, 0x00, 0xb0, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0x10, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x02,
|
||||
0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x01, 0x00, 0x00, 0x80, 0x02, 0x00, 0x00, 0x80, 0x03,
|
||||
0x00, 0x00, 0x80, 0x00, 0x00, 0x00, 0x00, 0x20, 0x2a, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20,
|
||||
0x73, 0x74, 0x6f, 0x72, 0x65, 0x64, 0x20, 0x61, 0x73, 0x20, 0x66, 0x61, 0x6b, 0x65, 0x20,
|
||||
0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x0a, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x8c, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x84,
|
||||
0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x95, 0x10, 0x00, 0x00,
|
||||
0x40, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0xa0, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x8c,
|
||||
0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x70, 0x72, 0x69, 0x6e, 0x74, 0x66,
|
||||
0x00, 0x6d, 0x73, 0x76, 0x63, 0x72, 0x74, 0x2e, 0x64, 0x6c, 0x6c, 0x00, 0x65, 0x78, 0x70,
|
||||
0x6f, 0x72, 0x74, 0x73, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x65, 0x78, 0x65, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe0, 0x10, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x68, 0x14, 0x10, 0xf0, 0x10, 0xff, 0x15, 0x30, 0x10, 0x00, 0x10, 0x73, 0xc4, 0x04,
|
||||
0xc3, 0xbc, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x02, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
];
|
||||
|
||||
#[test]
|
||||
fn size_export_directory_table() {
|
||||
assert_eq!(::std::mem::size_of::<ExportDirectoryTable>(), SIZEOF_EXPORT_DIRECTORY_TABLE);
|
||||
assert_eq!(
|
||||
::std::mem::size_of::<ExportDirectoryTable>(),
|
||||
SIZEOF_EXPORT_DIRECTORY_TABLE
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_export_table() {
|
||||
let data_dirs = DataDirectories::parse(&CORKAMI_POCS_PE_EXPORTSDATA_EXE[..], 16, &mut 0xb8).unwrap();
|
||||
let data_dirs =
|
||||
DataDirectories::parse(&CORKAMI_POCS_PE_EXPORTSDATA_EXE[..], 16, &mut 0xb8).unwrap();
|
||||
let export_table = data_dirs.get_export_table().unwrap();
|
||||
|
||||
assert_eq!(export_table.virtual_address, 0x10b0);
|
||||
|
|
|
|||
244
third_party/rust/goblin/src/pe/header.rs
vendored
244
third_party/rust/goblin/src/pe/header.rs
vendored
|
|
@ -1,9 +1,9 @@
|
|||
use alloc::vec::Vec;
|
||||
use crate::error;
|
||||
use crate::pe::{optional_header, section_table, symbol};
|
||||
use crate::strtab;
|
||||
use alloc::vec::Vec;
|
||||
use log::debug;
|
||||
use scroll::{Pread, Pwrite, IOread, IOwrite, SizeWith};
|
||||
use scroll::{IOread, IOwrite, Pread, Pwrite, SizeWith};
|
||||
|
||||
/// DOS header present in all PE binaries
|
||||
#[repr(C)]
|
||||
|
|
@ -20,11 +20,42 @@ pub const PE_POINTER_OFFSET: u32 = 0x3c;
|
|||
|
||||
impl DosHeader {
|
||||
pub fn parse(bytes: &[u8]) -> error::Result<Self> {
|
||||
let signature = bytes.pread_with(0, scroll::LE)
|
||||
.map_err(|_| error::Error::Malformed(format!("cannot parse DOS signature (offset {:#x})", 0)))?;
|
||||
let pe_pointer = bytes.pread_with(PE_POINTER_OFFSET as usize, scroll::LE)
|
||||
.map_err(|_| error::Error::Malformed(format!("cannot parse PE header pointer (offset {:#x})", PE_POINTER_OFFSET)))?;
|
||||
Ok (DosHeader { signature, pe_pointer })
|
||||
let signature = bytes.pread_with(0, scroll::LE).map_err(|_| {
|
||||
error::Error::Malformed(format!("cannot parse DOS signature (offset {:#x})", 0))
|
||||
})?;
|
||||
if signature != DOS_MAGIC {
|
||||
return Err(error::Error::Malformed(format!(
|
||||
"DOS header is malformed (signature {:#x})",
|
||||
signature
|
||||
)));
|
||||
}
|
||||
let pe_pointer = bytes
|
||||
.pread_with(PE_POINTER_OFFSET as usize, scroll::LE)
|
||||
.map_err(|_| {
|
||||
error::Error::Malformed(format!(
|
||||
"cannot parse PE header pointer (offset {:#x})",
|
||||
PE_POINTER_OFFSET
|
||||
))
|
||||
})?;
|
||||
let pe_signature: u32 =
|
||||
bytes
|
||||
.pread_with(pe_pointer as usize, scroll::LE)
|
||||
.map_err(|_| {
|
||||
error::Error::Malformed(format!(
|
||||
"cannot parse PE header signature (offset {:#x})",
|
||||
pe_pointer
|
||||
))
|
||||
})?;
|
||||
if pe_signature != PE_MAGIC {
|
||||
return Err(error::Error::Malformed(format!(
|
||||
"PE header is malformed (signature {:#x})",
|
||||
pe_signature
|
||||
)));
|
||||
}
|
||||
Ok(DosHeader {
|
||||
signature,
|
||||
pe_pointer,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -46,8 +77,56 @@ pub const SIZEOF_COFF_HEADER: usize = 20;
|
|||
/// PE\0\0, little endian
|
||||
pub const PE_MAGIC: u32 = 0x0000_4550;
|
||||
pub const SIZEOF_PE_MAGIC: usize = 4;
|
||||
pub const COFF_MACHINE_X86: u16 = 0x14c;
|
||||
/// The contents of this field are assumed to be applicable to any machine type
|
||||
pub const COFF_MACHINE_UNKNOWN: u16 = 0x0;
|
||||
/// Matsushita AM33
|
||||
pub const COFF_MACHINE_AM33: u16 = 0x1d3;
|
||||
/// x64
|
||||
pub const COFF_MACHINE_X86_64: u16 = 0x8664;
|
||||
/// ARM little endian
|
||||
pub const COFF_MACHINE_ARM: u16 = 0x1c0;
|
||||
/// ARM64 little endian
|
||||
pub const COFF_MACHINE_ARM64: u16 = 0xaa64;
|
||||
/// ARM Thumb-2 little endian
|
||||
pub const COFF_MACHINE_ARMNT: u16 = 0x1c4;
|
||||
/// EFI byte code
|
||||
pub const COFF_MACHINE_EBC: u16 = 0xebc;
|
||||
/// Intel 386 or later processors and compatible processors
|
||||
pub const COFF_MACHINE_X86: u16 = 0x14c;
|
||||
/// Intel Itanium processor family
|
||||
pub const COFF_MACHINE_IA64: u16 = 0x200;
|
||||
/// Mitsubishi M32R little endian
|
||||
pub const COFF_MACHINE_M32R: u16 = 0x9041;
|
||||
/// MIPS16
|
||||
pub const COFF_MACHINE_MIPS16: u16 = 0x266;
|
||||
/// MIPS with FPU
|
||||
pub const COFF_MACHINE_MIPSFPU: u16 = 0x366;
|
||||
/// MIPS16 with FPU
|
||||
pub const COFF_MACHINE_MIPSFPU16: u16 = 0x466;
|
||||
/// Power PC little endian
|
||||
pub const COFF_MACHINE_POWERPC: u16 = 0x1f0;
|
||||
/// Power PC with floating point support
|
||||
pub const COFF_MACHINE_POWERPCFP: u16 = 0x1f1;
|
||||
/// MIPS little endian
|
||||
pub const COFF_MACHINE_R4000: u16 = 0x166;
|
||||
/// RISC-V 32-bit address space
|
||||
pub const COFF_MACHINE_RISCV32: u16 = 0x5032;
|
||||
/// RISC-V 64-bit address space
|
||||
pub const COFF_MACHINE_RISCV64: u16 = 0x5064;
|
||||
/// RISC-V 128-bit address space
|
||||
pub const COFF_MACHINE_RISCV128: u16 = 0x5128;
|
||||
/// Hitachi SH3
|
||||
pub const COFF_MACHINE_SH3: u16 = 0x1a2;
|
||||
/// Hitachi SH3 DSP
|
||||
pub const COFF_MACHINE_SH3DSP: u16 = 0x1a3;
|
||||
/// Hitachi SH4
|
||||
pub const COFF_MACHINE_SH4: u16 = 0x1a6;
|
||||
/// Hitachi SH5
|
||||
pub const COFF_MACHINE_SH5: u16 = 0x1a8;
|
||||
/// Thumb
|
||||
pub const COFF_MACHINE_THUMB: u16 = 0x1c2;
|
||||
/// MIPS little-endian WCE v2
|
||||
pub const COFF_MACHINE_WCEMIPSV2: u16 = 0x169;
|
||||
|
||||
impl CoffHeader {
|
||||
pub fn parse(bytes: &[u8], offset: &mut usize) -> error::Result<Self> {
|
||||
|
|
@ -64,12 +143,19 @@ impl CoffHeader {
|
|||
offset: &mut usize,
|
||||
) -> error::Result<Vec<section_table::SectionTable>> {
|
||||
let nsections = self.number_of_sections as usize;
|
||||
|
||||
// a section table is at least 40 bytes
|
||||
if nsections > bytes.len() / 40 {
|
||||
return Err(error::Error::BufferTooShort(nsections, "sections"));
|
||||
}
|
||||
|
||||
let mut sections = Vec::with_capacity(nsections);
|
||||
// Note that if we are handling a BigCoff, the size of the symbol will be different!
|
||||
let string_table_offset = self.pointer_to_symbol_table as usize
|
||||
+ symbol::SymbolTable::size(self.number_of_symbol_table as usize);
|
||||
for i in 0..nsections {
|
||||
let section = section_table::SectionTable::parse(bytes, offset, string_table_offset as usize)?;
|
||||
let section =
|
||||
section_table::SectionTable::parse(bytes, offset, string_table_offset as usize)?;
|
||||
debug!("({}) {:#?}", i, section);
|
||||
sections.push(section);
|
||||
}
|
||||
|
|
@ -77,24 +163,24 @@ impl CoffHeader {
|
|||
}
|
||||
|
||||
/// Return the COFF symbol table.
|
||||
pub fn symbols<'a>(
|
||||
&self,
|
||||
bytes: &'a [u8],
|
||||
) -> error::Result<symbol::SymbolTable<'a>> {
|
||||
pub fn symbols<'a>(&self, bytes: &'a [u8]) -> error::Result<symbol::SymbolTable<'a>> {
|
||||
let offset = self.pointer_to_symbol_table as usize;
|
||||
let number = self.number_of_symbol_table as usize;
|
||||
symbol::SymbolTable::parse(bytes, offset, number)
|
||||
}
|
||||
|
||||
/// Return the COFF string table.
|
||||
pub fn strings<'a>(
|
||||
&self,
|
||||
bytes: &'a [u8],
|
||||
) -> error::Result<strtab::Strtab<'a>> {
|
||||
let offset = self.pointer_to_symbol_table as usize
|
||||
pub fn strings<'a>(&self, bytes: &'a [u8]) -> error::Result<strtab::Strtab<'a>> {
|
||||
let mut offset = self.pointer_to_symbol_table as usize
|
||||
+ symbol::SymbolTable::size(self.number_of_symbol_table as usize);
|
||||
let length = bytes.pread_with::<u32>(offset, scroll::LE)? as usize;
|
||||
Ok(strtab::Strtab::parse(bytes, offset, length, 0).unwrap())
|
||||
|
||||
let length_field_size = core::mem::size_of::<u32>();
|
||||
let length = bytes.pread_with::<u32>(offset, scroll::LE)? as usize - length_field_size;
|
||||
|
||||
// The offset needs to be advanced in order to read the strings.
|
||||
offset += length_field_size;
|
||||
|
||||
Ok(strtab::Strtab::parse(bytes, offset, length, 0)?)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -111,69 +197,79 @@ impl Header {
|
|||
pub fn parse(bytes: &[u8]) -> error::Result<Self> {
|
||||
let dos_header = DosHeader::parse(&bytes)?;
|
||||
let mut offset = dos_header.pe_pointer as usize;
|
||||
let signature = bytes.gread_with(&mut offset, scroll::LE)
|
||||
.map_err(|_| error::Error::Malformed(format!("cannot parse PE signature (offset {:#x})", offset)))?;
|
||||
let signature = bytes.gread_with(&mut offset, scroll::LE).map_err(|_| {
|
||||
error::Error::Malformed(format!("cannot parse PE signature (offset {:#x})", offset))
|
||||
})?;
|
||||
let coff_header = CoffHeader::parse(&bytes, &mut offset)?;
|
||||
let optional_header =
|
||||
if coff_header.size_of_optional_header > 0 {
|
||||
Some (bytes.pread::<optional_header::OptionalHeader>(offset)?)
|
||||
}
|
||||
else { None };
|
||||
Ok( Header { dos_header, signature, coff_header, optional_header })
|
||||
let optional_header = if coff_header.size_of_optional_header > 0 {
|
||||
Some(bytes.pread::<optional_header::OptionalHeader>(offset)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(Header {
|
||||
dos_header,
|
||||
signature,
|
||||
coff_header,
|
||||
optional_header,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{DOS_MAGIC, PE_MAGIC, COFF_MACHINE_X86, Header};
|
||||
use super::{Header, COFF_MACHINE_X86, DOS_MAGIC, PE_MAGIC};
|
||||
|
||||
const CRSS_HEADER: [u8; 688] =
|
||||
[0x4d, 0x5a, 0x90, 0x00, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xff, 0xff, 0x00, 0x00,
|
||||
0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xd0, 0x00, 0x00, 0x00,
|
||||
0x0e, 0x1f, 0xba, 0x0e, 0x00, 0xb4, 0x09, 0xcd, 0x21, 0xb8, 0x01, 0x4c, 0xcd, 0x21, 0x54, 0x68,
|
||||
0x69, 0x73, 0x20, 0x70, 0x72, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f,
|
||||
0x74, 0x20, 0x62, 0x65, 0x20, 0x72, 0x75, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x44, 0x4f, 0x53, 0x20,
|
||||
0x6d, 0x6f, 0x64, 0x65, 0x2e, 0x0d, 0x0d, 0x0a, 0x24, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0xaa, 0x4a, 0xc3, 0xeb, 0xee, 0x2b, 0xad, 0xb8, 0xee, 0x2b, 0xad, 0xb8, 0xee, 0x2b, 0xad, 0xb8,
|
||||
0xee, 0x2b, 0xac, 0xb8, 0xfe, 0x2b, 0xad, 0xb8, 0x33, 0xd4, 0x66, 0xb8, 0xeb, 0x2b, 0xad, 0xb8,
|
||||
0x33, 0xd4, 0x63, 0xb8, 0xea, 0x2b, 0xad, 0xb8, 0x33, 0xd4, 0x7a, 0xb8, 0xed, 0x2b, 0xad, 0xb8,
|
||||
0x33, 0xd4, 0x64, 0xb8, 0xef, 0x2b, 0xad, 0xb8, 0x33, 0xd4, 0x61, 0xb8, 0xef, 0x2b, 0xad, 0xb8,
|
||||
0x52, 0x69, 0x63, 0x68, 0xee, 0x2b, 0xad, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x50, 0x45, 0x00, 0x00, 0x4c, 0x01, 0x05, 0x00, 0xd9, 0x8f, 0x15, 0x52, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0xe0, 0x00, 0x02, 0x01, 0x0b, 0x01, 0x0b, 0x00, 0x00, 0x08, 0x00, 0x00,
|
||||
0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x11, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00,
|
||||
0x00, 0x20, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00,
|
||||
0x06, 0x00, 0x03, 0x00, 0x06, 0x00, 0x03, 0x00, 0x06, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x60, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0xe4, 0xab, 0x00, 0x00, 0x01, 0x00, 0x40, 0x05,
|
||||
0x00, 0x00, 0x04, 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x10, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x3c, 0x30, 0x00, 0x00, 0x3c, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1a, 0x00, 0x00, 0xb8, 0x22, 0x00, 0x00,
|
||||
0x00, 0x50, 0x00, 0x00, 0x38, 0x00, 0x00, 0x00, 0x10, 0x10, 0x00, 0x00, 0x38, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x68, 0x10, 0x00, 0x00, 0x5c, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0x00, 0x00, 0x3c, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2e, 0x74, 0x65, 0x78, 0x74, 0x00, 0x00, 0x00,
|
||||
0x24, 0x06, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x60,
|
||||
0x2e, 0x64, 0x61, 0x74, 0x61, 0x00, 0x00, 0x00, 0x3c, 0x03, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00,
|
||||
0x00, 0x02, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0xc0, 0x2e, 0x69, 0x64, 0x61, 0x74, 0x61, 0x00, 0x00,
|
||||
0xf8, 0x01, 0x00, 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x40,
|
||||
0x2e, 0x72, 0x73, 0x72, 0x63, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00,
|
||||
0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x42, 0x2e, 0x72, 0x65, 0x6c, 0x6f, 0x63, 0x00, 0x00,
|
||||
0x86, 0x01, 0x00, 0x00, 0x00, 0x50, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x42,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00];
|
||||
const CRSS_HEADER: [u8; 688] = [
|
||||
0x4d, 0x5a, 0x90, 0x00, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xff, 0xff, 0x00,
|
||||
0x00, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0xd0, 0x00, 0x00, 0x00, 0x0e, 0x1f, 0xba, 0x0e, 0x00, 0xb4, 0x09, 0xcd, 0x21, 0xb8, 0x01,
|
||||
0x4c, 0xcd, 0x21, 0x54, 0x68, 0x69, 0x73, 0x20, 0x70, 0x72, 0x6f, 0x67, 0x72, 0x61, 0x6d,
|
||||
0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x20, 0x62, 0x65, 0x20, 0x72, 0x75, 0x6e, 0x20,
|
||||
0x69, 0x6e, 0x20, 0x44, 0x4f, 0x53, 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x2e, 0x0d, 0x0d, 0x0a,
|
||||
0x24, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xaa, 0x4a, 0xc3, 0xeb, 0xee, 0x2b, 0xad,
|
||||
0xb8, 0xee, 0x2b, 0xad, 0xb8, 0xee, 0x2b, 0xad, 0xb8, 0xee, 0x2b, 0xac, 0xb8, 0xfe, 0x2b,
|
||||
0xad, 0xb8, 0x33, 0xd4, 0x66, 0xb8, 0xeb, 0x2b, 0xad, 0xb8, 0x33, 0xd4, 0x63, 0xb8, 0xea,
|
||||
0x2b, 0xad, 0xb8, 0x33, 0xd4, 0x7a, 0xb8, 0xed, 0x2b, 0xad, 0xb8, 0x33, 0xd4, 0x64, 0xb8,
|
||||
0xef, 0x2b, 0xad, 0xb8, 0x33, 0xd4, 0x61, 0xb8, 0xef, 0x2b, 0xad, 0xb8, 0x52, 0x69, 0x63,
|
||||
0x68, 0xee, 0x2b, 0xad, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x50, 0x45,
|
||||
0x00, 0x00, 0x4c, 0x01, 0x05, 0x00, 0xd9, 0x8f, 0x15, 0x52, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0xe0, 0x00, 0x02, 0x01, 0x0b, 0x01, 0x0b, 0x00, 0x00, 0x08, 0x00, 0x00,
|
||||
0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x11, 0x00, 0x00, 0x00, 0x10, 0x00,
|
||||
0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x02,
|
||||
0x00, 0x00, 0x06, 0x00, 0x03, 0x00, 0x06, 0x00, 0x03, 0x00, 0x06, 0x00, 0x03, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x60, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0xe4, 0xab, 0x00, 0x00,
|
||||
0x01, 0x00, 0x40, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, 0x00, 0x10,
|
||||
0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3c, 0x30, 0x00, 0x00, 0x3c, 0x00, 0x00, 0x00, 0x00,
|
||||
0x40, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x1a, 0x00, 0x00, 0xb8, 0x22, 0x00, 0x00, 0x00, 0x50, 0x00, 0x00, 0x38, 0x00, 0x00,
|
||||
0x00, 0x10, 0x10, 0x00, 0x00, 0x38, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x68, 0x10, 0x00, 0x00, 0x5c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0x00, 0x00, 0x3c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2e, 0x74, 0x65, 0x78, 0x74, 0x00, 0x00, 0x00, 0x24,
|
||||
0x06, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00,
|
||||
0x60, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x00, 0x00, 0x00, 0x3c, 0x03, 0x00, 0x00, 0x00, 0x20,
|
||||
0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0xc0, 0x2e, 0x69, 0x64, 0x61,
|
||||
0x74, 0x61, 0x00, 0x00, 0xf8, 0x01, 0x00, 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, 0x02, 0x00,
|
||||
0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x40, 0x00, 0x00, 0x40, 0x2e, 0x72, 0x73, 0x72, 0x63, 0x00, 0x00, 0x00, 0x00,
|
||||
0x08, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00,
|
||||
0x42, 0x2e, 0x72, 0x65, 0x6c, 0x6f, 0x63, 0x00, 0x00, 0x86, 0x01, 0x00, 0x00, 0x00, 0x50,
|
||||
0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x42, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
];
|
||||
|
||||
#[test]
|
||||
fn crss_header () {
|
||||
fn crss_header() {
|
||||
let header = Header::parse(&&CRSS_HEADER[..]).unwrap();
|
||||
assert!(header.dos_header.signature == DOS_MAGIC);
|
||||
assert!(header.signature == PE_MAGIC);
|
||||
|
|
|
|||
293
third_party/rust/goblin/src/pe/import.rs
vendored
293
third_party/rust/goblin/src/pe/import.rs
vendored
|
|
@ -1,14 +1,15 @@
|
|||
use alloc::borrow::Cow;
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt::{LowerHex, Debug};
|
||||
use core::fmt::{Debug, LowerHex};
|
||||
|
||||
use scroll::{Pread, Pwrite, SizeWith};
|
||||
use scroll::ctx::TryFromCtx;
|
||||
use crate::error;
|
||||
use scroll::ctx::TryFromCtx;
|
||||
use scroll::{Pread, Pwrite, SizeWith};
|
||||
|
||||
use crate::pe::data_directories;
|
||||
use crate::pe::options;
|
||||
use crate::pe::section_table;
|
||||
use crate::pe::utils;
|
||||
use crate::pe::data_directories;
|
||||
|
||||
use log::{debug, warn};
|
||||
|
||||
|
|
@ -17,7 +18,14 @@ pub const IMPORT_BY_ORDINAL_64: u64 = 0x8000_0000_0000_0000;
|
|||
pub const IMPORT_RVA_MASK_32: u32 = 0x7fff_ffff;
|
||||
pub const IMPORT_RVA_MASK_64: u64 = 0x0000_0000_7fff_ffff;
|
||||
|
||||
pub trait Bitfield<'a>: Into<u64> + PartialEq + Eq + LowerHex + Debug + TryFromCtx<'a, scroll::Endian, Error=scroll::Error> {
|
||||
pub trait Bitfield<'a>:
|
||||
Into<u64>
|
||||
+ PartialEq
|
||||
+ Eq
|
||||
+ LowerHex
|
||||
+ Debug
|
||||
+ TryFromCtx<'a, scroll::Endian, Error = scroll::Error>
|
||||
{
|
||||
fn is_ordinal(&self) -> bool;
|
||||
fn to_ordinal(&self) -> u16;
|
||||
fn to_rva(&self) -> u32;
|
||||
|
|
@ -26,19 +34,39 @@ pub trait Bitfield<'a>: Into<u64> + PartialEq + Eq + LowerHex + Debug + TryFromC
|
|||
}
|
||||
|
||||
impl<'a> Bitfield<'a> for u64 {
|
||||
fn is_ordinal(&self) -> bool { self & IMPORT_BY_ORDINAL_64 == IMPORT_BY_ORDINAL_64 }
|
||||
fn to_ordinal(&self) -> u16 { (0xffff & self) as u16 }
|
||||
fn to_rva(&self) -> u32 { (self & IMPORT_RVA_MASK_64) as u32 }
|
||||
fn size_of() -> usize { 8 }
|
||||
fn is_zero(&self) -> bool { *self == 0 }
|
||||
fn is_ordinal(&self) -> bool {
|
||||
self & IMPORT_BY_ORDINAL_64 == IMPORT_BY_ORDINAL_64
|
||||
}
|
||||
fn to_ordinal(&self) -> u16 {
|
||||
(0xffff & self) as u16
|
||||
}
|
||||
fn to_rva(&self) -> u32 {
|
||||
(self & IMPORT_RVA_MASK_64) as u32
|
||||
}
|
||||
fn size_of() -> usize {
|
||||
8
|
||||
}
|
||||
fn is_zero(&self) -> bool {
|
||||
*self == 0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Bitfield<'a> for u32 {
|
||||
fn is_ordinal(&self) -> bool { self & IMPORT_BY_ORDINAL_32 == IMPORT_BY_ORDINAL_32 }
|
||||
fn to_ordinal(&self) -> u16 { (0xffff & self) as u16 }
|
||||
fn to_rva(&self) -> u32 { (self & IMPORT_RVA_MASK_32) as u32 }
|
||||
fn size_of() -> usize { 4 }
|
||||
fn is_zero(&self) -> bool { *self == 0 }
|
||||
fn is_ordinal(&self) -> bool {
|
||||
self & IMPORT_BY_ORDINAL_32 == IMPORT_BY_ORDINAL_32
|
||||
}
|
||||
fn to_ordinal(&self) -> u16 {
|
||||
(0xffff & self) as u16
|
||||
}
|
||||
fn to_rva(&self) -> u32 {
|
||||
(self & IMPORT_RVA_MASK_32) as u32
|
||||
}
|
||||
fn size_of() -> usize {
|
||||
4
|
||||
}
|
||||
fn is_zero(&self) -> bool {
|
||||
*self == 0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
|
@ -59,13 +87,34 @@ impl<'a> HintNameTableEntry<'a> {
|
|||
#[derive(Debug, Clone)]
|
||||
pub enum SyntheticImportLookupTableEntry<'a> {
|
||||
OrdinalNumber(u16),
|
||||
HintNameTableRVA ((u32, HintNameTableEntry<'a>)), // [u8; 31] bitfield :/
|
||||
HintNameTableRVA((u32, HintNameTableEntry<'a>)), // [u8; 31] bitfield :/
|
||||
}
|
||||
|
||||
pub type ImportLookupTable<'a> = Vec<SyntheticImportLookupTableEntry<'a>>;
|
||||
|
||||
impl<'a> SyntheticImportLookupTableEntry<'a> {
|
||||
pub fn parse<T: Bitfield<'a>>(bytes: &'a [u8], mut offset: usize, sections: &[section_table::SectionTable], file_alignment: u32) -> error::Result<ImportLookupTable<'a>> {
|
||||
pub fn parse<T: Bitfield<'a>>(
|
||||
bytes: &'a [u8],
|
||||
offset: usize,
|
||||
sections: &[section_table::SectionTable],
|
||||
file_alignment: u32,
|
||||
) -> error::Result<ImportLookupTable<'a>> {
|
||||
Self::parse_with_opts::<T>(
|
||||
bytes,
|
||||
offset,
|
||||
sections,
|
||||
file_alignment,
|
||||
&options::ParseOptions::default(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn parse_with_opts<T: Bitfield<'a>>(
|
||||
bytes: &'a [u8],
|
||||
mut offset: usize,
|
||||
sections: &[section_table::SectionTable],
|
||||
file_alignment: u32,
|
||||
opts: &options::ParseOptions,
|
||||
) -> error::Result<ImportLookupTable<'a>> {
|
||||
let le = scroll::LE;
|
||||
let offset = &mut offset;
|
||||
let mut table = Vec::new();
|
||||
|
|
@ -86,15 +135,17 @@ impl<'a> SyntheticImportLookupTableEntry<'a> {
|
|||
let rva = bitfield.to_rva();
|
||||
let hentry = {
|
||||
debug!("searching for RVA {:#x}", rva);
|
||||
if let Some(offset) = utils::find_offset(rva as usize, sections, file_alignment) {
|
||||
if let Some(offset) =
|
||||
utils::find_offset(rva as usize, sections, file_alignment, opts)
|
||||
{
|
||||
debug!("offset {:#x}", offset);
|
||||
HintNameTableEntry::parse(bytes, offset)?
|
||||
} else {
|
||||
warn!("Entry {} has bad RVA: {:#x}", table.len(), rva);
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
};
|
||||
HintNameTableRVA ((rva, hentry))
|
||||
HintNameTableRVA((rva, hentry))
|
||||
}
|
||||
};
|
||||
table.push(entry);
|
||||
|
|
@ -108,8 +159,7 @@ impl<'a> SyntheticImportLookupTableEntry<'a> {
|
|||
pub type ImportAddressTable = Vec<u64>;
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Debug)]
|
||||
#[derive(Pread, Pwrite, SizeWith)]
|
||||
#[derive(Debug, Pread, Pwrite, SizeWith)]
|
||||
pub struct ImportDirectoryEntry {
|
||||
pub import_lookup_table_rva: u32,
|
||||
pub time_date_stamp: u32,
|
||||
|
|
@ -121,12 +171,12 @@ pub struct ImportDirectoryEntry {
|
|||
pub const SIZEOF_IMPORT_DIRECTORY_ENTRY: usize = 20;
|
||||
|
||||
impl ImportDirectoryEntry {
|
||||
pub fn is_null (&self) -> bool {
|
||||
(self.import_lookup_table_rva == 0) &&
|
||||
(self.time_date_stamp == 0) &&
|
||||
(self.forwarder_chain == 0) &&
|
||||
(self.name_rva == 0) &&
|
||||
(self.import_address_table_rva == 0)
|
||||
pub fn is_null(&self) -> bool {
|
||||
(self.import_lookup_table_rva == 0)
|
||||
&& (self.time_date_stamp == 0)
|
||||
&& (self.forwarder_chain == 0)
|
||||
&& (self.name_rva == 0)
|
||||
&& (self.import_address_table_rva == 0)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -142,39 +192,105 @@ pub struct SyntheticImportDirectoryEntry<'a> {
|
|||
}
|
||||
|
||||
impl<'a> SyntheticImportDirectoryEntry<'a> {
|
||||
pub fn parse<T: Bitfield<'a>>(bytes: &'a [u8], import_directory_entry: ImportDirectoryEntry, sections: &[section_table::SectionTable], file_alignment: u32) -> error::Result<SyntheticImportDirectoryEntry<'a>> {
|
||||
pub fn parse<T: Bitfield<'a>>(
|
||||
bytes: &'a [u8],
|
||||
import_directory_entry: ImportDirectoryEntry,
|
||||
sections: &[section_table::SectionTable],
|
||||
file_alignment: u32,
|
||||
) -> error::Result<SyntheticImportDirectoryEntry<'a>> {
|
||||
Self::parse_with_opts::<T>(
|
||||
bytes,
|
||||
import_directory_entry,
|
||||
sections,
|
||||
file_alignment,
|
||||
&options::ParseOptions::default(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn parse_with_opts<T: Bitfield<'a>>(
|
||||
bytes: &'a [u8],
|
||||
import_directory_entry: ImportDirectoryEntry,
|
||||
sections: &[section_table::SectionTable],
|
||||
file_alignment: u32,
|
||||
opts: &options::ParseOptions,
|
||||
) -> error::Result<SyntheticImportDirectoryEntry<'a>> {
|
||||
const LE: scroll::Endian = scroll::LE;
|
||||
let name_rva = import_directory_entry.name_rva;
|
||||
let name = utils::try_name(bytes, name_rva as usize, sections, file_alignment)?;
|
||||
let name = utils::try_name(bytes, name_rva as usize, sections, file_alignment, opts)?;
|
||||
let import_lookup_table = {
|
||||
let import_lookup_table_rva = import_directory_entry.import_lookup_table_rva;
|
||||
let import_address_table_rva = import_directory_entry.import_address_table_rva;
|
||||
if let Some(import_lookup_table_offset) = utils::find_offset(import_lookup_table_rva as usize, sections, file_alignment) {
|
||||
if let Some(import_lookup_table_offset) = utils::find_offset(
|
||||
import_lookup_table_rva as usize,
|
||||
sections,
|
||||
file_alignment,
|
||||
opts,
|
||||
) {
|
||||
debug!("Synthesizing lookup table imports for {} lib, with import lookup table rva: {:#x}", name, import_lookup_table_rva);
|
||||
let import_lookup_table = SyntheticImportLookupTableEntry::parse::<T>(bytes, import_lookup_table_offset, sections, file_alignment)?;
|
||||
debug!("Successfully synthesized import lookup table entry from lookup table: {:#?}", import_lookup_table);
|
||||
let import_lookup_table = SyntheticImportLookupTableEntry::parse_with_opts::<T>(
|
||||
bytes,
|
||||
import_lookup_table_offset,
|
||||
sections,
|
||||
file_alignment,
|
||||
opts,
|
||||
)?;
|
||||
debug!(
|
||||
"Successfully synthesized import lookup table entry from lookup table: {:#?}",
|
||||
import_lookup_table
|
||||
);
|
||||
Some(import_lookup_table)
|
||||
} else if let Some(import_address_table_offset) = utils::find_offset(import_address_table_rva as usize, sections, file_alignment) {
|
||||
} else if let Some(import_address_table_offset) = utils::find_offset(
|
||||
import_address_table_rva as usize,
|
||||
sections,
|
||||
file_alignment,
|
||||
opts,
|
||||
) {
|
||||
debug!("Synthesizing lookup table imports for {} lib, with import address table rva: {:#x}", name, import_lookup_table_rva);
|
||||
let import_address_table = SyntheticImportLookupTableEntry::parse::<T>(bytes, import_address_table_offset, sections, file_alignment)?;
|
||||
debug!("Successfully synthesized import lookup table entry from IAT: {:#?}", import_address_table);
|
||||
let import_address_table = SyntheticImportLookupTableEntry::parse_with_opts::<T>(
|
||||
bytes,
|
||||
import_address_table_offset,
|
||||
sections,
|
||||
file_alignment,
|
||||
opts,
|
||||
)?;
|
||||
debug!(
|
||||
"Successfully synthesized import lookup table entry from IAT: {:#?}",
|
||||
import_address_table
|
||||
);
|
||||
Some(import_address_table)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
let import_address_table_offset = &mut utils::find_offset(import_directory_entry.import_address_table_rva as usize, sections, file_alignment).ok_or_else(|| error::Error::Malformed(format!("Cannot map import_address_table_rva {:#x} into offset for {}", import_directory_entry.import_address_table_rva, name)))?;
|
||||
let import_address_table_offset = &mut utils::find_offset(
|
||||
import_directory_entry.import_address_table_rva as usize,
|
||||
sections,
|
||||
file_alignment,
|
||||
opts,
|
||||
)
|
||||
.ok_or_else(|| {
|
||||
error::Error::Malformed(format!(
|
||||
"Cannot map import_address_table_rva {:#x} into offset for {}",
|
||||
import_directory_entry.import_address_table_rva, name
|
||||
))
|
||||
})?;
|
||||
let mut import_address_table = Vec::new();
|
||||
loop {
|
||||
let import_address = bytes.gread_with::<T>(import_address_table_offset, LE)?.into();
|
||||
if import_address == 0 { break } else { import_address_table.push(import_address); }
|
||||
let import_address = bytes
|
||||
.gread_with::<T>(import_address_table_offset, LE)?
|
||||
.into();
|
||||
if import_address == 0 {
|
||||
break;
|
||||
} else {
|
||||
import_address_table.push(import_address);
|
||||
}
|
||||
}
|
||||
Ok(SyntheticImportDirectoryEntry {
|
||||
import_directory_entry,
|
||||
name,
|
||||
import_lookup_table,
|
||||
import_address_table
|
||||
import_address_table,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -186,25 +302,63 @@ pub struct ImportData<'a> {
|
|||
}
|
||||
|
||||
impl<'a> ImportData<'a> {
|
||||
pub fn parse<T: Bitfield<'a>>(bytes: &'a[u8], dd: data_directories::DataDirectory, sections: &[section_table::SectionTable], file_alignment: u32) -> error::Result<ImportData<'a>> {
|
||||
pub fn parse<T: Bitfield<'a>>(
|
||||
bytes: &'a [u8],
|
||||
dd: data_directories::DataDirectory,
|
||||
sections: &[section_table::SectionTable],
|
||||
file_alignment: u32,
|
||||
) -> error::Result<ImportData<'a>> {
|
||||
Self::parse_with_opts::<T>(
|
||||
bytes,
|
||||
dd,
|
||||
sections,
|
||||
file_alignment,
|
||||
&options::ParseOptions::default(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn parse_with_opts<T: Bitfield<'a>>(
|
||||
bytes: &'a [u8],
|
||||
dd: data_directories::DataDirectory,
|
||||
sections: &[section_table::SectionTable],
|
||||
file_alignment: u32,
|
||||
opts: &options::ParseOptions,
|
||||
) -> error::Result<ImportData<'a>> {
|
||||
let import_directory_table_rva = dd.virtual_address as usize;
|
||||
debug!("import_directory_table_rva {:#x}", import_directory_table_rva);
|
||||
let offset = &mut utils::find_offset(import_directory_table_rva, sections, file_alignment).ok_or_else(|| error::Error::Malformed(format!("Cannot create ImportData; cannot map import_directory_table_rva {:#x} into offset", import_directory_table_rva)))?;
|
||||
debug!(
|
||||
"import_directory_table_rva {:#x}",
|
||||
import_directory_table_rva
|
||||
);
|
||||
let offset =
|
||||
&mut utils::find_offset(import_directory_table_rva, sections, file_alignment, opts)
|
||||
.ok_or_else(|| {
|
||||
error::Error::Malformed(format!(
|
||||
"Cannot create ImportData; cannot map import_directory_table_rva {:#x} into offset",
|
||||
import_directory_table_rva
|
||||
))
|
||||
})?;
|
||||
debug!("import data offset {:#x}", offset);
|
||||
let mut import_data = Vec::new();
|
||||
loop {
|
||||
let import_directory_entry: ImportDirectoryEntry = bytes.gread_with(offset, scroll::LE)?;
|
||||
let import_directory_entry: ImportDirectoryEntry =
|
||||
bytes.gread_with(offset, scroll::LE)?;
|
||||
debug!("{:#?}", import_directory_entry);
|
||||
if import_directory_entry.is_null() {
|
||||
break;
|
||||
} else {
|
||||
let entry = SyntheticImportDirectoryEntry::parse::<T>(bytes, import_directory_entry, sections, file_alignment)?;
|
||||
let entry = SyntheticImportDirectoryEntry::parse_with_opts::<T>(
|
||||
bytes,
|
||||
import_directory_entry,
|
||||
sections,
|
||||
file_alignment,
|
||||
opts,
|
||||
)?;
|
||||
debug!("entry {:#?}", entry);
|
||||
import_data.push(entry);
|
||||
}
|
||||
}
|
||||
debug!("finished ImportData");
|
||||
Ok(ImportData { import_data})
|
||||
Ok(ImportData { import_data })
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -220,7 +374,11 @@ pub struct Import<'a> {
|
|||
}
|
||||
|
||||
impl<'a> Import<'a> {
|
||||
pub fn parse<T: Bitfield<'a>>(_bytes: &'a [u8], import_data: &ImportData<'a>, _sections: &[section_table::SectionTable]) -> error::Result<Vec<Import<'a>>> {
|
||||
pub fn parse<T: Bitfield<'a>>(
|
||||
_bytes: &'a [u8],
|
||||
import_data: &ImportData<'a>,
|
||||
_sections: &[section_table::SectionTable],
|
||||
) -> error::Result<Vec<Import<'a>>> {
|
||||
let mut imports = Vec::new();
|
||||
for data in &import_data.import_data {
|
||||
if let Some(ref import_lookup_table) = data.import_lookup_table {
|
||||
|
|
@ -230,29 +388,30 @@ impl<'a> Import<'a> {
|
|||
for (i, entry) in import_lookup_table.iter().enumerate() {
|
||||
let offset = import_base + (i * T::size_of());
|
||||
use self::SyntheticImportLookupTableEntry::*;
|
||||
let (rva, name, ordinal) =
|
||||
match *entry {
|
||||
HintNameTableRVA ((rva, ref hint_entry)) => {
|
||||
// if hint_entry.name = "" && hint_entry.hint = 0 {
|
||||
// println!("<PE.Import> warning hint/name table rva from {} without hint {:#x}", dll, rva);
|
||||
// }
|
||||
(rva, Cow::Borrowed(hint_entry.name), hint_entry.hint)
|
||||
},
|
||||
OrdinalNumber(ordinal) => {
|
||||
let name = format!("ORDINAL {}", ordinal);
|
||||
(0x0, Cow::Owned(name), ordinal)
|
||||
},
|
||||
};
|
||||
let import =
|
||||
Import {
|
||||
name,
|
||||
ordinal, dll,
|
||||
size: T::size_of(), offset, rva: rva as usize
|
||||
};
|
||||
let (rva, name, ordinal) = match *entry {
|
||||
HintNameTableRVA((rva, ref hint_entry)) => {
|
||||
// if hint_entry.name = "" && hint_entry.hint = 0 {
|
||||
// println!("<PE.Import> warning hint/name table rva from {} without hint {:#x}", dll, rva);
|
||||
// }
|
||||
(rva, Cow::Borrowed(hint_entry.name), hint_entry.hint)
|
||||
}
|
||||
OrdinalNumber(ordinal) => {
|
||||
let name = format!("ORDINAL {}", ordinal);
|
||||
(0x0, Cow::Owned(name), ordinal)
|
||||
}
|
||||
};
|
||||
let import = Import {
|
||||
name,
|
||||
ordinal,
|
||||
dll,
|
||||
size: T::size_of(),
|
||||
offset,
|
||||
rva: rva as usize,
|
||||
};
|
||||
imports.push(import);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok (imports)
|
||||
Ok(imports)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
244
third_party/rust/goblin/src/pe/mod.rs
vendored
244
third_party/rust/goblin/src/pe/mod.rs
vendored
|
|
@ -5,21 +5,22 @@
|
|||
|
||||
use alloc::vec::Vec;
|
||||
|
||||
pub mod header;
|
||||
pub mod optional_header;
|
||||
pub mod characteristic;
|
||||
pub mod section_table;
|
||||
pub mod data_directories;
|
||||
pub mod export;
|
||||
pub mod import;
|
||||
pub mod debug;
|
||||
pub mod exception;
|
||||
pub mod symbol;
|
||||
pub mod export;
|
||||
pub mod header;
|
||||
pub mod import;
|
||||
pub mod optional_header;
|
||||
pub mod options;
|
||||
pub mod relocation;
|
||||
pub mod section_table;
|
||||
pub mod symbol;
|
||||
pub mod utils;
|
||||
|
||||
use crate::error;
|
||||
use crate::container;
|
||||
use crate::error;
|
||||
use crate::strtab;
|
||||
|
||||
use log::debug;
|
||||
|
|
@ -62,9 +63,17 @@ pub struct PE<'a> {
|
|||
impl<'a> PE<'a> {
|
||||
/// Reads a PE binary from the underlying `bytes`
|
||||
pub fn parse(bytes: &'a [u8]) -> error::Result<Self> {
|
||||
Self::parse_with_opts(bytes, &options::ParseOptions::default())
|
||||
}
|
||||
|
||||
/// Reads a PE binary from the underlying `bytes`
|
||||
pub fn parse_with_opts(bytes: &'a [u8], opts: &options::ParseOptions) -> error::Result<Self> {
|
||||
let header = header::Header::parse(bytes)?;
|
||||
debug!("{:#?}", header);
|
||||
let offset = &mut (header.dos_header.pe_pointer as usize + header::SIZEOF_PE_MAGIC + header::SIZEOF_COFF_HEADER + header.coff_header.size_of_optional_header as usize);
|
||||
let offset = &mut (header.dos_header.pe_pointer as usize
|
||||
+ header::SIZEOF_PE_MAGIC
|
||||
+ header::SIZEOF_COFF_HEADER
|
||||
+ header.coff_header.size_of_optional_header as usize);
|
||||
let sections = header.coff_header.sections(bytes, offset)?;
|
||||
let is_lib = characteristic::is_dll(header.coff_header.characteristics);
|
||||
let mut entry = 0;
|
||||
|
|
@ -82,12 +91,27 @@ impl<'a> PE<'a> {
|
|||
entry = optional_header.standard_fields.address_of_entry_point as usize;
|
||||
image_base = optional_header.windows_fields.image_base as usize;
|
||||
is_64 = optional_header.container()? == container::Container::Big;
|
||||
debug!("entry {:#x} image_base {:#x} is_64: {}", entry, image_base, is_64);
|
||||
debug!(
|
||||
"entry {:#x} image_base {:#x} is_64: {}",
|
||||
entry, image_base, is_64
|
||||
);
|
||||
let file_alignment = optional_header.windows_fields.file_alignment;
|
||||
if let Some(export_table) = *optional_header.data_directories.get_export_table() {
|
||||
if let Ok(ed) = export::ExportData::parse(bytes, export_table, §ions, file_alignment) {
|
||||
if let Ok(ed) = export::ExportData::parse_with_opts(
|
||||
bytes,
|
||||
export_table,
|
||||
§ions,
|
||||
file_alignment,
|
||||
opts,
|
||||
) {
|
||||
debug!("export data {:#?}", ed);
|
||||
exports = export::Export::parse(bytes, &ed, §ions, file_alignment)?;
|
||||
exports = export::Export::parse_with_opts(
|
||||
bytes,
|
||||
&ed,
|
||||
§ions,
|
||||
file_alignment,
|
||||
opts,
|
||||
)?;
|
||||
name = ed.name;
|
||||
debug!("name: {:#?}", name);
|
||||
export_data = Some(ed);
|
||||
|
|
@ -96,9 +120,21 @@ impl<'a> PE<'a> {
|
|||
debug!("exports: {:#?}", exports);
|
||||
if let Some(import_table) = *optional_header.data_directories.get_import_table() {
|
||||
let id = if is_64 {
|
||||
import::ImportData::parse::<u64>(bytes, import_table, §ions, file_alignment)?
|
||||
import::ImportData::parse_with_opts::<u64>(
|
||||
bytes,
|
||||
import_table,
|
||||
§ions,
|
||||
file_alignment,
|
||||
opts,
|
||||
)?
|
||||
} else {
|
||||
import::ImportData::parse::<u32>(bytes, import_table, §ions, file_alignment)?
|
||||
import::ImportData::parse_with_opts::<u32>(
|
||||
bytes,
|
||||
import_table,
|
||||
§ions,
|
||||
file_alignment,
|
||||
opts,
|
||||
)?
|
||||
};
|
||||
debug!("import data {:#?}", id);
|
||||
if is_64 {
|
||||
|
|
@ -106,25 +142,43 @@ impl<'a> PE<'a> {
|
|||
} else {
|
||||
imports = import::Import::parse::<u32>(bytes, &id, §ions)?
|
||||
}
|
||||
libraries = id.import_data.iter().map( | data | { data.name }).collect::<Vec<&'a str>>();
|
||||
libraries = id
|
||||
.import_data
|
||||
.iter()
|
||||
.map(|data| data.name)
|
||||
.collect::<Vec<&'a str>>();
|
||||
libraries.sort();
|
||||
libraries.dedup();
|
||||
import_data = Some(id);
|
||||
}
|
||||
debug!("imports: {:#?}", imports);
|
||||
if let Some(debug_table) = *optional_header.data_directories.get_debug_table() {
|
||||
debug_data = Some(debug::DebugData::parse(bytes, debug_table, §ions, file_alignment)?);
|
||||
debug_data = Some(debug::DebugData::parse_with_opts(
|
||||
bytes,
|
||||
debug_table,
|
||||
§ions,
|
||||
file_alignment,
|
||||
opts,
|
||||
)?);
|
||||
}
|
||||
|
||||
if header.coff_header.machine == header::COFF_MACHINE_X86_64 {
|
||||
// currently only x86_64 is supported
|
||||
debug!("exception data: {:#?}", exception_data);
|
||||
if let Some(exception_table) = *optional_header.data_directories.get_exception_table() {
|
||||
exception_data = Some(exception::ExceptionData::parse(bytes, exception_table, §ions, file_alignment)?);
|
||||
if let Some(exception_table) =
|
||||
*optional_header.data_directories.get_exception_table()
|
||||
{
|
||||
exception_data = Some(exception::ExceptionData::parse_with_opts(
|
||||
bytes,
|
||||
exception_table,
|
||||
§ions,
|
||||
file_alignment,
|
||||
opts,
|
||||
)?);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok( PE {
|
||||
Ok(PE {
|
||||
header,
|
||||
sections,
|
||||
size: 0,
|
||||
|
|
@ -168,6 +222,158 @@ impl<'a> Coff<'a> {
|
|||
let sections = header.sections(bytes, offset)?;
|
||||
let symbols = header.symbols(bytes)?;
|
||||
let strings = header.strings(bytes)?;
|
||||
Ok(Coff { header, sections, symbols, strings })
|
||||
Ok(Coff {
|
||||
header,
|
||||
sections,
|
||||
symbols,
|
||||
strings,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::Coff;
|
||||
use super::PE;
|
||||
|
||||
static INVALID_DOS_SIGNATURE: [u8; 512] = [
|
||||
0x3D, 0x5A, 0x90, 0x00, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0x00,
|
||||
0x00, 0xB8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x01, 0x00, 0x00, 0x0E, 0x1F, 0xBA, 0x0E, 0x00, 0xB4, 0x09, 0xCD, 0x21, 0xB8, 0x01,
|
||||
0x4C, 0xCD, 0x21, 0x54, 0x68, 0x69, 0x73, 0x20, 0x70, 0x72, 0x6F, 0x67, 0x72, 0x61, 0x6D,
|
||||
0x20, 0x63, 0x61, 0x6E, 0x6E, 0x6F, 0x74, 0x20, 0x62, 0x65, 0x20, 0x72, 0x75, 0x6E, 0x20,
|
||||
0x69, 0x6E, 0x20, 0x44, 0x4F, 0x53, 0x20, 0x6D, 0x6F, 0x64, 0x65, 0x2E, 0x0D, 0x0D, 0x0A,
|
||||
0x24, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x23, 0x31, 0xE2, 0xB1, 0x67, 0x50, 0x8C,
|
||||
0xE2, 0x67, 0x50, 0x8C, 0xE2, 0x67, 0x50, 0x8C, 0xE2, 0x3C, 0x38, 0x88, 0xE3, 0x6D, 0x50,
|
||||
0x8C, 0xE2, 0x3C, 0x38, 0x8F, 0xE3, 0x62, 0x50, 0x8C, 0xE2, 0x3C, 0x38, 0x89, 0xE3, 0xE0,
|
||||
0x50, 0x8C, 0xE2, 0xAC, 0x3F, 0x89, 0xE3, 0x42, 0x50, 0x8C, 0xE2, 0xAC, 0x3F, 0x88, 0xE3,
|
||||
0x77, 0x50, 0x8C, 0xE2, 0xAC, 0x3F, 0x8F, 0xE3, 0x6E, 0x50, 0x8C, 0xE2, 0x3C, 0x38, 0x8D,
|
||||
0xE3, 0x64, 0x50, 0x8C, 0xE2, 0x67, 0x50, 0x8D, 0xE2, 0x3F, 0x50, 0x8C, 0xE2, 0xE1, 0x20,
|
||||
0x85, 0xE3, 0x66, 0x50, 0x8C, 0xE2, 0xE1, 0x20, 0x73, 0xE2, 0x66, 0x50, 0x8C, 0xE2, 0xE1,
|
||||
0x20, 0x8E, 0xE3, 0x66, 0x50, 0x8C, 0xE2, 0x52, 0x69, 0x63, 0x68, 0x67, 0x50, 0x8C, 0xE2,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x50, 0x45, 0x00, 0x00, 0x64, 0x86, 0x07, 0x00, 0x5F, 0x41, 0xFC, 0x5E, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xF0, 0x00, 0x22, 0x00, 0x0B, 0x02, 0x0E, 0x1A, 0x00,
|
||||
0xFC, 0x00, 0x00, 0x00, 0xD6, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xE4, 0x14, 0x00, 0x00,
|
||||
0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x01, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00,
|
||||
0x00, 0x00, 0x02, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x02, 0x00, 0x00, 0x04, 0x00, 0x00, 0xE0,
|
||||
0x68, 0x02, 0x00, 0x03, 0x00, 0x60, 0x81, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0xA3, 0x01, 0x00, 0x28,
|
||||
0x00, 0x00, 0x00, 0x00, 0xF0, 0x01, 0x00, 0xE0, 0x01, 0x00, 0x00, 0x00, 0xD0, 0x01, 0x00,
|
||||
0x60, 0x0F, 0x00, 0x00, 0x00, 0xC4, 0x01, 0x00, 0xF8, 0x46, 0x00, 0x00, 0x00, 0x00, 0x02,
|
||||
0x00, 0x54, 0x06, 0x00, 0x00, 0xF0, 0x91, 0x01, 0x00, 0x70, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x60, 0x92, 0x01, 0x00, 0x30, 0x01, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x01, 0x00, 0x48, 0x02, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00,
|
||||
];
|
||||
|
||||
static INVALID_PE_SIGNATURE: [u8; 512] = [
|
||||
0x4D, 0x5A, 0x90, 0x00, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0x00,
|
||||
0x00, 0xB8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x01, 0x00, 0x00, 0x0E, 0x1F, 0xBA, 0x0E, 0x00, 0xB4, 0x09, 0xCD, 0x21, 0xB8, 0x01,
|
||||
0x4C, 0xCD, 0x21, 0x54, 0x68, 0x69, 0x73, 0x20, 0x70, 0x72, 0x6F, 0x67, 0x72, 0x61, 0x6D,
|
||||
0x20, 0x63, 0x61, 0x6E, 0x6E, 0x6F, 0x74, 0x20, 0x62, 0x65, 0x20, 0x72, 0x75, 0x6E, 0x20,
|
||||
0x69, 0x6E, 0x20, 0x44, 0x4F, 0x53, 0x20, 0x6D, 0x6F, 0x64, 0x65, 0x2E, 0x0D, 0x0D, 0x0A,
|
||||
0x24, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x23, 0x31, 0xE2, 0xB1, 0x67, 0x50, 0x8C,
|
||||
0xE2, 0x67, 0x50, 0x8C, 0xE2, 0x67, 0x50, 0x8C, 0xE2, 0x3C, 0x38, 0x88, 0xE3, 0x6D, 0x50,
|
||||
0x8C, 0xE2, 0x3C, 0x38, 0x8F, 0xE3, 0x62, 0x50, 0x8C, 0xE2, 0x3C, 0x38, 0x89, 0xE3, 0xE0,
|
||||
0x50, 0x8C, 0xE2, 0xAC, 0x3F, 0x89, 0xE3, 0x42, 0x50, 0x8C, 0xE2, 0xAC, 0x3F, 0x88, 0xE3,
|
||||
0x77, 0x50, 0x8C, 0xE2, 0xAC, 0x3F, 0x8F, 0xE3, 0x6E, 0x50, 0x8C, 0xE2, 0x3C, 0x38, 0x8D,
|
||||
0xE3, 0x64, 0x50, 0x8C, 0xE2, 0x67, 0x50, 0x8D, 0xE2, 0x3F, 0x50, 0x8C, 0xE2, 0xE1, 0x20,
|
||||
0x85, 0xE3, 0x66, 0x50, 0x8C, 0xE2, 0xE1, 0x20, 0x73, 0xE2, 0x66, 0x50, 0x8C, 0xE2, 0xE1,
|
||||
0x20, 0x8E, 0xE3, 0x66, 0x50, 0x8C, 0xE2, 0x52, 0x69, 0x63, 0x68, 0x67, 0x50, 0x8C, 0xE2,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x50, 0x05, 0x00, 0x00, 0x64, 0x86, 0x07, 0x00, 0x5F, 0x41, 0xFC, 0x5E, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xF0, 0x00, 0x22, 0x00, 0x0B, 0x02, 0x0E, 0x1A, 0x00,
|
||||
0xFC, 0x00, 0x00, 0x00, 0xD6, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xE4, 0x14, 0x00, 0x00,
|
||||
0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x01, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00,
|
||||
0x00, 0x00, 0x02, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x02, 0x00, 0x00, 0x04, 0x00, 0x00, 0xE0,
|
||||
0x68, 0x02, 0x00, 0x03, 0x00, 0x60, 0x81, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0xA3, 0x01, 0x00, 0x28,
|
||||
0x00, 0x00, 0x00, 0x00, 0xF0, 0x01, 0x00, 0xE0, 0x01, 0x00, 0x00, 0x00, 0xD0, 0x01, 0x00,
|
||||
0x60, 0x0F, 0x00, 0x00, 0x00, 0xC4, 0x01, 0x00, 0xF8, 0x46, 0x00, 0x00, 0x00, 0x00, 0x02,
|
||||
0x00, 0x54, 0x06, 0x00, 0x00, 0xF0, 0x91, 0x01, 0x00, 0x70, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x60, 0x92, 0x01, 0x00, 0x30, 0x01, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x01, 0x00, 0x48, 0x02, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00,
|
||||
];
|
||||
|
||||
// The assembler program used to generate this string is as follows:
|
||||
//
|
||||
// bits 64
|
||||
// default rel
|
||||
// segment .text
|
||||
// global main
|
||||
// extern ExitProcess
|
||||
// main:
|
||||
// xor rax, rax
|
||||
// call ExitProcess
|
||||
//
|
||||
//
|
||||
// The code can be compiled using nasm (https://nasm.us) with the command below:
|
||||
// nasm -f win64 <filename>.asm -o <filename>.obj
|
||||
static COFF_FILE_SINGLE_STRING_IN_STRING_TABLE: [u8; 220] = [
|
||||
0x64, 0x86, 0x1, 0x0, 0xb5, 0x39, 0x91, 0x62, 0x4e, 0x0, 0x0, 0x0, 0x7, 0x0, 0x0, 0x0, 0x0,
|
||||
0x0, 0x0, 0x0, 0x2e, 0x74, 0x65, 0x78, 0x74, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
|
||||
0x0, 0x0, 0x8, 0x0, 0x0, 0x0, 0x3c, 0x0, 0x0, 0x0, 0x44, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
|
||||
0x1, 0x0, 0x0, 0x0, 0x20, 0x0, 0x50, 0x60, 0x48, 0x31, 0xc0, 0xe8, 0x0, 0x0, 0x0, 0x0, 0x4,
|
||||
0x0, 0x0, 0x0, 0x5, 0x0, 0x0, 0x0, 0x4, 0x0, 0x2e, 0x66, 0x69, 0x6c, 0x65, 0x0, 0x0, 0x0,
|
||||
0x0, 0x0, 0x0, 0x0, 0xfe, 0xff, 0x0, 0x0, 0x67, 0x1, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67,
|
||||
0x73, 0x2e, 0x61, 0x73, 0x6d, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2e, 0x74, 0x65, 0x78,
|
||||
0x74, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x3, 0x1, 0x8, 0x0, 0x0, 0x0,
|
||||
0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2e, 0x61, 0x62,
|
||||
0x73, 0x6f, 0x6c, 0x75, 0x74, 0x0, 0x0, 0x0, 0x0, 0xff, 0xff, 0x0, 0x0, 0x3, 0x0, 0x0, 0x0,
|
||||
0x0, 0x0, 0x4, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x6d, 0x61,
|
||||
0x69, 0x6e, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x2, 0x0, 0x10,
|
||||
0x0, 0x0, 0x0, 0x45, 0x78, 0x69, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x0,
|
||||
];
|
||||
|
||||
#[test]
|
||||
fn string_table_excludes_length() {
|
||||
let coff = Coff::parse(&&COFF_FILE_SINGLE_STRING_IN_STRING_TABLE[..]).unwrap();
|
||||
let string_table = coff.strings.to_vec().unwrap();
|
||||
|
||||
assert!(string_table == vec!["ExitProcess"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn symbol_name_excludes_length() {
|
||||
let coff = Coff::parse(&COFF_FILE_SINGLE_STRING_IN_STRING_TABLE).unwrap();
|
||||
let strings = coff.strings;
|
||||
let symbols = coff
|
||||
.symbols
|
||||
.iter()
|
||||
.filter(|(_, name, _)| name.is_none())
|
||||
.map(|(_, _, sym)| sym.name(&strings).unwrap().to_owned())
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(symbols, vec!["ExitProcess"])
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_dos_header() {
|
||||
if let Ok(_) = PE::parse(&INVALID_DOS_SIGNATURE) {
|
||||
panic!("must not parse PE with invalid DOS header");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_pe_header() {
|
||||
if let Ok(_) = PE::parse(&INVALID_PE_SIGNATURE) {
|
||||
panic!("must not parse PE with invalid PE header");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,8 +8,7 @@ use scroll::{Pread, Pwrite, SizeWith};
|
|||
|
||||
/// standard COFF fields
|
||||
#[repr(C)]
|
||||
#[derive(Debug, PartialEq, Copy, Clone, Default)]
|
||||
#[derive(Pread, Pwrite, SizeWith)]
|
||||
#[derive(Debug, PartialEq, Copy, Clone, Default, Pread, Pwrite, SizeWith)]
|
||||
pub struct StandardFields32 {
|
||||
pub magic: u16,
|
||||
pub major_linker_version: u8,
|
||||
|
|
@ -27,8 +26,7 @@ pub const SIZEOF_STANDARD_FIELDS_32: usize = 28;
|
|||
|
||||
/// standard 64-bit COFF fields
|
||||
#[repr(C)]
|
||||
#[derive(Debug, PartialEq, Copy, Clone, Default)]
|
||||
#[derive(Pread, Pwrite, SizeWith)]
|
||||
#[derive(Debug, PartialEq, Copy, Clone, Default, Pread, Pwrite, SizeWith)]
|
||||
pub struct StandardFields64 {
|
||||
pub magic: u16,
|
||||
pub major_linker_version: u8,
|
||||
|
|
@ -96,8 +94,7 @@ pub const MAGIC_64: u16 = 0x20b;
|
|||
|
||||
/// Windows specific fields
|
||||
#[repr(C)]
|
||||
#[derive(Debug, PartialEq, Copy, Clone, Default)]
|
||||
#[derive(Pread, Pwrite, SizeWith)]
|
||||
#[derive(Debug, PartialEq, Copy, Clone, Default, Pread, Pwrite, SizeWith)]
|
||||
pub struct WindowsFields32 {
|
||||
pub image_base: u32,
|
||||
pub section_alignment: u32,
|
||||
|
|
@ -126,8 +123,7 @@ pub const SIZEOF_WINDOWS_FIELDS_32: usize = 68;
|
|||
|
||||
/// 64-bit Windows specific fields
|
||||
#[repr(C)]
|
||||
#[derive(Debug, PartialEq, Copy, Clone, Default)]
|
||||
#[derive(Pread, Pwrite, SizeWith)]
|
||||
#[derive(Debug, PartialEq, Copy, Clone, Default, Pread, Pwrite, SizeWith)]
|
||||
pub struct WindowsFields64 {
|
||||
pub image_base: u64,
|
||||
pub section_alignment: u32,
|
||||
|
|
@ -145,9 +141,9 @@ pub struct WindowsFields64 {
|
|||
pub subsystem: u16,
|
||||
pub dll_characteristics: u16,
|
||||
pub size_of_stack_reserve: u64,
|
||||
pub size_of_stack_commit: u64,
|
||||
pub size_of_heap_reserve: u64,
|
||||
pub size_of_heap_commit: u64,
|
||||
pub size_of_stack_commit: u64,
|
||||
pub size_of_heap_reserve: u64,
|
||||
pub size_of_heap_commit: u64,
|
||||
pub loader_flags: u32,
|
||||
pub number_of_rva_and_sizes: u32,
|
||||
}
|
||||
|
|
@ -242,21 +238,15 @@ pub type WindowsFields = WindowsFields64;
|
|||
pub struct OptionalHeader {
|
||||
pub standard_fields: StandardFields,
|
||||
pub windows_fields: WindowsFields,
|
||||
pub data_directories: data_directories::DataDirectories
|
||||
pub data_directories: data_directories::DataDirectories,
|
||||
}
|
||||
|
||||
impl OptionalHeader {
|
||||
pub fn container(&self) -> error::Result<container::Container> {
|
||||
match self.standard_fields.magic {
|
||||
MAGIC_32 => {
|
||||
Ok(container::Container::Little)
|
||||
},
|
||||
MAGIC_64 => {
|
||||
Ok(container::Container::Big)
|
||||
},
|
||||
magic => {
|
||||
Err(error::Error::BadMagic(u64::from(magic)))
|
||||
}
|
||||
MAGIC_32 => Ok(container::Container::Little),
|
||||
MAGIC_64 => Ok(container::Container::Big),
|
||||
magic => Err(error::Error::BadMagic(u64::from(magic))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -271,20 +261,27 @@ impl<'a> ctx::TryFromCtx<'a, Endian> for OptionalHeader {
|
|||
let standard_fields = bytes.gread_with::<StandardFields32>(offset, LE)?.into();
|
||||
let windows_fields = bytes.gread_with::<WindowsFields32>(offset, LE)?.into();
|
||||
(standard_fields, windows_fields)
|
||||
},
|
||||
}
|
||||
MAGIC_64 => {
|
||||
let standard_fields = bytes.gread_with::<StandardFields64>(offset, LE)?.into();
|
||||
let windows_fields = bytes.gread_with::<WindowsFields64>(offset, LE)?;
|
||||
(standard_fields, windows_fields)
|
||||
},
|
||||
_ => return Err(error::Error::BadMagic(u64::from(magic)))
|
||||
}
|
||||
_ => return Err(error::Error::BadMagic(u64::from(magic))),
|
||||
};
|
||||
let data_directories = data_directories::DataDirectories::parse(&bytes, windows_fields.number_of_rva_and_sizes as usize, offset)?;
|
||||
Ok ((OptionalHeader {
|
||||
standard_fields,
|
||||
windows_fields,
|
||||
data_directories,
|
||||
}, 0)) // TODO: FIXME
|
||||
let data_directories = data_directories::DataDirectories::parse(
|
||||
&bytes,
|
||||
windows_fields.number_of_rva_and_sizes as usize,
|
||||
offset,
|
||||
)?;
|
||||
Ok((
|
||||
OptionalHeader {
|
||||
standard_fields,
|
||||
windows_fields,
|
||||
data_directories,
|
||||
},
|
||||
0,
|
||||
)) // TODO: FIXME
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -293,18 +290,30 @@ mod tests {
|
|||
use super::*;
|
||||
#[test]
|
||||
fn sizeof_standards32() {
|
||||
assert_eq!(::std::mem::size_of::<StandardFields32>(), SIZEOF_STANDARD_FIELDS_32);
|
||||
assert_eq!(
|
||||
::std::mem::size_of::<StandardFields32>(),
|
||||
SIZEOF_STANDARD_FIELDS_32
|
||||
);
|
||||
}
|
||||
#[test]
|
||||
fn sizeof_windows32() {
|
||||
assert_eq!(::std::mem::size_of::<WindowsFields32>(), SIZEOF_WINDOWS_FIELDS_32);
|
||||
assert_eq!(
|
||||
::std::mem::size_of::<WindowsFields32>(),
|
||||
SIZEOF_WINDOWS_FIELDS_32
|
||||
);
|
||||
}
|
||||
#[test]
|
||||
fn sizeof_standards64() {
|
||||
assert_eq!(::std::mem::size_of::<StandardFields64>(), SIZEOF_STANDARD_FIELDS_64);
|
||||
assert_eq!(
|
||||
::std::mem::size_of::<StandardFields64>(),
|
||||
SIZEOF_STANDARD_FIELDS_64
|
||||
);
|
||||
}
|
||||
#[test]
|
||||
fn sizeof_windows64() {
|
||||
assert_eq!(::std::mem::size_of::<WindowsFields64>(), SIZEOF_WINDOWS_FIELDS_64);
|
||||
assert_eq!(
|
||||
::std::mem::size_of::<WindowsFields64>(),
|
||||
SIZEOF_WINDOWS_FIELDS_64
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
13
third_party/rust/goblin/src/pe/options.rs
vendored
Normal file
13
third_party/rust/goblin/src/pe/options.rs
vendored
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
/// Parsing Options structure for the PE parser
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ParseOptions {
|
||||
/// Wether the parser should resolve rvas or not. Default: true
|
||||
pub resolve_rva: bool,
|
||||
}
|
||||
|
||||
impl ParseOptions {
|
||||
/// Returns a parse options structure with default values
|
||||
pub fn default() -> Self {
|
||||
ParseOptions { resolve_rva: true }
|
||||
}
|
||||
}
|
||||
59
third_party/rust/goblin/src/pe/section_table.rs
vendored
59
third_party/rust/goblin/src/pe/section_table.rs
vendored
|
|
@ -1,7 +1,7 @@
|
|||
use alloc::string::{String, ToString};
|
||||
use scroll::{ctx, Pread, Pwrite};
|
||||
use crate::error::{self, Error};
|
||||
use crate::pe::relocation;
|
||||
use alloc::string::{String, ToString};
|
||||
use scroll::{ctx, Pread, Pwrite};
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Debug, PartialEq, Clone, Default)]
|
||||
|
|
@ -29,18 +29,23 @@ fn base64_decode_string_entry(s: &str) -> Result<usize, ()> {
|
|||
|
||||
let mut val = 0;
|
||||
for c in s.bytes() {
|
||||
let v = if b'A' <= c && c <= b'Z' { // 00..=25
|
||||
let v = if b'A' <= c && c <= b'Z' {
|
||||
// 00..=25
|
||||
c - b'A'
|
||||
} else if b'a' <= c && c <= b'z' { // 26..=51
|
||||
} else if b'a' <= c && c <= b'z' {
|
||||
// 26..=51
|
||||
c - b'a' + 26
|
||||
} else if b'0' <= c && c <= b'9' { // 52..=61
|
||||
} else if b'0' <= c && c <= b'9' {
|
||||
// 52..=61
|
||||
c - b'0' + 52
|
||||
} else if c == b'+' { // 62
|
||||
} else if c == b'+' {
|
||||
// 62
|
||||
62
|
||||
} else if c == b'/' { // 63
|
||||
} else if c == b'/' {
|
||||
// 63
|
||||
63
|
||||
} else {
|
||||
return Err(())
|
||||
return Err(());
|
||||
};
|
||||
val = val * 64 + v as usize;
|
||||
}
|
||||
|
|
@ -48,7 +53,11 @@ fn base64_decode_string_entry(s: &str) -> Result<usize, ()> {
|
|||
}
|
||||
|
||||
impl SectionTable {
|
||||
pub fn parse(bytes: &[u8], offset: &mut usize, string_table_offset: usize) -> error::Result<Self> {
|
||||
pub fn parse(
|
||||
bytes: &[u8],
|
||||
offset: &mut usize,
|
||||
string_table_offset: usize,
|
||||
) -> error::Result<Self> {
|
||||
let mut table = SectionTable::default();
|
||||
let mut name = [0u8; 8];
|
||||
name.copy_from_slice(bytes.gread_with(offset, 8)?);
|
||||
|
|
@ -75,12 +84,17 @@ impl SectionTable {
|
|||
if self.name[0] == b'/' {
|
||||
let idx: usize = if self.name[1] == b'/' {
|
||||
let b64idx = self.name.pread::<&str>(2)?;
|
||||
base64_decode_string_entry(b64idx).map_err(|_|
|
||||
Error::Malformed(format!("Invalid indirect section name //{}: base64 decoding failed", b64idx)))?
|
||||
base64_decode_string_entry(b64idx).map_err(|_| {
|
||||
Error::Malformed(format!(
|
||||
"Invalid indirect section name //{}: base64 decoding failed",
|
||||
b64idx
|
||||
))
|
||||
})?
|
||||
} else {
|
||||
let name = self.name.pread::<&str>(1)?;
|
||||
name.parse().map_err(|err|
|
||||
Error::Malformed(format!("Invalid indirect section name /{}: {}", name, err)))?
|
||||
name.parse().map_err(|err| {
|
||||
Error::Malformed(format!("Invalid indirect section name /{}: {}", name, err))
|
||||
})?
|
||||
};
|
||||
Ok(Some(idx))
|
||||
} else {
|
||||
|
|
@ -90,7 +104,8 @@ impl SectionTable {
|
|||
|
||||
#[allow(clippy::useless_let_if_seq)]
|
||||
pub fn set_name_offset(&mut self, mut idx: usize) -> error::Result<()> {
|
||||
if idx <= 9_999_999 { // 10^7 - 1
|
||||
if idx <= 9_999_999 {
|
||||
// 10^7 - 1
|
||||
// write!(&mut self.name[1..], "{}", idx) without using io::Write.
|
||||
// We write into a temporary since we calculate digits starting at the right.
|
||||
let mut name = [0; 7];
|
||||
|
|
@ -110,7 +125,8 @@ impl SectionTable {
|
|||
self.name[0] = b'/';
|
||||
self.name[1..][..len].copy_from_slice(&name[7 - len..]);
|
||||
Ok(())
|
||||
} else if idx as u64 <= 0xfff_fff_fff { // 64^6 - 1
|
||||
} else if idx as u64 <= 0xfff_fff_fff {
|
||||
// 64^6 - 1
|
||||
self.name[0] = b'/';
|
||||
self.name[1] = b'/';
|
||||
for i in 0..6 {
|
||||
|
|
@ -128,18 +144,21 @@ impl SectionTable {
|
|||
}
|
||||
Ok(())
|
||||
} else {
|
||||
Err(Error::Malformed(format!("Invalid section name offset: {}", idx)))
|
||||
Err(Error::Malformed(format!(
|
||||
"Invalid section name offset: {}",
|
||||
idx
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn name(&self) -> error::Result<&str> {
|
||||
match self.real_name.as_ref() {
|
||||
Some(s) => Ok(s),
|
||||
None => Ok(self.name.pread(0)?)
|
||||
None => Ok(self.name.pread(0)?),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn relocations<'a>(&self, bytes: &'a[u8]) -> error::Result<relocation::Relocations<'a>> {
|
||||
pub fn relocations<'a>(&self, bytes: &'a [u8]) -> error::Result<relocation::Relocations<'a>> {
|
||||
let offset = self.pointer_to_relocations as usize;
|
||||
let number = self.number_of_relocations as usize;
|
||||
relocation::Relocations::parse(bytes, offset, number)
|
||||
|
|
@ -247,7 +266,9 @@ mod tests {
|
|||
(10_000_000, b"//AAmJaA"),
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
(0xfff_fff_fff, b"////////"),
|
||||
].iter() {
|
||||
]
|
||||
.iter()
|
||||
{
|
||||
section.set_name_offset(offset).unwrap();
|
||||
assert_eq!(§ion.name, name);
|
||||
assert_eq!(section.name_offset().unwrap(), Some(offset));
|
||||
|
|
|
|||
17
third_party/rust/goblin/src/pe/symbol.rs
vendored
17
third_party/rust/goblin/src/pe/symbol.rs
vendored
|
|
@ -1,6 +1,6 @@
|
|||
use alloc::vec::Vec;
|
||||
use crate::error;
|
||||
use crate::strtab;
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt::{self, Debug};
|
||||
use scroll::{ctx, IOread, IOwrite, Pread, Pwrite, SizeWith};
|
||||
|
||||
|
|
@ -218,11 +218,8 @@ impl Symbol {
|
|||
/// a strtab entry.
|
||||
pub fn name<'a>(&'a self, strtab: &'a strtab::Strtab) -> error::Result<&'a str> {
|
||||
if let Some(offset) = self.name_offset() {
|
||||
strtab.get(offset as usize).unwrap_or_else(|| {
|
||||
Err(error::Error::Malformed(format!(
|
||||
"Invalid Symbol name offset {:#x}",
|
||||
offset
|
||||
)))
|
||||
strtab.get_at(offset as usize).ok_or_else(|| {
|
||||
error::Error::Malformed(format!("Invalid Symbol name offset {:#x}", offset))
|
||||
})
|
||||
} else {
|
||||
Ok(self.name.pread(0)?)
|
||||
|
|
@ -233,8 +230,14 @@ impl Symbol {
|
|||
///
|
||||
/// Returns `None` if the name is inline.
|
||||
pub fn name_offset(&self) -> Option<u32> {
|
||||
// Symbol offset starts at the strtable's length, so let's adjust it
|
||||
let length_field_size = core::mem::size_of::<u32>() as u32;
|
||||
|
||||
if self.name[0] == 0 {
|
||||
self.name.pread_with(4, scroll::LE).ok()
|
||||
self.name
|
||||
.pread_with(4, scroll::LE)
|
||||
.ok()
|
||||
.map(|offset: u32| offset - length_field_size)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
|
|
|||
161
third_party/rust/goblin/src/pe/utils.rs
vendored
161
third_party/rust/goblin/src/pe/utils.rs
vendored
|
|
@ -1,15 +1,16 @@
|
|||
use scroll::Pread;
|
||||
use alloc::string::ToString;
|
||||
use crate::error;
|
||||
use alloc::string::ToString;
|
||||
use scroll::Pread;
|
||||
|
||||
use super::options;
|
||||
use super::section_table;
|
||||
|
||||
use core::cmp;
|
||||
use crate::pe::data_directories::DataDirectory;
|
||||
use core::cmp;
|
||||
|
||||
use log::debug;
|
||||
|
||||
pub fn is_in_range (rva: usize, r1: usize, r2: usize) -> bool {
|
||||
pub fn is_in_range(rva: usize, r1: usize, r2: usize) -> bool {
|
||||
r1 <= rva && rva < r2
|
||||
}
|
||||
|
||||
|
|
@ -27,11 +28,36 @@ fn section_read_size(section: §ion_table::SectionTable, file_alignment: u32)
|
|||
(size + PAGE_MASK) & !PAGE_MASK
|
||||
}
|
||||
|
||||
// Paraphrased from https://reverseengineering.stackexchange.com/a/4326 (by Peter Ferrie).
|
||||
//
|
||||
// Handles the corner cases such as mis-aligned pointers (round down) and sizes (round up)
|
||||
// Further rounding corner cases:
|
||||
// - the physical pointer should be rounded down to a multiple of 512, regardless of the value in the header
|
||||
// - the read size is rounded up by using a combination of the file alignment and 4kb
|
||||
// - the virtual size is always rounded up to a multiple of 4kb, regardless of the value in the header.
|
||||
//
|
||||
// Reference C implementation:
|
||||
//
|
||||
// long pointerToRaw = section.get(POINTER_TO_RAW_DATA);
|
||||
// long alignedpointerToRaw = pointerToRaw & ~0x1ff;
|
||||
// long sizeOfRaw = section.get(SIZE_OF_RAW_DATA);
|
||||
// long readsize = ((pointerToRaw + sizeOfRaw) + filealign - 1) & ~(filealign - 1)) - alignedpointerToRaw;
|
||||
// readsize = min(readsize, (sizeOfRaw + 0xfff) & ~0xfff);
|
||||
// long virtsize = section.get(VIRTUAL_SIZE);
|
||||
//
|
||||
// if (virtsize)
|
||||
// {
|
||||
// readsize = min(readsize, (virtsize + 0xfff) & ~0xfff);
|
||||
// }
|
||||
|
||||
let file_alignment = file_alignment as usize;
|
||||
let size_of_raw_data = section.size_of_raw_data as usize;
|
||||
let virtual_size = section.virtual_size as usize;
|
||||
let read_size = {
|
||||
let read_size = (section.pointer_to_raw_data as usize + size_of_raw_data + file_alignment - 1) & !(file_alignment - 1);
|
||||
let read_size =
|
||||
((section.pointer_to_raw_data as usize + size_of_raw_data + file_alignment - 1)
|
||||
& !(file_alignment - 1))
|
||||
- aligned_pointer_to_raw_data(section.pointer_to_raw_data as usize);
|
||||
cmp::min(read_size, round_size(size_of_raw_data))
|
||||
};
|
||||
|
||||
|
|
@ -42,47 +68,110 @@ fn section_read_size(section: §ion_table::SectionTable, file_alignment: u32)
|
|||
}
|
||||
}
|
||||
|
||||
fn rva2offset (rva: usize, section: §ion_table::SectionTable) -> usize {
|
||||
(rva - section.virtual_address as usize) + aligned_pointer_to_raw_data(section.pointer_to_raw_data as usize)
|
||||
fn rva2offset(rva: usize, section: §ion_table::SectionTable) -> usize {
|
||||
(rva - section.virtual_address as usize)
|
||||
+ aligned_pointer_to_raw_data(section.pointer_to_raw_data as usize)
|
||||
}
|
||||
|
||||
fn is_in_section (rva: usize, section: §ion_table::SectionTable, file_alignment: u32) -> bool {
|
||||
fn is_in_section(rva: usize, section: §ion_table::SectionTable, file_alignment: u32) -> bool {
|
||||
let section_rva = section.virtual_address as usize;
|
||||
is_in_range(rva, section_rva, section_rva + section_read_size(section, file_alignment))
|
||||
is_in_range(
|
||||
rva,
|
||||
section_rva,
|
||||
section_rva + section_read_size(section, file_alignment),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn find_offset (rva: usize, sections: &[section_table::SectionTable], file_alignment: u32) -> Option<usize> {
|
||||
for (i, section) in sections.iter().enumerate() {
|
||||
debug!("Checking {} for {:#x} ∈ {:#x}..{:#x}", section.name().unwrap_or(""), rva, section.virtual_address, section.virtual_address + section.virtual_size);
|
||||
if is_in_section(rva, §ion, file_alignment) {
|
||||
let offset = rva2offset(rva, §ion);
|
||||
debug!("Found in section {}({}), remapped into offset {:#x}", section.name().unwrap_or(""), i, offset);
|
||||
return Some(offset)
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn find_offset_or (rva: usize, sections: &[section_table::SectionTable], file_alignment: u32, msg: &str) -> error::Result<usize> {
|
||||
find_offset(rva, sections, file_alignment).ok_or_else(|| error::Error::Malformed(msg.to_string()))
|
||||
}
|
||||
|
||||
pub fn try_name<'a>(bytes: &'a [u8], rva: usize, sections: &[section_table::SectionTable], file_alignment: u32) -> error::Result<&'a str> {
|
||||
match find_offset(rva, sections, file_alignment) {
|
||||
Some(offset) => {
|
||||
Ok(bytes.pread::<&str>(offset)?)
|
||||
},
|
||||
None => {
|
||||
Err(error::Error::Malformed(format!("Cannot find name from rva {:#x} in sections: {:?}", rva, sections)))
|
||||
pub fn find_offset(
|
||||
rva: usize,
|
||||
sections: &[section_table::SectionTable],
|
||||
file_alignment: u32,
|
||||
opts: &options::ParseOptions,
|
||||
) -> Option<usize> {
|
||||
if opts.resolve_rva {
|
||||
for (i, section) in sections.iter().enumerate() {
|
||||
debug!(
|
||||
"Checking {} for {:#x} ∈ {:#x}..{:#x}",
|
||||
section.name().unwrap_or(""),
|
||||
rva,
|
||||
section.virtual_address,
|
||||
section.virtual_address + section.virtual_size
|
||||
);
|
||||
if is_in_section(rva, §ion, file_alignment) {
|
||||
let offset = rva2offset(rva, §ion);
|
||||
debug!(
|
||||
"Found in section {}({}), remapped into offset {:#x}",
|
||||
section.name().unwrap_or(""),
|
||||
i,
|
||||
offset
|
||||
);
|
||||
return Some(offset);
|
||||
}
|
||||
}
|
||||
None
|
||||
} else {
|
||||
Some(rva)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_data<'a, T>(bytes: &'a [u8], sections: &[section_table::SectionTable], directory: DataDirectory, file_alignment: u32) -> error::Result<T>
|
||||
where T: scroll::ctx::TryFromCtx<'a, scroll::Endian, Error = scroll::Error> {
|
||||
pub fn find_offset_or(
|
||||
rva: usize,
|
||||
sections: &[section_table::SectionTable],
|
||||
file_alignment: u32,
|
||||
opts: &options::ParseOptions,
|
||||
msg: &str,
|
||||
) -> error::Result<usize> {
|
||||
find_offset(rva, sections, file_alignment, opts)
|
||||
.ok_or_else(|| error::Error::Malformed(msg.to_string()))
|
||||
}
|
||||
|
||||
pub fn try_name<'a>(
|
||||
bytes: &'a [u8],
|
||||
rva: usize,
|
||||
sections: &[section_table::SectionTable],
|
||||
file_alignment: u32,
|
||||
opts: &options::ParseOptions,
|
||||
) -> error::Result<&'a str> {
|
||||
match find_offset(rva, sections, file_alignment, opts) {
|
||||
Some(offset) => Ok(bytes.pread::<&str>(offset)?),
|
||||
None => Err(error::Error::Malformed(format!(
|
||||
"Cannot find name from rva {:#x} in sections: {:?}",
|
||||
rva, sections
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_data<'a, T>(
|
||||
bytes: &'a [u8],
|
||||
sections: &[section_table::SectionTable],
|
||||
directory: DataDirectory,
|
||||
file_alignment: u32,
|
||||
) -> error::Result<T>
|
||||
where
|
||||
T: scroll::ctx::TryFromCtx<'a, scroll::Endian, Error = scroll::Error>,
|
||||
{
|
||||
get_data_with_opts(
|
||||
bytes,
|
||||
sections,
|
||||
directory,
|
||||
file_alignment,
|
||||
&options::ParseOptions::default(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_data_with_opts<'a, T>(
|
||||
bytes: &'a [u8],
|
||||
sections: &[section_table::SectionTable],
|
||||
directory: DataDirectory,
|
||||
file_alignment: u32,
|
||||
opts: &options::ParseOptions,
|
||||
) -> error::Result<T>
|
||||
where
|
||||
T: scroll::ctx::TryFromCtx<'a, scroll::Endian, Error = scroll::Error>,
|
||||
{
|
||||
let rva = directory.virtual_address as usize;
|
||||
let offset = find_offset(rva, sections, file_alignment)
|
||||
.ok_or_else(||error::Error::Malformed(directory.virtual_address.to_string()))?;
|
||||
let offset = find_offset(rva, sections, file_alignment, opts)
|
||||
.ok_or_else(|| error::Error::Malformed(directory.virtual_address.to_string()))?;
|
||||
let result: T = bytes.pread_with(offset, scroll::LE)?;
|
||||
Ok(result)
|
||||
}
|
||||
|
|
|
|||
226
third_party/rust/goblin/src/strtab.rs
vendored
226
third_party/rust/goblin/src/strtab.rs
vendored
|
|
@ -1,10 +1,9 @@
|
|||
//! A byte-offset based string table.
|
||||
//! Commonly used in ELF binaries, Unix archives, and even PE binaries.
|
||||
|
||||
use core::ops::Index;
|
||||
use core::slice;
|
||||
use core::str;
|
||||
use core::fmt;
|
||||
use core::ops::Index;
|
||||
use core::str;
|
||||
use scroll::{ctx, Pread};
|
||||
if_alloc! {
|
||||
use crate::error;
|
||||
|
|
@ -15,8 +14,10 @@ if_alloc! {
|
|||
/// member index). Constructed using [`parse`](#method.parse)
|
||||
/// with your choice of delimiter. Please be careful.
|
||||
pub struct Strtab<'a> {
|
||||
bytes: &'a[u8],
|
||||
delim: ctx::StrCtx,
|
||||
bytes: &'a [u8],
|
||||
#[cfg(feature = "alloc")]
|
||||
strings: Vec<(usize, &'a str)>,
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
|
|
@ -25,49 +26,29 @@ fn get_str(offset: usize, bytes: &[u8], delim: ctx::StrCtx) -> scroll::Result<&s
|
|||
}
|
||||
|
||||
impl<'a> Strtab<'a> {
|
||||
/// Construct a new strtab with `bytes` as the backing string table, using `delim` as the delimiter between entries
|
||||
pub fn new (bytes: &'a [u8], delim: u8) -> Self {
|
||||
Strtab { delim: ctx::StrCtx::Delimiter(delim), bytes }
|
||||
/// Creates a `Strtab` with `bytes` as the backing string table, using `delim` as the delimiter between entries.
|
||||
///
|
||||
/// NB: this does *not* preparse the string table, which can have non-optimal access patterns.
|
||||
/// See https://github.com/m4b/goblin/pull/275#issue-660364025
|
||||
pub fn new(bytes: &'a [u8], delim: u8) -> Self {
|
||||
Self::from_slice_unparsed(bytes, 0, bytes.len(), delim)
|
||||
}
|
||||
/// Construct a strtab from a `ptr`, and a `size`, using `delim` as the delimiter
|
||||
pub unsafe fn from_raw(ptr: *const u8, size: usize, delim: u8) -> Strtab<'a> {
|
||||
Strtab { delim: ctx::StrCtx::Delimiter(delim), bytes: slice::from_raw_parts(ptr, size) }
|
||||
}
|
||||
#[cfg(feature = "alloc")]
|
||||
/// Parses a strtab from `bytes` at `offset` with `len` size as the backing string table, using `delim` as the delimiter
|
||||
pub fn parse(bytes: &'a [u8], offset: usize, len: usize, delim: u8) -> error::Result<Strtab<'a>> {
|
||||
let (end, overflow) = offset.overflowing_add(len);
|
||||
if overflow || end > bytes.len () {
|
||||
return Err(error::Error::Malformed(format!("Strtable size ({}) + offset ({}) is out of bounds for {} #bytes. Overflowed: {}", len, offset, bytes.len(), overflow)));
|
||||
}
|
||||
Ok(Strtab { bytes: &bytes[offset..end], delim: ctx::StrCtx::Delimiter(delim) })
|
||||
}
|
||||
#[cfg(feature = "alloc")]
|
||||
/// Converts the string table to a vector, with the original `delim` used to separate the strings
|
||||
pub fn to_vec(&self) -> error::Result<Vec<&'a str>> {
|
||||
let len = self.bytes.len();
|
||||
let mut strings = Vec::with_capacity(len);
|
||||
let mut i = 0;
|
||||
while i < len {
|
||||
let string = self.get(i).unwrap()?;
|
||||
i = i + string.len() + 1;
|
||||
strings.push(string);
|
||||
}
|
||||
Ok(strings)
|
||||
}
|
||||
/// Safely parses and gets a str reference from the backing bytes starting at byte `offset`.
|
||||
/// If the index is out of bounds, `None` is returned.
|
||||
/// Requires `feature = "alloc"`
|
||||
#[cfg(feature = "alloc")]
|
||||
pub fn get(&self, offset: usize) -> Option<error::Result<&'a str>> {
|
||||
if offset >= self.bytes.len() {
|
||||
None
|
||||
} else {
|
||||
Some(get_str(offset, self.bytes, self.delim).map_err(core::convert::Into::into))
|
||||
|
||||
/// Creates a `Strtab` directly without bounds check and without parsing it.
|
||||
///
|
||||
/// This is potentially unsafe and should only be used if `feature = "alloc"` is disabled.
|
||||
pub fn from_slice_unparsed(bytes: &'a [u8], offset: usize, len: usize, delim: u8) -> Self {
|
||||
Self {
|
||||
delim: ctx::StrCtx::Delimiter(delim),
|
||||
bytes: &bytes[offset..offset + len],
|
||||
#[cfg(feature = "alloc")]
|
||||
strings: Vec::new(),
|
||||
}
|
||||
}
|
||||
/// Gets a str reference from the backing bytes starting at byte `offset`.
|
||||
///
|
||||
/// If the index is out of bounds, `None` is returned. Panics if bytes are invalid UTF-8.
|
||||
/// Use this method if the `Strtab` was created using `from_slice_unparsed()`.
|
||||
pub fn get_unsafe(&self, offset: usize) -> Option<&'a str> {
|
||||
if offset >= self.bytes.len() {
|
||||
None
|
||||
|
|
@ -75,6 +56,115 @@ impl<'a> Strtab<'a> {
|
|||
Some(get_str(offset, self.bytes, self.delim).unwrap())
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "alloc")]
|
||||
/// Parses a `Strtab` from `bytes` at `offset` with `len` size as the backing string table, using `delim` as the delimiter.
|
||||
///
|
||||
/// Errors if bytes are invalid UTF-8.
|
||||
/// Requires `feature = "alloc"`
|
||||
pub fn parse(bytes: &'a [u8], offset: usize, len: usize, delim: u8) -> error::Result<Self> {
|
||||
let (end, overflow) = offset.overflowing_add(len);
|
||||
if overflow || end > bytes.len() {
|
||||
return Err(error::Error::Malformed(format!(
|
||||
"Strtable size ({}) + offset ({}) is out of bounds for {} #bytes. Overflowed: {}",
|
||||
len,
|
||||
offset,
|
||||
bytes.len(),
|
||||
overflow
|
||||
)));
|
||||
}
|
||||
let mut result = Self::from_slice_unparsed(bytes, offset, len, delim);
|
||||
let mut i = 0;
|
||||
while i < result.bytes.len() {
|
||||
let string = get_str(i, result.bytes, result.delim)?;
|
||||
result.strings.push((i, string));
|
||||
i += string.len() + 1;
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
#[cfg(feature = "alloc")]
|
||||
/// Parses a `Strtab` with `bytes` as the backing string table, using `delim` as the delimiter between entries.
|
||||
///
|
||||
/// Requires `feature = "alloc"`
|
||||
pub fn new_preparsed(bytes: &'a [u8], delim: u8) -> error::Result<Self> {
|
||||
Self::parse(bytes, 0, bytes.len(), delim)
|
||||
}
|
||||
#[cfg(feature = "alloc")]
|
||||
/// Converts the string table to a vector of parsed strings.
|
||||
///
|
||||
/// Note: This method is used to check the parsed contents of `strtab`.
|
||||
/// If you want to get the correct contents of `strtab` as `Vec`, use the following example.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```rust
|
||||
/// use goblin::error::Error;
|
||||
///
|
||||
/// pub fn show_shdr_strtab(bytes: &[u8]) -> Result<(), Error> {
|
||||
/// let elf = goblin::elf::Elf::parse(&bytes)?;
|
||||
///
|
||||
/// for section in elf.section_headers {
|
||||
/// println!("{}", elf.shdr_strtab.get_at(section.sh_name).unwrap_or(""));
|
||||
/// }
|
||||
///
|
||||
/// Ok(())
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Requires `feature = "alloc"`
|
||||
pub fn to_vec(&self) -> error::Result<Vec<&'a str>> {
|
||||
// Fallback in case `Strtab` was created using `from_slice_unparsed()`.
|
||||
if self.strings.is_empty() {
|
||||
let mut result = Vec::new();
|
||||
let mut i = 0;
|
||||
while i < self.bytes.len() {
|
||||
let string = get_str(i, self.bytes, self.delim)?;
|
||||
result.push(string);
|
||||
i += string.len() + 1;
|
||||
}
|
||||
return Ok(result);
|
||||
}
|
||||
Ok(self.strings.iter().map(|&(_key, value)| value).collect())
|
||||
}
|
||||
#[cfg(feature = "alloc")]
|
||||
/// Safely gets a str reference from the parsed table starting at byte `offset`.
|
||||
///
|
||||
/// If the index is out of bounds, `None` is returned.
|
||||
/// Requires `feature = "alloc"`
|
||||
pub fn get_at(&self, offset: usize) -> Option<&'a str> {
|
||||
match self
|
||||
.strings
|
||||
.binary_search_by_key(&offset, |&(key, _value)| key)
|
||||
{
|
||||
Ok(index) => Some(self.strings[index].1),
|
||||
Err(index) => {
|
||||
if index == 0 {
|
||||
return None;
|
||||
}
|
||||
let (string_begin_offset, entire_string) = self.strings[index - 1];
|
||||
entire_string.get(offset - string_begin_offset..)
|
||||
}
|
||||
}
|
||||
}
|
||||
#[deprecated(since = "0.4.2", note = "Use from_slice_unparsed() instead")]
|
||||
/// Construct a strtab from a `ptr`, and a `size`, using `delim` as the delimiter
|
||||
///
|
||||
/// # Safety
|
||||
/// This function creates a `Strtab` directly from a raw pointer and size
|
||||
pub unsafe fn from_raw(ptr: *const u8, len: usize, delim: u8) -> Strtab<'a> {
|
||||
Self::from_slice_unparsed(core::slice::from_raw_parts(ptr, len), 0, len, delim)
|
||||
}
|
||||
#[deprecated(since = "0.4.2", note = "Bad performance, use get_at() instead")]
|
||||
#[cfg(feature = "alloc")]
|
||||
/// Parses a str reference from the parsed table starting at byte `offset`.
|
||||
///
|
||||
/// If the index is out of bounds, `None` is returned.
|
||||
/// Requires `feature = "alloc"`
|
||||
pub fn get(&self, offset: usize) -> Option<error::Result<&'a str>> {
|
||||
if offset >= self.bytes.len() {
|
||||
None
|
||||
} else {
|
||||
Some(get_str(offset, self.bytes, self.delim).map_err(core::convert::Into::into))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> fmt::Debug for Strtab<'a> {
|
||||
|
|
@ -87,8 +177,13 @@ impl<'a> fmt::Debug for Strtab<'a> {
|
|||
}
|
||||
|
||||
impl<'a> Default for Strtab<'a> {
|
||||
fn default() -> Strtab<'a> {
|
||||
Strtab { bytes: &[], delim: ctx::StrCtx::default() }
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
delim: ctx::StrCtx::default(),
|
||||
bytes: &[],
|
||||
#[cfg(feature = "alloc")]
|
||||
strings: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -107,8 +202,7 @@ impl<'a> Index<usize> for Strtab<'a> {
|
|||
|
||||
#[test]
|
||||
fn as_vec_no_final_null() {
|
||||
let bytes = b"\0printf\0memmove\0busta";
|
||||
let strtab = unsafe { Strtab::from_raw(bytes.as_ptr(), bytes.len(), 0x0) };
|
||||
let strtab = Strtab::new_preparsed(b"\0printf\0memmove\0busta", 0x0).unwrap();
|
||||
let vec = strtab.to_vec().unwrap();
|
||||
assert_eq!(vec.len(), 4);
|
||||
assert_eq!(vec, vec!["", "printf", "memmove", "busta"]);
|
||||
|
|
@ -116,8 +210,7 @@ fn as_vec_no_final_null() {
|
|||
|
||||
#[test]
|
||||
fn as_vec_no_first_null_no_final_null() {
|
||||
let bytes = b"printf\0memmove\0busta";
|
||||
let strtab = unsafe { Strtab::from_raw(bytes.as_ptr(), bytes.len(), 0x0) };
|
||||
let strtab = Strtab::new_preparsed(b"printf\0memmove\0busta", 0x0).unwrap();
|
||||
let vec = strtab.to_vec().unwrap();
|
||||
assert_eq!(vec.len(), 3);
|
||||
assert_eq!(vec, vec!["printf", "memmove", "busta"]);
|
||||
|
|
@ -125,8 +218,7 @@ fn as_vec_no_first_null_no_final_null() {
|
|||
|
||||
#[test]
|
||||
fn to_vec_final_null() {
|
||||
let bytes = b"\0printf\0memmove\0busta\0";
|
||||
let strtab = unsafe { Strtab::from_raw(bytes.as_ptr(), bytes.len(), 0x0) };
|
||||
let strtab = Strtab::new_preparsed(b"\0printf\0memmove\0busta\0", 0x0).unwrap();
|
||||
let vec = strtab.to_vec().unwrap();
|
||||
assert_eq!(vec.len(), 4);
|
||||
assert_eq!(vec, vec!["", "printf", "memmove", "busta"]);
|
||||
|
|
@ -134,9 +226,39 @@ fn to_vec_final_null() {
|
|||
|
||||
#[test]
|
||||
fn to_vec_newline_delim() {
|
||||
let bytes = b"\nprintf\nmemmove\nbusta\n";
|
||||
let strtab = unsafe { Strtab::from_raw(bytes.as_ptr(), bytes.len(), b'\n') };
|
||||
let strtab = Strtab::new_preparsed(b"\nprintf\nmemmove\nbusta\n", b'\n').unwrap();
|
||||
let vec = strtab.to_vec().unwrap();
|
||||
assert_eq!(vec.len(), 4);
|
||||
assert_eq!(vec, vec!["", "printf", "memmove", "busta"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_utf8() {
|
||||
assert!(match Strtab::new_preparsed(&[0x80, 0x80], b'\n') {
|
||||
Err(error::Error::Scroll(scroll::Error::BadInput {
|
||||
size: 2,
|
||||
msg: "invalid utf8",
|
||||
})) => true,
|
||||
_ => false,
|
||||
});
|
||||
assert!(
|
||||
match Strtab::new_preparsed(&[0xC6, 0x92, 0x6F, 0x6F], b'\n') {
|
||||
Ok(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_at_utf8() {
|
||||
let strtab = Strtab::new_preparsed("\nƒoo\nmemmove\n🅱️usta\n".as_bytes(), b'\n').unwrap();
|
||||
assert_eq!(strtab.get_at(0), Some(""));
|
||||
assert_eq!(strtab.get_at(5), Some(""));
|
||||
assert_eq!(strtab.get_at(6), Some("memmove"));
|
||||
assert_eq!(strtab.get_at(14), Some("\u{1f171}\u{fe0f}usta"));
|
||||
assert_eq!(strtab.get_at(16), None);
|
||||
assert_eq!(strtab.get_at(18), Some("\u{fe0f}usta"));
|
||||
assert_eq!(strtab.get_at(21), Some("usta"));
|
||||
assert_eq!(strtab.get_at(25), Some(""));
|
||||
assert_eq!(strtab.get_at(26), None);
|
||||
}
|
||||
|
|
|
|||
100
third_party/rust/goblin/tests/archive.rs
vendored
100
third_party/rust/goblin/tests/archive.rs
vendored
|
|
@ -1,100 +0,0 @@
|
|||
use goblin::archive::*;
|
||||
use scroll::Pread;
|
||||
use std::path::Path;
|
||||
use std::fs::File;
|
||||
|
||||
#[test]
|
||||
fn parse_file_header() {
|
||||
let file_header: [u8; SIZEOF_HEADER] = [0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
|
||||
0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
|
||||
0x20, 0x20, 0x30, 0x20, 0x20, 0x20, 0x20,
|
||||
0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
|
||||
0x30, 0x20, 0x20, 0x20, 0x20, 0x20, 0x30,
|
||||
0x20, 0x20, 0x20, 0x20, 0x20, 0x30, 0x20,
|
||||
0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x38,
|
||||
0x32, 0x34, 0x34, 0x20, 0x20, 0x20, 0x20,
|
||||
0x20, 0x20, 0x60, 0x0a];
|
||||
let buffer = &file_header[..];
|
||||
match buffer.pread::<MemberHeader>(0) {
|
||||
Err(e) => panic!("could not read the buffer: {:?}", e),
|
||||
Ok(file_header2) => {
|
||||
let file_header = MemberHeader {
|
||||
identifier: [0x2f,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,],
|
||||
timestamp: [48, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32],
|
||||
owner_id: [48, 32, 32, 32, 32, 32],
|
||||
group_id: [48, 32, 32, 32, 32, 32],
|
||||
mode: [48, 32, 32, 32, 32, 32, 32, 32],
|
||||
file_size: [56, 50, 52, 52, 32, 32, 32, 32, 32, 32],
|
||||
terminator: [96, 10] };
|
||||
assert_eq!(file_header, file_header2)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_archive() {
|
||||
let crt1a: Vec<u8> = include!("../etc/crt1a.rs");
|
||||
const START: &str = "_start";
|
||||
match Archive::parse(&crt1a) {
|
||||
Ok(archive) => {
|
||||
assert_eq!(archive.member_of_symbol(START), Some("crt1.o"));
|
||||
if let Some(member) = archive.get("crt1.o") {
|
||||
assert_eq!(member.offset, 194);
|
||||
assert_eq!(member.size(), 1928)
|
||||
} else {
|
||||
panic!("could not get crt1.o");
|
||||
}
|
||||
},
|
||||
Err(err) => panic!("could not parse archive: {:?}", err),
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_self() {
|
||||
use std::fs;
|
||||
use std::io::Read;
|
||||
let mut path = Path::new("target").join("debug").join("libgoblin.rlib");
|
||||
// https://github.com/m4b/goblin/issues/63
|
||||
if fs::metadata(&path).is_err() {
|
||||
path = Path::new("target").join("release").join("libgoblin.rlib");
|
||||
}
|
||||
let buffer = {
|
||||
let mut fd = File::open(path).expect("can open file; did you run cargo build first?");
|
||||
let mut v = Vec::new();
|
||||
fd.read_to_end(&mut v).expect("read file");
|
||||
v
|
||||
};
|
||||
|
||||
let archive = Archive::parse(&buffer).expect("parse rlib");
|
||||
|
||||
// check that the archive has a useful symbol table by counting the total number of symbols
|
||||
let symbol_count: usize = archive.summarize().into_iter()
|
||||
.map(|(_member_name, _member_index, ref symbols)| symbols.len())
|
||||
.sum();
|
||||
assert!(symbol_count > 500);
|
||||
|
||||
let goblin_object_name = archive.members()
|
||||
.into_iter()
|
||||
.find(|member| {
|
||||
println!("member: {:?}", member);
|
||||
member.ends_with("goblin-archive.o") // < 1.18
|
||||
|| (member.starts_with("goblin") && member.ends_with("0.o")) // >= 1.18 && < 1.22
|
||||
|| (member.starts_with("goblin") && member.ends_with("rust-cgu.o")) // = 1.22
|
||||
|| (member.starts_with("goblin") && member.ends_with("rcgu.o")) // >= nightly 1.23
|
||||
})
|
||||
.expect("goblin-<hash>.0.o not found");
|
||||
|
||||
let bytes = archive.extract(goblin_object_name, &buffer).expect("extract goblin object");
|
||||
match goblin::Object::parse(&bytes).expect("parse object") {
|
||||
goblin::Object::Elf(elf) => {
|
||||
assert!(elf.entry == 0);
|
||||
}
|
||||
goblin::Object::Mach(goblin::mach::Mach::Binary(macho)) => {
|
||||
assert_eq!(macho.header.filetype, goblin::mach::header::MH_OBJECT);
|
||||
assert_eq!(macho.entry, 0);
|
||||
}
|
||||
other => {
|
||||
panic!("unexpected Object::parse result: {:?}", other);
|
||||
}
|
||||
}
|
||||
}
|
||||
112
third_party/rust/goblin/tests/bins/elf/gnu_hash/README.md
vendored
Normal file
112
third_party/rust/goblin/tests/bins/elf/gnu_hash/README.md
vendored
Normal file
|
|
@ -0,0 +1,112 @@
|
|||
# How to generate hello.so file
|
||||
|
||||
With 64-bit gcc:
|
||||
|
||||
```bash
|
||||
% gcc -o hello.so helloworld.c -Wl,--as-needed -shared -fPIC
|
||||
% readelf --dyn-syms hello.so
|
||||
|
||||
Symbol table '.dynsym' contains 13 entries:
|
||||
Num: Value Size Type Bind Vis Ndx Name
|
||||
0: 0000000000000000 0 NOTYPE LOCAL DEFAULT UND
|
||||
1: 0000000000000000 0 NOTYPE WEAK DEFAULT UND _ITM_deregisterTMCloneTable
|
||||
2: 0000000000000000 0 FUNC GLOBAL DEFAULT UND printf@GLIBC_2.2.5 (2)
|
||||
3: 0000000000000000 0 NOTYPE WEAK DEFAULT UND __gmon_start__
|
||||
4: 0000000000000000 0 NOTYPE WEAK DEFAULT UND _ITM_registerTMCloneTable
|
||||
5: 0000000000000000 0 FUNC WEAK DEFAULT UND __cxa_finalize@GLIBC_2.2.5 (2)
|
||||
6: 0000000000201030 0 NOTYPE GLOBAL DEFAULT 22 _edata
|
||||
7: 000000000000065a 33 FUNC GLOBAL DEFAULT 12 helloWorld
|
||||
8: 0000000000201038 0 NOTYPE GLOBAL DEFAULT 23 _end
|
||||
9: 0000000000201030 0 NOTYPE GLOBAL DEFAULT 23 __bss_start
|
||||
10: 000000000000067b 43 FUNC GLOBAL DEFAULT 12 main
|
||||
11: 0000000000000520 0 FUNC GLOBAL DEFAULT 9 _init
|
||||
12: 00000000000006a8 0 FUNC GLOBAL DEFAULT 13 _fini
|
||||
|
||||
% readelf --section-headers hello.so
|
||||
There are 26 section headers, starting at offset 0x1140:
|
||||
|
||||
Section Headers:
|
||||
[Nr] Name Type Address Offset
|
||||
Size EntSize Flags Link Info Align
|
||||
[ 0] NULL 0000000000000000 00000000
|
||||
0000000000000000 0000000000000000 0 0 0
|
||||
[ 1] .note.gnu.build-i NOTE 00000000000001c8 000001c8
|
||||
0000000000000024 0000000000000000 A 0 0 4
|
||||
[ 2] .gnu.hash GNU_HASH 00000000000001f0 000001f0
|
||||
0000000000000040 0000000000000000 A 3 0 8
|
||||
[ 3] .dynsym DYNSYM 0000000000000230 00000230
|
||||
0000000000000138 0000000000000018 A 4 1 8
|
||||
[ 4] .dynstr STRTAB 0000000000000368 00000368
|
||||
00000000000000a6 0000000000000000 A 0 0 1
|
||||
[ 5] .gnu.version VERSYM 000000000000040e 0000040e
|
||||
000000000000001a 0000000000000002 A 3 0 2
|
||||
[ 6] .gnu.version_r VERNEED 0000000000000428 00000428
|
||||
0000000000000020 0000000000000000 A 4 1 8
|
||||
[ 7] .rela.dyn RELA 0000000000000448 00000448
|
||||
00000000000000a8 0000000000000018 A 3 0 8
|
||||
[ 8] .rela.plt RELA 00000000000004f0 000004f0
|
||||
0000000000000030 0000000000000018 AI 3 21 8
|
||||
[ 9] .init PROGBITS 0000000000000520 00000520
|
||||
0000000000000017 0000000000000000 AX 0 0 4
|
||||
[10] .plt PROGBITS 0000000000000540 00000540
|
||||
0000000000000030 0000000000000010 AX 0 0 16
|
||||
[11] .plt.got PROGBITS 0000000000000570 00000570
|
||||
0000000000000008 0000000000000008 AX 0 0 8
|
||||
[12] .text PROGBITS 0000000000000580 00000580
|
||||
0000000000000126 0000000000000000 AX 0 0 16
|
||||
[13] .fini PROGBITS 00000000000006a8 000006a8
|
||||
0000000000000009 0000000000000000 AX 0 0 4
|
||||
[14] .rodata PROGBITS 00000000000006b1 000006b1
|
||||
0000000000000010 0000000000000000 A 0 0 1
|
||||
[15] .eh_frame_hdr PROGBITS 00000000000006c4 000006c4
|
||||
000000000000002c 0000000000000000 A 0 0 4
|
||||
[16] .eh_frame PROGBITS 00000000000006f0 000006f0
|
||||
000000000000009c 0000000000000000 A 0 0 8
|
||||
[17] .init_array INIT_ARRAY 0000000000200e10 00000e10
|
||||
0000000000000008 0000000000000008 WA 0 0 8
|
||||
[18] .fini_array FINI_ARRAY 0000000000200e18 00000e18
|
||||
0000000000000008 0000000000000008 WA 0 0 8
|
||||
[19] .dynamic DYNAMIC 0000000000200e20 00000e20
|
||||
00000000000001c0 0000000000000010 WA 4 0 8
|
||||
[20] .got PROGBITS 0000000000200fe0 00000fe0
|
||||
0000000000000020 0000000000000008 WA 0 0 8
|
||||
[21] .got.plt PROGBITS 0000000000201000 00001000
|
||||
0000000000000028 0000000000000008 WA 0 0 8
|
||||
[22] .data PROGBITS 0000000000201028 00001028
|
||||
0000000000000008 0000000000000000 WA 0 0 8
|
||||
[23] .bss NOBITS 0000000000201030 00001030
|
||||
0000000000000008 0000000000000000 WA 0 0 1
|
||||
[24] .comment PROGBITS 0000000000000000 00001030
|
||||
000000000000002a 0000000000000001 MS 0 0 1
|
||||
[25] .shstrtab STRTAB 0000000000000000 0000105a
|
||||
00000000000000e1 0000000000000000 0 0 1
|
||||
Key to Flags:
|
||||
W (write), A (alloc), X (execute), M (merge), S (strings), I (info),
|
||||
L (link order), O (extra OS processing required), G (group), T (TLS),
|
||||
C (compressed), x (unknown), o (OS specific), E (exclude),
|
||||
l (large), p (processor specific)
|
||||
```
|
||||
|
||||
Or in 32-bit mode (one might need to install `gcc-multilib` on Ubuntu):
|
||||
|
||||
```bash
|
||||
% gcc -o hello.so helloworld.c -Wl,--as-needed -shared -fPIC
|
||||
% readelf --dyn-syms hello32.so
|
||||
|
||||
Symbol table '.dynsym' contains 13 entries:
|
||||
Num: Value Size Type Bind Vis Ndx Name
|
||||
0: 00000000 0 NOTYPE LOCAL DEFAULT UND
|
||||
1: 00000000 0 NOTYPE WEAK DEFAULT UND _ITM_deregisterTMCloneTable
|
||||
2: 00000000 0 FUNC GLOBAL DEFAULT UND printf@GLIBC_2.0 (2)
|
||||
3: 00000000 0 FUNC WEAK DEFAULT UND __cxa_finalize@GLIBC_2.1.3 (3)
|
||||
4: 00000000 0 NOTYPE WEAK DEFAULT UND __gmon_start__
|
||||
5: 00000000 0 NOTYPE WEAK DEFAULT UND _ITM_registerTMCloneTable
|
||||
6: 00002018 0 NOTYPE GLOBAL DEFAULT 22 _edata
|
||||
7: 000004ed 49 FUNC GLOBAL DEFAULT 12 helloWorld
|
||||
8: 0000201c 0 NOTYPE GLOBAL DEFAULT 23 _end
|
||||
9: 00002018 0 NOTYPE GLOBAL DEFAULT 23 __bss_start
|
||||
10: 0000051e 66 FUNC GLOBAL DEFAULT 12 main
|
||||
11: 0000038c 0 FUNC GLOBAL DEFAULT 9 _init
|
||||
12: 00000564 0 FUNC GLOBAL DEFAULT 13 _fini
|
||||
|
||||
```
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
use std::process;
|
||||
|
||||
pub fn compare(args: Vec<&str>) {
|
||||
let apple = process::Command::new("/Library/Developer/CommandLineTools/usr/bin/dyldinfo")
|
||||
.args(&args)
|
||||
.output()
|
||||
.expect("run Apple dyldinfo");
|
||||
|
||||
let goblin = process::Command::new("cargo")
|
||||
.arg("run")
|
||||
.arg("--quiet")
|
||||
.arg("--example")
|
||||
.arg("dyldinfo")
|
||||
.arg("--")
|
||||
.args(&args)
|
||||
.output()
|
||||
.expect("run cargo dyldinfo");
|
||||
|
||||
if apple.stdout.as_slice() != goblin.stdout.as_slice() {
|
||||
println!("dyldinfo calls disagree!");
|
||||
println!("Apple dyldinfo {:?} output:\n{}", &args, String::from_utf8_lossy(&apple.stdout));
|
||||
println!("---");
|
||||
println!("cargo dyldinfo {:?} output:\n{}", &args, String::from_utf8_lossy(&goblin.stdout));
|
||||
panic!("Apple dyldinfo and cargo dyldinfo differed (args: {:?})", args);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os="macos")]
|
||||
#[test]
|
||||
fn compare_binds() {
|
||||
compare(vec!["-bind", "/Library/Developer/CommandLineTools/usr/bin/dyldinfo"]);
|
||||
compare(vec!["-bind", "/Library/Developer/CommandLineTools/usr/bin/clang"]);
|
||||
compare(vec!["-bind", "/usr/bin/tmutil"]);
|
||||
}
|
||||
|
||||
#[cfg(target_os="macos")]
|
||||
#[test]
|
||||
fn compare_lazy_binds() {
|
||||
compare(vec!["-lazy_bind", "/Library/Developer/CommandLineTools/usr/bin/dyldinfo"]);
|
||||
compare(vec!["-lazy_bind", "/Library/Developer/CommandLineTools/usr/bin/clang"]);
|
||||
compare(vec!["-lazy_bind", "/usr/bin/tmutil"]);
|
||||
}
|
||||
|
||||
#[cfg(target_os="macos")]
|
||||
#[test]
|
||||
fn compare_combined_options() {
|
||||
compare(vec!["-lazy_bind", "-bind", "/Library/Developer/CommandLineTools/usr/bin/dyldinfo"]);
|
||||
}
|
||||
|
||||
#[cfg(not(target_os="macos"))]
|
||||
#[test]
|
||||
fn skipped_on_this_platform() {
|
||||
// this test does nothing on other platforms
|
||||
}
|
||||
119
third_party/rust/goblin/tests/macho.rs
vendored
119
third_party/rust/goblin/tests/macho.rs
vendored
File diff suppressed because one or more lines are too long
2
third_party/rust/scroll/.cargo-checksum.json
vendored
2
third_party/rust/scroll/.cargo-checksum.json
vendored
|
|
@ -1 +1 @@
|
|||
{"files":{"CHANGELOG.md":"de2bbf4669561405d402322f4cc2604218d4986b73b75b41708b9505aebcb02c","Cargo.lock":"00b6b526c23ba767d9e13c48d7cdafd1a8ed85177dab02fa155930a6db3e00f9","Cargo.toml":"cbf8802cfc9885445729ff60cc0dfc965ee2f51a5658617c6d90868be714e047","LICENSE":"6e24b7455f0b9afefdf4f3efd59a56ce76a3020c2dc4371937e281fc5e587fd7","README.md":"e4fe9aabcd87d85a5ec93241eeefc0d69aa0d98fbd67da2fe1849e4cbddac3ce","benches/bench.rs":"9ccbec001bf80b5c4ade12b041193d30406a1bd602fb895f31018001ede87c83","examples/data_ctx.rs":"0f33e092623fd4ef08f63c7f0d75af4fe0274dc7789b9840f2c138098fb08ede","src/ctx.rs":"5049720a6814d1ae1cdc3e8c16663571c940d817954579a472ed8f8076f67cae","src/endian.rs":"b552f4de3b5daf507810098eeb07821132821d9f8c6449ffee4f73366afa6387","src/error.rs":"6c5a913a60d5f8e5042622e5c41835a08d18ff3745f6f651c9e74d45cf10ee5b","src/greater.rs":"a87f9324b2536437f727d57924f6d0f86783ebb4215b0383656a6ac5aa790425","src/leb128.rs":"405f6f2629c77524fd61a1fb11724ba234445cabdc38bd2c60b06300565fdd5b","src/lesser.rs":"e9c59e713b4b4926cb80ef318a8342a832de3eb76683836e6f0f39c96bcc11eb","src/lib.rs":"9a8cf2fe904c29bcbf2862b2175028929229330e2ec61123570fe55c664b9e5c","src/pread.rs":"ae78a0a0206da219db455bb1da5c4e99778b0a75bd21d53c89ae07178b4b5ccf","src/pwrite.rs":"1721f49646747bf08103b3fb87e66a9121f8710d73d877aea5cd18fbff4b7ccb","tests/api.rs":"938771c7f1605ff038b993687c0717fcfce4f22912aa2fcf8767f140dcf4bada"},"package":"fda28d4b4830b807a8b43f7b0e6b5df875311b3e7621d84577188c175b6ec1ec"}
|
||||
{"files":{"CHANGELOG.md":"de2bbf4669561405d402322f4cc2604218d4986b73b75b41708b9505aebcb02c","Cargo.lock":"d6a215b7466d37e08551c56949e77be4ee488f989bdef3e507713c729bbda0e6","Cargo.toml":"c240c5768d23ea9611ef57308f08b8ee4372ede6c04f0783dc9fd1710e664c19","LICENSE":"6e24b7455f0b9afefdf4f3efd59a56ce76a3020c2dc4371937e281fc5e587fd7","README.md":"e4fe9aabcd87d85a5ec93241eeefc0d69aa0d98fbd67da2fe1849e4cbddac3ce","benches/bench.rs":"12ae02c383c91f1b0e11e9201eb8a9d44dadfb2b5987e7e71b0ef7c6589af1ca","examples/data_ctx.rs":"79684fc44d499d0b13a173184793837fbaba70d2f74f075e796eb37a1803ce3d","src/ctx.rs":"8f58672c5f3bc09b8f09c76f1d423431cbff786af75f5b39a0cef23b820d48c6","src/endian.rs":"5b717eb5ed0dc2b536779316b020df4e6489c05b13b4fd9b5f5e683aca1b2c28","src/error.rs":"a6a0ec9a6237d23febd608637c0e3926d147511e7983195366bc5a11f12d9093","src/greater.rs":"29d9736f9d35a0f92ca054c7a36878ade0a77b4e8ee27441c34cd81c6bdb68e6","src/leb128.rs":"e343f4e104ca6d8660a3dded30934b83bad4c04d8888ce2cbebfa562f5ac115d","src/lesser.rs":"d3028781977e60d67003512e45666935deab9a03c76a3ba9316a5dbdddf432eb","src/lib.rs":"49d02fa761bb2a771d1857ffd150aa4b6f55b4f03aee1a7a23d8181c76a55fd6","src/pread.rs":"64afdcf2c2785f1f23d065ec5e565d78569086dfd9ece0a3d2553b05aee5df9b","src/pwrite.rs":"05e3129ec666790a61f5b5f894ad863103e213eb798243cfe5f2cbb54d042ba1","tests/api.rs":"1bef345e020a6a4e590350ea4f6069c5836941656379e252bfbdaee6edbbc0de"},"package":"04c565b551bafbef4157586fa379538366e4385d42082f255bfd96e4fe8519da"}
|
||||
155
third_party/rust/scroll/Cargo.lock
generated
vendored
155
third_party/rust/scroll/Cargo.lock
generated
vendored
|
|
@ -1,74 +1,91 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "arrayvec"
|
||||
version = "0.4.12"
|
||||
name = "autocfg"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cd9fd44efafa8690358b7408d253adf110036b88f55672a933f01d616ad9b1b9"
|
||||
dependencies = [
|
||||
"nodrop",
|
||||
]
|
||||
checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
|
||||
|
||||
[[package]]
|
||||
name = "byteorder"
|
||||
version = "1.3.2"
|
||||
version = "1.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5"
|
||||
checksum = "ae44d1a3d5a19df61dd0c8beb138458ac2a53a7ac09eba97d55592540004306b"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "0.1.10"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "const_fn"
|
||||
version = "0.4.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28b9d6de7f49e22cf97ad17fc4036ece69300032f45f78f30b4a4482cdc3f4a6"
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-channel"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dca26ee1f8d361640700bde38b2c37d8c22b3ce2d360e1fc1c74ea4b0aa7d775"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-deque"
|
||||
version = "0.7.1"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b18cd2e169ad86297e6bc0ad9aa679aee9daa4f19e8163860faf7c164e4f5a71"
|
||||
checksum = "94af6efb46fef72616855b036a624cf27ba656ffc9be1b9a3c931cfc7749a9a9"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"crossbeam-epoch",
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-epoch"
|
||||
version = "0.7.2"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fedcd6772e37f3da2a9af9bf12ebe046c0dfe657992377b4df982a2b54cd37a9"
|
||||
checksum = "a1aaa739f95311c2c7887a76863f500026092fb1dce0161dab577e559ef3569d"
|
||||
dependencies = [
|
||||
"arrayvec",
|
||||
"cfg-if",
|
||||
"const_fn",
|
||||
"crossbeam-utils",
|
||||
"lazy_static",
|
||||
"memoffset",
|
||||
"scopeguard",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-queue"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c979cd6cfe72335896575c6b5688da489e420d36a27a0b9eb0c73db574b4a4b"
|
||||
dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-utils"
|
||||
version = "0.6.6"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "04973fa96e96579258a5091af6003abde64af786b860f18622b82e026cca60e6"
|
||||
checksum = "02d96d1e189ef58269ebe5b97953da3274d83a93af647c2ddd6f9dab28cedb8d"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"cfg-if",
|
||||
"lazy_static",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "either"
|
||||
version = "1.5.3"
|
||||
version = "1.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3"
|
||||
checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.1.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "322f4de77956e22ed0e5032c359a0f1273f1f7f0d79bfa3b8ffbc730d7fbcc5c"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
|
|
@ -78,58 +95,54 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
|||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.65"
|
||||
version = "0.2.82"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1a31a0627fdf1f6a39ec0dd577e101440b7db22672c0901fe00a9a6fbb5c24e8"
|
||||
checksum = "89203f3fba0a3795506acaad8ebce3c80c0af93f994d5a1d7a0b1eeb23271929"
|
||||
|
||||
[[package]]
|
||||
name = "memoffset"
|
||||
version = "0.5.1"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ce6075db033bbbb7ee5a0bbd3a3186bbae616f57fb001c485c7ff77955f8177f"
|
||||
checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87"
|
||||
dependencies = [
|
||||
"rustc_version",
|
||||
"autocfg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nodrop"
|
||||
version = "0.1.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb"
|
||||
|
||||
[[package]]
|
||||
name = "num_cpus"
|
||||
version = "1.10.1"
|
||||
version = "1.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bcef43580c035376c0705c42792c294b66974abbfd2789b511784023f71f3273"
|
||||
checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
|
||||
dependencies = [
|
||||
"hermit-abi",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.6"
|
||||
version = "1.0.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c9e470a8dc4aeae2dee2f335e8f533e2d4b347e1434e5671afc49b054592f27"
|
||||
checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
|
||||
dependencies = [
|
||||
"unicode-xid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.2"
|
||||
version = "1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe"
|
||||
checksum = "991431c3519a3f36861882da93630ce66b52918dcf1b8e2fd66b397fc96f28df"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rayon"
|
||||
version = "1.2.0"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "83a27732a533a1be0a0035a111fe76db89ad312f6f0347004c220c57f209a123"
|
||||
checksum = "8b0d8e0819fadc20c74ea8373106ead0600e3a67ef1fe8da56e39b9ae7275674"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"crossbeam-deque",
|
||||
"either",
|
||||
"rayon-core",
|
||||
|
|
@ -137,35 +150,26 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "rayon-core"
|
||||
version = "1.6.0"
|
||||
version = "1.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "98dcf634205083b17d0861252431eb2acbfb698ab7478a2d20de07954f47ec7b"
|
||||
checksum = "9ab346ac5921dc62ffa9f89b7a773907511cdfa5490c572ae9be1be33e8afa4a"
|
||||
dependencies = [
|
||||
"crossbeam-channel",
|
||||
"crossbeam-deque",
|
||||
"crossbeam-queue",
|
||||
"crossbeam-utils",
|
||||
"lazy_static",
|
||||
"num_cpus",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc_version"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
|
||||
dependencies = [
|
||||
"semver",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scopeguard"
|
||||
version = "1.0.0"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b42e15e59b18a828bbf5c58ea01debb36b9b096346de35d941dcb89009f24a0d"
|
||||
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||
|
||||
[[package]]
|
||||
name = "scroll"
|
||||
version = "0.10.2"
|
||||
version = "0.11.0"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"rayon",
|
||||
|
|
@ -174,35 +178,20 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "scroll_derive"
|
||||
version = "0.10.3"
|
||||
version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6dfde5d1531034db129e95c76ac857e2baecea3443579d493d02224950b0fb6d"
|
||||
checksum = "bdbda6ac5cd1321e724fa9cee216f3a61885889b896f073b8f82322789c5250e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
|
||||
dependencies = [
|
||||
"semver-parser",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "semver-parser"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.5"
|
||||
version = "1.0.60"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"
|
||||
checksum = "c700597eca8a5a762beb35753ef6b94df201c81cca676604f547495a0d7f0081"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
@ -211,6 +200,6 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "unicode-xid"
|
||||
version = "0.2.0"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
|
||||
checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
|
||||
|
|
|
|||
16
third_party/rust/scroll/Cargo.toml
vendored
16
third_party/rust/scroll/Cargo.toml
vendored
|
|
@ -3,17 +3,16 @@
|
|||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g., crates.io) dependencies
|
||||
# to registry (e.g., crates.io) dependencies.
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
# editing this file be aware that the upstream Cargo.toml
|
||||
# will likely look very different (and much more reasonable)
|
||||
# If you are reading this file be aware that the original Cargo.toml
|
||||
# will likely look very different (and much more reasonable).
|
||||
# See Cargo.toml.orig for the original contents.
|
||||
|
||||
[package]
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
name = "scroll"
|
||||
version = "0.10.2"
|
||||
version = "0.11.0"
|
||||
authors = ["m4b <m4b.github.io@gmail.com>", "Ted Mielczarek <ted@mielczarek.org>"]
|
||||
description = "A suite of powerful, extensible, generic, endian-aware Read/Write traits for byte buffers"
|
||||
documentation = "https://docs.rs/scroll"
|
||||
|
|
@ -21,8 +20,9 @@ readme = "README.md"
|
|||
keywords = ["bytes", "endian", "immutable", "pread", "pwrite"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/m4b/scroll"
|
||||
resolver = "2"
|
||||
[dependencies.scroll_derive]
|
||||
version = "0.10"
|
||||
version = "0.11"
|
||||
optional = true
|
||||
[dev-dependencies.byteorder]
|
||||
version = "1"
|
||||
|
|
|
|||
4
third_party/rust/scroll/benches/bench.rs
vendored
4
third_party/rust/scroll/benches/bench.rs
vendored
|
|
@ -11,7 +11,7 @@ fn bench_parallel_cread_with(b: &mut test::Bencher) {
|
|||
let nums = vec![0usize; 500_000];
|
||||
b.iter(|| {
|
||||
let data = black_box(&vec[..]);
|
||||
nums.par_iter().for_each(| offset | {
|
||||
nums.par_iter().for_each(|offset| {
|
||||
let _: u16 = black_box(data.cread_with(*offset, LE));
|
||||
});
|
||||
});
|
||||
|
|
@ -123,7 +123,7 @@ fn bench_parallel_pread_with(b: &mut test::Bencher) {
|
|||
let nums = vec![0usize; 500_000];
|
||||
b.iter(|| {
|
||||
let data = black_box(&vec[..]);
|
||||
nums.par_iter().for_each(| offset | {
|
||||
nums.par_iter().for_each(|offset| {
|
||||
let _: Result<u16, _> = black_box(data.pread_with(*offset, LE));
|
||||
});
|
||||
});
|
||||
|
|
|
|||
7
third_party/rust/scroll/examples/data_ctx.rs
vendored
7
third_party/rust/scroll/examples/data_ctx.rs
vendored
|
|
@ -8,11 +8,10 @@ struct Data<'a> {
|
|||
|
||||
impl<'a> ctx::TryFromCtx<'a, Endian> for Data<'a> {
|
||||
type Error = scroll::Error;
|
||||
fn try_from_ctx (src: &'a [u8], endian: Endian)
|
||||
-> Result<(Self, usize), Self::Error> {
|
||||
fn try_from_ctx(src: &'a [u8], endian: Endian) -> Result<(Self, usize), Self::Error> {
|
||||
let name = src.pread::<&'a str>(0)?;
|
||||
let id = src.pread_with(name.len()+1, endian)?;
|
||||
Ok((Data { name: name, id: id }, name.len()+4))
|
||||
let id = src.pread_with(name.len() + 1, endian)?;
|
||||
Ok((Data { name: name, id: id }, name.len() + 4))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
271
third_party/rust/scroll/src/ctx.rs
vendored
271
third_party/rust/scroll/src/ctx.rs
vendored
|
|
@ -11,7 +11,7 @@
|
|||
//! [Cread::cread](../trait.Cread.html#method.cread) and
|
||||
//! [IOread::ioread](../trait.IOread.html#method.ioread) to read that data type from a data source one
|
||||
//! of the `*read` traits has been implemented for.
|
||||
//!
|
||||
//!
|
||||
//! Implementations of `TryFromCtx` specify a source (called `This`) and an `Error` type for failed
|
||||
//! reads. The source defines the kind of container the type can be read from, and defaults to
|
||||
//! `[u8]` for any type that implements `AsRef<[u8]>`.
|
||||
|
|
@ -94,7 +94,7 @@
|
|||
//! // its source buffer without having to resort to copying.
|
||||
//! fn try_from_ctx (src: &'a [u8], ctx: Context)
|
||||
//! // the `usize` returned here is the amount of bytes read.
|
||||
//! -> Result<(Self, usize), Self::Error>
|
||||
//! -> Result<(Self, usize), Self::Error>
|
||||
//! {
|
||||
//! // The offset counter; gread and gread_with increment a given counter automatically so we
|
||||
//! // don't have to manually care.
|
||||
|
|
@ -180,17 +180,17 @@
|
|||
//! }
|
||||
//! ```
|
||||
|
||||
use core::ptr::copy_nonoverlapping;
|
||||
use core::mem::transmute;
|
||||
use core::mem::size_of;
|
||||
use core::str;
|
||||
use core::mem::transmute;
|
||||
use core::ptr::copy_nonoverlapping;
|
||||
use core::result;
|
||||
use core::str;
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
use std::ffi::{CStr, CString};
|
||||
|
||||
use crate::error;
|
||||
use crate::endian::Endian;
|
||||
use crate::error;
|
||||
|
||||
/// A trait for measuring how large something is; for a byte sequence, it will be its length.
|
||||
pub trait MeasureWith<Ctx> {
|
||||
|
|
@ -241,14 +241,17 @@ impl Default for StrCtx {
|
|||
impl StrCtx {
|
||||
pub fn len(&self) -> usize {
|
||||
match *self {
|
||||
StrCtx::Delimiter(_) |
|
||||
StrCtx::DelimiterUntil(_, _) => 1,
|
||||
StrCtx::Delimiter(_) | StrCtx::DelimiterUntil(_, _) => 1,
|
||||
StrCtx::Length(_) => 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
if let StrCtx::Length(_) = *self { true } else { false }
|
||||
if let StrCtx::Length(_) = *self {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -304,7 +307,7 @@ pub trait FromCtx<Ctx: Copy = (), This: ?Sized = [u8]> {
|
|||
/// fn description(&self) -> &str {
|
||||
/// "ExternalError"
|
||||
/// }
|
||||
/// fn cause(&self) -> Option<&error::Error> { None}
|
||||
/// fn cause(&self) -> Option<&dyn error::Error> { None}
|
||||
/// }
|
||||
///
|
||||
/// impl From<scroll::Error> for ExternalError {
|
||||
|
|
@ -330,7 +333,10 @@ pub trait FromCtx<Ctx: Copy = (), This: ?Sized = [u8]> {
|
|||
/// let bytes: [u8; 4] = [0xde, 0xad, 0, 0];
|
||||
/// let foo: Result<Foo, ExternalError> = bytes.pread(0);
|
||||
/// ```
|
||||
pub trait TryFromCtx<'a, Ctx: Copy = (), This: ?Sized = [u8]> where Self: 'a + Sized {
|
||||
pub trait TryFromCtx<'a, Ctx: Copy = (), This: ?Sized = [u8]>
|
||||
where
|
||||
Self: 'a + Sized,
|
||||
{
|
||||
type Error;
|
||||
fn try_from_ctx(from: &'a This, ctx: Ctx) -> Result<(Self, usize), Self::Error>;
|
||||
}
|
||||
|
|
@ -379,6 +385,7 @@ pub trait SizeWith<Ctx = ()> {
|
|||
fn size_with(ctx: &Ctx) -> usize;
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
macro_rules! signed_to_unsigned {
|
||||
(i8) => {u8 };
|
||||
(u8) => {u8 };
|
||||
|
|
@ -395,13 +402,17 @@ macro_rules! signed_to_unsigned {
|
|||
}
|
||||
|
||||
macro_rules! write_into {
|
||||
($typ:ty, $size:expr, $n:expr, $dst:expr, $endian:expr) => ({
|
||||
($typ:ty, $size:expr, $n:expr, $dst:expr, $endian:expr) => {{
|
||||
unsafe {
|
||||
assert!($dst.len() >= $size);
|
||||
let bytes = transmute::<$typ, [u8; $size]>(if $endian.is_little() { $n.to_le() } else { $n.to_be() });
|
||||
let bytes = transmute::<$typ, [u8; $size]>(if $endian.is_little() {
|
||||
$n.to_le()
|
||||
} else {
|
||||
$n.to_be()
|
||||
});
|
||||
copy_nonoverlapping((&bytes).as_ptr(), $dst.as_mut_ptr(), $size);
|
||||
}
|
||||
});
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! into_ctx_impl {
|
||||
|
|
@ -419,12 +430,18 @@ macro_rules! into_ctx_impl {
|
|||
(*self).into_ctx(dst, le)
|
||||
}
|
||||
}
|
||||
impl TryIntoCtx<Endian> for $typ where $typ: IntoCtx<Endian> {
|
||||
impl TryIntoCtx<Endian> for $typ
|
||||
where
|
||||
$typ: IntoCtx<Endian>,
|
||||
{
|
||||
type Error = error::Error;
|
||||
#[inline]
|
||||
fn try_into_ctx(self, dst: &mut [u8], le: Endian) -> error::Result<usize> {
|
||||
if $size > dst.len () {
|
||||
Err(error::Error::TooBig{size: $size, len: dst.len()})
|
||||
if $size > dst.len() {
|
||||
Err(error::Error::TooBig {
|
||||
size: $size,
|
||||
len: dst.len(),
|
||||
})
|
||||
} else {
|
||||
<$typ as IntoCtx<Endian>>::into_ctx(self, dst, le);
|
||||
Ok($size)
|
||||
|
|
@ -438,7 +455,7 @@ macro_rules! into_ctx_impl {
|
|||
(*self).try_into_ctx(dst, le)
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! from_ctx_impl {
|
||||
|
|
@ -452,25 +469,42 @@ macro_rules! from_ctx_impl {
|
|||
copy_nonoverlapping(
|
||||
src.as_ptr(),
|
||||
&mut data as *mut signed_to_unsigned!($typ) as *mut u8,
|
||||
$size);
|
||||
$size,
|
||||
);
|
||||
}
|
||||
(if le.is_little() { data.to_le() } else { data.to_be() }) as $typ
|
||||
(if le.is_little() {
|
||||
data.to_le()
|
||||
} else {
|
||||
data.to_be()
|
||||
}) as $typ
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> TryFromCtx<'a, Endian> for $typ where $typ: FromCtx<Endian> {
|
||||
impl<'a> TryFromCtx<'a, Endian> for $typ
|
||||
where
|
||||
$typ: FromCtx<Endian>,
|
||||
{
|
||||
type Error = error::Error;
|
||||
#[inline]
|
||||
fn try_from_ctx(src: &'a [u8], le: Endian) -> result::Result<(Self, usize), Self::Error> {
|
||||
if $size > src.len () {
|
||||
Err(error::Error::TooBig{size: $size, len: src.len()})
|
||||
fn try_from_ctx(
|
||||
src: &'a [u8],
|
||||
le: Endian,
|
||||
) -> result::Result<(Self, usize), Self::Error> {
|
||||
if $size > src.len() {
|
||||
Err(error::Error::TooBig {
|
||||
size: $size,
|
||||
len: src.len(),
|
||||
})
|
||||
} else {
|
||||
Ok((FromCtx::from_ctx(&src, le), $size))
|
||||
}
|
||||
}
|
||||
}
|
||||
// as ref
|
||||
impl<'a, T> FromCtx<Endian, T> for $typ where T: AsRef<[u8]> {
|
||||
impl<'a, T> FromCtx<Endian, T> for $typ
|
||||
where
|
||||
T: AsRef<[u8]>,
|
||||
{
|
||||
#[inline]
|
||||
fn from_ctx(src: &T, le: Endian) -> Self {
|
||||
let src = src.as_ref();
|
||||
|
|
@ -480,13 +514,22 @@ macro_rules! from_ctx_impl {
|
|||
copy_nonoverlapping(
|
||||
src.as_ptr(),
|
||||
&mut data as *mut signed_to_unsigned!($typ) as *mut u8,
|
||||
$size);
|
||||
$size,
|
||||
);
|
||||
}
|
||||
(if le.is_little() { data.to_le() } else { data.to_be() }) as $typ
|
||||
(if le.is_little() {
|
||||
data.to_le()
|
||||
} else {
|
||||
data.to_be()
|
||||
}) as $typ
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> TryFromCtx<'a, Endian, T> for $typ where $typ: FromCtx<Endian, T>, T: AsRef<[u8]> {
|
||||
impl<'a, T> TryFromCtx<'a, Endian, T> for $typ
|
||||
where
|
||||
$typ: FromCtx<Endian, T>,
|
||||
T: AsRef<[u8]>,
|
||||
{
|
||||
type Error = error::Error;
|
||||
#[inline]
|
||||
fn try_from_ctx(src: &'a T, le: Endian) -> result::Result<(Self, usize), Self::Error> {
|
||||
|
|
@ -500,11 +543,11 @@ macro_rules! from_ctx_impl {
|
|||
macro_rules! ctx_impl {
|
||||
($typ:tt, $size:expr) => {
|
||||
from_ctx_impl!($typ, $size);
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
ctx_impl!(u8, 1);
|
||||
ctx_impl!(i8, 1);
|
||||
ctx_impl!(u8, 1);
|
||||
ctx_impl!(i8, 1);
|
||||
ctx_impl!(u16, 2);
|
||||
ctx_impl!(i16, 2);
|
||||
ctx_impl!(u32, 4);
|
||||
|
|
@ -525,30 +568,44 @@ macro_rules! from_ctx_float_impl {
|
|||
copy_nonoverlapping(
|
||||
src.as_ptr(),
|
||||
&mut data as *mut signed_to_unsigned!($typ) as *mut u8,
|
||||
$size);
|
||||
transmute(if le.is_little() { data.to_le() } else { data.to_be() })
|
||||
$size,
|
||||
);
|
||||
transmute(if le.is_little() {
|
||||
data.to_le()
|
||||
} else {
|
||||
data.to_be()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<'a> TryFromCtx<'a, Endian> for $typ where $typ: FromCtx<Endian> {
|
||||
impl<'a> TryFromCtx<'a, Endian> for $typ
|
||||
where
|
||||
$typ: FromCtx<Endian>,
|
||||
{
|
||||
type Error = error::Error;
|
||||
#[inline]
|
||||
fn try_from_ctx(src: &'a [u8], le: Endian) -> result::Result<(Self, usize), Self::Error> {
|
||||
if $size > src.len () {
|
||||
Err(error::Error::TooBig{size: $size, len: src.len()})
|
||||
fn try_from_ctx(
|
||||
src: &'a [u8],
|
||||
le: Endian,
|
||||
) -> result::Result<(Self, usize), Self::Error> {
|
||||
if $size > src.len() {
|
||||
Err(error::Error::TooBig {
|
||||
size: $size,
|
||||
len: src.len(),
|
||||
})
|
||||
} else {
|
||||
Ok((FromCtx::from_ctx(src, le), $size))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
from_ctx_float_impl!(f32, 4);
|
||||
from_ctx_float_impl!(f64, 8);
|
||||
|
||||
into_ctx_impl!(u8, 1);
|
||||
into_ctx_impl!(i8, 1);
|
||||
into_ctx_impl!(u8, 1);
|
||||
into_ctx_impl!(i8, 1);
|
||||
into_ctx_impl!(u16, 2);
|
||||
into_ctx_impl!(i16, 2);
|
||||
into_ctx_impl!(u32, 4);
|
||||
|
|
@ -564,7 +621,13 @@ macro_rules! into_ctx_float_impl {
|
|||
#[inline]
|
||||
fn into_ctx(self, dst: &mut [u8], le: Endian) {
|
||||
assert!(dst.len() >= $size);
|
||||
write_into!(signed_to_unsigned!($typ), $size, transmute::<$typ, signed_to_unsigned!($typ)>(self), dst, le);
|
||||
write_into!(
|
||||
signed_to_unsigned!($typ),
|
||||
$size,
|
||||
transmute::<$typ, signed_to_unsigned!($typ)>(self),
|
||||
dst,
|
||||
le
|
||||
);
|
||||
}
|
||||
}
|
||||
impl<'a> IntoCtx<Endian> for &'a $typ {
|
||||
|
|
@ -573,12 +636,18 @@ macro_rules! into_ctx_float_impl {
|
|||
(*self).into_ctx(dst, le)
|
||||
}
|
||||
}
|
||||
impl TryIntoCtx<Endian> for $typ where $typ: IntoCtx<Endian> {
|
||||
impl TryIntoCtx<Endian> for $typ
|
||||
where
|
||||
$typ: IntoCtx<Endian>,
|
||||
{
|
||||
type Error = error::Error;
|
||||
#[inline]
|
||||
fn try_into_ctx(self, dst: &mut [u8], le: Endian) -> error::Result<usize> {
|
||||
if $size > dst.len () {
|
||||
Err(error::Error::TooBig{size: $size, len: dst.len()})
|
||||
if $size > dst.len() {
|
||||
Err(error::Error::TooBig {
|
||||
size: $size,
|
||||
len: dst.len(),
|
||||
})
|
||||
} else {
|
||||
<$typ as IntoCtx<Endian>>::into_ctx(self, dst, le);
|
||||
Ok($size)
|
||||
|
|
@ -592,7 +661,7 @@ macro_rules! into_ctx_float_impl {
|
|||
(*self).try_into_ctx(dst, le)
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
into_ctx_float_impl!(f32, 4);
|
||||
|
|
@ -608,10 +677,12 @@ impl<'a> TryFromCtx<'a, StrCtx> for &'a str {
|
|||
StrCtx::Delimiter(delimiter) => src.iter().take_while(|c| **c != delimiter).count(),
|
||||
StrCtx::DelimiterUntil(delimiter, len) => {
|
||||
if len > src.len() {
|
||||
return Err(error::Error::TooBig{size: len, len: src.len()});
|
||||
return Err(error::Error::TooBig {
|
||||
size: len,
|
||||
len: src.len(),
|
||||
});
|
||||
};
|
||||
src
|
||||
.iter()
|
||||
src.iter()
|
||||
.take_while(|c| **c != delimiter)
|
||||
.take(len)
|
||||
.count()
|
||||
|
|
@ -619,17 +690,26 @@ impl<'a> TryFromCtx<'a, StrCtx> for &'a str {
|
|||
};
|
||||
|
||||
if len > src.len() {
|
||||
return Err(error::Error::TooBig{size: len, len: src.len()});
|
||||
return Err(error::Error::TooBig {
|
||||
size: len,
|
||||
len: src.len(),
|
||||
});
|
||||
};
|
||||
|
||||
match str::from_utf8(&src[..len]) {
|
||||
Ok(res) => Ok((res, len + ctx.len())),
|
||||
Err(_) => Err(error::Error::BadInput{size: src.len(), msg: "invalid utf8"})
|
||||
Err(_) => Err(error::Error::BadInput {
|
||||
size: src.len(),
|
||||
msg: "invalid utf8",
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> TryFromCtx<'a, StrCtx, T> for &'a str where T: AsRef<[u8]> {
|
||||
impl<'a, T> TryFromCtx<'a, StrCtx, T> for &'a str
|
||||
where
|
||||
T: AsRef<[u8]>,
|
||||
{
|
||||
type Error = error::Error;
|
||||
#[inline]
|
||||
fn try_from_ctx(src: &'a T, ctx: StrCtx) -> result::Result<(Self, usize), Self::Error> {
|
||||
|
|
@ -648,7 +728,10 @@ impl<'a> TryIntoCtx for &'a [u8] {
|
|||
// return Err(error::Error::BadOffset(format!("requested operation has negative casts: src len: {} dst len: {} offset: {}", src_len, dst_len, offset)).into())
|
||||
// }
|
||||
if src_len > dst_len {
|
||||
Err(error::Error::TooBig{ size: self.len(), len: dst.len()})
|
||||
Err(error::Error::TooBig {
|
||||
size: self.len(),
|
||||
len: dst.len(),
|
||||
})
|
||||
} else {
|
||||
unsafe { copy_nonoverlapping(self.as_ptr(), dst.as_mut_ptr(), src_len as usize) };
|
||||
Ok(self.len())
|
||||
|
|
@ -675,7 +758,7 @@ macro_rules! sizeof_impl {
|
|||
size_of::<$ty>()
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
sizeof_impl!(u8);
|
||||
|
|
@ -690,77 +773,22 @@ sizeof_impl!(u128);
|
|||
sizeof_impl!(i128);
|
||||
sizeof_impl!(f32);
|
||||
sizeof_impl!(f64);
|
||||
sizeof_impl!(usize);
|
||||
sizeof_impl!(isize);
|
||||
|
||||
impl FromCtx<Endian> for usize {
|
||||
#[inline]
|
||||
fn from_ctx(src: &[u8], le: Endian) -> Self {
|
||||
let size = ::core::mem::size_of::<Self>();
|
||||
assert!(src.len() >= size);
|
||||
let mut data: usize = 0;
|
||||
unsafe {
|
||||
copy_nonoverlapping(
|
||||
src.as_ptr(),
|
||||
&mut data as *mut usize as *mut u8,
|
||||
size);
|
||||
if le.is_little() { data.to_le() } else { data.to_be() }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> TryFromCtx<'a, Endian> for usize where usize: FromCtx<Endian> {
|
||||
type Error = error::Error;
|
||||
#[inline]
|
||||
fn try_from_ctx(src: &'a [u8], le: Endian) -> result::Result<(Self, usize), Self::Error> {
|
||||
let size = ::core::mem::size_of::<usize>();
|
||||
if size > src.len () {
|
||||
Err(error::Error::TooBig{size, len: src.len()})
|
||||
} else {
|
||||
Ok((FromCtx::from_ctx(src, le), size))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> TryFromCtx<'a, usize> for &'a[u8] {
|
||||
impl<'a> TryFromCtx<'a, usize> for &'a [u8] {
|
||||
type Error = error::Error;
|
||||
#[inline]
|
||||
fn try_from_ctx(src: &'a [u8], size: usize) -> result::Result<(Self, usize), Self::Error> {
|
||||
if size > src.len () {
|
||||
Err(error::Error::TooBig{size, len: src.len()})
|
||||
if size > src.len() {
|
||||
Err(error::Error::TooBig {
|
||||
size,
|
||||
len: src.len(),
|
||||
})
|
||||
} else {
|
||||
Ok((&src[..size], size))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoCtx<Endian> for usize {
|
||||
#[inline]
|
||||
fn into_ctx(self, dst: &mut [u8], le: Endian) {
|
||||
let size = ::core::mem::size_of::<Self>();
|
||||
assert!(dst.len() >= size);
|
||||
let mut data = if le.is_little() { self.to_le() } else { self.to_be() };
|
||||
let data = &mut data as *mut usize as *mut u8;
|
||||
unsafe {
|
||||
copy_nonoverlapping(data, dst.as_mut_ptr(), size);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryIntoCtx<Endian> for usize where usize: IntoCtx<Endian> {
|
||||
type Error = error::Error;
|
||||
#[inline]
|
||||
fn try_into_ctx(self, dst: &mut [u8], le: Endian) -> error::Result<usize> {
|
||||
let size = ::core::mem::size_of::<usize>();
|
||||
if size > dst.len() {
|
||||
Err(error::Error::TooBig{size, len: dst.len()})
|
||||
} else {
|
||||
<usize as IntoCtx<Endian>>::into_ctx(self, dst, le);
|
||||
Ok(size)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl<'a> TryFromCtx<'a> for &'a CStr {
|
||||
type Error = error::Error;
|
||||
|
|
@ -768,14 +796,16 @@ impl<'a> TryFromCtx<'a> for &'a CStr {
|
|||
fn try_from_ctx(src: &'a [u8], _ctx: ()) -> result::Result<(Self, usize), Self::Error> {
|
||||
let null_byte = match src.iter().position(|b| *b == 0) {
|
||||
Some(ix) => ix,
|
||||
None => return Err(error::Error::BadInput {
|
||||
size: 0,
|
||||
msg: "The input doesn't contain a null byte",
|
||||
})
|
||||
None => {
|
||||
return Err(error::Error::BadInput {
|
||||
size: 0,
|
||||
msg: "The input doesn't contain a null byte",
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
let cstr = unsafe { CStr::from_bytes_with_nul_unchecked(&src[..=null_byte]) };
|
||||
Ok((cstr, null_byte+1))
|
||||
Ok((cstr, null_byte + 1))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -820,7 +850,6 @@ impl TryIntoCtx for CString {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
// example of marshalling to bytes, let's wait until const is an option
|
||||
// impl FromCtx for [u8; 10] {
|
||||
// fn from_ctx(bytes: &[u8], _ctx: Endian) -> Self {
|
||||
|
|
@ -866,5 +895,3 @@ mod tests {
|
|||
assert_eq!(got, src);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
6
third_party/rust/scroll/src/endian.rs
vendored
6
third_party/rust/scroll/src/endian.rs
vendored
|
|
@ -28,7 +28,11 @@ impl Default for Endian {
|
|||
impl From<bool> for Endian {
|
||||
#[inline]
|
||||
fn from(little_endian: bool) -> Self {
|
||||
if little_endian { LE } else { BE }
|
||||
if little_endian {
|
||||
LE
|
||||
} else {
|
||||
BE
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
54
third_party/rust/scroll/src/error.rs
vendored
54
third_party/rust/scroll/src/error.rs
vendored
|
|
@ -1,19 +1,25 @@
|
|||
use core::fmt::{self, Display};
|
||||
use core::result;
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
use std::io;
|
||||
#[cfg(feature = "std")]
|
||||
use std::error;
|
||||
#[cfg(feature = "std")]
|
||||
use std::io;
|
||||
|
||||
#[derive(Debug)]
|
||||
/// A custom Scroll error
|
||||
pub enum Error {
|
||||
/// The type you tried to read was too big
|
||||
TooBig { size: usize, len: usize },
|
||||
TooBig {
|
||||
size: usize,
|
||||
len: usize,
|
||||
},
|
||||
/// The requested offset to read/write at is invalid
|
||||
BadOffset(usize),
|
||||
BadInput{ size: usize, msg: &'static str },
|
||||
BadInput {
|
||||
size: usize,
|
||||
msg: &'static str,
|
||||
},
|
||||
#[cfg(feature = "std")]
|
||||
/// A custom Scroll error for reporting messages to clients
|
||||
Custom(String),
|
||||
|
|
@ -26,20 +32,20 @@ pub enum Error {
|
|||
impl error::Error for Error {
|
||||
fn description(&self) -> &str {
|
||||
match *self {
|
||||
Error::TooBig{ .. } => { "TooBig" }
|
||||
Error::BadOffset(_) => { "BadOffset" }
|
||||
Error::BadInput{ .. } => { "BadInput" }
|
||||
Error::Custom(_) => { "Custom" }
|
||||
Error::IO(_) => { "IO" }
|
||||
Error::TooBig { .. } => "TooBig",
|
||||
Error::BadOffset(_) => "BadOffset",
|
||||
Error::BadInput { .. } => "BadInput",
|
||||
Error::Custom(_) => "Custom",
|
||||
Error::IO(_) => "IO",
|
||||
}
|
||||
}
|
||||
fn cause(&self) -> Option<&dyn error::Error> {
|
||||
match *self {
|
||||
Error::TooBig{ .. } => { None }
|
||||
Error::BadOffset(_) => { None }
|
||||
Error::BadInput{ .. } => { None }
|
||||
Error::Custom(_) => { None }
|
||||
Error::IO(ref io) => { io.source() }
|
||||
Error::TooBig { .. } => None,
|
||||
Error::BadOffset(_) => None,
|
||||
Error::BadInput { .. } => None,
|
||||
Error::Custom(_) => None,
|
||||
Error::IO(ref io) => io.source(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -54,13 +60,23 @@ impl From<io::Error> for Error {
|
|||
impl Display for Error {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
Error::TooBig{ ref size, ref len } => { write! (fmt, "type is too big ({}) for {}", size, len) },
|
||||
Error::BadOffset(ref offset) => { write! (fmt, "bad offset {}", offset) },
|
||||
Error::BadInput{ ref msg, ref size } => { write! (fmt, "bad input {} ({})", msg, size) },
|
||||
Error::TooBig { ref size, ref len } => {
|
||||
write!(fmt, "type is too big ({}) for {}", size, len)
|
||||
}
|
||||
Error::BadOffset(ref offset) => {
|
||||
write!(fmt, "bad offset {}", offset)
|
||||
}
|
||||
Error::BadInput { ref msg, ref size } => {
|
||||
write!(fmt, "bad input {} ({})", msg, size)
|
||||
}
|
||||
#[cfg(feature = "std")]
|
||||
Error::Custom(ref msg) => { write! (fmt, "{}", msg) },
|
||||
Error::Custom(ref msg) => {
|
||||
write!(fmt, "{}", msg)
|
||||
}
|
||||
#[cfg(feature = "std")]
|
||||
Error::IO(ref err) => { write!(fmt, "{}", err) },
|
||||
Error::IO(ref err) => {
|
||||
write!(fmt, "{}", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
27
third_party/rust/scroll/src/greater.rs
vendored
27
third_party/rust/scroll/src/greater.rs
vendored
|
|
@ -33,8 +33,8 @@ use crate::ctx::{FromCtx, IntoCtx};
|
|||
/// assert_eq!({bar.foo}, -1);
|
||||
/// assert_eq!({bar.bar}, 0xdeadbeef);
|
||||
/// ```
|
||||
pub trait Cread<Ctx, I = usize> : Index<I> + Index<RangeFrom<I>>
|
||||
where
|
||||
pub trait Cread<Ctx, I = usize>: Index<I> + Index<RangeFrom<I>>
|
||||
where
|
||||
Ctx: Copy,
|
||||
{
|
||||
/// Reads a value from `Self` at `offset` with `ctx`. Cannot fail.
|
||||
|
|
@ -53,7 +53,11 @@ pub trait Cread<Ctx, I = usize> : Index<I> + Index<RangeFrom<I>>
|
|||
/// assert_eq!(bar, 0xdeadbeef);
|
||||
/// ```
|
||||
#[inline]
|
||||
fn cread_with<N: FromCtx<Ctx, <Self as Index<RangeFrom<I>>>::Output>>(&self, offset: I, ctx: Ctx) -> N {
|
||||
fn cread_with<N: FromCtx<Ctx, <Self as Index<RangeFrom<I>>>::Output>>(
|
||||
&self,
|
||||
offset: I,
|
||||
ctx: Ctx,
|
||||
) -> N {
|
||||
N::from_ctx(&self[offset..], ctx)
|
||||
}
|
||||
/// Reads a value implementing `FromCtx` from `Self` at `offset`,
|
||||
|
|
@ -79,7 +83,10 @@ pub trait Cread<Ctx, I = usize> : Index<I> + Index<RangeFrom<I>>
|
|||
/// assert_eq!(bar, 0xefbe0000);
|
||||
/// ```
|
||||
#[inline]
|
||||
fn cread<N: FromCtx<Ctx, <Self as Index<RangeFrom<I>>>::Output>>(&self, offset: I) -> N where Ctx: Default {
|
||||
fn cread<N: FromCtx<Ctx, <Self as Index<RangeFrom<I>>>::Output>>(&self, offset: I) -> N
|
||||
where
|
||||
Ctx: Default,
|
||||
{
|
||||
let ctx = Ctx::default();
|
||||
N::from_ctx(&self[offset..], ctx)
|
||||
}
|
||||
|
|
@ -130,7 +137,10 @@ pub trait Cwrite<Ctx: Copy, I = usize>: Index<I> + IndexMut<RangeFrom<I>> {
|
|||
/// assert_eq!(bytes.cread::<i64>(0), 42);
|
||||
/// assert_eq!(bytes.cread::<u32>(8), 0xdeadbeef);
|
||||
#[inline]
|
||||
fn cwrite<N: IntoCtx<Ctx, <Self as Index<RangeFrom<I>>>::Output>>(&mut self, n: N, offset: I) where Ctx: Default {
|
||||
fn cwrite<N: IntoCtx<Ctx, <Self as Index<RangeFrom<I>>>::Output>>(&mut self, n: N, offset: I)
|
||||
where
|
||||
Ctx: Default,
|
||||
{
|
||||
let ctx = Ctx::default();
|
||||
n.into_ctx(self.index_mut(offset..), ctx)
|
||||
}
|
||||
|
|
@ -146,7 +156,12 @@ pub trait Cwrite<Ctx: Copy, I = usize>: Index<I> + IndexMut<RangeFrom<I>> {
|
|||
/// assert_eq!(bytes.cread_with::<i64>(0, LE), 42);
|
||||
/// assert_eq!(bytes.cread_with::<u32>(8, LE), 0xefbeadde);
|
||||
#[inline]
|
||||
fn cwrite_with<N: IntoCtx<Ctx, <Self as Index<RangeFrom<I>>>::Output>>(&mut self, n: N, offset: I, ctx: Ctx) {
|
||||
fn cwrite_with<N: IntoCtx<Ctx, <Self as Index<RangeFrom<I>>>::Output>>(
|
||||
&mut self,
|
||||
n: N,
|
||||
offset: I,
|
||||
ctx: Ctx,
|
||||
) {
|
||||
n.into_ctx(self.index_mut(offset..), ctx)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
72
third_party/rust/scroll/src/leb128.rs
vendored
72
third_party/rust/scroll/src/leb128.rs
vendored
|
|
@ -1,9 +1,9 @@
|
|||
use core::u8;
|
||||
use core::convert::{From, AsRef};
|
||||
use core::result;
|
||||
use crate::Pread;
|
||||
use crate::ctx::TryFromCtx;
|
||||
use crate::error;
|
||||
use crate::Pread;
|
||||
use core::convert::{AsRef, From};
|
||||
use core::result;
|
||||
use core::u8;
|
||||
|
||||
#[derive(Debug, PartialEq, Copy, Clone)]
|
||||
/// An unsigned leb128 integer
|
||||
|
|
@ -101,7 +101,10 @@ impl<'a> TryFromCtx<'a> for Uleb128 {
|
|||
let byte: u8 = src.pread(count)?;
|
||||
|
||||
if shift == 63 && byte != 0x00 && byte != 0x01 {
|
||||
return Err(error::Error::BadInput{ size: src.len(), msg: "failed to parse"})
|
||||
return Err(error::Error::BadInput {
|
||||
size: src.len(),
|
||||
msg: "failed to parse",
|
||||
});
|
||||
}
|
||||
|
||||
let low_bits = u64::from(mask_continuation(byte));
|
||||
|
|
@ -111,7 +114,13 @@ impl<'a> TryFromCtx<'a> for Uleb128 {
|
|||
shift += 7;
|
||||
|
||||
if byte & CONTINUATION_BIT == 0 {
|
||||
return Ok((Uleb128 { value: result, count }, count));
|
||||
return Ok((
|
||||
Uleb128 {
|
||||
value: result,
|
||||
count,
|
||||
},
|
||||
count,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -131,7 +140,10 @@ impl<'a> TryFromCtx<'a> for Sleb128 {
|
|||
byte = src.gread(offset)?;
|
||||
|
||||
if shift == 63 && byte != 0x00 && byte != 0x7f {
|
||||
return Err(error::Error::BadInput{size: src.len(), msg: "failed to parse"})
|
||||
return Err(error::Error::BadInput {
|
||||
size: src.len(),
|
||||
msg: "failed to parse",
|
||||
});
|
||||
}
|
||||
|
||||
let low_bits = i64::from(mask_continuation(byte));
|
||||
|
|
@ -148,14 +160,20 @@ impl<'a> TryFromCtx<'a> for Sleb128 {
|
|||
result |= !0 << shift;
|
||||
}
|
||||
let count = *offset - o;
|
||||
Ok((Sleb128{ value: result, count }, count))
|
||||
Ok((
|
||||
Sleb128 {
|
||||
value: result,
|
||||
count,
|
||||
},
|
||||
count,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{Uleb128, Sleb128};
|
||||
use super::super::LE;
|
||||
use super::{Sleb128, Uleb128};
|
||||
|
||||
const CONTINUATION_BIT: u8 = 1 << 7;
|
||||
//const SIGN_BIT: u8 = 1 << 6;
|
||||
|
|
@ -170,7 +188,7 @@ mod tests {
|
|||
assert_eq!(130u64, num.into());
|
||||
assert_eq!(num.size(), 2);
|
||||
|
||||
let buf = [0x00,0x01];
|
||||
let buf = [0x00, 0x01];
|
||||
let bytes = &buf[..];
|
||||
let num = bytes.pread::<Uleb128>(0).unwrap();
|
||||
println!("num: {:?}", &num);
|
||||
|
|
@ -192,23 +210,28 @@ mod tests {
|
|||
let bytes = &buf[..];
|
||||
let num = bytes.pread::<Uleb128>(0).expect("Should read Uleb128");
|
||||
assert_eq!(130u64, num.into());
|
||||
assert_eq!(386, bytes.pread_with::<u16>(0, LE).expect("Should read number"));
|
||||
assert_eq!(
|
||||
386,
|
||||
bytes.pread_with::<u16>(0, LE).expect("Should read number")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uleb128_overflow() {
|
||||
use super::super::Pread;
|
||||
let buf = [2u8 | CONTINUATION_BIT,
|
||||
2 | CONTINUATION_BIT,
|
||||
2 | CONTINUATION_BIT,
|
||||
2 | CONTINUATION_BIT,
|
||||
2 | CONTINUATION_BIT,
|
||||
2 | CONTINUATION_BIT,
|
||||
2 | CONTINUATION_BIT,
|
||||
2 | CONTINUATION_BIT,
|
||||
2 | CONTINUATION_BIT,
|
||||
2 | CONTINUATION_BIT,
|
||||
1];
|
||||
let buf = [
|
||||
2u8 | CONTINUATION_BIT,
|
||||
2 | CONTINUATION_BIT,
|
||||
2 | CONTINUATION_BIT,
|
||||
2 | CONTINUATION_BIT,
|
||||
2 | CONTINUATION_BIT,
|
||||
2 | CONTINUATION_BIT,
|
||||
2 | CONTINUATION_BIT,
|
||||
2 | CONTINUATION_BIT,
|
||||
2 | CONTINUATION_BIT,
|
||||
2 | CONTINUATION_BIT,
|
||||
1,
|
||||
];
|
||||
let bytes = &buf[..];
|
||||
assert!(bytes.pread::<Uleb128>(0).is_err());
|
||||
}
|
||||
|
|
@ -217,7 +240,10 @@ mod tests {
|
|||
fn sleb128() {
|
||||
use super::super::Pread;
|
||||
let bytes = [0x7fu8 | CONTINUATION_BIT, 0x7e];
|
||||
let num: i64 = bytes.pread::<Sleb128>(0).expect("Should read Sleb128").into();
|
||||
let num: i64 = bytes
|
||||
.pread::<Sleb128>(0)
|
||||
.expect("Should read Sleb128")
|
||||
.into();
|
||||
assert_eq!(-129, num);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
18
third_party/rust/scroll/src/lesser.rs
vendored
18
third_party/rust/scroll/src/lesser.rs
vendored
|
|
@ -1,5 +1,5 @@
|
|||
use std::io::{Result, Read, Write};
|
||||
use crate::ctx::{FromCtx, IntoCtx, SizeWith};
|
||||
use std::io::{Read, Result, Write};
|
||||
|
||||
/// An extension trait to `std::io::Read` streams; mainly targeted at reading primitive types with
|
||||
/// a known size.
|
||||
|
|
@ -54,8 +54,7 @@ use crate::ctx::{FromCtx, IntoCtx, SizeWith};
|
|||
/// assert_eq!({foo_.bar}, bar);
|
||||
/// ```
|
||||
///
|
||||
pub trait IOread<Ctx: Copy> : Read
|
||||
{
|
||||
pub trait IOread<Ctx: Copy>: Read {
|
||||
/// Reads the type `N` from `Self`, with a default parsing context.
|
||||
/// For the primitive numeric types, this will be at the host machine's endianness.
|
||||
///
|
||||
|
|
@ -73,7 +72,10 @@ pub trait IOread<Ctx: Copy> : Read
|
|||
/// assert_eq!(0xefbe, beef);
|
||||
/// ```
|
||||
#[inline]
|
||||
fn ioread<N: FromCtx<Ctx> + SizeWith<Ctx>>(&mut self) -> Result<N> where Ctx: Default {
|
||||
fn ioread<N: FromCtx<Ctx> + SizeWith<Ctx>>(&mut self) -> Result<N>
|
||||
where
|
||||
Ctx: Default,
|
||||
{
|
||||
let ctx = Ctx::default();
|
||||
self.ioread_with(ctx)
|
||||
}
|
||||
|
|
@ -115,8 +117,7 @@ impl<Ctx: Copy, R: Read + ?Sized> IOread<Ctx> for R {}
|
|||
/// An extension trait to `std::io::Write` streams; this only serializes simple types, like `u8`, `i32`, `f32`, `usize`, etc.
|
||||
///
|
||||
/// To write custom types with a single `iowrite::<YourType>` call, implement [`IntoCtx`](ctx/trait.IntoCtx.html) and [`SizeWith`](ctx/trait.SizeWith.html) for `YourType`.
|
||||
pub trait IOwrite<Ctx: Copy>: Write
|
||||
{
|
||||
pub trait IOwrite<Ctx: Copy>: Write {
|
||||
/// Writes the type `N` into `Self`, with the parsing context `ctx`.
|
||||
/// **NB**: this will panic if the type you're writing has a size greater than 256. Plans are to have this allocate in larger cases.
|
||||
///
|
||||
|
|
@ -137,7 +138,10 @@ pub trait IOwrite<Ctx: Copy>: Write
|
|||
/// assert_eq!(bytes.into_inner(), [0xde, 0xad, 0xbe, 0xef,]);
|
||||
/// ```
|
||||
#[inline]
|
||||
fn iowrite<N: SizeWith<Ctx> + IntoCtx<Ctx>>(&mut self, n: N) -> Result<()> where Ctx: Default {
|
||||
fn iowrite<N: SizeWith<Ctx> + IntoCtx<Ctx>>(&mut self, n: N) -> Result<()>
|
||||
where
|
||||
Ctx: Default,
|
||||
{
|
||||
let ctx = Ctx::default();
|
||||
self.iowrite_with(n, ctx)
|
||||
}
|
||||
|
|
|
|||
109
third_party/rust/scroll/src/lib.rs
vendored
109
third_party/rust/scroll/src/lib.rs
vendored
|
|
@ -61,7 +61,7 @@
|
|||
//! #[cfg(target_endian = "big")]
|
||||
//! let bytes: [u8; 4] = [0xef, 0xbe, 0xad, 0xde];
|
||||
//!
|
||||
//! // We can read a u32 from the array `bytes` at offset 0.
|
||||
//! // We can read a u32 from the array `bytes` at offset 0.
|
||||
//! // This will use a default context for the type being parsed;
|
||||
//! // in the case of u32 this defines to use the host's endianess.
|
||||
//! let number = bytes.pread::<u32>(0).unwrap();
|
||||
|
|
@ -71,16 +71,25 @@
|
|||
//! // Similarly we can also read a single byte at offset 2
|
||||
//! // This time using type ascription instead of the turbofish (::<>) operator.
|
||||
//! let byte: u8 = bytes.pread(2).unwrap();
|
||||
//! #[cfg(target_endian = "little")]
|
||||
//! assert_eq!(byte, 0xbe);
|
||||
//! #[cfg(target_endian = "big")]
|
||||
//! assert_eq!(byte, 0xad);
|
||||
//!
|
||||
//!
|
||||
//! // If required we can also provide a specific parsing context; e.g. if we want to explicitly
|
||||
//! // define the endianess to use:
|
||||
//! let be_number: u32 = bytes.pread_with(0, scroll::BE).unwrap();
|
||||
//! #[cfg(target_endian = "little")]
|
||||
//! assert_eq!(be_number, 0xdeadbeef);
|
||||
//! #[cfg(target_endian = "big")]
|
||||
//! assert_eq!(be_number, 0xefbeadde);
|
||||
//!
|
||||
//! let be_number16 = bytes.pread_with::<u16>(1, scroll::BE).unwrap();
|
||||
//! #[cfg(target_endian = "little")]
|
||||
//! assert_eq!(be_number16, 0xadbe);
|
||||
//! #[cfg(target_endian = "big")]
|
||||
//! assert_eq!(be_number16, 0xbead);
|
||||
//!
|
||||
//!
|
||||
//! // Reads may fail; in this example due to a too large read for the given container.
|
||||
|
|
@ -174,7 +183,7 @@
|
|||
//! }
|
||||
//!
|
||||
//! // To allow for safe zero-copying scroll allows to specify lifetimes explicitly:
|
||||
//! // The context
|
||||
//! // The context
|
||||
//! impl<'a> ctx::TryFromCtx<'a, Context> for Data<'a> {
|
||||
//! // If necessary you can set a custom error type here, which will be returned by Pread/Pwrite
|
||||
//! type Error = scroll::Error;
|
||||
|
|
@ -183,7 +192,7 @@
|
|||
//! // its source buffer without having to resort to copying.
|
||||
//! fn try_from_ctx (src: &'a [u8], ctx: Context)
|
||||
//! // the `usize` returned here is the amount of bytes read.
|
||||
//! -> Result<(Self, usize), Self::Error>
|
||||
//! -> Result<(Self, usize), Self::Error>
|
||||
//! {
|
||||
//! let offset = &mut 0;
|
||||
//!
|
||||
|
|
@ -214,37 +223,36 @@
|
|||
|
||||
#[cfg(feature = "derive")]
|
||||
#[allow(unused_imports)]
|
||||
pub use scroll_derive::{Pread, Pwrite, SizeWith, IOread, IOwrite};
|
||||
pub use scroll_derive::{IOread, IOwrite, Pread, Pwrite, SizeWith};
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
extern crate core;
|
||||
|
||||
pub mod ctx;
|
||||
mod pread;
|
||||
mod pwrite;
|
||||
mod greater;
|
||||
mod error;
|
||||
mod endian;
|
||||
mod error;
|
||||
mod greater;
|
||||
mod leb128;
|
||||
#[cfg(feature = "std")]
|
||||
mod lesser;
|
||||
mod pread;
|
||||
mod pwrite;
|
||||
|
||||
pub use crate::endian::*;
|
||||
pub use crate::pread::*;
|
||||
pub use crate::pwrite::*;
|
||||
pub use crate::greater::*;
|
||||
pub use crate::error::*;
|
||||
pub use crate::greater::*;
|
||||
pub use crate::leb128::*;
|
||||
#[cfg(feature = "std")]
|
||||
pub use crate::lesser::*;
|
||||
pub use crate::pread::*;
|
||||
pub use crate::pwrite::*;
|
||||
|
||||
#[doc(hidden)]
|
||||
pub mod export {
|
||||
pub use ::core::result;
|
||||
pub use ::core::mem;
|
||||
pub use ::core::result;
|
||||
}
|
||||
|
||||
|
||||
#[allow(unused)]
|
||||
macro_rules! doc_comment {
|
||||
($x:expr) => {
|
||||
|
|
@ -260,7 +268,7 @@ doc_comment!(include_str!("../README.md"));
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[allow(overflowing_literals)]
|
||||
use super::{LE};
|
||||
use super::LE;
|
||||
|
||||
#[test]
|
||||
fn test_measure_with_bytes() {
|
||||
|
|
@ -283,7 +291,7 @@ mod tests {
|
|||
($write:ident, $read:ident, $deadbeef:expr) => {
|
||||
#[test]
|
||||
fn $write() {
|
||||
use super::{Pwrite, Pread, BE};
|
||||
use super::{Pread, Pwrite, BE};
|
||||
let mut bytes: [u8; 8] = [0, 0, 0, 0, 0, 0, 0, 0];
|
||||
let b = &mut bytes[..];
|
||||
b.pwrite_with::<$read>($deadbeef, 0, LE).unwrap();
|
||||
|
|
@ -291,7 +299,7 @@ mod tests {
|
|||
b.pwrite_with::<$read>($deadbeef, 0, BE).unwrap();
|
||||
assert_eq!(b.pread_with::<$read>(0, BE).unwrap(), $deadbeef);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pwrite_test!(pwrite_and_pread_roundtrip_u16, u16, 0xbeef);
|
||||
|
|
@ -303,7 +311,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn pread_with_be() {
|
||||
use super::{Pread};
|
||||
use super::Pread;
|
||||
let bytes: [u8; 2] = [0x7e, 0xef];
|
||||
let b = &bytes[..];
|
||||
let byte: u16 = b.pread_with(0, super::BE).unwrap();
|
||||
|
|
@ -315,7 +323,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn pread() {
|
||||
use super::{Pread};
|
||||
use super::Pread;
|
||||
let bytes: [u8; 2] = [0x7e, 0xef];
|
||||
let b = &bytes[..];
|
||||
let byte: u16 = b.pread(0).unwrap();
|
||||
|
|
@ -327,11 +335,11 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn pread_slice() {
|
||||
use super::{Pread};
|
||||
use super::ctx::StrCtx;
|
||||
use super::Pread;
|
||||
let bytes: [u8; 2] = [0x7e, 0xef];
|
||||
let b = &bytes[..];
|
||||
let iserr: Result<&str, _> = b.pread_with(0, StrCtx::Length(3));
|
||||
let iserr: Result<&str, _> = b.pread_with(0, StrCtx::Length(3));
|
||||
assert!(iserr.is_err());
|
||||
// let bytes2: &[u8] = b.pread_with(0, 2).unwrap();
|
||||
// assert_eq!(bytes2.len(), bytes[..].len());
|
||||
|
|
@ -342,11 +350,11 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn pread_str() {
|
||||
use super::Pread;
|
||||
use super::ctx::*;
|
||||
use super::Pread;
|
||||
let bytes: [u8; 2] = [0x2e, 0x0];
|
||||
let b = &bytes[..];
|
||||
let s: &str = b.pread(0).unwrap();
|
||||
let s: &str = b.pread(0).unwrap();
|
||||
println!("str: {}", s);
|
||||
assert_eq!(s.len(), bytes[..].len() - 1);
|
||||
let bytes: &[u8] = b"hello, world!\0some_other_things";
|
||||
|
|
@ -365,8 +373,8 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn pread_str_weird() {
|
||||
use super::Pread;
|
||||
use super::ctx::*;
|
||||
use super::Pread;
|
||||
let bytes: &[u8] = b"";
|
||||
let hello_world = bytes.pread_with::<&str>(0, StrCtx::Delimiter(NULL));
|
||||
println!("1 {:?}", &hello_world);
|
||||
|
|
@ -375,27 +383,33 @@ mod tests {
|
|||
println!("2 {:?}", &error);
|
||||
assert!(error.is_err());
|
||||
let bytes: &[u8] = b"\0";
|
||||
let null = bytes.pread::<&str>(0).unwrap();
|
||||
let null = bytes.pread::<&str>(0).unwrap();
|
||||
println!("3 {:?}", &null);
|
||||
assert_eq!(null.len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pwrite_str_and_bytes() {
|
||||
use super::{Pread, Pwrite};
|
||||
use super::ctx::*;
|
||||
use super::{Pread, Pwrite};
|
||||
let astring: &str = "lol hello_world lal\0ala imabytes";
|
||||
let mut buffer = [0u8; 33];
|
||||
buffer.pwrite(astring, 0).unwrap();
|
||||
{
|
||||
let hello_world = buffer.pread_with::<&str>(4, StrCtx::Delimiter(SPACE)).unwrap();
|
||||
let hello_world = buffer
|
||||
.pread_with::<&str>(4, StrCtx::Delimiter(SPACE))
|
||||
.unwrap();
|
||||
assert_eq!(hello_world, "hello_world");
|
||||
}
|
||||
let bytes: &[u8] = b"more\0bytes";
|
||||
buffer.pwrite(bytes, 0).unwrap();
|
||||
let more = bytes.pread_with::<&str>(0, StrCtx::Delimiter(NULL)).unwrap();
|
||||
let more = bytes
|
||||
.pread_with::<&str>(0, StrCtx::Delimiter(NULL))
|
||||
.unwrap();
|
||||
assert_eq!(more, "more");
|
||||
let bytes = bytes.pread_with::<&str>(more.len() + 1, StrCtx::Delimiter(NULL)).unwrap();
|
||||
let bytes = bytes
|
||||
.pread_with::<&str>(more.len() + 1, StrCtx::Delimiter(NULL))
|
||||
.unwrap();
|
||||
assert_eq!(bytes, "bytes");
|
||||
}
|
||||
|
||||
|
|
@ -415,14 +429,16 @@ mod tests {
|
|||
fn description(&self) -> &str {
|
||||
"ExternalError"
|
||||
}
|
||||
fn cause(&self) -> Option<&dyn error::Error> { None}
|
||||
fn cause(&self) -> Option<&dyn error::Error> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl From<super::Error> for ExternalError {
|
||||
fn from(err: super::Error) -> Self {
|
||||
//use super::Error::*;
|
||||
match err {
|
||||
_ => ExternalError{},
|
||||
_ => ExternalError {},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -434,7 +450,9 @@ mod tests {
|
|||
type Error = ExternalError;
|
||||
fn try_into_ctx(self, this: &mut [u8], le: super::Endian) -> Result<usize, Self::Error> {
|
||||
use super::Pwrite;
|
||||
if this.len() < 2 { return Err((ExternalError {}).into()) }
|
||||
if this.len() < 2 {
|
||||
return Err((ExternalError {}).into());
|
||||
}
|
||||
this.pwrite_with(self.0, 0, le)?;
|
||||
Ok(2)
|
||||
}
|
||||
|
|
@ -444,7 +462,9 @@ mod tests {
|
|||
type Error = ExternalError;
|
||||
fn try_from_ctx(this: &'a [u8], le: super::Endian) -> Result<(Self, usize), Self::Error> {
|
||||
use super::Pread;
|
||||
if this.len() > 2 { return Err((ExternalError {}).into()) }
|
||||
if this.len() > 2 {
|
||||
return Err((ExternalError {}).into());
|
||||
}
|
||||
let n = this.pread_with(0, le)?;
|
||||
Ok((Foo(n), 2))
|
||||
}
|
||||
|
|
@ -452,7 +472,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn pread_with_iter_bytes() {
|
||||
use super::{Pread};
|
||||
use super::Pread;
|
||||
let mut bytes_to: [u8; 8] = [0, 0, 0, 0, 0, 0, 0, 0];
|
||||
let bytes_from: [u8; 8] = [1, 2, 3, 4, 5, 6, 7, 8];
|
||||
let bytes_to = &mut bytes_to[..];
|
||||
|
|
@ -481,7 +501,7 @@ mod tests {
|
|||
assert_eq!(deadbeef, $deadbeef as $typ);
|
||||
assert_eq!(offset, ::std::mem::size_of::<$typ>());
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
g_test!(simple_gread_u16, 0xe0f, u16);
|
||||
|
|
@ -510,7 +530,7 @@ mod tests {
|
|||
($read:ident, $val:expr, $typ:ty) => {
|
||||
#[test]
|
||||
fn $read() {
|
||||
use super::{LE, BE, Pread, Pwrite};
|
||||
use super::{Pread, Pwrite, BE, LE};
|
||||
let mut buffer = [0u8; 16];
|
||||
let offset = &mut 0;
|
||||
buffer.gwrite_with($val.clone(), offset, LE).unwrap();
|
||||
|
|
@ -546,7 +566,7 @@ mod tests {
|
|||
// useful for ferreting out problems with impls
|
||||
#[test]
|
||||
fn gread_with_iter_bytes() {
|
||||
use super::{Pread};
|
||||
use super::Pread;
|
||||
let mut bytes_to: [u8; 8] = [0, 0, 0, 0, 0, 0, 0, 0];
|
||||
let bytes_from: [u8; 8] = [1, 2, 3, 4, 5, 6, 7, 8];
|
||||
let bytes_to = &mut bytes_to[..];
|
||||
|
|
@ -561,7 +581,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn gread_inout() {
|
||||
use super::{Pread};
|
||||
use super::Pread;
|
||||
let mut bytes_to: [u8; 8] = [0, 0, 0, 0, 0, 0, 0, 0];
|
||||
let bytes_from: [u8; 8] = [1, 2, 3, 4, 5, 6, 7, 8];
|
||||
let bytes = &bytes_from[..];
|
||||
|
|
@ -573,7 +593,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn gread_with_byte() {
|
||||
use super::{Pread};
|
||||
use super::Pread;
|
||||
let bytes: [u8; 1] = [0x7f];
|
||||
let b = &bytes[..];
|
||||
let offset = &mut 0;
|
||||
|
|
@ -584,8 +604,8 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn gread_slice() {
|
||||
use super::{Pread};
|
||||
use super::ctx::{StrCtx};
|
||||
use super::ctx::StrCtx;
|
||||
use super::Pread;
|
||||
let bytes: [u8; 2] = [0x7e, 0xef];
|
||||
let b = &bytes[..];
|
||||
let offset = &mut 0;
|
||||
|
|
@ -595,12 +615,15 @@ mod tests {
|
|||
let astring: [u8; 3] = [0x45, 042, 0x44];
|
||||
let string = astring.gread_with::<&str>(offset, StrCtx::Length(2));
|
||||
match &string {
|
||||
&Ok(_) => {},
|
||||
&Err(ref err) => {println!("{}", &err); panic!();}
|
||||
&Ok(_) => {}
|
||||
&Err(ref err) => {
|
||||
println!("{}", &err);
|
||||
panic!();
|
||||
}
|
||||
}
|
||||
assert_eq!(string.unwrap(), "E*");
|
||||
*offset = 0;
|
||||
let bytes2: &[u8] = b.gread_with(offset, 2).unwrap();
|
||||
let bytes2: &[u8] = b.gread_with(offset, 2).unwrap();
|
||||
assert_eq!(*offset, 2);
|
||||
assert_eq!(bytes2.len(), bytes[..].len());
|
||||
for i in 0..bytes2.len() {
|
||||
|
|
|
|||
117
third_party/rust/scroll/src/pread.rs
vendored
117
third_party/rust/scroll/src/pread.rs
vendored
|
|
@ -1,10 +1,9 @@
|
|||
use core::result;
|
||||
use core::ops::{Index, RangeFrom};
|
||||
|
||||
use crate::ctx::{TryFromCtx, MeasureWith};
|
||||
use crate::ctx::TryFromCtx;
|
||||
use crate::error;
|
||||
|
||||
/// A very generic, contextual pread interface in Rust.
|
||||
/// A very generic, contextual pread interface in Rust.
|
||||
///
|
||||
/// Like [Pwrite](trait.Pwrite.html) — but for reading!
|
||||
///
|
||||
|
|
@ -39,14 +38,10 @@ use crate::error;
|
|||
/// the resulting type. scroll defaults to `&[u8]` here.
|
||||
///
|
||||
/// Unless you need to implement your own data store — that is either can't convert to `&[u8]` or
|
||||
/// have a data that is not `&[u8]` — you will probably want to implement
|
||||
/// have a data that does not expose a `&[u8]` — you will probably want to implement
|
||||
/// [TryFromCtx](ctx/trait.TryFromCtx.html) on your Rust types to be extracted.
|
||||
///
|
||||
pub trait Pread<Ctx, E> : Index<usize> + Index<RangeFrom<usize>> + MeasureWith<Ctx>
|
||||
where
|
||||
Ctx: Copy,
|
||||
E: From<error::Error>,
|
||||
{
|
||||
pub trait Pread<Ctx: Copy, E> {
|
||||
#[inline]
|
||||
/// Reads a value from `self` at `offset` with a default `Ctx`. For the primitive numeric values, this will read at the machine's endianness.
|
||||
/// # Example
|
||||
|
|
@ -54,9 +49,16 @@ pub trait Pread<Ctx, E> : Index<usize> + Index<RangeFrom<usize>> + MeasureWith<C
|
|||
/// use scroll::Pread;
|
||||
/// let bytes = [0x7fu8; 0x01];
|
||||
/// let byte = bytes.pread::<u8>(0).unwrap();
|
||||
fn pread<'a, N: TryFromCtx<'a, Ctx, <Self as Index<RangeFrom<usize>>>::Output, Error = E>>(&'a self, offset: usize) -> result::Result<N, E> where <Self as Index<RangeFrom<usize>>>::Output: 'a, Ctx: Default {
|
||||
fn pread<'a, N: TryFromCtx<'a, Ctx, Self, Error = E>>(
|
||||
&'a self,
|
||||
offset: usize,
|
||||
) -> result::Result<N, E>
|
||||
where
|
||||
Ctx: Default,
|
||||
{
|
||||
self.pread_with(offset, Ctx::default())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
/// Reads a value from `self` at `offset` with the given `ctx`
|
||||
/// # Example
|
||||
|
|
@ -65,13 +67,15 @@ pub trait Pread<Ctx, E> : Index<usize> + Index<RangeFrom<usize>> + MeasureWith<C
|
|||
/// let bytes: [u8; 2] = [0xde, 0xad];
|
||||
/// let dead: u16 = bytes.pread_with(0, scroll::BE).unwrap();
|
||||
/// assert_eq!(dead, 0xdeadu16);
|
||||
fn pread_with<'a, N: TryFromCtx<'a, Ctx, <Self as Index<RangeFrom<usize>>>::Output, Error = E>>(&'a self, offset: usize, ctx: Ctx) -> result::Result<N, E> where <Self as Index<RangeFrom<usize>>>::Output: 'a {
|
||||
let len = self.measure_with(&ctx);
|
||||
if offset >= len {
|
||||
return Err(error::Error::BadOffset(offset).into())
|
||||
}
|
||||
N::try_from_ctx(&self[offset..], ctx).and_then(|(n, _)| Ok(n))
|
||||
fn pread_with<'a, N: TryFromCtx<'a, Ctx, Self, Error = E>>(
|
||||
&'a self,
|
||||
offset: usize,
|
||||
ctx: Ctx,
|
||||
) -> result::Result<N, E> {
|
||||
let mut ignored = offset;
|
||||
self.gread_with(&mut ignored, ctx)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
/// Reads a value from `self` at `offset` with a default `Ctx`. For the primitive numeric values, this will read at the machine's endianness. Updates the offset
|
||||
/// # Example
|
||||
|
|
@ -81,10 +85,17 @@ pub trait Pread<Ctx, E> : Index<usize> + Index<RangeFrom<usize>> + MeasureWith<C
|
|||
/// let bytes = [0x7fu8; 0x01];
|
||||
/// let byte = bytes.gread::<u8>(offset).unwrap();
|
||||
/// assert_eq!(*offset, 1);
|
||||
fn gread<'a, N: TryFromCtx<'a, Ctx, <Self as Index<RangeFrom<usize>>>::Output, Error = E>>(&'a self, offset: &mut usize) -> result::Result<N, E> where Ctx: Default, <Self as Index<RangeFrom<usize>>>::Output: 'a {
|
||||
fn gread<'a, N: TryFromCtx<'a, Ctx, Self, Error = E>>(
|
||||
&'a self,
|
||||
offset: &mut usize,
|
||||
) -> result::Result<N, E>
|
||||
where
|
||||
Ctx: Default,
|
||||
{
|
||||
let ctx = Ctx::default();
|
||||
self.gread_with(offset, ctx)
|
||||
}
|
||||
|
||||
/// Reads a value from `self` at `offset` with the given `ctx`, and updates the offset.
|
||||
/// # Example
|
||||
/// ```rust
|
||||
|
|
@ -94,28 +105,13 @@ pub trait Pread<Ctx, E> : Index<usize> + Index<RangeFrom<usize>> + MeasureWith<C
|
|||
/// let dead: u16 = bytes.gread_with(offset, scroll::BE).unwrap();
|
||||
/// assert_eq!(dead, 0xdeadu16);
|
||||
/// assert_eq!(*offset, 2);
|
||||
#[inline]
|
||||
fn gread_with<'a, N: TryFromCtx<'a, Ctx, <Self as Index<RangeFrom<usize>>>::Output, Error = E>>
|
||||
(&'a self, offset: &mut usize, ctx: Ctx) ->
|
||||
result::Result<N, E>
|
||||
where <Self as Index<RangeFrom<usize>>>::Output: 'a
|
||||
{
|
||||
let o = *offset;
|
||||
// self.pread_with(o, ctx).and_then(|(n, size)| {
|
||||
// *offset += size;
|
||||
// Ok(n)
|
||||
// })
|
||||
let len = self.measure_with(&ctx);
|
||||
if o >= len {
|
||||
return Err(error::Error::BadOffset(o).into())
|
||||
}
|
||||
N::try_from_ctx(&self[o..], ctx).and_then(|(n, size)| {
|
||||
*offset += size;
|
||||
Ok(n)
|
||||
})
|
||||
}
|
||||
fn gread_with<'a, N: TryFromCtx<'a, Ctx, Self, Error = E>>(
|
||||
&'a self,
|
||||
offset: &mut usize,
|
||||
ctx: Ctx,
|
||||
) -> result::Result<N, E>;
|
||||
|
||||
/// Trys to write `inout.len()` `N`s into `inout` from `Self` starting at `offset`, using the default context for `N`, and updates the offset.
|
||||
/// Tries to write `inout.len()` `N`s into `inout` from `Self` starting at `offset`, using the default context for `N`, and updates the offset.
|
||||
/// # Example
|
||||
/// ```rust
|
||||
/// use scroll::Pread;
|
||||
|
|
@ -126,11 +122,13 @@ pub trait Pread<Ctx, E> : Index<usize> + Index<RangeFrom<usize>> + MeasureWith<C
|
|||
/// assert_eq!(&bytes, &bytes_from);
|
||||
/// assert_eq!(*offset, 2);
|
||||
#[inline]
|
||||
fn gread_inout<'a, N>(&'a self, offset: &mut usize, inout: &mut [N]) -> result::Result<(), E>
|
||||
where
|
||||
N: TryFromCtx<'a, Ctx, <Self as Index<RangeFrom<usize>>>::Output, Error = E>,
|
||||
Ctx: Default,
|
||||
<Self as Index<RangeFrom<usize>>>::Output: 'a
|
||||
fn gread_inout<'a, N: TryFromCtx<'a, Ctx, Self, Error = E>>(
|
||||
&'a self,
|
||||
offset: &mut usize,
|
||||
inout: &mut [N],
|
||||
) -> result::Result<(), E>
|
||||
where
|
||||
Ctx: Default,
|
||||
{
|
||||
for i in inout.iter_mut() {
|
||||
*i = self.gread(offset)?;
|
||||
|
|
@ -138,7 +136,7 @@ pub trait Pread<Ctx, E> : Index<usize> + Index<RangeFrom<usize>> + MeasureWith<C
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Trys to write `inout.len()` `N`s into `inout` from `Self` starting at `offset`, using the context `ctx`
|
||||
/// Tries to write `inout.len()` `N`s into `inout` from `Self` starting at `offset`, using the context `ctx`
|
||||
/// # Example
|
||||
/// ```rust
|
||||
/// use scroll::{ctx, LE, Pread};
|
||||
|
|
@ -149,11 +147,12 @@ pub trait Pread<Ctx, E> : Index<usize> + Index<RangeFrom<usize>> + MeasureWith<C
|
|||
/// assert_eq!(&bytes, &bytes_from);
|
||||
/// assert_eq!(*offset, 2);
|
||||
#[inline]
|
||||
fn gread_inout_with<'a, N>(&'a self, offset: &mut usize, inout: &mut [N], ctx: Ctx) -> result::Result<(), E>
|
||||
where
|
||||
N: TryFromCtx<'a, Ctx, <Self as Index<RangeFrom<usize>>>::Output, Error = E>,
|
||||
<Self as Index<RangeFrom<usize>>>::Output: 'a
|
||||
{
|
||||
fn gread_inout_with<'a, N: TryFromCtx<'a, Ctx, Self, Error = E>>(
|
||||
&'a self,
|
||||
offset: &mut usize,
|
||||
inout: &mut [N],
|
||||
ctx: Ctx,
|
||||
) -> result::Result<(), E> {
|
||||
for i in inout.iter_mut() {
|
||||
*i = self.gread_with(offset, ctx)?;
|
||||
}
|
||||
|
|
@ -161,7 +160,19 @@ pub trait Pread<Ctx, E> : Index<usize> + Index<RangeFrom<usize>> + MeasureWith<C
|
|||
}
|
||||
}
|
||||
|
||||
impl<Ctx: Copy,
|
||||
E: From<error::Error>,
|
||||
R: ?Sized + Index<usize> + Index<RangeFrom<usize>> + MeasureWith<Ctx>>
|
||||
Pread<Ctx, E> for R {}
|
||||
impl<Ctx: Copy, E: From<error::Error>> Pread<Ctx, E> for [u8] {
|
||||
fn gread_with<'a, N: TryFromCtx<'a, Ctx, Self, Error = E>>(
|
||||
&'a self,
|
||||
offset: &mut usize,
|
||||
ctx: Ctx,
|
||||
) -> result::Result<N, E> {
|
||||
let start = *offset;
|
||||
if start >= self.len() {
|
||||
return Err(error::Error::BadOffset(start).into());
|
||||
}
|
||||
N::try_from_ctx(&self[start..], ctx).map(|(n, size)| {
|
||||
*offset += size;
|
||||
n
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
|||
98
third_party/rust/scroll/src/pwrite.rs
vendored
98
third_party/rust/scroll/src/pwrite.rs
vendored
|
|
@ -1,7 +1,6 @@
|
|||
use core::result;
|
||||
use core::ops::{Index, IndexMut, RangeFrom};
|
||||
|
||||
use crate::ctx::{TryIntoCtx, MeasureWith};
|
||||
use crate::ctx::TryIntoCtx;
|
||||
use crate::error;
|
||||
|
||||
/// A very generic, contextual pwrite interface in Rust.
|
||||
|
|
@ -21,17 +20,22 @@ use crate::error;
|
|||
/// should read the documentation of `Pread` first.
|
||||
///
|
||||
/// Unless you need to implement your own data store — that is either can't convert to `&[u8]` or
|
||||
/// have a data that is not `&[u8]` — you will probably want to implement
|
||||
/// have a data that does not expose a `&mut [u8]` — you will probably want to implement
|
||||
/// [TryIntoCtx](ctx/trait.TryIntoCtx.html) on your Rust types to be written.
|
||||
///
|
||||
pub trait Pwrite<Ctx, E> : Index<usize> + IndexMut<RangeFrom<usize>> + MeasureWith<Ctx>
|
||||
where
|
||||
Ctx: Copy,
|
||||
E: From<error::Error>,
|
||||
{
|
||||
fn pwrite<N: TryIntoCtx<Ctx, <Self as Index<RangeFrom<usize>>>::Output, Error = E>>(&mut self, n: N, offset: usize) -> result::Result<usize, E> where Ctx: Default {
|
||||
///
|
||||
pub trait Pwrite<Ctx: Copy, E> {
|
||||
#[inline]
|
||||
fn pwrite<N: TryIntoCtx<Ctx, Self, Error = E>>(
|
||||
&mut self,
|
||||
n: N,
|
||||
offset: usize,
|
||||
) -> result::Result<usize, E>
|
||||
where
|
||||
Ctx: Default,
|
||||
{
|
||||
self.pwrite_with(n, offset, Ctx::default())
|
||||
}
|
||||
|
||||
/// Write `N` at offset `I` with context `Ctx`
|
||||
/// # Example
|
||||
/// ```
|
||||
|
|
@ -39,36 +43,54 @@ pub trait Pwrite<Ctx, E> : Index<usize> + IndexMut<RangeFrom<usize>> + MeasureWi
|
|||
/// let mut bytes: [u8; 8] = [0, 0, 0, 0, 0, 0, 0, 0];
|
||||
/// bytes.pwrite_with::<u32>(0xbeefbeef, 0, LE).unwrap();
|
||||
/// assert_eq!(bytes.pread_with::<u32>(0, LE).unwrap(), 0xbeefbeef);
|
||||
fn pwrite_with<N: TryIntoCtx<Ctx, <Self as Index<RangeFrom<usize>>>::Output, Error = E>>(&mut self, n: N, offset: usize, ctx: Ctx) -> result::Result<usize, E> {
|
||||
let len = self.measure_with(&ctx);
|
||||
if offset >= len {
|
||||
return Err(error::Error::BadOffset(offset).into())
|
||||
fn pwrite_with<N: TryIntoCtx<Ctx, Self, Error = E>>(
|
||||
&mut self,
|
||||
n: N,
|
||||
offset: usize,
|
||||
ctx: Ctx,
|
||||
) -> result::Result<usize, E>;
|
||||
|
||||
/// Write `n` into `self` at `offset`, with a default `Ctx`. Updates the offset.
|
||||
#[inline]
|
||||
fn gwrite<N: TryIntoCtx<Ctx, Self, Error = E>>(
|
||||
&mut self,
|
||||
n: N,
|
||||
offset: &mut usize,
|
||||
) -> result::Result<usize, E>
|
||||
where
|
||||
Ctx: Default,
|
||||
{
|
||||
let ctx = Ctx::default();
|
||||
self.gwrite_with(n, offset, ctx)
|
||||
}
|
||||
|
||||
/// Write `n` into `self` at `offset`, with the `ctx`. Updates the offset.
|
||||
#[inline]
|
||||
fn gwrite_with<N: TryIntoCtx<Ctx, Self, Error = E>>(
|
||||
&mut self,
|
||||
n: N,
|
||||
offset: &mut usize,
|
||||
ctx: Ctx,
|
||||
) -> result::Result<usize, E> {
|
||||
let o = *offset;
|
||||
self.pwrite_with(n, o, ctx).map(|size| {
|
||||
*offset += size;
|
||||
size
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: Copy, E: From<error::Error>> Pwrite<Ctx, E> for [u8] {
|
||||
fn pwrite_with<N: TryIntoCtx<Ctx, Self, Error = E>>(
|
||||
&mut self,
|
||||
n: N,
|
||||
offset: usize,
|
||||
ctx: Ctx,
|
||||
) -> result::Result<usize, E> {
|
||||
if offset >= self.len() {
|
||||
return Err(error::Error::BadOffset(offset).into());
|
||||
}
|
||||
let dst = &mut self[offset..];
|
||||
n.try_into_ctx(dst, ctx)
|
||||
}
|
||||
/// Write `n` into `self` at `offset`, with a default `Ctx`. Updates the offset.
|
||||
#[inline]
|
||||
fn gwrite<N: TryIntoCtx<Ctx, <Self as Index<RangeFrom<usize>>>::Output, Error = E>>(&mut self, n: N, offset: &mut usize) -> result::Result<usize, E> where
|
||||
Ctx: Default {
|
||||
let ctx = Ctx::default();
|
||||
self.gwrite_with(n, offset, ctx)
|
||||
}
|
||||
/// Write `n` into `self` at `offset`, with the `ctx`. Updates the offset.
|
||||
#[inline]
|
||||
fn gwrite_with<N: TryIntoCtx<Ctx, <Self as Index<RangeFrom<usize>>>::Output, Error = E>>(&mut self, n: N, offset: &mut usize, ctx: Ctx) -> result::Result<usize, E> {
|
||||
let o = *offset;
|
||||
match self.pwrite_with(n, o, ctx) {
|
||||
Ok(size) => {
|
||||
*offset += size;
|
||||
Ok(size)
|
||||
},
|
||||
err => err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: Copy,
|
||||
E: From<error::Error>,
|
||||
R: ?Sized + Index<usize> + IndexMut<RangeFrom<usize>> + MeasureWith<Ctx>>
|
||||
Pwrite<Ctx, E> for R {}
|
||||
|
|
|
|||
127
third_party/rust/scroll/tests/api.rs
vendored
127
third_party/rust/scroll/tests/api.rs
vendored
|
|
@ -2,26 +2,26 @@
|
|||
|
||||
// guard against potential undefined behaviour when borrowing from
|
||||
// packed structs. See https://github.com/rust-lang/rust/issues/46043
|
||||
#![deny(safe_packed_borrows)]
|
||||
#![deny(unaligned_references)]
|
||||
|
||||
// #[macro_use] extern crate scroll_derive;
|
||||
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use scroll::{ctx, Result, Cread, Pread};
|
||||
use scroll::ctx::SizeWith;
|
||||
use scroll::{ctx, Cread, Pread, Result};
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Section<'a> {
|
||||
pub sectname: [u8; 16],
|
||||
pub segname: [u8; 16],
|
||||
pub addr: u64,
|
||||
pub size: u64,
|
||||
pub offset: u32,
|
||||
pub align: u32,
|
||||
pub reloff: u32,
|
||||
pub nreloc: u32,
|
||||
pub flags: u32,
|
||||
pub data: &'a [u8],
|
||||
pub sectname: [u8; 16],
|
||||
pub segname: [u8; 16],
|
||||
pub addr: u64,
|
||||
pub size: u64,
|
||||
pub offset: u32,
|
||||
pub align: u32,
|
||||
pub reloff: u32,
|
||||
pub nreloc: u32,
|
||||
pub flags: u32,
|
||||
pub data: &'a [u8],
|
||||
}
|
||||
|
||||
impl<'a> Section<'a> {
|
||||
|
|
@ -44,42 +44,45 @@ impl<'a> ctx::SizeWith for Section<'a> {
|
|||
//#[derive(Debug, Clone, Copy, Pread, Pwrite)]
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct Section32 {
|
||||
pub sectname: [u8; 16],
|
||||
pub segname: [u8; 16],
|
||||
pub addr: u32,
|
||||
pub size: u32,
|
||||
pub offset: u32,
|
||||
pub align: u32,
|
||||
pub reloff: u32,
|
||||
pub nreloc: u32,
|
||||
pub flags: u32,
|
||||
pub sectname: [u8; 16],
|
||||
pub segname: [u8; 16],
|
||||
pub addr: u32,
|
||||
pub size: u32,
|
||||
pub offset: u32,
|
||||
pub align: u32,
|
||||
pub reloff: u32,
|
||||
pub nreloc: u32,
|
||||
pub flags: u32,
|
||||
pub reserved1: u32,
|
||||
pub reserved2: u32,
|
||||
}
|
||||
|
||||
impl<'a> ctx::TryFromCtx<'a, ()> for Section<'a> {
|
||||
type Error = scroll::Error;
|
||||
fn try_from_ctx(_bytes: &'a [u8], _ctx: ()) -> ::std::result::Result<(Self, usize), Self::Error> {
|
||||
fn try_from_ctx(
|
||||
_bytes: &'a [u8],
|
||||
_ctx: (),
|
||||
) -> ::std::result::Result<(Self, usize), Self::Error> {
|
||||
let section = Section::default();
|
||||
Ok((section, ::std::mem::size_of::<Section>()))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Segment<'a> {
|
||||
pub cmd: u32,
|
||||
pub cmdsize: u32,
|
||||
pub segname: [u8; 16],
|
||||
pub vmaddr: u64,
|
||||
pub vmsize: u64,
|
||||
pub fileoff: u64,
|
||||
pub cmd: u32,
|
||||
pub cmdsize: u32,
|
||||
pub segname: [u8; 16],
|
||||
pub vmaddr: u64,
|
||||
pub vmsize: u64,
|
||||
pub fileoff: u64,
|
||||
pub filesize: u64,
|
||||
pub maxprot: u32,
|
||||
pub maxprot: u32,
|
||||
pub initprot: u32,
|
||||
pub nsects: u32,
|
||||
pub flags: u32,
|
||||
pub data: &'a [u8],
|
||||
offset: usize,
|
||||
raw_data: &'a [u8],
|
||||
pub nsects: u32,
|
||||
pub flags: u32,
|
||||
pub data: &'a [u8],
|
||||
offset: usize,
|
||||
raw_data: &'a [u8],
|
||||
}
|
||||
|
||||
impl<'a> Segment<'a> {
|
||||
|
|
@ -176,7 +179,10 @@ struct Foo {
|
|||
|
||||
impl scroll::ctx::FromCtx<scroll::Endian> for Foo {
|
||||
fn from_ctx(bytes: &[u8], ctx: scroll::Endian) -> Self {
|
||||
Foo { foo: bytes.cread_with::<i64>(0, ctx), bar: bytes.cread_with::<u32>(8, ctx) }
|
||||
Foo {
|
||||
foo: bytes.cread_with::<i64>(0, ctx),
|
||||
bar: bytes.cread_with::<u32>(8, ctx),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -188,9 +194,11 @@ impl scroll::ctx::SizeWith<scroll::Endian> for Foo {
|
|||
|
||||
#[test]
|
||||
fn ioread_api() {
|
||||
use scroll::{IOread, LE};
|
||||
use std::io::Cursor;
|
||||
use scroll::{LE, IOread};
|
||||
let bytes_ = [0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0xef,0xbe,0x00,0x00,];
|
||||
let bytes_ = [
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xef, 0xbe, 0x00, 0x00,
|
||||
];
|
||||
let mut bytes = Cursor::new(bytes_);
|
||||
let foo = bytes.ioread_with::<i64>(LE).unwrap();
|
||||
let bar = bytes.ioread_with::<u32>(LE).unwrap();
|
||||
|
|
@ -200,8 +208,8 @@ fn ioread_api() {
|
|||
assert!(error.is_err());
|
||||
let mut bytes = Cursor::new(bytes_);
|
||||
let foo_ = bytes.ioread_with::<Foo>(LE).unwrap();
|
||||
assert_eq!({foo_.foo}, foo);
|
||||
assert_eq!({foo_.bar}, bar);
|
||||
assert_eq!({ foo_.foo }, foo);
|
||||
assert_eq!({ foo_.bar }, bar);
|
||||
}
|
||||
|
||||
#[repr(packed)]
|
||||
|
|
@ -212,14 +220,19 @@ struct Bar {
|
|||
|
||||
impl scroll::ctx::FromCtx<scroll::Endian> for Bar {
|
||||
fn from_ctx(bytes: &[u8], ctx: scroll::Endian) -> Self {
|
||||
Bar { foo: bytes.cread_with(0, ctx), bar: bytes.cread_with(4, ctx) }
|
||||
Bar {
|
||||
foo: bytes.cread_with(0, ctx),
|
||||
bar: bytes.cread_with(4, ctx),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cread_api() {
|
||||
use scroll::{LE, Cread};
|
||||
let bytes = [0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0xef,0xbe,0x00,0x00,];
|
||||
use scroll::{Cread, LE};
|
||||
let bytes = [
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xef, 0xbe, 0x00, 0x00,
|
||||
];
|
||||
let foo = bytes.cread_with::<u64>(0, LE);
|
||||
let bar = bytes.cread_with::<u32>(8, LE);
|
||||
assert_eq!(foo, 1);
|
||||
|
|
@ -228,25 +241,27 @@ fn cread_api() {
|
|||
|
||||
#[test]
|
||||
fn cread_api_customtype() {
|
||||
use scroll::{LE, Cread};
|
||||
let bytes = [0xff, 0xff, 0xff, 0xff, 0xef,0xbe,0xad,0xde,];
|
||||
use scroll::{Cread, LE};
|
||||
let bytes = [0xff, 0xff, 0xff, 0xff, 0xef, 0xbe, 0xad, 0xde];
|
||||
let bar = &bytes[..].cread_with::<Bar>(0, LE);
|
||||
assert_eq!({bar.foo}, -1);
|
||||
assert_eq!({bar.bar}, 0xdeadbeef);
|
||||
assert_eq!({ bar.foo }, -1);
|
||||
assert_eq!({ bar.bar }, 0xdeadbeef);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn cread_api_badindex() {
|
||||
use scroll::Cread;
|
||||
let bytes = [0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0xef,0xbe,0xad,0xde,];
|
||||
let bytes = [
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xef, 0xbe, 0xad, 0xde,
|
||||
];
|
||||
let _foo = bytes.cread::<i64>(1_000_000);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cwrite_api() {
|
||||
use scroll::Cwrite;
|
||||
use scroll::Cread;
|
||||
use scroll::Cwrite;
|
||||
let mut bytes = [0x0; 16];
|
||||
bytes.cwrite::<u64>(42, 0);
|
||||
bytes.cwrite::<u32>(0xdeadbeef, 8);
|
||||
|
|
@ -264,12 +279,14 @@ impl scroll::ctx::IntoCtx<scroll::Endian> for Bar {
|
|||
|
||||
#[test]
|
||||
fn cwrite_api_customtype() {
|
||||
use scroll::{Cwrite, Cread};
|
||||
let bar = Bar { foo: -1, bar: 0xdeadbeef };
|
||||
use scroll::{Cread, Cwrite};
|
||||
let bar = Bar {
|
||||
foo: -1,
|
||||
bar: 0xdeadbeef,
|
||||
};
|
||||
let mut bytes = [0x0; 16];
|
||||
&bytes[..].cwrite::<Bar>(bar, 0);
|
||||
let _ = &bytes[..].cwrite::<Bar>(bar, 0);
|
||||
let bar = bytes.cread::<Bar>(0);
|
||||
assert_eq!({bar.foo}, -1);
|
||||
assert_eq!({bar.bar}, 0xdeadbeef);
|
||||
assert_eq!({ bar.foo }, -1);
|
||||
assert_eq!({ bar.bar }, 0xdeadbeef);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.lock":"a2485772b983e64ef7c68ccc3565777766a33f5f4b4981ea3f3dab526be152b9","Cargo.toml":"65e5b6aa01b53fcedace46f5de0038f82de578e34f9b3e799ec700d35aecacd3","LICENSE":"afb11426e09da40a1ae4f8fa17ddcc6b6a52d14df04c29bc5bcd06eb8730624d","README.md":"f89c7768454b0d2b9db816afe05db3a4cea1125bef87f08ed3eefd65e9e2b180","examples/main.rs":"dc2f7f6ba45dcba4e6fe7c8ac100df0c101cb091ddd34f7dfc6599e58cc9e9a7","src/lib.rs":"a9cabe3c0b373f352357745b817f188ab841e9445056014dee9cc83c4d167483","tests/tests.rs":"ab4e6955d2e3bedd003b53b8f3423a6fc48424e37218ca989bf7e0debdf3c3f9"},"package":"aaaae8f38bb311444cfb7f1979af0bc9240d95795f75f9ceddf6a59b79ceffa0"}
|
||||
{"files":{"Cargo.lock":"1c75a1216efdf3c7b4726138eed43fadf9325750c8d01d2358b4cf4ad742f8c1","Cargo.toml":"36cd9b38e6f4ed4bd807208da809d3803af3b134264fd5c90a29a6f064b3b4d9","LICENSE":"afb11426e09da40a1ae4f8fa17ddcc6b6a52d14df04c29bc5bcd06eb8730624d","README.md":"f89c7768454b0d2b9db816afe05db3a4cea1125bef87f08ed3eefd65e9e2b180","examples/main.rs":"dc2f7f6ba45dcba4e6fe7c8ac100df0c101cb091ddd34f7dfc6599e58cc9e9a7","src/lib.rs":"a9cabe3c0b373f352357745b817f188ab841e9445056014dee9cc83c4d167483","tests/tests.rs":"ab4e6955d2e3bedd003b53b8f3423a6fc48424e37218ca989bf7e0debdf3c3f9"},"package":"bdbda6ac5cd1321e724fa9cee216f3a61885889b896f073b8f82322789c5250e"}
|
||||
10
third_party/rust/scroll_derive/Cargo.lock
generated
vendored
10
third_party/rust/scroll_derive/Cargo.lock
generated
vendored
|
|
@ -11,9 +11,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.7"
|
||||
version = "1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37"
|
||||
checksum = "991431c3519a3f36861882da93630ce66b52918dcf1b8e2fd66b397fc96f28df"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
|
@ -26,7 +26,7 @@ checksum = "fda28d4b4830b807a8b43f7b0e6b5df875311b3e7621d84577188c175b6ec1ec"
|
|||
|
||||
[[package]]
|
||||
name = "scroll_derive"
|
||||
version = "0.10.5"
|
||||
version = "0.11.0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
@ -36,9 +36,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.42"
|
||||
version = "1.0.60"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c51d92969d209b54a98397e1b91c8ae82d8c87a7bb87df0b29aa2ad81454228"
|
||||
checksum = "c700597eca8a5a762beb35753ef6b94df201c81cca676604f547495a0d7f0081"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
|
|||
11
third_party/rust/scroll_derive/Cargo.toml
vendored
11
third_party/rust/scroll_derive/Cargo.toml
vendored
|
|
@ -3,17 +3,16 @@
|
|||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g., crates.io) dependencies
|
||||
# to registry (e.g., crates.io) dependencies.
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
# editing this file be aware that the upstream Cargo.toml
|
||||
# will likely look very different (and much more reasonable)
|
||||
# If you are reading this file be aware that the original Cargo.toml
|
||||
# will likely look very different (and much more reasonable).
|
||||
# See Cargo.toml.orig for the original contents.
|
||||
|
||||
[package]
|
||||
edition = "2018"
|
||||
name = "scroll_derive"
|
||||
version = "0.10.5"
|
||||
version = "0.11.0"
|
||||
authors = ["m4b <m4b.github.io@gmail.com>", "Ted Mielczarek <ted@mielczarek.org>", "Systemcluster <me@systemcluster.me>"]
|
||||
description = "A macros 1.1 derive implementation for Pread and Pwrite traits from the scroll crate"
|
||||
documentation = "https://docs.rs/scroll_derive"
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue