update rust crate
This commit is contained in:
198
Cargo.lock
generated
198
Cargo.lock
generated
@@ -22,9 +22,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aho-corasick"
|
name = "aho-corasick"
|
||||||
version = "1.1.3"
|
version = "1.1.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
|
checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"memchr",
|
"memchr",
|
||||||
]
|
]
|
||||||
@@ -78,10 +78,16 @@ dependencies = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "autocfg"
|
name = "anyhow"
|
||||||
version = "1.2.0"
|
version = "1.0.82"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80"
|
checksum = "f538837af36e6f6a9be0faa67f9a314f8119e4e4b5867c6ab40ed60360142519"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "autocfg"
|
||||||
|
version = "1.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "basic_script"
|
name = "basic_script"
|
||||||
@@ -110,32 +116,32 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bindgen"
|
name = "bindgen"
|
||||||
version = "0.68.1"
|
version = "0.69.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "726e4313eb6ec35d2730258ad4e15b547ee75d6afaa1361a922e78e59b7d8078"
|
checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"cexpr",
|
"cexpr",
|
||||||
"clang-sys",
|
"clang-sys",
|
||||||
|
"itertools",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"lazycell",
|
"lazycell",
|
||||||
"log",
|
"log",
|
||||||
"peeking_take_while",
|
|
||||||
"prettyplease",
|
"prettyplease",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"regex",
|
"regex",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
"shlex",
|
"shlex",
|
||||||
"syn 2.0.57",
|
"syn 2.0.52",
|
||||||
"which",
|
"which",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bitflags"
|
name = "bitflags"
|
||||||
version = "2.5.0"
|
version = "2.4.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1"
|
checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "byteorder"
|
name = "byteorder"
|
||||||
@@ -143,6 +149,18 @@ version = "1.5.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
|
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cab"
|
||||||
|
version = "0.4.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ae6b4de23c7d39c0631fd3cc952d87951c86c75a13812d7247cb7a896e7b3551"
|
||||||
|
dependencies = [
|
||||||
|
"byteorder",
|
||||||
|
"flate2",
|
||||||
|
"lzxd",
|
||||||
|
"time",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cexpr"
|
name = "cexpr"
|
||||||
version = "0.6.0"
|
version = "0.6.0"
|
||||||
@@ -171,9 +189,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap"
|
name = "clap"
|
||||||
version = "4.5.4"
|
version = "4.5.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0"
|
checksum = "b230ab84b0ffdf890d5a10abdbc8b83ae1c4918275daea1ab8801f71536b2651"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap_builder",
|
"clap_builder",
|
||||||
"clap_derive",
|
"clap_derive",
|
||||||
@@ -193,14 +211,14 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_derive"
|
name = "clap_derive"
|
||||||
version = "4.5.4"
|
version = "4.5.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64"
|
checksum = "307bc0538d5f0f83b8248db3087aa92fe504e4691294d0c96c0eabc33f47ba47"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"heck",
|
"heck",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.57",
|
"syn 2.0.52",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -215,6 +233,15 @@ version = "1.0.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
|
checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cpp_demangle"
|
||||||
|
version = "0.4.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7e8227005286ec39567949b33df9896bcadfa6051bccca2488129f108ca23119"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crc32fast"
|
name = "crc32fast"
|
||||||
version = "1.4.0"
|
version = "1.4.0"
|
||||||
@@ -301,9 +328,12 @@ name = "dwarf_import"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"binaryninja",
|
"binaryninja",
|
||||||
|
"cpp_demangle",
|
||||||
"dwarfreader",
|
"dwarfreader",
|
||||||
"gimli",
|
"gimli",
|
||||||
|
"iset",
|
||||||
"log",
|
"log",
|
||||||
|
"regex",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -354,6 +384,12 @@ dependencies = [
|
|||||||
"windows-sys",
|
"windows-sys",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fallible-iterator"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fallible-iterator"
|
name = "fallible-iterator"
|
||||||
version = "0.3.0"
|
version = "0.3.0"
|
||||||
@@ -379,11 +415,11 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "gimli"
|
name = "gimli"
|
||||||
version = "0.28.1"
|
version = "0.31.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253"
|
checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"fallible-iterator",
|
"fallible-iterator 0.3.0",
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"stable_deref_trait",
|
"stable_deref_trait",
|
||||||
]
|
]
|
||||||
@@ -405,9 +441,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "heck"
|
name = "heck"
|
||||||
version = "0.5.0"
|
version = "0.4.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
|
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hlil_lifter"
|
name = "hlil_lifter"
|
||||||
@@ -443,10 +479,25 @@ dependencies = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "itoa"
|
name = "iset"
|
||||||
version = "1.0.11"
|
version = "0.2.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b"
|
checksum = "d0716a0d7080cb7b20b9426276315e6ff5ed537bd920af47417b16de07f9ac76"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "itertools"
|
||||||
|
version = "0.11.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"
|
||||||
|
dependencies = [
|
||||||
|
"either",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "itoa"
|
||||||
|
version = "1.0.10"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lazy_static"
|
name = "lazy_static"
|
||||||
@@ -489,10 +540,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c"
|
checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memchr"
|
name = "lzxd"
|
||||||
version = "2.7.2"
|
version = "0.1.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d"
|
checksum = "784462f20dddd9dfdb45de963fa4ad4a288cb10a7889ac5d2c34fb6481c6b213"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "memchr"
|
||||||
|
version = "2.7.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memmap2"
|
name = "memmap2"
|
||||||
@@ -599,7 +656,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.57",
|
"syn 2.0.52",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -632,16 +689,47 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "peeking_take_while"
|
name = "pdb"
|
||||||
version = "0.1.2"
|
version = "0.8.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
dependencies = [
|
||||||
checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
|
"fallible-iterator 0.2.0",
|
||||||
|
"scroll",
|
||||||
|
"uuid",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pdb-import-plugin"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"binaryninja",
|
||||||
|
"cab",
|
||||||
|
"home",
|
||||||
|
"itertools",
|
||||||
|
"log",
|
||||||
|
"pdb",
|
||||||
|
"regex",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pdb-import-plugin-static"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"binaryninja",
|
||||||
|
"cab",
|
||||||
|
"home",
|
||||||
|
"itertools",
|
||||||
|
"log",
|
||||||
|
"pdb",
|
||||||
|
"regex",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pin-project-lite"
|
name = "pin-project-lite"
|
||||||
version = "0.2.14"
|
version = "0.2.13"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02"
|
checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "powerfmt"
|
name = "powerfmt"
|
||||||
@@ -651,12 +739,12 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "prettyplease"
|
name = "prettyplease"
|
||||||
version = "0.2.17"
|
version = "0.2.16"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8d3928fb5db768cb86f891ff014f0144589297e3c6a1aba6ed7cecfdace270c7"
|
checksum = "a41cf62165e97c7f814d2221421dbb9afcbcdb0a88068e5ea206e19951c2cbb5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"syn 2.0.57",
|
"syn 2.0.52",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -688,9 +776,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rayon"
|
name = "rayon"
|
||||||
version = "1.10.0"
|
version = "1.9.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa"
|
checksum = "e4963ed1bc86e4f3ee217022bd855b297cef07fb9eac5dfa1f788b220b49b3bd"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"either",
|
"either",
|
||||||
"rayon-core",
|
"rayon-core",
|
||||||
@@ -708,9 +796,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex"
|
name = "regex"
|
||||||
version = "1.10.4"
|
version = "1.10.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c"
|
checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
@@ -731,9 +819,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-syntax"
|
name = "regex-syntax"
|
||||||
version = "0.8.3"
|
version = "0.8.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56"
|
checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustc-hash"
|
name = "rustc-hash"
|
||||||
@@ -743,9 +831,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustix"
|
name = "rustix"
|
||||||
version = "0.38.32"
|
version = "0.38.31"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89"
|
checksum = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"errno",
|
"errno",
|
||||||
@@ -782,7 +870,7 @@ checksum = "1db149f81d46d2deba7cd3c50772474707729550221e69588478ebf9ada425ae"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.57",
|
"syn 2.0.52",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -802,7 +890,7 @@ checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.57",
|
"syn 2.0.52",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -819,7 +907,7 @@ checksum = "0eb01866308440fc64d6c44d9e86c5cc17adfe33c4d6eed55da9145044d0ffc1"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.57",
|
"syn 2.0.52",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -853,9 +941,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.57"
|
version = "2.0.52"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "11a6ae1e52eb25aab8f3fb9fca13be982a373b8f1157ca14b897a825ba4a2d35"
|
checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@@ -886,7 +974,7 @@ checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.57",
|
"syn 2.0.52",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -940,7 +1028,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.57",
|
"syn 2.0.52",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -976,9 +1064,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "uuid"
|
name = "uuid"
|
||||||
version = "1.8.0"
|
version = "1.7.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a183cf7feeba97b4dd1c0d46788634f6221d87fa961b305bed08c851829efcc0"
|
checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "version_check"
|
name = "version_check"
|
||||||
@@ -1081,5 +1169,5 @@ checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.57",
|
"syn 2.0.52",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ name = "binaryninja"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["Ryan Snyder <ryan@vector35.com>", "Kyle Martin <kyle@vector35.com>"]
|
authors = ["Ryan Snyder <ryan@vector35.com>", "Kyle Martin <kyle@vector35.com>"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
rust-version = "1.77.0"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
noexports = []
|
noexports = []
|
||||||
@@ -14,6 +15,10 @@ libc = "0.2"
|
|||||||
rayon = { version = "1.8", optional = true }
|
rayon = { version = "1.8", optional = true }
|
||||||
binaryninjacore-sys = { path = "binaryninjacore-sys" }
|
binaryninjacore-sys = { path = "binaryninjacore-sys" }
|
||||||
|
|
||||||
|
[patch.crates-io]
|
||||||
|
# Patched pdb crate to implement some extra structures
|
||||||
|
pdb = { path = "./examples/pdb-ng/pdb-0.8.0-patched" }
|
||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
members = [
|
members = [
|
||||||
"examples/basic_script",
|
"examples/basic_script",
|
||||||
@@ -28,6 +33,8 @@ members = [
|
|||||||
"examples/mlil_lifter",
|
"examples/mlil_lifter",
|
||||||
"examples/hlil_visitor",
|
"examples/hlil_visitor",
|
||||||
"examples/hlil_lifter",
|
"examples/hlil_lifter",
|
||||||
|
"examples/pdb-ng",
|
||||||
|
"examples/pdb-ng/demo",
|
||||||
"examples/template"
|
"examples/template"
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -5,4 +5,4 @@ authors = ["Ryan Snyder <ryan@vector35.com>", "Kyle Martin <kyle@vector35.com>"]
|
|||||||
build = "build.rs"
|
build = "build.rs"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
bindgen = "^0.68"
|
bindgen = "^0.69.2"
|
||||||
|
|||||||
@@ -83,9 +83,9 @@ fn main() {
|
|||||||
for line in BufReader::new(file).lines() {
|
for line in BufReader::new(file).lines() {
|
||||||
let line = line.unwrap();
|
let line = line.unwrap();
|
||||||
if let Some(version) = line.strip_prefix(current_line) {
|
if let Some(version) = line.strip_prefix(current_line) {
|
||||||
current_version = version.to_owned();
|
version.clone_into(&mut current_version);
|
||||||
} else if let Some(version) = line.strip_prefix(minimum_line) {
|
} else if let Some(version) = line.strip_prefix(minimum_line) {
|
||||||
minimum_version = version.to_owned();
|
version.clone_into(&mut minimum_version);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -96,6 +96,9 @@ fn main() {
|
|||||||
.clang_arg("c++")
|
.clang_arg("c++")
|
||||||
.size_t_is_usize(true)
|
.size_t_is_usize(true)
|
||||||
.generate_comments(false)
|
.generate_comments(false)
|
||||||
|
.derive_default(true)
|
||||||
|
.generate_comments(true)
|
||||||
|
.clang_arg("-fparse-all-comments")
|
||||||
.allowlist_function("BN.*")
|
.allowlist_function("BN.*")
|
||||||
.allowlist_var("BN_CURRENT_CORE_ABI_VERSION")
|
.allowlist_var("BN_CURRENT_CORE_ABI_VERSION")
|
||||||
.allowlist_var("BN_MINIMUM_CORE_ABI_VERSION")
|
.allowlist_var("BN_MINIMUM_CORE_ABI_VERSION")
|
||||||
|
|||||||
@@ -26,9 +26,7 @@ fn main() {
|
|||||||
.get_data(),
|
.get_data(),
|
||||||
addr,
|
addr,
|
||||||
) {
|
) {
|
||||||
tokens
|
tokens.iter().for_each(|token| print!("{}", token.text()));
|
||||||
.iter()
|
|
||||||
.for_each(|token| print!("{}", token.text().as_str()));
|
|
||||||
println!();
|
println!();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ fn decompile_to_c(view: &BinaryView, func: &Function) {
|
|||||||
let last = view.get_next_linear_disassembly_lines(&mut cursor.duplicate());
|
let last = view.get_next_linear_disassembly_lines(&mut cursor.duplicate());
|
||||||
let first = view.get_previous_linear_disassembly_lines(&mut cursor);
|
let first = view.get_previous_linear_disassembly_lines(&mut cursor);
|
||||||
|
|
||||||
let lines = first.into_iter().chain(last.into_iter());
|
let lines = first.into_iter().chain(&last);
|
||||||
|
|
||||||
for line in lines {
|
for line in lines {
|
||||||
println!("{}", line.as_ref());
|
println!("{}", line.as_ref());
|
||||||
|
|||||||
@@ -8,6 +8,6 @@ crate-type = ["cdylib"]
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
binaryninja = {path="../../../"}
|
binaryninja = {path="../../../"}
|
||||||
gimli = "^0.28"
|
gimli = "^0.31"
|
||||||
log = "^0.4"
|
log = "^0.4"
|
||||||
object = { version = "0.32.1", features = ["write"] }
|
object = { version = "0.32.1", features = ["write"] }
|
||||||
|
|||||||
@@ -522,13 +522,11 @@ fn export_data_vars(
|
|||||||
|
|
||||||
for data_variable in &bv.data_variables() {
|
for data_variable in &bv.data_variables() {
|
||||||
if let Some(symbol) = data_variable.symbol(bv) {
|
if let Some(symbol) = data_variable.symbol(bv) {
|
||||||
if symbol.sym_type() == SymbolType::External {
|
if let SymbolType::External
|
||||||
continue;
|
| SymbolType::Function
|
||||||
} else if symbol.sym_type() == SymbolType::Function {
|
| SymbolType::ImportedFunction
|
||||||
continue;
|
| SymbolType::LibraryFunction = symbol.sym_type()
|
||||||
} else if symbol.sym_type() == SymbolType::ImportedFunction {
|
{
|
||||||
continue;
|
|
||||||
} else if symbol.sym_type() == SymbolType::LibraryFunction {
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -551,7 +549,7 @@ fn export_data_vars(
|
|||||||
dwarf.unit.get_mut(var_die_uid).set(
|
dwarf.unit.get_mut(var_die_uid).set(
|
||||||
gimli::DW_AT_name,
|
gimli::DW_AT_name,
|
||||||
AttributeValue::String(
|
AttributeValue::String(
|
||||||
format!("data_{:x}", data_variable.address)
|
format!("data_{:x}", data_variable.address())
|
||||||
.as_bytes()
|
.as_bytes()
|
||||||
.to_vec(),
|
.to_vec(),
|
||||||
),
|
),
|
||||||
@@ -559,15 +557,15 @@ fn export_data_vars(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mut variable_location = Expression::new();
|
let mut variable_location = Expression::new();
|
||||||
variable_location.op_addr(Address::Constant(data_variable.address));
|
variable_location.op_addr(Address::Constant(data_variable.address()));
|
||||||
dwarf.unit.get_mut(var_die_uid).set(
|
dwarf.unit.get_mut(var_die_uid).set(
|
||||||
gimli::DW_AT_location,
|
gimli::DW_AT_location,
|
||||||
AttributeValue::Exprloc(variable_location),
|
AttributeValue::Exprloc(variable_location),
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(target_die_uid) = export_type(
|
if let Some(target_die_uid) = export_type(
|
||||||
format!("{}", data_variable.t.contents),
|
format!("{}", data_variable.t()),
|
||||||
data_variable.t.contents.as_ref(),
|
data_variable.t(),
|
||||||
bv,
|
bv,
|
||||||
defined_types,
|
defined_types,
|
||||||
dwarf,
|
dwarf,
|
||||||
@@ -739,7 +737,7 @@ fn export_dwarf(bv: &BinaryView) {
|
|||||||
} else {
|
} else {
|
||||||
BnString::new("Unknown")
|
BnString::new("Unknown")
|
||||||
};
|
};
|
||||||
let responses = present_form(&arch_name);
|
let responses = present_form(arch_name.as_str());
|
||||||
|
|
||||||
let encoding = gimli::Encoding {
|
let encoding = gimli::Encoding {
|
||||||
format: gimli::Format::Dwarf32,
|
format: gimli::Format::Dwarf32,
|
||||||
|
|||||||
@@ -10,5 +10,8 @@ crate-type = ["cdylib"]
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
dwarfreader = { path = "../shared/" }
|
dwarfreader = { path = "../shared/" }
|
||||||
binaryninja = { path = "../../../" }
|
binaryninja = { path = "../../../" }
|
||||||
gimli = "0.28"
|
gimli = "0.31"
|
||||||
log = "0.4.20"
|
log = "0.4.20"
|
||||||
|
iset = "0.2.2"
|
||||||
|
cpp_demangle = "0.4.3"
|
||||||
|
regex = "1"
|
||||||
|
|||||||
@@ -13,7 +13,7 @@
|
|||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
use crate::dwarfdebuginfo::{DebugInfoBuilder, DebugInfoBuilderContext, TypeUID};
|
use crate::dwarfdebuginfo::{DebugInfoBuilder, DebugInfoBuilderContext, TypeUID};
|
||||||
use crate::helpers::*;
|
use crate::{helpers::*, ReaderType};
|
||||||
use crate::types::get_type;
|
use crate::types::get_type;
|
||||||
|
|
||||||
use binaryninja::{
|
use binaryninja::{
|
||||||
@@ -21,9 +21,11 @@ use binaryninja::{
|
|||||||
types::{EnumerationBuilder, FunctionParameter, ReferenceType, Type, TypeBuilder},
|
types::{EnumerationBuilder, FunctionParameter, ReferenceType, Type, TypeBuilder},
|
||||||
};
|
};
|
||||||
|
|
||||||
use gimli::{constants, AttributeValue::Encoding, DebuggingInformationEntry, Reader, Unit};
|
use gimli::Dwarf;
|
||||||
|
use gimli::{constants, AttributeValue::Encoding, DebuggingInformationEntry, Unit};
|
||||||
|
|
||||||
pub(crate) fn handle_base_type<R: Reader<Offset = usize>>(
|
pub(crate) fn handle_base_type<R: ReaderType>(
|
||||||
|
dwarf: &Dwarf<R>,
|
||||||
unit: &Unit<R>,
|
unit: &Unit<R>,
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
||||||
@@ -37,7 +39,7 @@ pub(crate) fn handle_base_type<R: Reader<Offset = usize>>(
|
|||||||
// *Some indication of signedness?
|
// *Some indication of signedness?
|
||||||
// * = Optional
|
// * = Optional
|
||||||
|
|
||||||
let name = debug_info_builder_context.get_name(unit, entry)?;
|
let name = debug_info_builder_context.get_name(dwarf, unit, entry)?;
|
||||||
let size = get_size_as_usize(entry)?;
|
let size = get_size_as_usize(entry)?;
|
||||||
match entry.attr_value(constants::DW_AT_encoding) {
|
match entry.attr_value(constants::DW_AT_encoding) {
|
||||||
Ok(Some(Encoding(encoding))) => {
|
Ok(Some(Encoding(encoding))) => {
|
||||||
@@ -69,7 +71,8 @@ pub(crate) fn handle_base_type<R: Reader<Offset = usize>>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn handle_enum<R: Reader<Offset = usize>>(
|
pub(crate) fn handle_enum<R: ReaderType>(
|
||||||
|
dwarf: &Dwarf<R>,
|
||||||
unit: &Unit<R>,
|
unit: &Unit<R>,
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
||||||
@@ -107,17 +110,18 @@ pub(crate) fn handle_enum<R: Reader<Offset = usize>>(
|
|||||||
let mut children = tree.root().unwrap().children();
|
let mut children = tree.root().unwrap().children();
|
||||||
while let Ok(Some(child)) = children.next() {
|
while let Ok(Some(child)) = children.next() {
|
||||||
if child.entry().tag() == constants::DW_TAG_enumerator {
|
if child.entry().tag() == constants::DW_TAG_enumerator {
|
||||||
let name = debug_info_builder_context.get_name(unit, child.entry())?;
|
let name = debug_info_builder_context.get_name(dwarf, unit, child.entry())?;
|
||||||
let value = get_attr_as_u64(
|
let attr = &child
|
||||||
&child
|
|
||||||
.entry()
|
.entry()
|
||||||
.attr(constants::DW_AT_const_value)
|
.attr(constants::DW_AT_const_value)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
if let Some(value) = get_attr_as_u64(attr) {
|
||||||
enumeration_builder.insert(name, value);
|
enumeration_builder.insert(name, value);
|
||||||
|
} else {
|
||||||
|
log::error!("Unhandled enum member value type - please report this");
|
||||||
|
return None;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -131,7 +135,7 @@ pub(crate) fn handle_enum<R: Reader<Offset = usize>>(
|
|||||||
pub(crate) fn handle_typedef(
|
pub(crate) fn handle_typedef(
|
||||||
debug_info_builder: &mut DebugInfoBuilder,
|
debug_info_builder: &mut DebugInfoBuilder,
|
||||||
entry_type: Option<TypeUID>,
|
entry_type: Option<TypeUID>,
|
||||||
typedef_name: String,
|
typedef_name: &String,
|
||||||
) -> (Option<Ref<Type>>, bool) {
|
) -> (Option<Ref<Type>>, bool) {
|
||||||
// All base types have:
|
// All base types have:
|
||||||
// DW_AT_name
|
// DW_AT_name
|
||||||
@@ -140,12 +144,8 @@ pub(crate) fn handle_typedef(
|
|||||||
|
|
||||||
// This will fail in the case where we have a typedef to a type that doesn't exist (failed to parse, incomplete, etc)
|
// This will fail in the case where we have a typedef to a type that doesn't exist (failed to parse, incomplete, etc)
|
||||||
if let Some(entry_type_offset) = entry_type {
|
if let Some(entry_type_offset) = entry_type {
|
||||||
if let Some((name, t)) = debug_info_builder.get_type(entry_type_offset) {
|
if let Some(t) = debug_info_builder.get_type(entry_type_offset) {
|
||||||
if typedef_name == name {
|
return (Some(t.get_type()), typedef_name != t.get_name());
|
||||||
return (Some(t), false);
|
|
||||||
} else if typedef_name != name {
|
|
||||||
return (Some(t), true);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -153,7 +153,7 @@ pub(crate) fn handle_typedef(
|
|||||||
(None, false)
|
(None, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn handle_pointer<R: Reader<Offset = usize>>(
|
pub(crate) fn handle_pointer<R: ReaderType>(
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
||||||
debug_info_builder: &mut DebugInfoBuilder,
|
debug_info_builder: &mut DebugInfoBuilder,
|
||||||
@@ -172,7 +172,7 @@ pub(crate) fn handle_pointer<R: Reader<Offset = usize>>(
|
|||||||
|
|
||||||
if let Some(pointer_size) = get_size_as_usize(entry) {
|
if let Some(pointer_size) = get_size_as_usize(entry) {
|
||||||
if let Some(entry_type_offset) = entry_type {
|
if let Some(entry_type_offset) = entry_type {
|
||||||
let parent_type = debug_info_builder.get_type(entry_type_offset).unwrap().1;
|
let parent_type = debug_info_builder.get_type(entry_type_offset).unwrap().get_type();
|
||||||
Some(Type::pointer_of_width(
|
Some(Type::pointer_of_width(
|
||||||
parent_type.as_ref(),
|
parent_type.as_ref(),
|
||||||
pointer_size,
|
pointer_size,
|
||||||
@@ -190,7 +190,7 @@ pub(crate) fn handle_pointer<R: Reader<Offset = usize>>(
|
|||||||
))
|
))
|
||||||
}
|
}
|
||||||
} else if let Some(entry_type_offset) = entry_type {
|
} else if let Some(entry_type_offset) = entry_type {
|
||||||
let parent_type = debug_info_builder.get_type(entry_type_offset).unwrap().1;
|
let parent_type = debug_info_builder.get_type(entry_type_offset).unwrap().get_type();
|
||||||
Some(Type::pointer_of_width(
|
Some(Type::pointer_of_width(
|
||||||
parent_type.as_ref(),
|
parent_type.as_ref(),
|
||||||
debug_info_builder_context.default_address_size(),
|
debug_info_builder_context.default_address_size(),
|
||||||
@@ -209,7 +209,7 @@ pub(crate) fn handle_pointer<R: Reader<Offset = usize>>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn handle_array<R: Reader<Offset = usize>>(
|
pub(crate) fn handle_array<R: ReaderType>(
|
||||||
unit: &Unit<R>,
|
unit: &Unit<R>,
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
debug_info_builder: &mut DebugInfoBuilder,
|
debug_info_builder: &mut DebugInfoBuilder,
|
||||||
@@ -228,7 +228,7 @@ pub(crate) fn handle_array<R: Reader<Offset = usize>>(
|
|||||||
// For multidimensional arrays, DW_TAG_subrange_type or DW_TAG_enumeration_type
|
// For multidimensional arrays, DW_TAG_subrange_type or DW_TAG_enumeration_type
|
||||||
|
|
||||||
if let Some(entry_type_offset) = entry_type {
|
if let Some(entry_type_offset) = entry_type {
|
||||||
let parent_type = debug_info_builder.get_type(entry_type_offset).unwrap().1;
|
let parent_type = debug_info_builder.get_type(entry_type_offset).unwrap().get_type();
|
||||||
|
|
||||||
let mut tree = unit.entries_tree(Some(entry.offset())).unwrap();
|
let mut tree = unit.entries_tree(Some(entry.offset())).unwrap();
|
||||||
let mut children = tree.root().unwrap().children();
|
let mut children = tree.root().unwrap().children();
|
||||||
@@ -255,7 +255,8 @@ pub(crate) fn handle_array<R: Reader<Offset = usize>>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn handle_function<R: Reader<Offset = usize>>(
|
pub(crate) fn handle_function<R: ReaderType>(
|
||||||
|
dwarf: &Dwarf<R>,
|
||||||
unit: &Unit<R>,
|
unit: &Unit<R>,
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
||||||
@@ -289,29 +290,25 @@ pub(crate) fn handle_function<R: Reader<Offset = usize>>(
|
|||||||
debug_info_builder
|
debug_info_builder
|
||||||
.get_type(entry_type_offset)
|
.get_type(entry_type_offset)
|
||||||
.expect("Subroutine return type was not processed")
|
.expect("Subroutine return type was not processed")
|
||||||
.1
|
.get_type()
|
||||||
}
|
}
|
||||||
None => Type::void(),
|
None => Type::void(),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Alias function type in the case that it contains itself
|
// Alias function type in the case that it contains itself
|
||||||
if let Some(name) = debug_info_builder_context.get_name(unit, entry) {
|
if let Some(name) = debug_info_builder_context.get_name(dwarf, unit, entry) {
|
||||||
debug_info_builder.add_type(
|
debug_info_builder.add_type(
|
||||||
get_uid(unit, entry),
|
get_uid(dwarf, unit, entry),
|
||||||
name.clone(),
|
&name,
|
||||||
Type::named_type_from_type(
|
Type::named_type_from_type(
|
||||||
name,
|
&name,
|
||||||
&Type::function::<String, &binaryninja::types::Type>(
|
&Type::function::<&binaryninja::types::Type>(return_type.as_ref(), &[], false),
|
||||||
return_type.as_ref(),
|
|
||||||
&[],
|
|
||||||
false,
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut parameters: Vec<FunctionParameter<String>> = vec![];
|
let mut parameters: Vec<FunctionParameter> = vec![];
|
||||||
let mut variable_arguments = false;
|
let mut variable_arguments = false;
|
||||||
|
|
||||||
// Get all the children and populate
|
// Get all the children and populate
|
||||||
@@ -322,15 +319,16 @@ pub(crate) fn handle_function<R: Reader<Offset = usize>>(
|
|||||||
if let (Some(child_uid), Some(name)) = {
|
if let (Some(child_uid), Some(name)) = {
|
||||||
(
|
(
|
||||||
get_type(
|
get_type(
|
||||||
|
dwarf,
|
||||||
unit,
|
unit,
|
||||||
child.entry(),
|
child.entry(),
|
||||||
debug_info_builder_context,
|
debug_info_builder_context,
|
||||||
debug_info_builder,
|
debug_info_builder,
|
||||||
),
|
),
|
||||||
debug_info_builder_context.get_name(unit, child.entry()),
|
debug_info_builder_context.get_name(dwarf, unit, child.entry()),
|
||||||
)
|
)
|
||||||
} {
|
} {
|
||||||
let child_type = debug_info_builder.get_type(child_uid).unwrap().1;
|
let child_type = debug_info_builder.get_type(child_uid).unwrap().get_type();
|
||||||
parameters.push(FunctionParameter::new(child_type, name, None));
|
parameters.push(FunctionParameter::new(child_type, name, None));
|
||||||
}
|
}
|
||||||
} else if child.entry().tag() == constants::DW_TAG_unspecified_parameters {
|
} else if child.entry().tag() == constants::DW_TAG_unspecified_parameters {
|
||||||
@@ -338,8 +336,8 @@ pub(crate) fn handle_function<R: Reader<Offset = usize>>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if debug_info_builder_context.get_name(unit, entry).is_some() {
|
if debug_info_builder_context.get_name(dwarf, unit, entry).is_some() {
|
||||||
debug_info_builder.remove_type(get_uid(unit, entry));
|
debug_info_builder.remove_type(get_uid(dwarf, unit, entry));
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(Type::function(
|
Some(Type::function(
|
||||||
@@ -362,7 +360,7 @@ pub(crate) fn handle_const(
|
|||||||
// ?DW_AT_type
|
// ?DW_AT_type
|
||||||
|
|
||||||
if let Some(entry_type_offset) = entry_type {
|
if let Some(entry_type_offset) = entry_type {
|
||||||
let parent_type = debug_info_builder.get_type(entry_type_offset).unwrap().1;
|
let parent_type = debug_info_builder.get_type(entry_type_offset).unwrap().get_type();
|
||||||
Some((*parent_type).to_builder().set_const(true).finalize())
|
Some((*parent_type).to_builder().set_const(true).finalize())
|
||||||
} else {
|
} else {
|
||||||
Some(TypeBuilder::void().set_const(true).finalize())
|
Some(TypeBuilder::void().set_const(true).finalize())
|
||||||
@@ -382,7 +380,7 @@ pub(crate) fn handle_volatile(
|
|||||||
// ?DW_AT_type
|
// ?DW_AT_type
|
||||||
|
|
||||||
if let Some(entry_type_offset) = entry_type {
|
if let Some(entry_type_offset) = entry_type {
|
||||||
let parent_type = debug_info_builder.get_type(entry_type_offset).unwrap().1;
|
let parent_type = debug_info_builder.get_type(entry_type_offset).unwrap().get_type();
|
||||||
Some((*parent_type).to_builder().set_volatile(true).finalize())
|
Some((*parent_type).to_builder().set_volatile(true).finalize())
|
||||||
} else {
|
} else {
|
||||||
Some(TypeBuilder::void().set_volatile(true).finalize())
|
Some(TypeBuilder::void().set_volatile(true).finalize())
|
||||||
|
|||||||
@@ -12,7 +12,7 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
use crate::helpers::{get_uid, resolve_specification, DieReference};
|
use crate::{helpers::{get_uid, resolve_specification, DieReference}, ReaderType};
|
||||||
|
|
||||||
use binaryninja::{
|
use binaryninja::{
|
||||||
binaryview::{BinaryView, BinaryViewBase, BinaryViewExt},
|
binaryview::{BinaryView, BinaryViewBase, BinaryViewExt},
|
||||||
@@ -21,13 +21,14 @@ use binaryninja::{
|
|||||||
rc::*,
|
rc::*,
|
||||||
symbol::SymbolType,
|
symbol::SymbolType,
|
||||||
templatesimplifier::simplify_str_to_fqn,
|
templatesimplifier::simplify_str_to_fqn,
|
||||||
types::{Conf, FunctionParameter, Type},
|
types::{Conf, FunctionParameter, NamedTypedVariable, Type, Variable, VariableSourceType},
|
||||||
};
|
};
|
||||||
|
|
||||||
use gimli::{DebuggingInformationEntry, Dwarf, Reader, Unit};
|
use gimli::{DebuggingInformationEntry, Dwarf, Unit};
|
||||||
|
|
||||||
use log::{error, warn};
|
use log::{debug, error, warn};
|
||||||
use std::{
|
use std::{
|
||||||
|
cmp::Ordering,
|
||||||
collections::{hash_map::Values, HashMap},
|
collections::{hash_map::Values, HashMap},
|
||||||
hash::Hash,
|
hash::Hash,
|
||||||
};
|
};
|
||||||
@@ -46,6 +47,8 @@ pub(crate) struct FunctionInfoBuilder {
|
|||||||
pub(crate) address: Option<u64>,
|
pub(crate) address: Option<u64>,
|
||||||
pub(crate) parameters: Vec<Option<(String, TypeUID)>>,
|
pub(crate) parameters: Vec<Option<(String, TypeUID)>>,
|
||||||
pub(crate) platform: Option<Ref<Platform>>,
|
pub(crate) platform: Option<Ref<Platform>>,
|
||||||
|
pub(crate) variable_arguments: bool,
|
||||||
|
pub(crate) stack_variables: Vec<NamedTypedVariable>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FunctionInfoBuilder {
|
impl FunctionInfoBuilder {
|
||||||
@@ -55,7 +58,7 @@ impl FunctionInfoBuilder {
|
|||||||
raw_name: Option<String>,
|
raw_name: Option<String>,
|
||||||
return_type: Option<TypeUID>,
|
return_type: Option<TypeUID>,
|
||||||
address: Option<u64>,
|
address: Option<u64>,
|
||||||
parameters: Vec<Option<(String, TypeUID)>>,
|
parameters: &Vec<Option<(String, TypeUID)>>,
|
||||||
) {
|
) {
|
||||||
if full_name.is_some() {
|
if full_name.is_some() {
|
||||||
self.full_name = full_name;
|
self.full_name = full_name;
|
||||||
@@ -75,13 +78,13 @@ impl FunctionInfoBuilder {
|
|||||||
|
|
||||||
for (i, new_parameter) in parameters.into_iter().enumerate() {
|
for (i, new_parameter) in parameters.into_iter().enumerate() {
|
||||||
match self.parameters.get(i) {
|
match self.parameters.get(i) {
|
||||||
Some(None) => self.parameters[i] = new_parameter,
|
Some(None) => self.parameters[i] = new_parameter.clone(),
|
||||||
Some(Some(_)) => (),
|
Some(Some(_)) => (),
|
||||||
// Some(Some((name, _))) if name.as_bytes().is_empty() => {
|
// Some(Some((name, _))) if name.as_bytes().is_empty() => {
|
||||||
// self.parameters[i] = new_parameter
|
// self.parameters[i] = new_parameter
|
||||||
// }
|
// }
|
||||||
// Some(Some((_, uid))) if *uid == 0 => self.parameters[i] = new_parameter, // TODO : This is a placebo....void types aren't actually UID 0
|
// Some(Some((_, uid))) if *uid == 0 => self.parameters[i] = new_parameter, // TODO : This is a placebo....void types aren't actually UID 0
|
||||||
_ => self.parameters.push(new_parameter),
|
_ => self.parameters.push(new_parameter.clone()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -97,16 +100,27 @@ pub(crate) struct DebugType {
|
|||||||
commit: bool,
|
commit: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) struct DebugInfoBuilderContext<R: Reader<Offset = usize>> {
|
impl DebugType {
|
||||||
dwarf: Dwarf<R>,
|
pub fn get_name(&self) -> &String {
|
||||||
|
&self.name
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_type(&self) -> Ref<Type> {
|
||||||
|
self.t.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct DebugInfoBuilderContext<R: ReaderType> {
|
||||||
units: Vec<Unit<R>>,
|
units: Vec<Unit<R>>,
|
||||||
|
sup_units: Vec<Unit<R>>,
|
||||||
names: HashMap<TypeUID, String>,
|
names: HashMap<TypeUID, String>,
|
||||||
default_address_size: usize,
|
default_address_size: usize,
|
||||||
pub(crate) total_die_count: usize,
|
pub(crate) total_die_count: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<R: Reader<Offset = usize>> DebugInfoBuilderContext<R> {
|
impl<R: ReaderType> DebugInfoBuilderContext<R> {
|
||||||
pub(crate) fn new(view: &BinaryView, dwarf: Dwarf<R>) -> Option<Self> {
|
pub(crate) fn new(view: &BinaryView, dwarf: &Dwarf<R>) -> Option<Self> {
|
||||||
|
|
||||||
let mut units = vec![];
|
let mut units = vec![];
|
||||||
let mut iter = dwarf.units();
|
let mut iter = dwarf.units();
|
||||||
while let Ok(Some(header)) = iter.next() {
|
while let Ok(Some(header)) = iter.next() {
|
||||||
@@ -118,40 +132,56 @@ impl<R: Reader<Offset = usize>> DebugInfoBuilderContext<R> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let mut sup_units = vec![];
|
||||||
|
if let Some(sup_dwarf) = dwarf.sup() {
|
||||||
|
let mut sup_iter = sup_dwarf.units();
|
||||||
|
while let Ok(Some(header)) = sup_iter.next() {
|
||||||
|
if let Ok(unit) = sup_dwarf.unit(header) {
|
||||||
|
sup_units.push(unit);
|
||||||
|
} else {
|
||||||
|
error!("Unable to read supplementary DWARF information. File may be malformed or corrupted. Not applying debug info.");
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Some(Self {
|
Some(Self {
|
||||||
dwarf,
|
|
||||||
units,
|
units,
|
||||||
|
sup_units,
|
||||||
names: HashMap::new(),
|
names: HashMap::new(),
|
||||||
default_address_size: view.address_size(),
|
default_address_size: view.address_size(),
|
||||||
total_die_count: 0,
|
total_die_count: 0,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn dwarf(&self) -> &Dwarf<R> {
|
|
||||||
&self.dwarf
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn units(&self) -> &[Unit<R>] {
|
pub(crate) fn units(&self) -> &[Unit<R>] {
|
||||||
&self.units
|
&self.units
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn sup_units(&self) -> &[Unit<R>] {
|
||||||
|
&self.sup_units
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn default_address_size(&self) -> usize {
|
pub(crate) fn default_address_size(&self) -> usize {
|
||||||
self.default_address_size
|
self.default_address_size
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn set_name(&mut self, die_uid: TypeUID, name: String) {
|
pub(crate) fn set_name(&mut self, die_uid: TypeUID, name: String) {
|
||||||
|
// die_uids need to be unique here
|
||||||
assert!(self.names.insert(die_uid, name).is_none());
|
assert!(self.names.insert(die_uid, name).is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn get_name(
|
pub(crate) fn get_name(
|
||||||
&self,
|
&self,
|
||||||
|
dwarf: &Dwarf<R>,
|
||||||
unit: &Unit<R>,
|
unit: &Unit<R>,
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
) -> Option<String> {
|
) -> Option<String> {
|
||||||
match resolve_specification(unit, entry, self) {
|
match resolve_specification(dwarf, unit, entry, self) {
|
||||||
DieReference::UnitAndOffset((entry_unit, entry_offset)) => self
|
DieReference::UnitAndOffset((dwarf, entry_unit, entry_offset)) => self
|
||||||
.names
|
.names
|
||||||
.get(&get_uid(
|
.get(&get_uid(
|
||||||
|
dwarf,
|
||||||
entry_unit,
|
entry_unit,
|
||||||
&entry_unit.entry(entry_offset).unwrap(),
|
&entry_unit.entry(entry_offset).unwrap(),
|
||||||
))
|
))
|
||||||
@@ -166,19 +196,29 @@ impl<R: Reader<Offset = usize>> DebugInfoBuilderContext<R> {
|
|||||||
// info and types to one DIE's UID (T) before adding the completed info to BN's debug info
|
// info and types to one DIE's UID (T) before adding the completed info to BN's debug info
|
||||||
pub(crate) struct DebugInfoBuilder {
|
pub(crate) struct DebugInfoBuilder {
|
||||||
functions: Vec<FunctionInfoBuilder>,
|
functions: Vec<FunctionInfoBuilder>,
|
||||||
|
raw_function_name_indices: HashMap<String, usize>,
|
||||||
|
full_function_name_indices: HashMap<String, usize>,
|
||||||
types: HashMap<TypeUID, DebugType>,
|
types: HashMap<TypeUID, DebugType>,
|
||||||
data_variables: HashMap<u64, (Option<String>, TypeUID)>,
|
data_variables: HashMap<u64, (Option<String>, TypeUID)>,
|
||||||
|
range_data_offsets: iset::IntervalMap<u64, i64>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DebugInfoBuilder {
|
impl DebugInfoBuilder {
|
||||||
pub(crate) fn new() -> Self {
|
pub(crate) fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
functions: vec![],
|
functions: vec![],
|
||||||
|
raw_function_name_indices: HashMap::new(),
|
||||||
|
full_function_name_indices: HashMap::new(),
|
||||||
types: HashMap::new(),
|
types: HashMap::new(),
|
||||||
data_variables: HashMap::new(),
|
data_variables: HashMap::new(),
|
||||||
|
range_data_offsets: iset::IntervalMap::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn set_range_data_offsets(&mut self, offsets: iset::IntervalMap<u64, i64>) {
|
||||||
|
self.range_data_offsets = offsets
|
||||||
|
}
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub(crate) fn insert_function(
|
pub(crate) fn insert_function(
|
||||||
&mut self,
|
&mut self,
|
||||||
@@ -186,32 +226,87 @@ impl DebugInfoBuilder {
|
|||||||
raw_name: Option<String>,
|
raw_name: Option<String>,
|
||||||
return_type: Option<TypeUID>,
|
return_type: Option<TypeUID>,
|
||||||
address: Option<u64>,
|
address: Option<u64>,
|
||||||
parameters: Vec<Option<(String, TypeUID)>>,
|
parameters: &Vec<Option<(String, TypeUID)>>,
|
||||||
) {
|
variable_arguments: bool,
|
||||||
|
) -> Option<usize> {
|
||||||
|
// Returns the index of the function
|
||||||
// Raw names should be the primary key, but if they don't exist, use the full name
|
// Raw names should be the primary key, but if they don't exist, use the full name
|
||||||
// TODO : Consider further falling back on address/architecture
|
// TODO : Consider further falling back on address/architecture
|
||||||
if let Some(function) = self
|
|
||||||
.functions
|
/*
|
||||||
.iter_mut()
|
If it has a raw_name and we know it, update it and return
|
||||||
.find(|func| func.raw_name.is_some() && func.raw_name == raw_name)
|
Else if it has a full_name and we know it, update it and return
|
||||||
{
|
Else Add a new entry if we don't know the full_name or raw_name
|
||||||
|
*/
|
||||||
|
|
||||||
|
if let Some(ident) = &raw_name {
|
||||||
|
// check if we already know about this raw name's index
|
||||||
|
// if we do, and the full name will change, remove the known full index if it exists
|
||||||
|
// update the function
|
||||||
|
// if the full name exists, update the stored index for the full name
|
||||||
|
if let Some(idx) = self.raw_function_name_indices.get(ident) {
|
||||||
|
let function = self.functions.get_mut(*idx).unwrap();
|
||||||
|
|
||||||
|
if function.full_name.is_some() && function.full_name != full_name {
|
||||||
|
self.full_function_name_indices.remove(function.full_name.as_ref().unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
function.update(full_name, raw_name, return_type, address, parameters);
|
function.update(full_name, raw_name, return_type, address, parameters);
|
||||||
} else if let Some(function) = self.functions.iter_mut().find(|func| {
|
|
||||||
(func.raw_name.is_none() || raw_name.is_none())
|
if function.full_name.is_some() {
|
||||||
&& func.full_name.is_some()
|
self.full_function_name_indices.insert(function.full_name.clone().unwrap(), *idx);
|
||||||
&& func.full_name == full_name
|
}
|
||||||
}) {
|
|
||||||
|
return Some(*idx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if let Some(ident) = &full_name {
|
||||||
|
// check if we already know about this full name's index
|
||||||
|
// if we do, and the raw name will change, remove the known raw index if it exists
|
||||||
|
// update the function
|
||||||
|
// if the raw name exists, update the stored index for the raw name
|
||||||
|
if let Some(idx) = self.full_function_name_indices.get(ident) {
|
||||||
|
let function = self.functions.get_mut(*idx).unwrap();
|
||||||
|
|
||||||
|
if function.raw_name.is_some() && function.raw_name != raw_name {
|
||||||
|
self.raw_function_name_indices.remove(function.raw_name.as_ref().unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
function.update(full_name, raw_name, return_type, address, parameters);
|
function.update(full_name, raw_name, return_type, address, parameters);
|
||||||
} else {
|
|
||||||
self.functions.push(FunctionInfoBuilder {
|
if function.raw_name.is_some() {
|
||||||
|
self.raw_function_name_indices.insert(function.raw_name.clone().unwrap(), *idx);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Some(*idx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
debug!("Function entry in DWARF without full or raw name.");
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let function = FunctionInfoBuilder {
|
||||||
full_name,
|
full_name,
|
||||||
raw_name,
|
raw_name,
|
||||||
return_type,
|
return_type,
|
||||||
address,
|
address,
|
||||||
parameters,
|
parameters: parameters.clone(),
|
||||||
platform: None,
|
platform: None,
|
||||||
});
|
variable_arguments,
|
||||||
|
stack_variables: vec![],
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(n) = &function.full_name {
|
||||||
|
self.full_function_name_indices.insert(n.clone(), self.functions.len());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(n) = &function.raw_name {
|
||||||
|
self.raw_function_name_indices.insert(n.clone(), self.functions.len());
|
||||||
|
}
|
||||||
|
|
||||||
|
self.functions.push(function);
|
||||||
|
Some(self.functions.len()-1)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn functions(&self) -> &[FunctionInfoBuilder] {
|
pub(crate) fn functions(&self) -> &[FunctionInfoBuilder] {
|
||||||
@@ -222,13 +317,7 @@ impl DebugInfoBuilder {
|
|||||||
self.types.values()
|
self.types.values()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn add_type(
|
pub(crate) fn add_type(&mut self, type_uid: TypeUID, name: &String, t: Ref<Type>, commit: bool) {
|
||||||
&mut self,
|
|
||||||
type_uid: TypeUID,
|
|
||||||
name: String,
|
|
||||||
t: Ref<Type>,
|
|
||||||
commit: bool,
|
|
||||||
) {
|
|
||||||
if let Some(DebugType {
|
if let Some(DebugType {
|
||||||
name: existing_name,
|
name: existing_name,
|
||||||
t: existing_type,
|
t: existing_type,
|
||||||
@@ -242,7 +331,7 @@ impl DebugInfoBuilder {
|
|||||||
},
|
},
|
||||||
) {
|
) {
|
||||||
if existing_type != t && commit {
|
if existing_type != t && commit {
|
||||||
error!("DWARF info contains duplicate type definition. Overwriting type `{}` (named `{:?}`) with `{}` (named `{:?}`)",
|
warn!("DWARF info contains duplicate type definition. Overwriting type `{}` (named `{:?}`) with `{}` (named `{:?}`)",
|
||||||
existing_type,
|
existing_type,
|
||||||
existing_name,
|
existing_name,
|
||||||
t,
|
t,
|
||||||
@@ -256,15 +345,76 @@ impl DebugInfoBuilder {
|
|||||||
self.types.remove(&type_uid);
|
self.types.remove(&type_uid);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO : Non-copy?
|
pub(crate) fn get_type(&self, type_uid: TypeUID) -> Option<&DebugType> {
|
||||||
pub(crate) fn get_type(&self, type_uid: TypeUID) -> Option<(String, Ref<Type>)> {
|
self.types.get(&type_uid)
|
||||||
self.types
|
|
||||||
.get(&type_uid)
|
|
||||||
.map(|type_ref_ref| (type_ref_ref.name.clone(), type_ref_ref.t.clone()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn contains_type(&self, type_uid: TypeUID) -> bool {
|
pub(crate) fn contains_type(&self, type_uid: TypeUID) -> bool {
|
||||||
self.types.get(&type_uid).is_some()
|
self.types.contains_key(&type_uid)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pub(crate) fn add_stack_variable(
|
||||||
|
&mut self,
|
||||||
|
fn_idx: Option<usize>,
|
||||||
|
offset: i64,
|
||||||
|
name: Option<String>,
|
||||||
|
type_uid: Option<TypeUID>,
|
||||||
|
) {
|
||||||
|
let name = match name {
|
||||||
|
Some(x) => {
|
||||||
|
if x.len() == 1 && x.chars().next() == Some('\x00') {
|
||||||
|
// Anonymous variable, generate name
|
||||||
|
format!("debug_var_{}", offset)
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
x
|
||||||
|
}
|
||||||
|
},
|
||||||
|
None => {
|
||||||
|
// Anonymous variable, generate name
|
||||||
|
format!("debug_var_{}", offset)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(function_index) = fn_idx else {
|
||||||
|
// If we somehow lost track of what subprogram we're in or we're not actually in a subprogram
|
||||||
|
error!("Trying to add a local variable outside of a subprogram. Please report this issue.");
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Either get the known type or use a 0 confidence void type so we at least get the name applied
|
||||||
|
let t = match type_uid {
|
||||||
|
Some(uid) => Conf::new(self.get_type(uid).unwrap().get_type(), 128),
|
||||||
|
None => Conf::new(Type::void(), 0)
|
||||||
|
};
|
||||||
|
let function = &mut self.functions[function_index];
|
||||||
|
|
||||||
|
// TODO: If we can't find a known offset can we try to guess somehow?
|
||||||
|
|
||||||
|
let Some(func_addr) = function.address else {
|
||||||
|
// If we somehow are processing a function's variables before the function is created
|
||||||
|
error!("Trying to add a local variable without a known function start. Please report this issue.");
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(offset_adjustment) = self.range_data_offsets.values_overlap(func_addr).next() else {
|
||||||
|
// Unknown why, but this is happening with MachO + external dSYM
|
||||||
|
debug!("Refusing to add a local variable ({}@{}) to function at {} without a known CIE offset.", name, offset, func_addr);
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
let adjusted_offset = offset - offset_adjustment;
|
||||||
|
|
||||||
|
if adjusted_offset > 0 {
|
||||||
|
// If we somehow end up with a positive sp offset
|
||||||
|
error!("Trying to add a local variable at positive storage offset {}. Please report this issue.", adjusted_offset);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let var = Variable::new(VariableSourceType::StackVariableSourceType, 0, adjusted_offset);
|
||||||
|
function.stack_variables.push(NamedTypedVariable::new(var, name, t, false));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn add_data_variable(
|
pub(crate) fn add_data_variable(
|
||||||
@@ -276,14 +426,14 @@ impl DebugInfoBuilder {
|
|||||||
if let Some((_existing_name, existing_type_uid)) =
|
if let Some((_existing_name, existing_type_uid)) =
|
||||||
self.data_variables.insert(address, (name, type_uid))
|
self.data_variables.insert(address, (name, type_uid))
|
||||||
{
|
{
|
||||||
let existing_type = self.get_type(existing_type_uid).unwrap().1;
|
let existing_type = self.get_type(existing_type_uid).unwrap().get_type();
|
||||||
let new_type = self.get_type(type_uid).unwrap().1;
|
let new_type = self.get_type(type_uid).unwrap().get_type();
|
||||||
|
|
||||||
if existing_type_uid != type_uid || existing_type != new_type {
|
if existing_type_uid != type_uid || existing_type != new_type {
|
||||||
error!("DWARF info contains duplicate data variable definition. Overwriting data variable at 0x{:08x} (`{}`) with `{}`",
|
warn!("DWARF info contains duplicate data variable definition. Overwriting data variable at 0x{:08x} (`{}`) with `{}`",
|
||||||
address,
|
address,
|
||||||
self.get_type(existing_type_uid).unwrap().1,
|
existing_type,
|
||||||
self.get_type(type_uid).unwrap().1
|
new_type
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -303,7 +453,7 @@ impl DebugInfoBuilder {
|
|||||||
for (&address, (name, type_uid)) in &self.data_variables {
|
for (&address, (name, type_uid)) in &self.data_variables {
|
||||||
assert!(debug_info.add_data_variable(
|
assert!(debug_info.add_data_variable(
|
||||||
address,
|
address,
|
||||||
&self.get_type(*type_uid).unwrap().1,
|
&self.get_type(*type_uid).unwrap().t,
|
||||||
name.clone(),
|
name.clone(),
|
||||||
&[] // TODO : Components
|
&[] // TODO : Components
|
||||||
));
|
));
|
||||||
@@ -312,17 +462,17 @@ impl DebugInfoBuilder {
|
|||||||
|
|
||||||
fn get_function_type(&self, function: &FunctionInfoBuilder) -> Ref<Type> {
|
fn get_function_type(&self, function: &FunctionInfoBuilder) -> Ref<Type> {
|
||||||
let return_type = match function.return_type {
|
let return_type = match function.return_type {
|
||||||
Some(return_type_id) => Conf::new(self.get_type(return_type_id).unwrap().1.clone(), 0),
|
Some(return_type_id) => Conf::new(self.get_type(return_type_id).unwrap().get_type(), 128),
|
||||||
_ => Conf::new(binaryninja::types::Type::void(), 0),
|
_ => Conf::new(binaryninja::types::Type::void(), 0),
|
||||||
};
|
};
|
||||||
|
|
||||||
let parameters: Vec<FunctionParameter<String>> = function
|
let parameters: Vec<FunctionParameter> = function
|
||||||
.parameters
|
.parameters
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|parameter| match parameter {
|
.filter_map(|parameter| match parameter {
|
||||||
Some((name, 0)) => Some(FunctionParameter::new(Type::void(), name.clone(), None)),
|
Some((name, 0)) => Some(FunctionParameter::new(Type::void(), name.clone(), None)),
|
||||||
Some((name, uid)) => Some(FunctionParameter::new(
|
Some((name, uid)) => Some(FunctionParameter::new(
|
||||||
self.get_type(*uid).unwrap().1,
|
self.get_type(*uid).unwrap().get_type(),
|
||||||
name.clone(),
|
name.clone(),
|
||||||
None,
|
None,
|
||||||
)),
|
)),
|
||||||
@@ -330,10 +480,7 @@ impl DebugInfoBuilder {
|
|||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
// TODO : Handle
|
binaryninja::types::Type::function(&return_type, ¶meters, function.variable_arguments)
|
||||||
let variable_parameters = false;
|
|
||||||
|
|
||||||
binaryninja::types::Type::function(&return_type, ¶meters, variable_parameters)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn commit_functions(&self, debug_info: &mut DebugInfo) {
|
fn commit_functions(&self, debug_info: &mut DebugInfo) {
|
||||||
@@ -348,12 +495,12 @@ impl DebugInfoBuilder {
|
|||||||
function.address,
|
function.address,
|
||||||
function.platform.clone(),
|
function.platform.clone(),
|
||||||
vec![], // TODO : Components
|
vec![], // TODO : Components
|
||||||
|
function.stack_variables.clone(), // TODO: local non-stack variables
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn post_process(&mut self, bv: &BinaryView, _debug_info: &mut DebugInfo) -> &Self {
|
pub(crate) fn post_process(&mut self, bv: &BinaryView, _debug_info: &mut DebugInfo) -> &Self {
|
||||||
// TODO : We don't need post-processing if we process correctly the first time....
|
|
||||||
// When originally resolving names, we need to check:
|
// When originally resolving names, we need to check:
|
||||||
// If there's already a name from binja that's "more correct" than what we found (has more namespaces)
|
// If there's already a name from binja that's "more correct" than what we found (has more namespaces)
|
||||||
// If there's no name for the DIE, but there's a linkage name that's resolved in binja to a usable name
|
// If there's no name for the DIE, but there's a linkage name that's resolved in binja to a usable name
|
||||||
@@ -379,19 +526,22 @@ impl DebugInfoBuilder {
|
|||||||
if simplify_str_to_fqn(func_full_name, true).len()
|
if simplify_str_to_fqn(func_full_name, true).len()
|
||||||
< simplify_str_to_fqn(symbol_full_name.clone(), true).len()
|
< simplify_str_to_fqn(symbol_full_name.clone(), true).len()
|
||||||
{
|
{
|
||||||
func.full_name =
|
func.full_name = Some(symbol_full_name.to_string());
|
||||||
Some(symbol_full_name.to_string());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(address) = func.address {
|
if let Some(address) = func.address.as_mut() {
|
||||||
let existing_functions = bv.functions_at(address);
|
let diff = bv.start() - bv.original_image_base();
|
||||||
if existing_functions.len() > 1 {
|
*address += diff; // rebase the address
|
||||||
|
let existing_functions = bv.functions_at(*address);
|
||||||
|
match existing_functions.len().cmp(&1) {
|
||||||
|
Ordering::Greater => {
|
||||||
warn!("Multiple existing functions at address {address:08x}. One or more functions at this address may have the wrong platform information. Please report this binary.");
|
warn!("Multiple existing functions at address {address:08x}. One or more functions at this address may have the wrong platform information. Please report this binary.");
|
||||||
} else if existing_functions.len() == 1 {
|
}
|
||||||
func.platform = Some(existing_functions.get(0).platform());
|
Ordering::Equal => func.platform = Some(existing_functions.get(0).platform()),
|
||||||
|
Ordering::Less => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,32 +12,45 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
|
use std::sync::OnceLock;
|
||||||
|
|
||||||
use crate::dwarfdebuginfo::{DebugInfoBuilder, DebugInfoBuilderContext, TypeUID};
|
use crate::dwarfdebuginfo::{DebugInfoBuilder, DebugInfoBuilderContext, TypeUID};
|
||||||
use crate::helpers::*;
|
use crate::{helpers::*, ReaderType};
|
||||||
use crate::types::get_type;
|
use crate::types::get_type;
|
||||||
|
|
||||||
use gimli::{constants, DebuggingInformationEntry, Reader, Unit};
|
use binaryninja::templatesimplifier::simplify_str_to_str;
|
||||||
|
use cpp_demangle::DemangleOptions;
|
||||||
|
use gimli::{constants, DebuggingInformationEntry, Dwarf, Unit};
|
||||||
|
use log::debug;
|
||||||
|
use regex::Regex;
|
||||||
|
|
||||||
fn get_parameters<R: Reader<Offset = usize>>(
|
fn get_parameters<R: ReaderType>(
|
||||||
|
dwarf: &Dwarf<R>,
|
||||||
unit: &Unit<R>,
|
unit: &Unit<R>,
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
||||||
debug_info_builder: &mut DebugInfoBuilder,
|
debug_info_builder: &mut DebugInfoBuilder,
|
||||||
) -> Vec<Option<(String, TypeUID)>> {
|
) -> (Vec<Option<(String, TypeUID)>>, bool) {
|
||||||
if !entry.has_children() {
|
if !entry.has_children() {
|
||||||
vec![]
|
return (vec![], false);
|
||||||
} else {
|
}
|
||||||
|
|
||||||
// We make a new tree from the current entry to iterate over its children
|
// We make a new tree from the current entry to iterate over its children
|
||||||
let mut sub_die_tree = unit.entries_tree(Some(entry.offset())).unwrap();
|
let mut sub_die_tree = unit.entries_tree(Some(entry.offset())).unwrap();
|
||||||
let root = sub_die_tree.root().unwrap();
|
let root = sub_die_tree.root().unwrap();
|
||||||
|
|
||||||
|
let mut variable_arguments = false;
|
||||||
let mut result = vec![];
|
let mut result = vec![];
|
||||||
let mut children = root.children();
|
let mut children = root.children();
|
||||||
while let Some(child) = children.next().unwrap() {
|
while let Some(child) = children.next().unwrap() {
|
||||||
match child.entry().tag() {
|
match child.entry().tag() {
|
||||||
constants::DW_TAG_formal_parameter => {
|
constants::DW_TAG_formal_parameter => {
|
||||||
let name = debug_info_builder_context.get_name(unit, child.entry());
|
//TODO: if the param type is a typedef to an anonymous struct (typedef struct {...} foo) then this is reoslved to an anonymous struct instead of foo
|
||||||
|
// We should still recurse to make sure we load all types this param type depends on, but
|
||||||
|
let name = debug_info_builder_context.get_name(dwarf, unit, child.entry());
|
||||||
|
|
||||||
let type_ = get_type(
|
let type_ = get_type(
|
||||||
|
dwarf,
|
||||||
unit,
|
unit,
|
||||||
child.entry(),
|
child.entry(),
|
||||||
debug_info_builder_context,
|
debug_info_builder_context,
|
||||||
@@ -53,26 +66,65 @@ fn get_parameters<R: Reader<Offset = usize>>(
|
|||||||
result.push(None)
|
result.push(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
constants::DW_TAG_unspecified_parameters => (),
|
constants::DW_TAG_unspecified_parameters => variable_arguments = true,
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
result
|
(result, variable_arguments)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn parse_function_entry<R: Reader<Offset = usize>>(
|
pub(crate) fn parse_function_entry<R: ReaderType>(
|
||||||
|
dwarf: &Dwarf<R>,
|
||||||
unit: &Unit<R>,
|
unit: &Unit<R>,
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
||||||
debug_info_builder: &mut DebugInfoBuilder,
|
debug_info_builder: &mut DebugInfoBuilder,
|
||||||
) {
|
) -> Option<usize> {
|
||||||
// Collect function properties (if they exist in this DIE)
|
// Collect function properties (if they exist in this DIE)
|
||||||
let full_name = debug_info_builder_context.get_name(unit, entry);
|
let raw_name = get_raw_name(dwarf, unit, entry);
|
||||||
let raw_name = get_raw_name(unit, entry, debug_info_builder_context);
|
let return_type = get_type(dwarf, unit, entry, debug_info_builder_context, debug_info_builder);
|
||||||
let return_type = get_type(unit, entry, debug_info_builder_context, debug_info_builder);
|
let address = get_start_address(dwarf, unit, entry);
|
||||||
let address = get_start_address(unit, entry, debug_info_builder_context);
|
let (parameters, variable_arguments) = get_parameters(dwarf, unit, entry, debug_info_builder_context, debug_info_builder);
|
||||||
let parameters = get_parameters(unit, entry, debug_info_builder_context, debug_info_builder);
|
|
||||||
|
|
||||||
debug_info_builder.insert_function(full_name, raw_name, return_type, address, parameters);
|
// If we have a raw name, it might be mangled, see if we can demangle it into full_name
|
||||||
|
// raw_name should contain a superset of the info we have in full_name
|
||||||
|
let mut full_name = None;
|
||||||
|
if let Some(possibly_mangled_name) = &raw_name {
|
||||||
|
if possibly_mangled_name.starts_with('_') {
|
||||||
|
static OPTIONS_MEM: OnceLock<DemangleOptions> = OnceLock::new();
|
||||||
|
let demangle_options = OPTIONS_MEM.get_or_init(|| {
|
||||||
|
DemangleOptions::new()
|
||||||
|
.no_return_type()
|
||||||
|
.hide_expression_literal_types()
|
||||||
|
.no_params()
|
||||||
|
});
|
||||||
|
|
||||||
|
static ABI_REGEX_MEM: OnceLock<Regex> = OnceLock::new();
|
||||||
|
let abi_regex = ABI_REGEX_MEM.get_or_init(|| {
|
||||||
|
Regex::new(r"\[abi:v\d+\]").unwrap()
|
||||||
|
});
|
||||||
|
if let Ok(sym) = cpp_demangle::Symbol::new(possibly_mangled_name) {
|
||||||
|
if let Ok(demangled) = sym.demangle(demangle_options) {
|
||||||
|
let cleaned = abi_regex.replace_all(&demangled, "");
|
||||||
|
let simplified = simplify_str_to_str(&cleaned);
|
||||||
|
full_name = Some(simplified.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we didn't demangle the raw name, fetch the name given
|
||||||
|
if full_name.is_none() {
|
||||||
|
full_name = debug_info_builder_context.get_name(dwarf, unit, entry)
|
||||||
|
}
|
||||||
|
|
||||||
|
if raw_name.is_none() && full_name.is_none() {
|
||||||
|
debug!(
|
||||||
|
"Function entry in DWARF without full or raw name: .debug_info offset {:?}",
|
||||||
|
entry.offset().to_debug_info_offset(&unit.header)
|
||||||
|
);
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
debug_info_builder.insert_function(full_name, raw_name, return_type, address, ¶meters, variable_arguments)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,124 +12,179 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
use crate::DebugInfoBuilderContext;
|
use std::path::PathBuf;
|
||||||
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
ops::Deref,
|
||||||
|
sync::mpsc,
|
||||||
|
str::FromStr
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{DebugInfoBuilderContext, ReaderType};
|
||||||
|
use binaryninja::binaryview::BinaryViewBase;
|
||||||
|
use binaryninja::filemetadata::FileMetadata;
|
||||||
|
use binaryninja::Endianness;
|
||||||
|
use binaryninja::{binaryview::{BinaryView, BinaryViewExt}, downloadprovider::{DownloadInstanceInputOutputCallbacks, DownloadProvider}, rc::Ref, settings::Settings};
|
||||||
|
use gimli::Dwarf;
|
||||||
use gimli::{
|
use gimli::{
|
||||||
constants, Attribute, AttributeValue,
|
constants, Attribute, AttributeValue,
|
||||||
AttributeValue::{DebugInfoRef, UnitRef},
|
AttributeValue::{DebugInfoRef, DebugInfoRefSup, UnitRef},
|
||||||
DebuggingInformationEntry, Operation, Reader, Unit, UnitOffset, UnitSectionOffset,
|
DebuggingInformationEntry, Operation, Unit, UnitOffset, UnitSectionOffset,
|
||||||
};
|
};
|
||||||
|
|
||||||
use log::warn;
|
use log::warn;
|
||||||
|
|
||||||
pub(crate) fn get_uid<R: Reader<Offset = usize>>(
|
pub(crate) fn get_uid<R: ReaderType>(
|
||||||
|
dwarf: &Dwarf<R>,
|
||||||
unit: &Unit<R>,
|
unit: &Unit<R>,
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
) -> usize {
|
) -> usize {
|
||||||
match entry.offset().to_unit_section_offset(unit) {
|
// We set a large gap between supplementary and main entries
|
||||||
|
let adj = dwarf.sup().map_or(0, |_| 0x1000000000000000);
|
||||||
|
let entry_offset = match entry.offset().to_unit_section_offset(unit) {
|
||||||
UnitSectionOffset::DebugInfoOffset(o) => o.0,
|
UnitSectionOffset::DebugInfoOffset(o) => o.0,
|
||||||
UnitSectionOffset::DebugTypesOffset(o) => o.0,
|
UnitSectionOffset::DebugTypesOffset(o) => o.0,
|
||||||
}
|
};
|
||||||
|
entry_offset + adj
|
||||||
}
|
}
|
||||||
|
|
||||||
////////////////////////////////////
|
////////////////////////////////////
|
||||||
// DIE attr convenience functions
|
// DIE attr convenience functions
|
||||||
|
|
||||||
pub(crate) enum DieReference<'a, R: Reader<Offset = usize>> {
|
pub(crate) enum DieReference<'a, R: ReaderType> {
|
||||||
UnitAndOffset((&'a Unit<R>, UnitOffset)),
|
UnitAndOffset((&'a Dwarf<R>, &'a Unit<R>, UnitOffset)),
|
||||||
Err,
|
Err,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn get_attr_die<'a, R: Reader<Offset = usize>>(
|
pub(crate) fn get_attr_die<'a, R: ReaderType>(
|
||||||
|
dwarf: &'a Dwarf<R>,
|
||||||
unit: &'a Unit<R>,
|
unit: &'a Unit<R>,
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
debug_info_builder_context: &'a DebugInfoBuilderContext<R>,
|
debug_info_builder_context: &'a DebugInfoBuilderContext<R>,
|
||||||
attr: constants::DwAt,
|
attr: constants::DwAt,
|
||||||
) -> Option<DieReference<'a, R>> {
|
) -> Option<DieReference<'a, R>> {
|
||||||
match entry.attr_value(attr) {
|
match entry.attr_value(attr) {
|
||||||
Ok(Some(UnitRef(offset))) => Some(DieReference::UnitAndOffset((unit, offset))),
|
Ok(Some(UnitRef(offset))) => Some(DieReference::UnitAndOffset((dwarf, unit, offset))),
|
||||||
Ok(Some(DebugInfoRef(offset))) => {
|
Ok(Some(DebugInfoRef(offset))) => {
|
||||||
|
if dwarf.sup().is_some() {
|
||||||
for source_unit in debug_info_builder_context.units() {
|
for source_unit in debug_info_builder_context.units() {
|
||||||
if let Some(new_offset) = offset.to_unit_offset(&source_unit.header) {
|
if let Some(new_offset) = offset.to_unit_offset(&source_unit.header) {
|
||||||
return Some(DieReference::UnitAndOffset((source_unit, new_offset)));
|
return Some(DieReference::UnitAndOffset((dwarf, source_unit, new_offset)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
warn!("Failed to fetch DIE. Debug information may be incomplete.");
|
}
|
||||||
|
else {
|
||||||
|
// This could either have no supplementary file because it is one or because it just doesn't have one
|
||||||
|
// operate on supplementary file if dwarf is a supplementary file, else self
|
||||||
|
|
||||||
|
// It's possible this is a reference in the supplementary file to itself
|
||||||
|
for source_unit in debug_info_builder_context.sup_units() {
|
||||||
|
if let Some(new_offset) = offset.to_unit_offset(&source_unit.header) {
|
||||||
|
return Some(DieReference::UnitAndOffset((dwarf, source_unit, new_offset)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ... or it just doesn't have a supplementary file
|
||||||
|
for source_unit in debug_info_builder_context.units() {
|
||||||
|
if let Some(new_offset) = offset.to_unit_offset(&source_unit.header) {
|
||||||
|
return Some(DieReference::UnitAndOffset((dwarf, source_unit, new_offset)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
None
|
None
|
||||||
|
},
|
||||||
|
Ok(Some(DebugInfoRefSup(offset))) => {
|
||||||
|
for source_unit in debug_info_builder_context.sup_units() {
|
||||||
|
if let Some(new_offset) = offset.to_unit_offset(&source_unit.header) {
|
||||||
|
return Some(DieReference::UnitAndOffset((dwarf.sup().unwrap(), source_unit, new_offset)));
|
||||||
}
|
}
|
||||||
// Ok(Some(DebugInfoRefSup(offset))) TODO - dwarf 5 stuff
|
}
|
||||||
|
warn!("Failed to fetch DIE. Supplementary debug information may be incomplete.");
|
||||||
|
None
|
||||||
|
},
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn resolve_specification<'a, R: Reader<Offset = usize>>(
|
pub(crate) fn resolve_specification<'a, R: ReaderType>(
|
||||||
|
dwarf: &'a Dwarf<R>,
|
||||||
unit: &'a Unit<R>,
|
unit: &'a Unit<R>,
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
debug_info_builder_context: &'a DebugInfoBuilderContext<R>,
|
debug_info_builder_context: &'a DebugInfoBuilderContext<R>,
|
||||||
) -> DieReference<'a, R> {
|
) -> DieReference<'a, R> {
|
||||||
if let Some(die_reference) = get_attr_die(
|
if let Some(die_reference) = get_attr_die(
|
||||||
|
dwarf,
|
||||||
unit,
|
unit,
|
||||||
entry,
|
entry,
|
||||||
debug_info_builder_context,
|
debug_info_builder_context,
|
||||||
constants::DW_AT_specification,
|
constants::DW_AT_specification,
|
||||||
) {
|
) {
|
||||||
match die_reference {
|
match die_reference {
|
||||||
DieReference::UnitAndOffset((entry_unit, entry_offset)) => {
|
DieReference::UnitAndOffset((dwarf, entry_unit, entry_offset)) => {
|
||||||
if let Ok(entry) = entry_unit.entry(entry_offset) {
|
if let Ok(entry) = entry_unit.entry(entry_offset) {
|
||||||
resolve_specification(entry_unit, &entry, debug_info_builder_context)
|
resolve_specification(dwarf, entry_unit, &entry, debug_info_builder_context)
|
||||||
} else {
|
} else {
|
||||||
warn!("Failed to fetch DIE. Debug information may be incomplete.");
|
warn!("Failed to fetch DIE for attr DW_AT_specification. Debug information may be incomplete.");
|
||||||
DieReference::Err
|
DieReference::Err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
DieReference::Err => DieReference::Err,
|
DieReference::Err => DieReference::Err,
|
||||||
}
|
}
|
||||||
} else if let Some(die_reference) = get_attr_die(
|
} else if let Some(die_reference) = get_attr_die(
|
||||||
|
dwarf,
|
||||||
unit,
|
unit,
|
||||||
entry,
|
entry,
|
||||||
debug_info_builder_context,
|
debug_info_builder_context,
|
||||||
constants::DW_AT_abstract_origin,
|
constants::DW_AT_abstract_origin,
|
||||||
) {
|
) {
|
||||||
match die_reference {
|
match die_reference {
|
||||||
DieReference::UnitAndOffset((entry_unit, entry_offset)) => {
|
DieReference::UnitAndOffset((dwarf, entry_unit, entry_offset)) => {
|
||||||
if entry_offset == entry.offset() {
|
if entry_offset == entry.offset() && unit.header.offset() == entry_unit.header.offset() {
|
||||||
warn!("DWARF information is invalid (infinite abstract origin reference cycle). Debug information may be incomplete.");
|
warn!("DWARF information is invalid (infinite abstract origin reference cycle). Debug information may be incomplete.");
|
||||||
DieReference::Err
|
DieReference::Err
|
||||||
} else if let Ok(new_entry) = entry_unit.entry(entry_offset) {
|
} else if let Ok(new_entry) = entry_unit.entry(entry_offset) {
|
||||||
resolve_specification(entry_unit, &new_entry, debug_info_builder_context)
|
resolve_specification(dwarf, entry_unit, &new_entry, debug_info_builder_context)
|
||||||
} else {
|
} else {
|
||||||
warn!("Failed to fetch DIE. Debug information may be incomplete.");
|
warn!("Failed to fetch DIE for attr DW_AT_abstract_origin. Debug information may be incomplete.");
|
||||||
DieReference::Err
|
DieReference::Err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
DieReference::Err => DieReference::Err,
|
DieReference::Err => DieReference::Err,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
DieReference::UnitAndOffset((unit, entry.offset()))
|
DieReference::UnitAndOffset((dwarf, unit, entry.offset()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get name from DIE, or referenced dependencies
|
// Get name from DIE, or referenced dependencies
|
||||||
pub(crate) fn get_name<R: Reader<Offset = usize>>(
|
pub(crate) fn get_name<R: ReaderType>(
|
||||||
|
dwarf: &Dwarf<R>,
|
||||||
unit: &Unit<R>,
|
unit: &Unit<R>,
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
||||||
) -> Option<String> {
|
) -> Option<String> {
|
||||||
match resolve_specification(unit, entry, debug_info_builder_context) {
|
match resolve_specification(dwarf, unit, entry, debug_info_builder_context) {
|
||||||
DieReference::UnitAndOffset((entry_unit, entry_offset)) => {
|
DieReference::UnitAndOffset((dwarf, entry_unit, entry_offset)) => {
|
||||||
if let Ok(Some(attr_val)) = entry_unit
|
if let Ok(Some(attr_val)) = entry_unit
|
||||||
.entry(entry_offset)
|
.entry(entry_offset)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.attr_value(constants::DW_AT_name)
|
.attr_value(constants::DW_AT_name)
|
||||||
{
|
{
|
||||||
if let Ok(attr_string) = debug_info_builder_context
|
if let Ok(attr_string) = dwarf.attr_string(entry_unit, attr_val.clone())
|
||||||
.dwarf()
|
|
||||||
.attr_string(entry_unit, attr_val)
|
|
||||||
{
|
{
|
||||||
if let Ok(attr_string) = attr_string.to_string() {
|
if let Ok(attr_string) = attr_string.to_string() {
|
||||||
return Some(attr_string.to_string());
|
return Some(attr_string.to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
else if let Some(dwarf) = &dwarf.sup {
|
||||||
|
if let Ok(attr_string) = dwarf.attr_string(entry_unit, attr_val)
|
||||||
|
{
|
||||||
|
if let Ok(attr_string) = attr_string.to_string() {
|
||||||
|
return Some(attr_string.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// if let Some(raw_name) = get_raw_name(unit, entry, debug_info_builder_context) {
|
// if let Some(raw_name) = get_raw_name(unit, entry, debug_info_builder_context) {
|
||||||
@@ -146,26 +201,32 @@ pub(crate) fn get_name<R: Reader<Offset = usize>>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get raw name from DIE, or referenced dependencies
|
// Get raw name from DIE, or referenced dependencies
|
||||||
pub(crate) fn get_raw_name<R: Reader<Offset = usize>>(
|
pub(crate) fn get_raw_name<R: ReaderType>(
|
||||||
|
dwarf: &Dwarf<R>,
|
||||||
unit: &Unit<R>,
|
unit: &Unit<R>,
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
|
||||||
) -> Option<String> {
|
) -> Option<String> {
|
||||||
if let Ok(Some(attr_val)) = entry.attr_value(constants::DW_AT_linkage_name) {
|
if let Ok(Some(attr_val)) = entry.attr_value(constants::DW_AT_linkage_name) {
|
||||||
if let Ok(attr_string) = debug_info_builder_context
|
if let Ok(attr_string) = dwarf.attr_string(unit, attr_val.clone())
|
||||||
.dwarf()
|
|
||||||
.attr_string(unit, attr_val)
|
|
||||||
{
|
{
|
||||||
if let Ok(attr_string) = attr_string.to_string() {
|
if let Ok(attr_string) = attr_string.to_string() {
|
||||||
return Some(attr_string.to_string());
|
return Some(attr_string.to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
else if let Some(dwarf) = dwarf.sup() {
|
||||||
|
if let Ok(attr_string) = dwarf.attr_string(unit, attr_val)
|
||||||
|
{
|
||||||
|
if let Ok(attr_string) = attr_string.to_string() {
|
||||||
|
return Some(attr_string.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the size of an object as a usize
|
// Get the size of an object as a usize
|
||||||
pub(crate) fn get_size_as_usize<R: Reader<Offset = usize>>(
|
pub(crate) fn get_size_as_usize<R: ReaderType>(
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
) -> Option<usize> {
|
) -> Option<usize> {
|
||||||
if let Ok(Some(attr)) = entry.attr(constants::DW_AT_byte_size) {
|
if let Ok(Some(attr)) = entry.attr(constants::DW_AT_byte_size) {
|
||||||
@@ -178,7 +239,7 @@ pub(crate) fn get_size_as_usize<R: Reader<Offset = usize>>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get the size of an object as a u64
|
// Get the size of an object as a u64
|
||||||
pub(crate) fn get_size_as_u64<R: Reader<Offset = usize>>(
|
pub(crate) fn get_size_as_u64<R: ReaderType>(
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
) -> Option<u64> {
|
) -> Option<u64> {
|
||||||
if let Ok(Some(attr)) = entry.attr(constants::DW_AT_byte_size) {
|
if let Ok(Some(attr)) = entry.attr(constants::DW_AT_byte_size) {
|
||||||
@@ -191,7 +252,7 @@ pub(crate) fn get_size_as_u64<R: Reader<Offset = usize>>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get the size of a subrange as a u64
|
// Get the size of a subrange as a u64
|
||||||
pub(crate) fn get_subrange_size<R: Reader<Offset = usize>>(
|
pub(crate) fn get_subrange_size<R: ReaderType>(
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
) -> u64 {
|
) -> u64 {
|
||||||
if let Ok(Some(attr)) = entry.attr(constants::DW_AT_upper_bound) {
|
if let Ok(Some(attr)) = entry.attr(constants::DW_AT_upper_bound) {
|
||||||
@@ -206,35 +267,27 @@ pub(crate) fn get_subrange_size<R: Reader<Offset = usize>>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get the start address of a function
|
// Get the start address of a function
|
||||||
pub(crate) fn get_start_address<R: Reader<Offset = usize>>(
|
pub(crate) fn get_start_address<R: ReaderType>(
|
||||||
|
dwarf: &Dwarf<R>,
|
||||||
unit: &Unit<R>,
|
unit: &Unit<R>,
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
|
||||||
) -> Option<u64> {
|
) -> Option<u64> {
|
||||||
if let Ok(Some(attr_val)) = entry.attr_value(constants::DW_AT_low_pc) {
|
if let Ok(Some(attr_val)) = entry.attr_value(constants::DW_AT_low_pc) {
|
||||||
match debug_info_builder_context
|
match dwarf.attr_address(unit, attr_val)
|
||||||
.dwarf()
|
|
||||||
.attr_address(unit, attr_val)
|
|
||||||
{
|
{
|
||||||
Ok(Some(val)) => Some(val),
|
Ok(Some(val)) => Some(val),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
} else if let Ok(Some(attr_val)) = entry.attr_value(constants::DW_AT_entry_pc) {
|
} else if let Ok(Some(attr_val)) = entry.attr_value(constants::DW_AT_entry_pc) {
|
||||||
match debug_info_builder_context
|
match dwarf.attr_address(unit, attr_val)
|
||||||
.dwarf()
|
|
||||||
.attr_address(unit, attr_val)
|
|
||||||
{
|
{
|
||||||
Ok(Some(val)) => Some(val),
|
Ok(Some(val)) => Some(val),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
} else if let Ok(Some(attr_value)) = entry.attr_value(constants::DW_AT_ranges) {
|
} else if let Ok(Some(attr_value)) = entry.attr_value(constants::DW_AT_ranges) {
|
||||||
if let Ok(Some(ranges_offset)) = debug_info_builder_context
|
if let Ok(Some(ranges_offset)) = dwarf.attr_ranges_offset(unit, attr_value)
|
||||||
.dwarf()
|
|
||||||
.attr_ranges_offset(unit, attr_value)
|
|
||||||
{
|
{
|
||||||
if let Ok(mut ranges) = debug_info_builder_context
|
if let Ok(mut ranges) = dwarf.ranges(unit, ranges_offset)
|
||||||
.dwarf()
|
|
||||||
.ranges(unit, ranges_offset)
|
|
||||||
{
|
{
|
||||||
if let Ok(Some(range)) = ranges.next() {
|
if let Ok(Some(range)) = ranges.next() {
|
||||||
return Some(range.begin);
|
return Some(range.begin);
|
||||||
@@ -248,20 +301,26 @@ pub(crate) fn get_start_address<R: Reader<Offset = usize>>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get an attribute value as a u64 if it can be coerced
|
// Get an attribute value as a u64 if it can be coerced
|
||||||
pub(crate) fn get_attr_as_u64<R: Reader<Offset = usize>>(attr: &Attribute<R>) -> Option<u64> {
|
pub(crate) fn get_attr_as_u64<R: ReaderType>(attr: &Attribute<R>) -> Option<u64> {
|
||||||
if let Some(value) = attr.u8_value() {
|
if let Some(value) = attr.udata_value() {
|
||||||
Some(value.into())
|
|
||||||
} else if let Some(value) = attr.u16_value() {
|
|
||||||
Some(value.into())
|
|
||||||
} else if let Some(value) = attr.udata_value() {
|
|
||||||
Some(value)
|
Some(value)
|
||||||
|
} else if let Some(value) = attr.sdata_value() {
|
||||||
|
Some(value as u64)
|
||||||
|
} else if let AttributeValue::Block(mut data) = attr.value() {
|
||||||
|
match data.len() {
|
||||||
|
1 => data.read_u8().map(u64::from).ok(),
|
||||||
|
2 => data.read_u16().map(u64::from).ok(),
|
||||||
|
4 => data.read_u32().map(u64::from).ok(),
|
||||||
|
8 => data.read_u64().ok(),
|
||||||
|
_ => None
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
attr.sdata_value().map(|value| value as u64)
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get an attribute value as a usize if it can be coerced
|
// Get an attribute value as a usize if it can be coerced
|
||||||
pub(crate) fn get_attr_as_usize<R: Reader<Offset = usize>>(attr: Attribute<R>) -> Option<usize> {
|
pub(crate) fn get_attr_as_usize<R: ReaderType>(attr: Attribute<R>) -> Option<usize> {
|
||||||
if let Some(value) = attr.u8_value() {
|
if let Some(value) = attr.u8_value() {
|
||||||
Some(value.into())
|
Some(value.into())
|
||||||
} else if let Some(value) = attr.u16_value() {
|
} else if let Some(value) = attr.u16_value() {
|
||||||
@@ -275,7 +334,7 @@ pub(crate) fn get_attr_as_usize<R: Reader<Offset = usize>>(attr: Attribute<R>) -
|
|||||||
|
|
||||||
// Get an attribute value as a usize if it can be coerced
|
// Get an attribute value as a usize if it can be coerced
|
||||||
// Parses DW_OP_address, DW_OP_const
|
// Parses DW_OP_address, DW_OP_const
|
||||||
pub(crate) fn get_expr_value<R: Reader<Offset = usize>>(
|
pub(crate) fn get_expr_value<R: ReaderType>(
|
||||||
unit: &Unit<R>,
|
unit: &Unit<R>,
|
||||||
attr: Attribute<R>,
|
attr: Attribute<R>,
|
||||||
) -> Option<u64> {
|
) -> Option<u64> {
|
||||||
@@ -285,9 +344,252 @@ pub(crate) fn get_expr_value<R: Reader<Offset = usize>>(
|
|||||||
Ok(Operation::UnsignedConstant { value }) => Some(value),
|
Ok(Operation::UnsignedConstant { value }) => Some(value),
|
||||||
Ok(Operation::Address { address: 0 }) => None,
|
Ok(Operation::Address { address: 0 }) => None,
|
||||||
Ok(Operation::Address { address }) => Some(address),
|
Ok(Operation::Address { address }) => Some(address),
|
||||||
_ => None,
|
_ => None
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pub(crate) fn get_build_id(view: &BinaryView) -> Result<String, String> {
|
||||||
|
let mut build_id: Option<String> = None;
|
||||||
|
|
||||||
|
if let Ok(raw_view) = view.raw_view() {
|
||||||
|
if let Ok(build_id_section) = raw_view.section_by_name(".note.gnu.build-id") {
|
||||||
|
// Name size - 4 bytes
|
||||||
|
// Desc size - 4 bytes
|
||||||
|
// Type - 4 bytes
|
||||||
|
// Name - n bytes
|
||||||
|
// Desc - n bytes
|
||||||
|
let build_id_bytes = raw_view.read_vec(build_id_section.start(), build_id_section.len());
|
||||||
|
if build_id_bytes.len() < 12 {
|
||||||
|
return Err("Build id section must be at least 12 bytes".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
let name_len: u32;
|
||||||
|
let desc_len: u32;
|
||||||
|
let note_type: u32;
|
||||||
|
match raw_view.default_endianness() {
|
||||||
|
Endianness::LittleEndian => {
|
||||||
|
name_len = u32::from_le_bytes(build_id_bytes[0..4].try_into().unwrap());
|
||||||
|
desc_len = u32::from_le_bytes(build_id_bytes[4..8].try_into().unwrap());
|
||||||
|
note_type = u32::from_le_bytes(build_id_bytes[8..12].try_into().unwrap());
|
||||||
|
},
|
||||||
|
Endianness::BigEndian => {
|
||||||
|
name_len = u32::from_be_bytes(build_id_bytes[0..4].try_into().unwrap());
|
||||||
|
desc_len = u32::from_be_bytes(build_id_bytes[4..8].try_into().unwrap());
|
||||||
|
note_type = u32::from_be_bytes(build_id_bytes[8..12].try_into().unwrap());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if note_type != 3 {
|
||||||
|
return Err(format!("Build id section has wrong type: {}", note_type));
|
||||||
|
}
|
||||||
|
|
||||||
|
let expected_len = (12 + name_len + desc_len) as usize;
|
||||||
|
|
||||||
|
if build_id_bytes.len() < expected_len {
|
||||||
|
return Err(format!("Build id section not expected length: expected {}, got {}", expected_len, build_id_bytes.len()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let desc: &[u8] = &build_id_bytes[(12+name_len as usize)..expected_len];
|
||||||
|
build_id = Some(desc.iter().map(|b| format!("{:02x}", b)).collect());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(x) = build_id {
|
||||||
|
Ok(x)
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
Err("Failed to get build id".to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pub(crate) fn download_debug_info(build_id: &String, view: &BinaryView) -> Result<Ref<BinaryView>, String> {
|
||||||
|
let settings = Settings::new("");
|
||||||
|
|
||||||
|
let debug_server_urls = settings.get_string_list("network.debuginfodServers", Some(view), None);
|
||||||
|
|
||||||
|
for debug_server_url in debug_server_urls.iter() {
|
||||||
|
let artifact_url = format!("{}/buildid/{}/debuginfo", debug_server_url, build_id);
|
||||||
|
|
||||||
|
// Download from remote
|
||||||
|
let (tx, rx) = mpsc::channel();
|
||||||
|
let write = move |data: &[u8]| -> usize {
|
||||||
|
if let Ok(_) = tx.send(Vec::from(data)) {
|
||||||
|
data.len()
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let dp = DownloadProvider::try_default().map_err(|_| "No default download provider")?;
|
||||||
|
let mut inst = dp
|
||||||
|
.create_instance()
|
||||||
|
.map_err(|_| "Couldn't create download instance")?;
|
||||||
|
let result = inst
|
||||||
|
.perform_custom_request(
|
||||||
|
"GET",
|
||||||
|
artifact_url,
|
||||||
|
HashMap::<String, String>::new(),
|
||||||
|
DownloadInstanceInputOutputCallbacks {
|
||||||
|
read: None,
|
||||||
|
write: Some(Box::new(write)),
|
||||||
|
progress: None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
if result.status_code != 200 {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut expected_length = None;
|
||||||
|
for (k, v) in result.headers.iter() {
|
||||||
|
if k.to_lowercase() == "content-length" {
|
||||||
|
expected_length = Some(usize::from_str(v).map_err(|e| e.to_string())?);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut data = vec![];
|
||||||
|
while let Ok(packet) = rx.try_recv() {
|
||||||
|
data.extend(packet.into_iter());
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(length) = expected_length {
|
||||||
|
if data.len() != length {
|
||||||
|
return Err(format!(
|
||||||
|
"Bad length: expected {} got {}",
|
||||||
|
length,
|
||||||
|
data.len()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let options = "{\"analysis.debugInfo.internal\": false}";
|
||||||
|
let bv = BinaryView::from_data(FileMetadata::new().deref(), &data)
|
||||||
|
.map_err(|_| "Unable to create binary view from downloaded data".to_string())?;
|
||||||
|
|
||||||
|
return binaryninja::load_view(bv.deref(), false, Some(options))
|
||||||
|
.ok_or("Unable to load binary view from downloaded data".to_string());
|
||||||
|
}
|
||||||
|
return Err("Could not find a server with debug info for this file".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pub(crate) fn find_local_debug_file_for_build_id(build_id: &String, view: &BinaryView) -> Option<String> {
|
||||||
|
let settings = Settings::new("");
|
||||||
|
let debug_dirs_enabled = settings.get_bool("analysis.debugInfo.enableDebugDirectories", Some(view), None);
|
||||||
|
|
||||||
|
if !debug_dirs_enabled {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let debug_info_paths = settings.get_string_list("analysis.debugInfo.debugDirectories", Some(view), None);
|
||||||
|
|
||||||
|
if debug_info_paths.is_empty() {
|
||||||
|
return None
|
||||||
|
}
|
||||||
|
|
||||||
|
for debug_info_path in debug_info_paths.into_iter() {
|
||||||
|
if let Ok(path) = PathBuf::from_str(&debug_info_path.to_string())
|
||||||
|
{
|
||||||
|
let elf_path = path
|
||||||
|
.join(&build_id[..2])
|
||||||
|
.join(&build_id[2..])
|
||||||
|
.join("elf");
|
||||||
|
|
||||||
|
let debug_ext_path = path
|
||||||
|
.join(&build_id[..2])
|
||||||
|
.join(format!("{}.debug", &build_id[2..]));
|
||||||
|
|
||||||
|
let final_path = if debug_ext_path.exists() {
|
||||||
|
debug_ext_path
|
||||||
|
}
|
||||||
|
else if elf_path.exists() {
|
||||||
|
elf_path
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// No paths exist in this dir, try the next one
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
return final_path
|
||||||
|
.to_str()
|
||||||
|
.and_then(|x| Some(x.to_string()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pub(crate) fn load_debug_info_for_build_id(build_id: &String, view: &BinaryView) -> (Option<Ref<BinaryView>>, bool) {
|
||||||
|
if let Some(debug_file_path) = find_local_debug_file_for_build_id(build_id, view) {
|
||||||
|
return
|
||||||
|
(
|
||||||
|
binaryninja::load_with_options(
|
||||||
|
debug_file_path,
|
||||||
|
false,
|
||||||
|
Some("{\"analysis.debugInfo.internal\": false}")
|
||||||
|
),
|
||||||
|
false
|
||||||
|
);
|
||||||
|
}
|
||||||
|
else if Settings::new("").get_bool("network.enableDebuginfod", Some(view), None) {
|
||||||
|
return (
|
||||||
|
download_debug_info(build_id, view).ok(),
|
||||||
|
true
|
||||||
|
);
|
||||||
|
}
|
||||||
|
(None, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pub(crate) fn find_sibling_debug_file(view: &BinaryView) -> Option<String> {
|
||||||
|
let settings = Settings::new("");
|
||||||
|
let load_sibling_debug = settings.get_bool("analysis.debugInfo.loadSiblingDebugFiles", Some(view), None);
|
||||||
|
|
||||||
|
if !load_sibling_debug {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let filename = view.file().filename().to_string();
|
||||||
|
|
||||||
|
let debug_file = PathBuf::from(format!("{}.debug", filename));
|
||||||
|
let dsym_folder = PathBuf::from(format!("{}.dSYM/", filename));
|
||||||
|
if debug_file.exists() && debug_file.is_file() {
|
||||||
|
return Some(debug_file.to_string_lossy().to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
if dsym_folder.exists() && dsym_folder.is_dir() {
|
||||||
|
let dsym_file = dsym_folder
|
||||||
|
.join("Contents/Resources/DWARF/")
|
||||||
|
.join(filename); // TODO: should this just pull any file out? Can there be multiple files?
|
||||||
|
if dsym_file.exists() {
|
||||||
|
return Some(dsym_file.to_string_lossy().to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pub(crate) fn load_sibling_debug_file(view: &BinaryView) -> (Option<Ref<BinaryView>>, bool) {
|
||||||
|
let Some(debug_file) = find_sibling_debug_file(view) else {
|
||||||
|
return (None, false);
|
||||||
|
};
|
||||||
|
|
||||||
|
let load_settings = match view.default_platform() {
|
||||||
|
Some(plat) => format!("{{\"analysis.debugInfo.internal\": false, \"loader.platform\": \"{}\"}}", plat.name()),
|
||||||
|
None => "{\"analysis.debugInfo.internal\": false}".to_string()
|
||||||
|
};
|
||||||
|
|
||||||
|
(
|
||||||
|
binaryninja::load_with_options(
|
||||||
|
debug_file,
|
||||||
|
false,
|
||||||
|
Some(load_settings)
|
||||||
|
),
|
||||||
|
false
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|||||||
@@ -18,32 +18,60 @@ mod functions;
|
|||||||
mod helpers;
|
mod helpers;
|
||||||
mod types;
|
mod types;
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use crate::dwarfdebuginfo::{DebugInfoBuilder, DebugInfoBuilderContext};
|
use crate::dwarfdebuginfo::{DebugInfoBuilder, DebugInfoBuilderContext};
|
||||||
use crate::functions::parse_function_entry;
|
use crate::functions::parse_function_entry;
|
||||||
use crate::helpers::{get_attr_die, get_name, get_uid, DieReference};
|
use crate::helpers::{get_attr_die, get_name, get_uid, DieReference};
|
||||||
use crate::types::parse_data_variable;
|
use crate::types::parse_variable;
|
||||||
|
|
||||||
|
use binaryninja::binaryview::BinaryViewBase;
|
||||||
use binaryninja::{
|
use binaryninja::{
|
||||||
binaryview::{BinaryView, BinaryViewExt},
|
binaryview::{BinaryView, BinaryViewExt},
|
||||||
debuginfo::{CustomDebugInfoParser, DebugInfo, DebugInfoParser},
|
debuginfo::{CustomDebugInfoParser, DebugInfo, DebugInfoParser},
|
||||||
logger,
|
logger,
|
||||||
|
settings::Settings,
|
||||||
templatesimplifier::simplify_str_to_str,
|
templatesimplifier::simplify_str_to_str,
|
||||||
};
|
};
|
||||||
use dwarfreader::{
|
use dwarfreader::{
|
||||||
create_section_reader, get_endian, is_dwo_dwarf, is_non_dwo_dwarf, is_raw_dwo_dwarf,
|
create_section_reader, get_endian, is_dwo_dwarf, is_non_dwo_dwarf, is_raw_dwo_dwarf,
|
||||||
};
|
};
|
||||||
|
|
||||||
use gimli::{constants, DebuggingInformationEntry, Dwarf, DwarfFileType, Reader, SectionId, Unit};
|
use gimli::{constants, DebuggingInformationEntry, Dwarf, DwarfFileType, Reader, Section, SectionId, Unit, UnwindSection};
|
||||||
|
|
||||||
|
use helpers::{get_build_id, load_debug_info_for_build_id};
|
||||||
use log::{error, warn, LevelFilter};
|
use log::{error, warn, LevelFilter};
|
||||||
|
|
||||||
fn recover_names<R: Reader<Offset = usize>>(
|
|
||||||
|
trait ReaderType: Reader<Offset = usize> {}
|
||||||
|
impl<T: Reader<Offset = usize>> ReaderType for T {}
|
||||||
|
|
||||||
|
|
||||||
|
fn recover_names<R: ReaderType>(
|
||||||
|
dwarf: &Dwarf<R>,
|
||||||
debug_info_builder_context: &mut DebugInfoBuilderContext<R>,
|
debug_info_builder_context: &mut DebugInfoBuilderContext<R>,
|
||||||
progress: &dyn Fn(usize, usize) -> Result<(), ()>,
|
progress: &dyn Fn(usize, usize) -> Result<(), ()>,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let mut iter = debug_info_builder_context.dwarf().units();
|
|
||||||
|
let mut res = true;
|
||||||
|
if let Some(sup_dwarf) = dwarf.sup() {
|
||||||
|
res = recover_names_internal(sup_dwarf, debug_info_builder_context, progress);
|
||||||
|
}
|
||||||
|
|
||||||
|
if res {
|
||||||
|
res = recover_names_internal(dwarf, debug_info_builder_context, progress);
|
||||||
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
fn recover_names_internal<R: ReaderType>(
|
||||||
|
dwarf: &Dwarf<R>,
|
||||||
|
debug_info_builder_context: &mut DebugInfoBuilderContext<R>,
|
||||||
|
progress: &dyn Fn(usize, usize) -> Result<(), ()>,
|
||||||
|
) -> bool {
|
||||||
|
let mut iter = dwarf.units();
|
||||||
while let Ok(Some(header)) = iter.next() {
|
while let Ok(Some(header)) = iter.next() {
|
||||||
let unit = debug_info_builder_context.dwarf().unit(header).unwrap();
|
let unit = dwarf.unit(header).unwrap();
|
||||||
let mut namespace_qualifiers: Vec<(isize, String)> = vec![];
|
let mut namespace_qualifiers: Vec<(isize, String)> = vec![];
|
||||||
let mut entries = unit.entries();
|
let mut entries = unit.entries();
|
||||||
let mut depth = 0;
|
let mut depth = 0;
|
||||||
@@ -72,7 +100,8 @@ fn recover_names<R: Reader<Offset = usize>>(
|
|||||||
|
|
||||||
match entry.tag() {
|
match entry.tag() {
|
||||||
constants::DW_TAG_namespace => {
|
constants::DW_TAG_namespace => {
|
||||||
fn resolve_namespace_name<R: Reader<Offset = usize>>(
|
fn resolve_namespace_name<R: ReaderType>(
|
||||||
|
dwarf: &Dwarf<R>,
|
||||||
unit: &Unit<R>,
|
unit: &Unit<R>,
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
||||||
@@ -80,18 +109,20 @@ fn recover_names<R: Reader<Offset = usize>>(
|
|||||||
depth: isize,
|
depth: isize,
|
||||||
) {
|
) {
|
||||||
if let Some(namespace_qualifier) =
|
if let Some(namespace_qualifier) =
|
||||||
get_name(unit, entry, debug_info_builder_context)
|
get_name(dwarf, unit, entry, debug_info_builder_context)
|
||||||
{
|
{
|
||||||
namespace_qualifiers.push((depth, namespace_qualifier));
|
namespace_qualifiers.push((depth, namespace_qualifier));
|
||||||
} else if let Some(die_reference) = get_attr_die(
|
} else if let Some(die_reference) = get_attr_die(
|
||||||
|
dwarf,
|
||||||
unit,
|
unit,
|
||||||
entry,
|
entry,
|
||||||
debug_info_builder_context,
|
debug_info_builder_context,
|
||||||
constants::DW_AT_extension,
|
constants::DW_AT_extension,
|
||||||
) {
|
) {
|
||||||
match die_reference {
|
match die_reference {
|
||||||
DieReference::UnitAndOffset((entry_unit, entry_offset)) => {
|
DieReference::UnitAndOffset((dwarf, entry_unit, entry_offset)) => {
|
||||||
resolve_namespace_name(
|
resolve_namespace_name(
|
||||||
|
dwarf,
|
||||||
entry_unit,
|
entry_unit,
|
||||||
&entry_unit.entry(entry_offset).unwrap(),
|
&entry_unit.entry(entry_offset).unwrap(),
|
||||||
debug_info_builder_context,
|
debug_info_builder_context,
|
||||||
@@ -101,17 +132,17 @@ fn recover_names<R: Reader<Offset = usize>>(
|
|||||||
}
|
}
|
||||||
DieReference::Err => {
|
DieReference::Err => {
|
||||||
warn!(
|
warn!(
|
||||||
"Failed to fetch DIE. Debug information may be incomplete."
|
"Failed to fetch DIE when resolving namespace. Debug information may be incomplete."
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
namespace_qualifiers
|
namespace_qualifiers.push((depth, "anonymous_namespace".to_string()));
|
||||||
.push((depth, "anonymous_namespace".to_string()));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
resolve_namespace_name(
|
resolve_namespace_name(
|
||||||
|
dwarf,
|
||||||
&unit,
|
&unit,
|
||||||
entry,
|
entry,
|
||||||
debug_info_builder_context,
|
debug_info_builder_context,
|
||||||
@@ -122,21 +153,23 @@ fn recover_names<R: Reader<Offset = usize>>(
|
|||||||
constants::DW_TAG_class_type
|
constants::DW_TAG_class_type
|
||||||
| constants::DW_TAG_structure_type
|
| constants::DW_TAG_structure_type
|
||||||
| constants::DW_TAG_union_type => {
|
| constants::DW_TAG_union_type => {
|
||||||
if let Some(name) = get_name(&unit, entry, debug_info_builder_context) {
|
if let Some(name) = get_name(dwarf, &unit, entry, debug_info_builder_context) {
|
||||||
namespace_qualifiers.push((depth, name))
|
namespace_qualifiers.push((depth, name))
|
||||||
} else {
|
} else {
|
||||||
namespace_qualifiers.push((
|
namespace_qualifiers.push((
|
||||||
depth,
|
depth,
|
||||||
match entry.tag() {
|
match entry.tag() {
|
||||||
constants::DW_TAG_class_type => "anonymous_class".to_string(),
|
constants::DW_TAG_class_type => "anonymous_class".to_string(),
|
||||||
constants::DW_TAG_structure_type => "anonymous_structure".to_string(),
|
constants::DW_TAG_structure_type => {
|
||||||
|
"anonymous_structure".to_string()
|
||||||
|
}
|
||||||
constants::DW_TAG_union_type => "anonymous_union".to_string(),
|
constants::DW_TAG_union_type => "anonymous_union".to_string(),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
},
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
debug_info_builder_context.set_name(
|
debug_info_builder_context.set_name(
|
||||||
get_uid(&unit, entry),
|
get_uid(dwarf, &unit, entry),
|
||||||
simplify_str_to_str(
|
simplify_str_to_str(
|
||||||
namespace_qualifiers
|
namespace_qualifiers
|
||||||
.iter()
|
.iter()
|
||||||
@@ -150,16 +183,14 @@ fn recover_names<R: Reader<Offset = usize>>(
|
|||||||
constants::DW_TAG_typedef
|
constants::DW_TAG_typedef
|
||||||
| constants::DW_TAG_subprogram
|
| constants::DW_TAG_subprogram
|
||||||
| constants::DW_TAG_enumeration_type => {
|
| constants::DW_TAG_enumeration_type => {
|
||||||
if let Some(name) = get_name(&unit, entry, debug_info_builder_context) {
|
if let Some(name) = get_name(dwarf, &unit, entry, debug_info_builder_context) {
|
||||||
debug_info_builder_context.set_name(
|
debug_info_builder_context.set_name(
|
||||||
get_uid(&unit, entry),
|
get_uid(dwarf, &unit, entry),
|
||||||
simplify_str_to_str(
|
simplify_str_to_str(
|
||||||
namespace_qualifiers
|
namespace_qualifiers
|
||||||
.iter()
|
.iter()
|
||||||
.chain(vec![&(-1, name)].into_iter())
|
.chain(vec![&(-1, name)].into_iter())
|
||||||
.map(|(_, namespace)| {
|
.map(|(_, namespace)| namespace.to_owned())
|
||||||
namespace.to_owned()
|
|
||||||
})
|
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join("::"),
|
.join("::"),
|
||||||
)
|
)
|
||||||
@@ -168,8 +199,8 @@ fn recover_names<R: Reader<Offset = usize>>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
if let Some(name) = get_name(&unit, entry, debug_info_builder_context) {
|
if let Some(name) = get_name(dwarf, &unit, entry, debug_info_builder_context) {
|
||||||
debug_info_builder_context.set_name(get_uid(&unit, entry), name);
|
debug_info_builder_context.set_name(get_uid(dwarf, &unit, entry), name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -179,7 +210,8 @@ fn recover_names<R: Reader<Offset = usize>>(
|
|||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_unit<R: Reader<Offset = usize>>(
|
fn parse_unit<R: ReaderType>(
|
||||||
|
dwarf: &Dwarf<R>,
|
||||||
unit: &Unit<R>,
|
unit: &Unit<R>,
|
||||||
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
||||||
debug_info_builder: &mut DebugInfoBuilder,
|
debug_info_builder: &mut DebugInfoBuilder,
|
||||||
@@ -188,9 +220,12 @@ fn parse_unit<R: Reader<Offset = usize>>(
|
|||||||
) {
|
) {
|
||||||
let mut entries = unit.entries();
|
let mut entries = unit.entries();
|
||||||
|
|
||||||
|
let mut current_depth: isize = 0;
|
||||||
|
let mut functions_by_depth: Vec<(Option<usize>, isize)> = vec![];
|
||||||
|
|
||||||
// Really all we care about as we iterate the entries in a given unit is how they modify state (our perception of the file)
|
// Really all we care about as we iterate the entries in a given unit is how they modify state (our perception of the file)
|
||||||
// There's a lot of junk we don't care about in DWARF info, so we choose a couple DIEs and mutate state (add functions (which adds the types it uses) and keep track of what namespace we're in)
|
// There's a lot of junk we don't care about in DWARF info, so we choose a couple DIEs and mutate state (add functions (which adds the types it uses) and keep track of what namespace we're in)
|
||||||
while let Ok(Some((_, entry))) = entries.next_dfs() {
|
while let Ok(Some((depth_delta, entry))) = entries.next_dfs() {
|
||||||
*current_die_number += 1;
|
*current_die_number += 1;
|
||||||
if (*progress)(
|
if (*progress)(
|
||||||
*current_die_number,
|
*current_die_number,
|
||||||
@@ -201,34 +236,147 @@ fn parse_unit<R: Reader<Offset = usize>>(
|
|||||||
return; // Parsing canceled
|
return; // Parsing canceled
|
||||||
}
|
}
|
||||||
|
|
||||||
|
current_depth = current_depth.saturating_add(depth_delta);
|
||||||
|
|
||||||
|
loop {
|
||||||
|
if let Some((_fn_idx, depth)) = functions_by_depth.last() {
|
||||||
|
if current_depth <= *depth {
|
||||||
|
functions_by_depth.pop();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
match entry.tag() {
|
match entry.tag() {
|
||||||
constants::DW_TAG_subprogram => {
|
constants::DW_TAG_subprogram => {
|
||||||
parse_function_entry(unit, entry, debug_info_builder_context, debug_info_builder)
|
let fn_idx = parse_function_entry(dwarf, unit, entry, debug_info_builder_context, debug_info_builder);
|
||||||
}
|
functions_by_depth.push((fn_idx, current_depth));
|
||||||
|
},
|
||||||
constants::DW_TAG_variable => {
|
constants::DW_TAG_variable => {
|
||||||
parse_data_variable(unit, entry, debug_info_builder_context, debug_info_builder)
|
let current_fn_idx = functions_by_depth.last().and_then(|x| x.0);
|
||||||
}
|
parse_variable(dwarf, unit, entry, debug_info_builder_context, debug_info_builder, current_fn_idx)
|
||||||
|
},
|
||||||
|
constants::DW_TAG_class_type |
|
||||||
|
constants::DW_TAG_enumeration_type |
|
||||||
|
constants::DW_TAG_structure_type |
|
||||||
|
constants::DW_TAG_union_type |
|
||||||
|
constants::DW_TAG_typedef => {
|
||||||
|
// Ensure types are loaded even if they're unused
|
||||||
|
types::get_type(dwarf, unit, entry, debug_info_builder_context, debug_info_builder);
|
||||||
|
},
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_dwarf(
|
fn parse_eh_frame<R: Reader>(
|
||||||
view: &BinaryView,
|
view: &BinaryView,
|
||||||
|
mut eh_frame: gimli::EhFrame<R>,
|
||||||
|
) -> gimli::Result<iset::IntervalMap<u64, i64>> {
|
||||||
|
eh_frame.set_address_size(view.address_size() as u8);
|
||||||
|
|
||||||
|
let mut bases = gimli::BaseAddresses::default();
|
||||||
|
if let Ok(section) = view.section_by_name(".eh_frame_hdr").or(view.section_by_name("__eh_frame_hdr")) {
|
||||||
|
bases = bases.set_eh_frame_hdr(section.start());
|
||||||
|
}
|
||||||
|
if let Ok(section) = view.section_by_name(".eh_frame").or(view.section_by_name("__eh_frame")) {
|
||||||
|
bases = bases.set_eh_frame(section.start());
|
||||||
|
}
|
||||||
|
if let Ok(section) = view.section_by_name(".text").or(view.section_by_name("__text")) {
|
||||||
|
bases = bases.set_text(section.start());
|
||||||
|
}
|
||||||
|
if let Ok(section) = view.section_by_name(".got").or(view.section_by_name("__got")) {
|
||||||
|
bases = bases.set_got(section.start());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut cies = HashMap::new();
|
||||||
|
let mut cie_data_offsets = iset::IntervalMap::new();
|
||||||
|
|
||||||
|
let mut entries = eh_frame.entries(&bases);
|
||||||
|
loop {
|
||||||
|
match entries.next()? {
|
||||||
|
None => return Ok(cie_data_offsets),
|
||||||
|
Some(gimli::CieOrFde::Cie(_cie)) => {
|
||||||
|
// TODO: do we want to do anything with standalone CIEs?
|
||||||
|
}
|
||||||
|
Some(gimli::CieOrFde::Fde(partial)) => {
|
||||||
|
let fde = match partial.parse(|_, bases, o| {
|
||||||
|
cies.entry(o)
|
||||||
|
.or_insert_with(|| eh_frame.cie_from_offset(bases, o))
|
||||||
|
.clone()
|
||||||
|
}) {
|
||||||
|
Ok(fde) => fde,
|
||||||
|
Err(e) => {
|
||||||
|
error!("Failed to parse FDE: {}", e);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if fde.len() == 0 {
|
||||||
|
// This FDE is a terminator
|
||||||
|
return Ok(cie_data_offsets);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store CIE offset for FDE range
|
||||||
|
cie_data_offsets.insert(
|
||||||
|
fde.initial_address()..fde.initial_address()+fde.len(),
|
||||||
|
fde.cie().data_alignment_factor()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_supplementary_build_id(bv: &BinaryView) -> Option<String> {
|
||||||
|
let raw_view = bv.raw_view().ok()?;
|
||||||
|
if let Ok(section) = raw_view.section_by_name(".gnu_debugaltlink") {
|
||||||
|
let start = section.start();
|
||||||
|
let len = section.len();
|
||||||
|
|
||||||
|
if len < 20 {
|
||||||
|
// Not large enough to hold a build id
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
raw_view
|
||||||
|
.read_vec(start, len)
|
||||||
|
.splitn(2, |x| *x == 0)
|
||||||
|
.last()
|
||||||
|
.map(|a| {
|
||||||
|
a.iter().map(|b| format!("{:02x}", b)).collect()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_dwarf(
|
||||||
|
bv: &BinaryView,
|
||||||
|
debug_bv: &BinaryView,
|
||||||
|
supplementary_bv: Option<&BinaryView>,
|
||||||
progress: Box<dyn Fn(usize, usize) -> Result<(), ()>>,
|
progress: Box<dyn Fn(usize, usize) -> Result<(), ()>>,
|
||||||
) -> DebugInfoBuilder {
|
) -> Result<DebugInfoBuilder, ()> {
|
||||||
|
// TODO: warn if no supplementary file and .gnu_debugaltlink section present
|
||||||
|
|
||||||
// Determine if this is a DWO
|
// Determine if this is a DWO
|
||||||
// TODO : Make this more robust...some DWOs follow non-DWO conventions
|
// TODO : Make this more robust...some DWOs follow non-DWO conventions
|
||||||
let dwo_file = is_dwo_dwarf(view) || is_raw_dwo_dwarf(view);
|
|
||||||
|
|
||||||
// Figure out if it's the given view or the raw view that has the dwarf info in it
|
// Figure out if it's the given view or the raw view that has the dwarf info in it
|
||||||
let raw_view = &view.raw_view().unwrap();
|
let raw_view = &debug_bv.raw_view().unwrap();
|
||||||
let view = if is_dwo_dwarf(view) || is_non_dwo_dwarf(view) {
|
let view = if is_dwo_dwarf(debug_bv) || is_non_dwo_dwarf(debug_bv) {
|
||||||
view
|
debug_bv
|
||||||
} else {
|
} else {
|
||||||
raw_view
|
raw_view
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let dwo_file = is_dwo_dwarf(view) || is_raw_dwo_dwarf(view);
|
||||||
|
|
||||||
// gimli setup
|
// gimli setup
|
||||||
let endian = get_endian(view);
|
let endian = get_endian(view);
|
||||||
let mut section_reader =
|
let mut section_reader =
|
||||||
@@ -237,24 +385,60 @@ fn parse_dwarf(
|
|||||||
if dwo_file {
|
if dwo_file {
|
||||||
dwarf.file_type = DwarfFileType::Dwo;
|
dwarf.file_type = DwarfFileType::Dwo;
|
||||||
}
|
}
|
||||||
|
else {
|
||||||
|
dwarf.file_type = DwarfFileType::Main;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(sup_bv) = supplementary_bv {
|
||||||
|
let sup_endian = get_endian(sup_bv);
|
||||||
|
let sup_dwo_file = is_dwo_dwarf(sup_bv) || is_raw_dwo_dwarf(sup_bv);
|
||||||
|
let sup_section_reader =
|
||||||
|
|section_id: SectionId| -> _ { create_section_reader(section_id, sup_bv, sup_endian, sup_dwo_file) };
|
||||||
|
if let Err(e) = dwarf.load_sup(sup_section_reader) {
|
||||||
|
error!("Failed to load supplementary file: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let eh_frame_endian = get_endian(bv);
|
||||||
|
let mut eh_frame_section_reader =
|
||||||
|
|section_id: SectionId| -> _ { create_section_reader(section_id, bv, eh_frame_endian, dwo_file) };
|
||||||
|
let eh_frame = gimli::EhFrame::load(&mut eh_frame_section_reader).unwrap();
|
||||||
|
|
||||||
|
let range_data_offsets = parse_eh_frame(bv, eh_frame)
|
||||||
|
.map_err(|e| error!("Error parsing .eh_frame: {}", e))?;
|
||||||
|
|
||||||
// Create debug info builder and recover name mapping first
|
// Create debug info builder and recover name mapping first
|
||||||
// Since DWARF is stored as a tree with arbitrary implicit edges among leaves,
|
// Since DWARF is stored as a tree with arbitrary implicit edges among leaves,
|
||||||
// it is not possible to correctly track namespaces while you're parsing "in order" without backtracking,
|
// it is not possible to correctly track namespaces while you're parsing "in order" without backtracking,
|
||||||
// so we just do it up front
|
// so we just do it up front
|
||||||
let mut debug_info_builder = DebugInfoBuilder::new();
|
let mut debug_info_builder = DebugInfoBuilder::new();
|
||||||
if let Some(mut debug_info_builder_context) = DebugInfoBuilderContext::new(view, dwarf) {
|
debug_info_builder.set_range_data_offsets(range_data_offsets);
|
||||||
if !recover_names(&mut debug_info_builder_context, &progress)
|
|
||||||
|
if let Some(mut debug_info_builder_context) = DebugInfoBuilderContext::new(view, &dwarf) {
|
||||||
|
if !recover_names(&dwarf, &mut debug_info_builder_context, &progress)
|
||||||
|| debug_info_builder_context.total_die_count == 0
|
|| debug_info_builder_context.total_die_count == 0
|
||||||
{
|
{
|
||||||
return debug_info_builder;
|
return Ok(debug_info_builder);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse all the compilation units
|
// Parse all the compilation units
|
||||||
let mut current_die_number = 0;
|
let mut current_die_number = 0;
|
||||||
|
|
||||||
|
for unit in debug_info_builder_context.sup_units() {
|
||||||
|
parse_unit(
|
||||||
|
dwarf.sup().unwrap(),
|
||||||
|
&unit,
|
||||||
|
&debug_info_builder_context,
|
||||||
|
&mut debug_info_builder,
|
||||||
|
&progress,
|
||||||
|
&mut current_die_number,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
for unit in debug_info_builder_context.units() {
|
for unit in debug_info_builder_context.units() {
|
||||||
parse_unit(
|
parse_unit(
|
||||||
unit,
|
&dwarf,
|
||||||
|
&unit,
|
||||||
&debug_info_builder_context,
|
&debug_info_builder_context,
|
||||||
&mut debug_info_builder,
|
&mut debug_info_builder,
|
||||||
&progress,
|
&progress,
|
||||||
@@ -262,14 +446,28 @@ fn parse_dwarf(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
debug_info_builder
|
|
||||||
|
Ok(debug_info_builder)
|
||||||
}
|
}
|
||||||
|
|
||||||
struct DWARFParser;
|
struct DWARFParser;
|
||||||
|
|
||||||
impl CustomDebugInfoParser for DWARFParser {
|
impl CustomDebugInfoParser for DWARFParser {
|
||||||
fn is_valid(&self, view: &BinaryView) -> bool {
|
fn is_valid(&self, view: &BinaryView) -> bool {
|
||||||
dwarfreader::is_valid(view)
|
if dwarfreader::is_valid(view) || dwarfreader::can_use_debuginfod(view) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if dwarfreader::has_build_id_section(view) {
|
||||||
|
if let Ok(build_id) = get_build_id(view) {
|
||||||
|
if helpers::find_local_debug_file_for_build_id(&build_id, view).is_some() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if helpers::find_sibling_debug_file(view).is_some() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_info(
|
fn parse_info(
|
||||||
@@ -279,17 +477,117 @@ impl CustomDebugInfoParser for DWARFParser {
|
|||||||
debug_file: &BinaryView,
|
debug_file: &BinaryView,
|
||||||
progress: Box<dyn Fn(usize, usize) -> Result<(), ()>>,
|
progress: Box<dyn Fn(usize, usize) -> Result<(), ()>>,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
parse_dwarf(debug_file, progress)
|
let (external_file, close_external) = if !dwarfreader::is_valid(bv) {
|
||||||
.post_process(bv, debug_info)
|
if let (Some(debug_view), x) = helpers::load_sibling_debug_file(bv) {
|
||||||
.commit_info(debug_info);
|
(Some(debug_view), x)
|
||||||
|
}
|
||||||
|
else if let Ok(build_id) = get_build_id(bv) {
|
||||||
|
helpers::load_debug_info_for_build_id(&build_id, bv)
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
(None, false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
(None, false)
|
||||||
|
};
|
||||||
|
|
||||||
|
let sup_bv = get_supplementary_build_id(
|
||||||
|
external_file
|
||||||
|
.as_deref()
|
||||||
|
.unwrap_or(debug_file)
|
||||||
|
)
|
||||||
|
.and_then(|build_id| {
|
||||||
|
load_debug_info_for_build_id(&build_id, bv)
|
||||||
|
.0
|
||||||
|
.map(|x| x.raw_view().unwrap())
|
||||||
|
});
|
||||||
|
|
||||||
|
let result = match parse_dwarf(
|
||||||
|
bv,
|
||||||
|
external_file.as_deref().unwrap_or(debug_file),
|
||||||
|
sup_bv.as_deref(),
|
||||||
|
progress
|
||||||
|
)
|
||||||
|
{
|
||||||
|
Ok(mut builder) => {
|
||||||
|
builder.post_process(bv, debug_info).commit_info(debug_info);
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
Err(_) => false,
|
||||||
|
};
|
||||||
|
|
||||||
|
if let (Some(ext), true) = (external_file, close_external) {
|
||||||
|
ext.file().close();
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub extern "C" fn CorePluginInit() -> bool {
|
pub extern "C" fn CorePluginInit() -> bool {
|
||||||
logger::init(LevelFilter::Debug).unwrap();
|
logger::init(LevelFilter::Debug).unwrap();
|
||||||
|
|
||||||
|
let settings = Settings::new("");
|
||||||
|
|
||||||
|
settings.register_setting_json(
|
||||||
|
"network.enableDebuginfod",
|
||||||
|
r#"{
|
||||||
|
"title" : "Enable Debuginfod Support",
|
||||||
|
"type" : "boolean",
|
||||||
|
"default" : false,
|
||||||
|
"description" : "Enable using Debuginfod servers to fetch DWARF debug info for files with a .note.gnu.build-id section.",
|
||||||
|
"ignore" : []
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
settings.register_setting_json(
|
||||||
|
"network.debuginfodServers",
|
||||||
|
r#"{
|
||||||
|
"title" : "Debuginfod Server URLs",
|
||||||
|
"type" : "array",
|
||||||
|
"elementType" : "string",
|
||||||
|
"default" : [],
|
||||||
|
"description" : "Servers to use for fetching DWARF debug info for files with a .note.gnu.build-id section.",
|
||||||
|
"ignore" : []
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
settings.register_setting_json(
|
||||||
|
"analysis.debugInfo.enableDebugDirectories",
|
||||||
|
r#"{
|
||||||
|
"title" : "Enable Debug File Directories",
|
||||||
|
"type" : "boolean",
|
||||||
|
"default" : true,
|
||||||
|
"description" : "Enable searching local debug directories for DWARF debug info.",
|
||||||
|
"ignore" : []
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
settings.register_setting_json(
|
||||||
|
"analysis.debugInfo.debugDirectories",
|
||||||
|
r#"{
|
||||||
|
"title" : "Debug File Directories",
|
||||||
|
"type" : "array",
|
||||||
|
"elementType" : "string",
|
||||||
|
"default" : [],
|
||||||
|
"description" : "Paths to folder containing DWARF debug info stored by build id.",
|
||||||
|
"ignore" : []
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
settings.register_setting_json(
|
||||||
|
"analysis.debugInfo.loadSiblingDebugFiles",
|
||||||
|
r#"{
|
||||||
|
"title" : "Enable Loading of Sibling Debug Files",
|
||||||
|
"type" : "boolean",
|
||||||
|
"default" : true,
|
||||||
|
"description" : "Enable automatic loading of X.debug and X.dSYM files next to a file named X.",
|
||||||
|
"ignore" : []
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
DebugInfoParser::register("DWARF", DWARFParser {});
|
DebugInfoParser::register("DWARF", DWARFParser {});
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,7 +12,7 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
use crate::die_handlers::*;
|
use crate::{die_handlers::*, ReaderType};
|
||||||
use crate::dwarfdebuginfo::{DebugInfoBuilder, DebugInfoBuilderContext, TypeUID};
|
use crate::dwarfdebuginfo::{DebugInfoBuilder, DebugInfoBuilderContext, TypeUID};
|
||||||
use crate::helpers::*;
|
use crate::helpers::*;
|
||||||
|
|
||||||
@@ -23,31 +23,52 @@ use binaryninja::{
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
use gimli::{constants, DebuggingInformationEntry, Reader, Unit};
|
use gimli::{constants, AttributeValue, DebuggingInformationEntry, Dwarf, Operation, Unit};
|
||||||
|
|
||||||
use log::warn;
|
use log::{debug, error, warn};
|
||||||
|
|
||||||
pub(crate) fn parse_data_variable<R: Reader<Offset = usize>>(
|
pub(crate) fn parse_variable<R: ReaderType>(
|
||||||
|
dwarf: &Dwarf<R>,
|
||||||
unit: &Unit<R>,
|
unit: &Unit<R>,
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
||||||
debug_info_builder: &mut DebugInfoBuilder,
|
debug_info_builder: &mut DebugInfoBuilder,
|
||||||
|
function_index: Option<usize>,
|
||||||
) {
|
) {
|
||||||
let full_name = debug_info_builder_context.get_name(unit, entry);
|
let full_name = debug_info_builder_context.get_name(dwarf, unit, entry);
|
||||||
let type_uid = get_type(unit, entry, debug_info_builder_context, debug_info_builder);
|
let type_uid = get_type(dwarf, unit, entry, debug_info_builder_context, debug_info_builder);
|
||||||
|
|
||||||
let address = if let Ok(Some(attr)) = entry.attr(constants::DW_AT_location) {
|
let Ok(Some(attr)) = entry.attr(constants::DW_AT_location) else {
|
||||||
get_expr_value(unit, attr)
|
return
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if let (Some(address), Some(type_uid)) = (address, type_uid) {
|
let AttributeValue::Exprloc(mut expression) = attr.value() else {
|
||||||
debug_info_builder.add_data_variable(address, full_name, type_uid);
|
return
|
||||||
|
};
|
||||||
|
|
||||||
|
match Operation::parse(&mut expression.0, unit.encoding()) {
|
||||||
|
Ok(Operation::FrameOffset { offset }) => {
|
||||||
|
debug_info_builder.add_stack_variable(function_index, offset, full_name, type_uid);
|
||||||
|
},
|
||||||
|
//Ok(Operation::RegisterOffset { register: _, offset: _, base_type: _ }) => {
|
||||||
|
// //TODO: look up register by index (binja register indexes don't match processor indexes?)
|
||||||
|
// //TODO: calculate absolute stack offset
|
||||||
|
// //TODO: add by absolute offset
|
||||||
|
//},
|
||||||
|
Ok(Operation::Address { address }) => {
|
||||||
|
if let Some(uid) = type_uid {
|
||||||
|
debug_info_builder.add_data_variable(address, full_name, uid)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Ok(op) => {
|
||||||
|
debug!("Unhandled operation type for variable: {:?}", op);
|
||||||
|
},
|
||||||
|
Err(e) => error!("Error parsing operation type for variable {:?}: {}", full_name, e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn do_structure_parse<R: Reader<Offset = usize>>(
|
fn do_structure_parse<R: ReaderType>(
|
||||||
|
dwarf: &Dwarf<R>,
|
||||||
structure_type: StructureType,
|
structure_type: StructureType,
|
||||||
unit: &Unit<R>,
|
unit: &Unit<R>,
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
@@ -91,8 +112,8 @@ fn do_structure_parse<R: Reader<Offset = usize>>(
|
|||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let full_name = if get_name(unit, entry, debug_info_builder_context).is_some() {
|
let full_name = if get_name(dwarf, unit, entry, debug_info_builder_context).is_some() {
|
||||||
debug_info_builder_context.get_name(unit, entry)
|
debug_info_builder_context.get_name(dwarf, unit, entry)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
@@ -109,8 +130,8 @@ fn do_structure_parse<R: Reader<Offset = usize>>(
|
|||||||
// it will also be how any other types refer to this struct
|
// it will also be how any other types refer to this struct
|
||||||
if let Some(full_name) = &full_name {
|
if let Some(full_name) = &full_name {
|
||||||
debug_info_builder.add_type(
|
debug_info_builder.add_type(
|
||||||
get_uid(unit, entry),
|
get_uid(dwarf, unit, entry),
|
||||||
full_name.clone(),
|
&full_name,
|
||||||
Type::named_type_from_type(
|
Type::named_type_from_type(
|
||||||
full_name.clone(),
|
full_name.clone(),
|
||||||
&Type::structure(&structure_builder.finalize()),
|
&Type::structure(&structure_builder.finalize()),
|
||||||
@@ -121,11 +142,11 @@ fn do_structure_parse<R: Reader<Offset = usize>>(
|
|||||||
// We _need_ to have initial typedefs or else we can enter infinite parsing loops
|
// We _need_ to have initial typedefs or else we can enter infinite parsing loops
|
||||||
// These get overwritten in the last step with the actual type, however, so this
|
// These get overwritten in the last step with the actual type, however, so this
|
||||||
// is either perfectly fine or breaking a bunch of NTRs
|
// is either perfectly fine or breaking a bunch of NTRs
|
||||||
let full_name = format!("anonymous_structure_{:x}", get_uid(unit, entry));
|
let full_name = format!("anonymous_structure_{:x}", get_uid(dwarf, unit, entry));
|
||||||
debug_info_builder.add_type(
|
debug_info_builder.add_type(
|
||||||
get_uid(unit, entry),
|
get_uid(dwarf, unit, entry),
|
||||||
full_name.clone(),
|
&full_name,
|
||||||
Type::named_type_from_type(full_name, &Type::structure(&structure_builder.finalize())),
|
Type::named_type_from_type(&full_name, &Type::structure(&structure_builder.finalize())),
|
||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -136,14 +157,16 @@ fn do_structure_parse<R: Reader<Offset = usize>>(
|
|||||||
while let Ok(Some(child)) = children.next() {
|
while let Ok(Some(child)) = children.next() {
|
||||||
if child.entry().tag() == constants::DW_TAG_member {
|
if child.entry().tag() == constants::DW_TAG_member {
|
||||||
if let Some(child_type_id) = get_type(
|
if let Some(child_type_id) = get_type(
|
||||||
|
dwarf,
|
||||||
unit,
|
unit,
|
||||||
child.entry(),
|
child.entry(),
|
||||||
debug_info_builder_context,
|
debug_info_builder_context,
|
||||||
debug_info_builder,
|
debug_info_builder,
|
||||||
) {
|
) {
|
||||||
if let Some((_, child_type)) = debug_info_builder.get_type(child_type_id) {
|
if let Some(t) = debug_info_builder.get_type(child_type_id) {
|
||||||
|
let child_type = t.get_type();
|
||||||
if let Some(child_name) = debug_info_builder_context
|
if let Some(child_name) = debug_info_builder_context
|
||||||
.get_name(unit, child.entry())
|
.get_name(dwarf, unit, child.entry())
|
||||||
.map_or(
|
.map_or(
|
||||||
if child_type.type_class() == TypeClass::StructureTypeClass {
|
if child_type.type_class() == TypeClass::StructureTypeClass {
|
||||||
Some("".to_string())
|
Some("".to_string())
|
||||||
@@ -188,32 +211,34 @@ fn do_structure_parse<R: Reader<Offset = usize>>(
|
|||||||
let finalized_structure = Type::structure(&structure_builder.finalize());
|
let finalized_structure = Type::structure(&structure_builder.finalize());
|
||||||
if let Some(full_name) = full_name {
|
if let Some(full_name) = full_name {
|
||||||
debug_info_builder.add_type(
|
debug_info_builder.add_type(
|
||||||
get_uid(unit, entry) + 1, // TODO : This is super broke (uid + 1 is not guaranteed to be unique)
|
get_uid(dwarf, unit, entry) + 1, // TODO : This is super broke (uid + 1 is not guaranteed to be unique)
|
||||||
full_name,
|
&full_name,
|
||||||
finalized_structure,
|
finalized_structure,
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
debug_info_builder.add_type(
|
debug_info_builder.add_type(
|
||||||
get_uid(unit, entry),
|
get_uid(dwarf, unit, entry),
|
||||||
format!("{}", finalized_structure),
|
&format!("{}", finalized_structure),
|
||||||
finalized_structure,
|
finalized_structure,
|
||||||
false, // Don't commit anonymous unions (because I think it'll break things)
|
false, // Don't commit anonymous unions (because I think it'll break things)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Some(get_uid(unit, entry))
|
Some(get_uid(dwarf, unit, entry))
|
||||||
}
|
}
|
||||||
|
|
||||||
// This function iterates up through the dependency references, adding all the types along the way until there are no more or stopping at the first one already tracked, then returns the UID of the type of the given DIE
|
// This function iterates up through the dependency references, adding all the types along the way until there are no more or stopping at the first one already tracked, then returns the UID of the type of the given DIE
|
||||||
pub(crate) fn get_type<R: Reader<Offset = usize>>(
|
pub(crate) fn get_type<R: ReaderType>(
|
||||||
|
dwarf: &Dwarf<R>,
|
||||||
unit: &Unit<R>,
|
unit: &Unit<R>,
|
||||||
entry: &DebuggingInformationEntry<R>,
|
entry: &DebuggingInformationEntry<R>,
|
||||||
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
debug_info_builder_context: &DebugInfoBuilderContext<R>,
|
||||||
debug_info_builder: &mut DebugInfoBuilder,
|
debug_info_builder: &mut DebugInfoBuilder,
|
||||||
) -> Option<TypeUID> {
|
) -> Option<TypeUID> {
|
||||||
// If this node (and thus all its referenced nodes) has already been processed, just return the offset
|
// If this node (and thus all its referenced nodes) has already been processed, just return the offset
|
||||||
if debug_info_builder.contains_type(get_uid(unit, entry)) {
|
let entry_uid = get_uid(dwarf, unit, entry);
|
||||||
return Some(get_uid(unit, entry));
|
if debug_info_builder.contains_type(entry_uid) {
|
||||||
|
return Some(entry_uid);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Don't parse types that are just declarations and not definitions
|
// Don't parse types that are just declarations and not definitions
|
||||||
@@ -222,6 +247,7 @@ pub(crate) fn get_type<R: Reader<Offset = usize>>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let entry_type = if let Some(die_reference) = get_attr_die(
|
let entry_type = if let Some(die_reference) = get_attr_die(
|
||||||
|
dwarf,
|
||||||
unit,
|
unit,
|
||||||
entry,
|
entry,
|
||||||
debug_info_builder_context,
|
debug_info_builder_context,
|
||||||
@@ -229,25 +255,29 @@ pub(crate) fn get_type<R: Reader<Offset = usize>>(
|
|||||||
) {
|
) {
|
||||||
// This needs to recurse first (before the early return below) to ensure all sub-types have been parsed
|
// This needs to recurse first (before the early return below) to ensure all sub-types have been parsed
|
||||||
match die_reference {
|
match die_reference {
|
||||||
DieReference::UnitAndOffset((entry_unit, entry_offset)) => get_type(
|
DieReference::UnitAndOffset((dwarf, entry_unit, entry_offset)) => {
|
||||||
|
get_type(
|
||||||
|
dwarf,
|
||||||
entry_unit,
|
entry_unit,
|
||||||
&entry_unit.entry(entry_offset).unwrap(),
|
&entry_unit.entry(entry_offset).unwrap(),
|
||||||
debug_info_builder_context,
|
debug_info_builder_context,
|
||||||
debug_info_builder,
|
debug_info_builder,
|
||||||
),
|
)
|
||||||
|
}
|
||||||
DieReference::Err => {
|
DieReference::Err => {
|
||||||
warn!("Failed to fetch DIE. Debug information may be incomplete.");
|
warn!("Failed to fetch DIE when getting type through DW_AT_type. Debug information may be incomplete.");
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// This needs to recurse first (before the early return below) to ensure all sub-types have been parsed
|
// This needs to recurse first (before the early return below) to ensure all sub-types have been parsed
|
||||||
match resolve_specification(unit, entry, debug_info_builder_context) {
|
match resolve_specification(dwarf, unit, entry, debug_info_builder_context) {
|
||||||
DieReference::UnitAndOffset((entry_unit, entry_offset))
|
DieReference::UnitAndOffset((dwarf, entry_unit, entry_offset))
|
||||||
if entry_unit.header.offset() != unit.header.offset()
|
if entry_unit.header.offset() != unit.header.offset()
|
||||||
&& entry_offset != entry.offset() =>
|
&& entry_offset != entry.offset() =>
|
||||||
{
|
{
|
||||||
get_type(
|
get_type(
|
||||||
|
dwarf,
|
||||||
entry_unit,
|
entry_unit,
|
||||||
&entry_unit.entry(entry_offset).unwrap(),
|
&entry_unit.entry(entry_offset).unwrap(),
|
||||||
debug_info_builder_context,
|
debug_info_builder_context,
|
||||||
@@ -256,7 +286,7 @@ pub(crate) fn get_type<R: Reader<Offset = usize>>(
|
|||||||
}
|
}
|
||||||
DieReference::UnitAndOffset(_) => None,
|
DieReference::UnitAndOffset(_) => None,
|
||||||
DieReference::Err => {
|
DieReference::Err => {
|
||||||
warn!("Failed to fetch DIE. Debug information may be incomplete.");
|
warn!("Failed to fetch DIE when getting type. Debug information may be incomplete.");
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -264,20 +294,21 @@ pub(crate) fn get_type<R: Reader<Offset = usize>>(
|
|||||||
|
|
||||||
// If this node (and thus all its referenced nodes) has already been processed, just return the offset
|
// If this node (and thus all its referenced nodes) has already been processed, just return the offset
|
||||||
// This check is not redundant because this type might have been processes in the recursive calls above
|
// This check is not redundant because this type might have been processes in the recursive calls above
|
||||||
if debug_info_builder.contains_type(get_uid(unit, entry)) {
|
if debug_info_builder.contains_type(entry_uid) {
|
||||||
return Some(get_uid(unit, entry));
|
return Some(entry_uid);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Collect the required information to create a type and add it to the type map. Also, add the dependencies of this type to the type's typeinfo
|
// Collect the required information to create a type and add it to the type map. Also, add the dependencies of this type to the type's typeinfo
|
||||||
// Create the type, make a TypeInfo for it, and add it to the debug info
|
// Create the type, make a TypeInfo for it, and add it to the debug info
|
||||||
let (type_def, mut commit): (Option<Ref<Type>>, bool) = match entry.tag() {
|
let (type_def, mut commit): (Option<Ref<Type>>, bool) = match entry.tag() {
|
||||||
constants::DW_TAG_base_type => (
|
constants::DW_TAG_base_type => (
|
||||||
handle_base_type(unit, entry, debug_info_builder_context),
|
handle_base_type(dwarf, unit, entry, debug_info_builder_context),
|
||||||
false,
|
false,
|
||||||
),
|
),
|
||||||
|
|
||||||
constants::DW_TAG_structure_type => {
|
constants::DW_TAG_structure_type => {
|
||||||
return do_structure_parse(
|
return do_structure_parse(
|
||||||
|
dwarf,
|
||||||
StructureType::StructStructureType,
|
StructureType::StructStructureType,
|
||||||
unit,
|
unit,
|
||||||
entry,
|
entry,
|
||||||
@@ -287,6 +318,7 @@ pub(crate) fn get_type<R: Reader<Offset = usize>>(
|
|||||||
}
|
}
|
||||||
constants::DW_TAG_class_type => {
|
constants::DW_TAG_class_type => {
|
||||||
return do_structure_parse(
|
return do_structure_parse(
|
||||||
|
dwarf,
|
||||||
StructureType::ClassStructureType,
|
StructureType::ClassStructureType,
|
||||||
unit,
|
unit,
|
||||||
entry,
|
entry,
|
||||||
@@ -296,6 +328,7 @@ pub(crate) fn get_type<R: Reader<Offset = usize>>(
|
|||||||
}
|
}
|
||||||
constants::DW_TAG_union_type => {
|
constants::DW_TAG_union_type => {
|
||||||
return do_structure_parse(
|
return do_structure_parse(
|
||||||
|
dwarf,
|
||||||
StructureType::UnionStructureType,
|
StructureType::UnionStructureType,
|
||||||
unit,
|
unit,
|
||||||
entry,
|
entry,
|
||||||
@@ -306,13 +339,13 @@ pub(crate) fn get_type<R: Reader<Offset = usize>>(
|
|||||||
|
|
||||||
// Enum
|
// Enum
|
||||||
constants::DW_TAG_enumeration_type => {
|
constants::DW_TAG_enumeration_type => {
|
||||||
(handle_enum(unit, entry, debug_info_builder_context), true)
|
(handle_enum(dwarf, unit, entry, debug_info_builder_context), true)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Basic types
|
// Basic types
|
||||||
constants::DW_TAG_typedef => {
|
constants::DW_TAG_typedef => {
|
||||||
if let Some(name) = debug_info_builder_context.get_name(unit, entry) {
|
if let Some(name) = debug_info_builder_context.get_name(dwarf, unit, entry) {
|
||||||
handle_typedef(debug_info_builder, entry_type, name)
|
handle_typedef(debug_info_builder, entry_type, &name)
|
||||||
} else {
|
} else {
|
||||||
(None, false)
|
(None, false)
|
||||||
}
|
}
|
||||||
@@ -356,6 +389,7 @@ pub(crate) fn get_type<R: Reader<Offset = usize>>(
|
|||||||
constants::DW_TAG_unspecified_type => (Some(Type::void()), false),
|
constants::DW_TAG_unspecified_type => (Some(Type::void()), false),
|
||||||
constants::DW_TAG_subroutine_type => (
|
constants::DW_TAG_subroutine_type => (
|
||||||
handle_function(
|
handle_function(
|
||||||
|
dwarf,
|
||||||
unit,
|
unit,
|
||||||
entry,
|
entry,
|
||||||
debug_info_builder_context,
|
debug_info_builder_context,
|
||||||
@@ -375,8 +409,8 @@ pub(crate) fn get_type<R: Reader<Offset = usize>>(
|
|||||||
|
|
||||||
// Wrap our resultant type in a TypeInfo so that the internal DebugInfo class can manage it
|
// Wrap our resultant type in a TypeInfo so that the internal DebugInfo class can manage it
|
||||||
if let Some(type_def) = type_def {
|
if let Some(type_def) = type_def {
|
||||||
let name = if get_name(unit, entry, debug_info_builder_context).is_some() {
|
let name = if get_name(dwarf, unit, entry, debug_info_builder_context).is_some() {
|
||||||
debug_info_builder_context.get_name(unit, entry)
|
debug_info_builder_context.get_name(dwarf, unit, entry)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
@@ -385,8 +419,8 @@ pub(crate) fn get_type<R: Reader<Offset = usize>>(
|
|||||||
format!("{}", type_def)
|
format!("{}", type_def)
|
||||||
});
|
});
|
||||||
|
|
||||||
debug_info_builder.add_type(get_uid(unit, entry), name, type_def, commit);
|
debug_info_builder.add_type(entry_uid, &name, type_def, commit);
|
||||||
Some(get_uid(unit, entry))
|
Some(entry_uid)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,4 +10,4 @@ crate-type = ["cdylib"]
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
dwarfreader = { path = "../shared/" }
|
dwarfreader = { path = "../shared/" }
|
||||||
binaryninja = {path="../../../"}
|
binaryninja = {path="../../../"}
|
||||||
gimli = "0.28"
|
gimli = "0.31"
|
||||||
|
|||||||
@@ -17,7 +17,6 @@ use binaryninja::{
|
|||||||
command::{register, Command},
|
command::{register, Command},
|
||||||
disassembly::{DisassemblyTextLine, InstructionTextToken, InstructionTextTokenContents},
|
disassembly::{DisassemblyTextLine, InstructionTextToken, InstructionTextTokenContents},
|
||||||
flowgraph::{BranchType, EdgeStyle, FlowGraph, FlowGraphNode, FlowGraphOption},
|
flowgraph::{BranchType, EdgeStyle, FlowGraph, FlowGraphNode, FlowGraphOption},
|
||||||
string::BnString,
|
|
||||||
};
|
};
|
||||||
use dwarfreader::is_valid;
|
use dwarfreader::is_valid;
|
||||||
|
|
||||||
@@ -34,7 +33,7 @@ use gimli::{
|
|||||||
UnitSectionOffset,
|
UnitSectionOffset,
|
||||||
};
|
};
|
||||||
|
|
||||||
static PADDING: [&'static str; 23] = [
|
static PADDING: [&str; 23] = [
|
||||||
"",
|
"",
|
||||||
" ",
|
" ",
|
||||||
" ",
|
" ",
|
||||||
@@ -77,14 +76,14 @@ fn get_info_string<R: Reader>(
|
|||||||
let label_string = format!("#0x{:08x}", label_value);
|
let label_string = format!("#0x{:08x}", label_value);
|
||||||
disassembly_lines.push(DisassemblyTextLine::from(vec![
|
disassembly_lines.push(DisassemblyTextLine::from(vec![
|
||||||
InstructionTextToken::new(
|
InstructionTextToken::new(
|
||||||
BnString::new(label_string),
|
&label_string,
|
||||||
InstructionTextTokenContents::GotoLabel(label_value),
|
InstructionTextTokenContents::GotoLabel(label_value),
|
||||||
),
|
),
|
||||||
InstructionTextToken::new(BnString::new(":"), InstructionTextTokenContents::Text),
|
InstructionTextToken::new(":", InstructionTextTokenContents::Text),
|
||||||
]));
|
]));
|
||||||
|
|
||||||
disassembly_lines.push(DisassemblyTextLine::from(vec![InstructionTextToken::new(
|
disassembly_lines.push(DisassemblyTextLine::from(vec![InstructionTextToken::new(
|
||||||
BnString::new(die_node.tag().static_string().unwrap()),
|
die_node.tag().static_string().unwrap(),
|
||||||
InstructionTextTokenContents::TypeName, // TODO : KeywordToken?
|
InstructionTextTokenContents::TypeName, // TODO : KeywordToken?
|
||||||
)]));
|
)]));
|
||||||
|
|
||||||
@@ -92,7 +91,7 @@ fn get_info_string<R: Reader>(
|
|||||||
while let Some(attr) = attrs.next().unwrap() {
|
while let Some(attr) = attrs.next().unwrap() {
|
||||||
let mut attr_line: Vec<InstructionTextToken> = Vec::with_capacity(5);
|
let mut attr_line: Vec<InstructionTextToken> = Vec::with_capacity(5);
|
||||||
attr_line.push(InstructionTextToken::new(
|
attr_line.push(InstructionTextToken::new(
|
||||||
BnString::new(" "),
|
" ",
|
||||||
InstructionTextTokenContents::Indentation,
|
InstructionTextTokenContents::Indentation,
|
||||||
));
|
));
|
||||||
|
|
||||||
@@ -100,14 +99,14 @@ fn get_info_string<R: Reader>(
|
|||||||
if let Some(n) = attr.name().static_string() {
|
if let Some(n) = attr.name().static_string() {
|
||||||
len = n.len();
|
len = n.len();
|
||||||
attr_line.push(InstructionTextToken::new(
|
attr_line.push(InstructionTextToken::new(
|
||||||
BnString::new(n),
|
n,
|
||||||
InstructionTextTokenContents::FieldName,
|
InstructionTextTokenContents::FieldName,
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
// This is rather unlikely, I think
|
// This is rather unlikely, I think
|
||||||
len = 1;
|
len = 1;
|
||||||
attr_line.push(InstructionTextToken::new(
|
attr_line.push(InstructionTextToken::new(
|
||||||
BnString::new("?"),
|
"?",
|
||||||
InstructionTextTokenContents::FieldName,
|
InstructionTextTokenContents::FieldName,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
@@ -115,25 +114,25 @@ fn get_info_string<R: Reader>(
|
|||||||
// On command line the magic number that looks good is 22, but that's too much whitespace in a basic block, so I chose 18 (22 is the max with the current padding provided)
|
// On command line the magic number that looks good is 22, but that's too much whitespace in a basic block, so I chose 18 (22 is the max with the current padding provided)
|
||||||
if len < 18 {
|
if len < 18 {
|
||||||
attr_line.push(InstructionTextToken::new(
|
attr_line.push(InstructionTextToken::new(
|
||||||
BnString::new(PADDING[18 - len]),
|
PADDING[18 - len],
|
||||||
InstructionTextTokenContents::Text,
|
InstructionTextTokenContents::Text,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
attr_line.push(InstructionTextToken::new(
|
attr_line.push(InstructionTextToken::new(
|
||||||
BnString::new(" = "),
|
" = ",
|
||||||
InstructionTextTokenContents::Text,
|
InstructionTextTokenContents::Text,
|
||||||
));
|
));
|
||||||
|
|
||||||
if let Ok(Some(addr)) = dwarf.attr_address(unit, attr.value()) {
|
if let Ok(Some(addr)) = dwarf.attr_address(unit, attr.value()) {
|
||||||
let addr_string = format!("0x{:08x}", addr);
|
let addr_string = format!("0x{:08x}", addr);
|
||||||
attr_line.push(InstructionTextToken::new(
|
attr_line.push(InstructionTextToken::new(
|
||||||
BnString::new(addr_string),
|
&addr_string,
|
||||||
InstructionTextTokenContents::Integer(addr),
|
InstructionTextTokenContents::Integer(addr),
|
||||||
));
|
));
|
||||||
} else if let Ok(attr_reader) = dwarf.attr_string(unit, attr.value()) {
|
} else if let Ok(attr_reader) = dwarf.attr_string(unit, attr.value()) {
|
||||||
if let Ok(attr_string) = attr_reader.to_string() {
|
if let Ok(attr_string) = attr_reader.to_string() {
|
||||||
attr_line.push(InstructionTextToken::new(
|
attr_line.push(InstructionTextToken::new(
|
||||||
BnString::new(attr_string.as_ref()),
|
attr_string.as_ref(),
|
||||||
InstructionTextTokenContents::String({
|
InstructionTextTokenContents::String({
|
||||||
let (_, id, offset) =
|
let (_, id, offset) =
|
||||||
dwarf.lookup_offset_id(attr_reader.offset_id()).unwrap();
|
dwarf.lookup_offset_id(attr_reader.offset_id()).unwrap();
|
||||||
@@ -142,13 +141,13 @@ fn get_info_string<R: Reader>(
|
|||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
attr_line.push(InstructionTextToken::new(
|
attr_line.push(InstructionTextToken::new(
|
||||||
BnString::new("??"),
|
"??",
|
||||||
InstructionTextTokenContents::Text,
|
InstructionTextTokenContents::Text,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
} else if let Encoding(type_class) = attr.value() {
|
} else if let Encoding(type_class) = attr.value() {
|
||||||
attr_line.push(InstructionTextToken::new(
|
attr_line.push(InstructionTextToken::new(
|
||||||
BnString::new(type_class.static_string().unwrap()),
|
type_class.static_string().unwrap(),
|
||||||
InstructionTextTokenContents::TypeName,
|
InstructionTextTokenContents::TypeName,
|
||||||
));
|
));
|
||||||
} else if let UnitRef(offset) = attr.value() {
|
} else if let UnitRef(offset) = attr.value() {
|
||||||
@@ -159,17 +158,17 @@ fn get_info_string<R: Reader>(
|
|||||||
.into_u64();
|
.into_u64();
|
||||||
let addr_string = format!("#0x{:08x}", addr);
|
let addr_string = format!("#0x{:08x}", addr);
|
||||||
attr_line.push(InstructionTextToken::new(
|
attr_line.push(InstructionTextToken::new(
|
||||||
BnString::new(addr_string),
|
&addr_string,
|
||||||
InstructionTextTokenContents::GotoLabel(addr),
|
InstructionTextTokenContents::GotoLabel(addr),
|
||||||
));
|
));
|
||||||
} else if let Flag(true) = attr.value() {
|
} else if let Flag(true) = attr.value() {
|
||||||
attr_line.push(InstructionTextToken::new(
|
attr_line.push(InstructionTextToken::new(
|
||||||
BnString::new("true"),
|
"true",
|
||||||
InstructionTextTokenContents::Integer(1),
|
InstructionTextTokenContents::Integer(1),
|
||||||
));
|
));
|
||||||
} else if let Flag(false) = attr.value() {
|
} else if let Flag(false) = attr.value() {
|
||||||
attr_line.push(InstructionTextToken::new(
|
attr_line.push(InstructionTextToken::new(
|
||||||
BnString::new("false"),
|
"false",
|
||||||
InstructionTextTokenContents::Integer(1),
|
InstructionTextTokenContents::Integer(1),
|
||||||
));
|
));
|
||||||
|
|
||||||
@@ -177,31 +176,31 @@ fn get_info_string<R: Reader>(
|
|||||||
} else if let Some(value) = attr.u8_value() {
|
} else if let Some(value) = attr.u8_value() {
|
||||||
let value_string = format!("{}", value);
|
let value_string = format!("{}", value);
|
||||||
attr_line.push(InstructionTextToken::new(
|
attr_line.push(InstructionTextToken::new(
|
||||||
BnString::new(value_string),
|
&value_string,
|
||||||
InstructionTextTokenContents::Integer(value.into()),
|
InstructionTextTokenContents::Integer(value.into()),
|
||||||
));
|
));
|
||||||
} else if let Some(value) = attr.u16_value() {
|
} else if let Some(value) = attr.u16_value() {
|
||||||
let value_string = format!("{}", value);
|
let value_string = format!("{}", value);
|
||||||
attr_line.push(InstructionTextToken::new(
|
attr_line.push(InstructionTextToken::new(
|
||||||
BnString::new(value_string),
|
&value_string,
|
||||||
InstructionTextTokenContents::Integer(value.into()),
|
InstructionTextTokenContents::Integer(value.into()),
|
||||||
));
|
));
|
||||||
} else if let Some(value) = attr.udata_value() {
|
} else if let Some(value) = attr.udata_value() {
|
||||||
let value_string = format!("{}", value);
|
let value_string = format!("{}", value);
|
||||||
attr_line.push(InstructionTextToken::new(
|
attr_line.push(InstructionTextToken::new(
|
||||||
BnString::new(value_string),
|
&value_string,
|
||||||
InstructionTextTokenContents::Integer(value.into()),
|
InstructionTextTokenContents::Integer(value),
|
||||||
));
|
));
|
||||||
} else if let Some(value) = attr.sdata_value() {
|
} else if let Some(value) = attr.sdata_value() {
|
||||||
let value_string = format!("{}", value);
|
let value_string = format!("{}", value);
|
||||||
attr_line.push(InstructionTextToken::new(
|
attr_line.push(InstructionTextToken::new(
|
||||||
BnString::new(value_string),
|
&value_string,
|
||||||
InstructionTextTokenContents::Integer(value as u64),
|
InstructionTextTokenContents::Integer(value as u64),
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
let attr_string = format!("{:?}", attr.value());
|
let attr_string = format!("{:?}", attr.value());
|
||||||
attr_line.push(InstructionTextToken::new(
|
attr_line.push(InstructionTextToken::new(
|
||||||
BnString::new(attr_string),
|
&attr_string,
|
||||||
InstructionTextTokenContents::Text,
|
InstructionTextTokenContents::Text,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
@@ -282,7 +281,7 @@ fn dump_dwarf(bv: &BinaryView) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
view.show_graph_report("DWARF", graph);
|
view.show_graph_report("DWARF", &graph);
|
||||||
}
|
}
|
||||||
|
|
||||||
struct DWARFDump;
|
struct DWARFDump;
|
||||||
|
|||||||
@@ -6,4 +6,4 @@ edition = "2021"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
binaryninja = {path="../../../"}
|
binaryninja = {path="../../../"}
|
||||||
gimli = "0.28"
|
gimli = "0.31"
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ use binaryninja::{
|
|||||||
binaryview::{BinaryView, BinaryViewBase, BinaryViewExt},
|
binaryview::{BinaryView, BinaryViewBase, BinaryViewExt},
|
||||||
databuffer::DataBuffer,
|
databuffer::DataBuffer,
|
||||||
Endianness,
|
Endianness,
|
||||||
|
settings::Settings,
|
||||||
};
|
};
|
||||||
|
|
||||||
use std::{ffi::CString, rc::Rc};
|
use std::{ffi::CString, rc::Rc};
|
||||||
@@ -52,6 +53,19 @@ pub fn is_raw_dwo_dwarf(view: &BinaryView) -> bool {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn can_use_debuginfod(view: &BinaryView) -> bool {
|
||||||
|
has_build_id_section(view) &&
|
||||||
|
Settings::new("")
|
||||||
|
.get_bool("network.enableDebuginfod", Some(view), None)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn has_build_id_section(view: &BinaryView) -> bool {
|
||||||
|
if let Ok(raw_view) = view.raw_view() {
|
||||||
|
return raw_view.section_by_name(".note.gnu.build-id").is_ok()
|
||||||
|
}
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
pub fn is_valid(view: &BinaryView) -> bool {
|
pub fn is_valid(view: &BinaryView) -> bool {
|
||||||
is_non_dwo_dwarf(view)
|
is_non_dwo_dwarf(view)
|
||||||
|| is_raw_non_dwo_dwarf(view)
|
|| is_raw_non_dwo_dwarf(view)
|
||||||
@@ -88,23 +102,37 @@ pub fn create_section_reader<'a, Endian: 'a + Endianity>(
|
|||||||
if let Some(data_var) = view
|
if let Some(data_var) = view
|
||||||
.data_variables()
|
.data_variables()
|
||||||
.iter()
|
.iter()
|
||||||
.find(|var| var.address == symbol.address())
|
.find(|var| var.address() == symbol.address())
|
||||||
{
|
{
|
||||||
// TODO : This should eventually be wrapped by some DataView sorta thingy thing, like how python does it
|
// TODO : This should eventually be wrapped by some DataView sorta thingy thing, like how python does it
|
||||||
let data_type = data_var.type_with_confidence().contents;
|
let data_type = data_var.t();
|
||||||
let data = view.read_vec(data_var.address, data_type.width() as usize);
|
let data = view.read_vec(data_var.address(), data_type.width() as usize);
|
||||||
let element_type = data_type.element_type().unwrap().contents;
|
let element_type = data_type.element_type().unwrap().contents;
|
||||||
|
|
||||||
if let Some(current_section_header) = data
|
if let Some(current_section_header) = data
|
||||||
.chunks(element_type.width() as usize)
|
.chunks(element_type.width() as usize)
|
||||||
.find(|section_header| {
|
.find(|section_header| {
|
||||||
|
if view.address_size() == 4 {
|
||||||
|
endian.read_u32(§ion_header[16..20]) as u64 == section.start()
|
||||||
|
}
|
||||||
|
else {
|
||||||
endian.read_u64(§ion_header[24..32]) == section.start()
|
endian.read_u64(§ion_header[24..32]) == section.start()
|
||||||
|
}
|
||||||
})
|
})
|
||||||
{
|
{
|
||||||
if (endian.read_u64(¤t_section_header[8..16]) & 2048) != 0 {
|
let section_flags = if view.address_size() == 4 {
|
||||||
|
endian.read_u32(¤t_section_header[8..12]) as u64
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
endian.read_u64(¤t_section_header[8..16])
|
||||||
|
};
|
||||||
|
// If the section has the compressed bit set
|
||||||
|
if (section_flags & 2048) != 0 {
|
||||||
// Get section, trim header, decompress, return
|
// Get section, trim header, decompress, return
|
||||||
let offset = section.start() + 24;
|
let compressed_header_size = view.address_size()*3;
|
||||||
let len = section.len() - 24;
|
|
||||||
|
let offset = section.start() + compressed_header_size as u64;
|
||||||
|
let len = section.len() - compressed_header_size;
|
||||||
|
|
||||||
if let Ok(buffer) = view.read_buffer(offset, len) {
|
if let Ok(buffer) = view.read_buffer(offset, len) {
|
||||||
use std::ptr;
|
use std::ptr;
|
||||||
|
|||||||
@@ -3,16 +3,15 @@ use binaryninja::{
|
|||||||
command::register,
|
command::register,
|
||||||
disassembly::{DisassemblyTextLine, InstructionTextToken, InstructionTextTokenContents},
|
disassembly::{DisassemblyTextLine, InstructionTextToken, InstructionTextTokenContents},
|
||||||
flowgraph::{BranchType, EdgePenStyle, EdgeStyle, FlowGraph, FlowGraphNode, ThemeColor},
|
flowgraph::{BranchType, EdgePenStyle, EdgeStyle, FlowGraph, FlowGraphNode, ThemeColor},
|
||||||
string::BnString,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
fn test_graph(view: &BinaryView) {
|
fn test_graph(view: &BinaryView) {
|
||||||
let graph = FlowGraph::new();
|
let graph = FlowGraph::new();
|
||||||
|
|
||||||
let disassembly_lines_a = vec![DisassemblyTextLine::from(vec![
|
let disassembly_lines_a = vec![DisassemblyTextLine::from(vec![
|
||||||
InstructionTextToken::new(BnString::new("Li"), InstructionTextTokenContents::Text),
|
InstructionTextToken::new("Li", InstructionTextTokenContents::Text),
|
||||||
InstructionTextToken::new(BnString::new("ne"), InstructionTextTokenContents::Text),
|
InstructionTextToken::new("ne", InstructionTextTokenContents::Text),
|
||||||
InstructionTextToken::new(BnString::new(" 1"), InstructionTextTokenContents::Text),
|
InstructionTextToken::new(" 1", InstructionTextTokenContents::Text),
|
||||||
])];
|
])];
|
||||||
|
|
||||||
let node_a = FlowGraphNode::new(&graph);
|
let node_a = FlowGraphNode::new(&graph);
|
||||||
@@ -37,7 +36,7 @@ fn test_graph(view: &BinaryView) {
|
|||||||
&EdgeStyle::default(),
|
&EdgeStyle::default(),
|
||||||
);
|
);
|
||||||
|
|
||||||
view.show_graph_report("Rust Graph Title", graph);
|
view.show_graph_report("Rust Graph Title", &graph);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
|
|||||||
@@ -2,9 +2,7 @@ use std::env;
|
|||||||
|
|
||||||
use binaryninja::binaryview::BinaryViewExt;
|
use binaryninja::binaryview::BinaryViewExt;
|
||||||
use binaryninja::hlil::HighLevelILLiftedOperand;
|
use binaryninja::hlil::HighLevelILLiftedOperand;
|
||||||
use binaryninja::hlil::{
|
use binaryninja::hlil::{HighLevelILFunction, HighLevelILLiftedInstruction};
|
||||||
HighLevelILFunction, HighLevelILLiftedInstruction, HighLevelILLiftedInstructionKind,
|
|
||||||
};
|
|
||||||
use binaryninja::types::Variable;
|
use binaryninja::types::Variable;
|
||||||
|
|
||||||
fn print_indent(indent: usize) {
|
fn print_indent(indent: usize) {
|
||||||
@@ -12,131 +10,7 @@ fn print_indent(indent: usize) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn print_operation(operation: &HighLevelILLiftedInstruction) {
|
fn print_operation(operation: &HighLevelILLiftedInstruction) {
|
||||||
use HighLevelILLiftedInstructionKind::*;
|
print!("{}", operation.name());
|
||||||
match &operation.kind {
|
|
||||||
Adc(_) => print!("Adc"),
|
|
||||||
Sbb(_) => print!("Sbb"),
|
|
||||||
Rlc(_) => print!("Rlc"),
|
|
||||||
Rrc(_) => print!("Rrc"),
|
|
||||||
Add(_) => print!("Add"),
|
|
||||||
Sub(_) => print!("Sub"),
|
|
||||||
And(_) => print!("And"),
|
|
||||||
Or(_) => print!("Or"),
|
|
||||||
Xor(_) => print!("Xor"),
|
|
||||||
Lsl(_) => print!("Lsl"),
|
|
||||||
Lsr(_) => print!("Lsr"),
|
|
||||||
Asr(_) => print!("Asr"),
|
|
||||||
Rol(_) => print!("Rol"),
|
|
||||||
Ror(_) => print!("Ror"),
|
|
||||||
Mul(_) => print!("Mul"),
|
|
||||||
MuluDp(_) => print!("MuluDp"),
|
|
||||||
MulsDp(_) => print!("MulsDp"),
|
|
||||||
Divu(_) => print!("Divu"),
|
|
||||||
DivuDp(_) => print!("DivuDp"),
|
|
||||||
Divs(_) => print!("Divs"),
|
|
||||||
DivsDp(_) => print!("DivsDp"),
|
|
||||||
Modu(_) => print!("Modu"),
|
|
||||||
ModuDp(_) => print!("ModuDp"),
|
|
||||||
Mods(_) => print!("Mods"),
|
|
||||||
ModsDp(_) => print!("ModsDp"),
|
|
||||||
CmpE(_) => print!("CmpE"),
|
|
||||||
CmpNe(_) => print!("CmpNe"),
|
|
||||||
CmpSlt(_) => print!("CmpSlt"),
|
|
||||||
CmpUlt(_) => print!("CmpUlt"),
|
|
||||||
CmpSle(_) => print!("CmpSle"),
|
|
||||||
CmpUle(_) => print!("CmpUle"),
|
|
||||||
CmpSge(_) => print!("CmpSge"),
|
|
||||||
CmpUge(_) => print!("CmpUge"),
|
|
||||||
CmpSgt(_) => print!("CmpSgt"),
|
|
||||||
CmpUgt(_) => print!("CmpUgt"),
|
|
||||||
TestBit(_) => print!("TestBit"),
|
|
||||||
AddOverflow(_) => print!("AddOverflow"),
|
|
||||||
Fadd(_) => print!("Fadd"),
|
|
||||||
Fsub(_) => print!("Fsub"),
|
|
||||||
Fmul(_) => print!("Fmul"),
|
|
||||||
Fdiv(_) => print!("Fdiv"),
|
|
||||||
FcmpE(_) => print!("FcmpE"),
|
|
||||||
FcmpNe(_) => print!("FcmpNe"),
|
|
||||||
FcmpLt(_) => print!("FcmpLt"),
|
|
||||||
FcmpLe(_) => print!("FcmpLe"),
|
|
||||||
FcmpGe(_) => print!("FcmpGe"),
|
|
||||||
FcmpGt(_) => print!("FcmpGt"),
|
|
||||||
FcmpO(_) => print!("FcmpO"),
|
|
||||||
FcmpUo(_) => print!("FcmpUo"),
|
|
||||||
ArrayIndex(_) => print!("ArrayIndex"),
|
|
||||||
ArrayIndexSsa(_) => print!("ArrayIndexSsa"),
|
|
||||||
Assign(_) => print!("Assign"),
|
|
||||||
AssignMemSsa(_) => print!("AssignMemSsa"),
|
|
||||||
AssignUnpack(_) => print!("AssignUnpack"),
|
|
||||||
AssignUnpackMemSsa(_) => print!("AssignUnpackMemSsa"),
|
|
||||||
Block(_) => print!("Block"),
|
|
||||||
Call(_) => print!("Call"),
|
|
||||||
Tailcall(_) => print!("Tailcall"),
|
|
||||||
CallSsa(_) => print!("CallSsa"),
|
|
||||||
Case(_) => print!("Case"),
|
|
||||||
Const(_) => print!("Const"),
|
|
||||||
ConstPtr(_) => print!("ConstPtr"),
|
|
||||||
Import(_) => print!("Import"),
|
|
||||||
ConstData(_) => print!("ConstData"),
|
|
||||||
Deref(_) => print!("Deref"),
|
|
||||||
AddressOf(_) => print!("AddressOf"),
|
|
||||||
Neg(_) => print!("Neg"),
|
|
||||||
Not(_) => print!("Not"),
|
|
||||||
Sx(_) => print!("Sx"),
|
|
||||||
Zx(_) => print!("Zx"),
|
|
||||||
LowPart(_) => print!("LowPart"),
|
|
||||||
BoolToInt(_) => print!("BoolToInt"),
|
|
||||||
UnimplMem(_) => print!("UnimplMem"),
|
|
||||||
Fsqrt(_) => print!("Fsqrt"),
|
|
||||||
Fneg(_) => print!("Fneg"),
|
|
||||||
Fabs(_) => print!("Fabs"),
|
|
||||||
FloatToInt(_) => print!("FloatToInt"),
|
|
||||||
IntToFloat(_) => print!("IntToFloat"),
|
|
||||||
FloatConv(_) => print!("FloatConv"),
|
|
||||||
RoundToInt(_) => print!("RoundToInt"),
|
|
||||||
Floor(_) => print!("Floor"),
|
|
||||||
Ceil(_) => print!("Ceil"),
|
|
||||||
Ftrunc(_) => print!("Ftrunc"),
|
|
||||||
DerefFieldSsa(_) => print!("DerefFieldSsa"),
|
|
||||||
DerefSsa(_) => print!("DerefSsa"),
|
|
||||||
ExternPtr(_) => print!("ExternPtr"),
|
|
||||||
FloatConst(_) => print!("FloatConst"),
|
|
||||||
For(_) => print!("For"),
|
|
||||||
ForSsa(_) => print!("ForSsa"),
|
|
||||||
Goto(_) => print!("Goto"),
|
|
||||||
Label(_) => print!("Label"),
|
|
||||||
If(_) => print!("If"),
|
|
||||||
Intrinsic(_) => print!("Intrinsic"),
|
|
||||||
IntrinsicSsa(_) => print!("IntrinsicSsa"),
|
|
||||||
Jump(_) => print!("Jump"),
|
|
||||||
MemPhi(_) => print!("MemPhi"),
|
|
||||||
Nop => print!("Nop"),
|
|
||||||
Break => print!("Break"),
|
|
||||||
Continue => print!("Continue"),
|
|
||||||
Noret => print!("Noret"),
|
|
||||||
Unreachable => print!("Unreachable"),
|
|
||||||
Bp => print!("Bp"),
|
|
||||||
Undef => print!("Undef"),
|
|
||||||
Unimpl => print!("Unimpl"),
|
|
||||||
Ret(_) => print!("Ret"),
|
|
||||||
Split(_) => print!("Split"),
|
|
||||||
StructField(_) => print!("StructField"),
|
|
||||||
DerefField(_) => print!("DerefField"),
|
|
||||||
Switch(_) => print!("Switch"),
|
|
||||||
Syscall(_) => print!("Syscall"),
|
|
||||||
SyscallSsa(_) => print!("SyscallSsa"),
|
|
||||||
Trap(_) => print!("Trap"),
|
|
||||||
VarDeclare(_) => print!("VarDeclare"),
|
|
||||||
Var(_) => print!("Var"),
|
|
||||||
VarInit(_) => print!("VarInit"),
|
|
||||||
VarInitSsa(_) => print!("VarInitSsa"),
|
|
||||||
VarPhi(_) => print!("VarPhi"),
|
|
||||||
VarSsa(_) => print!("VarSsa"),
|
|
||||||
While(_) => print!("While"),
|
|
||||||
DoWhile(_) => print!("DoWhile"),
|
|
||||||
WhileSsa(_) => print!("WhileSsa"),
|
|
||||||
DoWhileSsa(_) => print!("DoWhileSsa"),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_variable(func: &HighLevelILFunction, var: &Variable) {
|
fn print_variable(func: &HighLevelILFunction, var: &Variable) {
|
||||||
@@ -146,7 +20,7 @@ fn print_variable(func: &HighLevelILFunction, var: &Variable) {
|
|||||||
fn print_il_expr(instr: &HighLevelILLiftedInstruction, mut indent: usize) {
|
fn print_il_expr(instr: &HighLevelILLiftedInstruction, mut indent: usize) {
|
||||||
print_indent(indent);
|
print_indent(indent);
|
||||||
print_operation(instr);
|
print_operation(instr);
|
||||||
println!("");
|
println!();
|
||||||
|
|
||||||
indent += 1;
|
indent += 1;
|
||||||
|
|
||||||
|
|||||||
@@ -5,14 +5,11 @@ use minidump::{Minidump, MinidumpMemoryInfoList};
|
|||||||
|
|
||||||
use binaryninja::binaryview::{BinaryView, BinaryViewBase, BinaryViewExt};
|
use binaryninja::binaryview::{BinaryView, BinaryViewBase, BinaryViewExt};
|
||||||
|
|
||||||
use crate::view::DataBufferWrapper;
|
|
||||||
|
|
||||||
pub fn print_memory_information(bv: &BinaryView) {
|
pub fn print_memory_information(bv: &BinaryView) {
|
||||||
debug!("Printing memory information");
|
debug!("Printing memory information");
|
||||||
if let Ok(minidump_bv) = bv.parent_view() {
|
if let Ok(minidump_bv) = bv.parent_view() {
|
||||||
if let Ok(read_buffer) = minidump_bv.read_buffer(0, minidump_bv.len()) {
|
if let Ok(read_buffer) = minidump_bv.read_buffer(0, minidump_bv.len()) {
|
||||||
let read_buffer = DataBufferWrapper::new(read_buffer);
|
if let Ok(minidump_obj) = Minidump::read(read_buffer.get_data()) {
|
||||||
if let Ok(minidump_obj) = Minidump::read(read_buffer) {
|
|
||||||
if let Ok(memory_info_list) = minidump_obj.get_stream::<MinidumpMemoryInfoList>() {
|
if let Ok(memory_info_list) = minidump_obj.get_stream::<MinidumpMemoryInfoList>() {
|
||||||
let mut memory_info_list_writer = Vec::new();
|
let mut memory_info_list_writer = Vec::new();
|
||||||
match memory_info_list.print(&mut memory_info_list_writer) {
|
match memory_info_list.print(&mut memory_info_list_writer) {
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::ops::{Deref, Range};
|
use std::ops::Range;
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use binaryninja::section::Section;
|
use binaryninja::section::Section;
|
||||||
use binaryninja::segment::Segment;
|
use binaryninja::segment::Segment;
|
||||||
@@ -16,37 +15,11 @@ use binaryninja::custombinaryview::{
|
|||||||
BinaryViewType, BinaryViewTypeBase, CustomBinaryView, CustomBinaryViewType, CustomView,
|
BinaryViewType, BinaryViewTypeBase, CustomBinaryView, CustomBinaryViewType, CustomView,
|
||||||
CustomViewBuilder,
|
CustomViewBuilder,
|
||||||
};
|
};
|
||||||
use binaryninja::databuffer::DataBuffer;
|
|
||||||
use binaryninja::platform::Platform;
|
use binaryninja::platform::Platform;
|
||||||
use binaryninja::Endianness;
|
use binaryninja::Endianness;
|
||||||
|
|
||||||
type BinaryViewResult<R> = binaryninja::binaryview::Result<R>;
|
type BinaryViewResult<R> = binaryninja::binaryview::Result<R>;
|
||||||
|
|
||||||
/// A wrapper around a `binaryninja::databuffer::DataBuffer`, from which a `[u8]` buffer can be obtained
|
|
||||||
/// to pass to `minidump::Minidump::read`.
|
|
||||||
///
|
|
||||||
/// This code is taken from [`dwarfdump`](https://github.com/Vector35/binaryninja-api/blob/9d8bc846bd213407fb1a7a19af2a96f17501ac3b/rust/examples/dwarfdump/src/lib.rs#L81)
|
|
||||||
/// in the Rust API examples.
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct DataBufferWrapper {
|
|
||||||
inner: Arc<DataBuffer>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DataBufferWrapper {
|
|
||||||
pub fn new(buf: DataBuffer) -> Self {
|
|
||||||
DataBufferWrapper {
|
|
||||||
inner: Arc::new(buf),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for DataBufferWrapper {
|
|
||||||
type Target = [u8];
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
self.inner.get_data()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The _Minidump_ binary view type, which the Rust plugin registers with the Binary Ninja core
|
/// The _Minidump_ binary view type, which the Rust plugin registers with the Binary Ninja core
|
||||||
/// (via `binaryninja::custombinaryview::register_view_type`) as a possible binary view
|
/// (via `binaryninja::custombinaryview::register_view_type`) as a possible binary view
|
||||||
/// that can be applied to opened binaries.
|
/// that can be applied to opened binaries.
|
||||||
@@ -141,9 +114,8 @@ impl MinidumpBinaryView {
|
|||||||
fn init(&self) -> BinaryViewResult<()> {
|
fn init(&self) -> BinaryViewResult<()> {
|
||||||
let parent_view = self.parent_view()?;
|
let parent_view = self.parent_view()?;
|
||||||
let read_buffer = parent_view.read_buffer(0, parent_view.len())?;
|
let read_buffer = parent_view.read_buffer(0, parent_view.len())?;
|
||||||
let read_buffer = DataBufferWrapper::new(read_buffer);
|
|
||||||
|
|
||||||
if let Ok(minidump_obj) = Minidump::read(read_buffer) {
|
if let Ok(minidump_obj) = Minidump::read(read_buffer.get_data()) {
|
||||||
// Architecture, platform information
|
// Architecture, platform information
|
||||||
if let Ok(minidump_system_info) = minidump_obj.get_stream::<MinidumpSystemInfo>() {
|
if let Ok(minidump_system_info) = minidump_obj.get_stream::<MinidumpSystemInfo>() {
|
||||||
if let Some(platform) = MinidumpBinaryView::translate_minidump_platform(
|
if let Some(platform) = MinidumpBinaryView::translate_minidump_platform(
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
use std::env;
|
use std::env;
|
||||||
|
|
||||||
|
use binaryninja::architecture::Intrinsic;
|
||||||
use binaryninja::binaryview::BinaryViewExt;
|
use binaryninja::binaryview::BinaryViewExt;
|
||||||
use binaryninja::mlil::MediumLevelILLiftedOperand;
|
use binaryninja::mlil::MediumLevelILLiftedOperand;
|
||||||
use binaryninja::mlil::{
|
use binaryninja::mlil::{MediumLevelILFunction, MediumLevelILLiftedInstruction};
|
||||||
MediumLevelILFunction, MediumLevelILLiftedInstruction, MediumLevelILLiftedInstructionKind,
|
|
||||||
};
|
|
||||||
use binaryninja::types::Variable;
|
use binaryninja::types::Variable;
|
||||||
|
|
||||||
fn print_indent(indent: usize) {
|
fn print_indent(indent: usize) {
|
||||||
@@ -12,139 +11,7 @@ fn print_indent(indent: usize) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn print_operation(operation: &MediumLevelILLiftedInstruction) {
|
fn print_operation(operation: &MediumLevelILLiftedInstruction) {
|
||||||
use MediumLevelILLiftedInstructionKind::*;
|
print!("{}", operation.name());
|
||||||
match operation.kind {
|
|
||||||
Nop => print!("Nop"),
|
|
||||||
Noret => print!("Noret"),
|
|
||||||
Bp => print!("Bp"),
|
|
||||||
Undef => print!("Undef"),
|
|
||||||
Unimpl => print!("Unimpl"),
|
|
||||||
If(_) => print!("If"),
|
|
||||||
FloatConst(_) => print!("FloatConst"),
|
|
||||||
Const(_) => print!("Const"),
|
|
||||||
ConstPtr(_) => print!("ConstPtr"),
|
|
||||||
Import(_) => print!("Import"),
|
|
||||||
ExternPtr(_) => print!("ExternPtr"),
|
|
||||||
ConstData(_) => print!("ConstData"),
|
|
||||||
Jump(_) => print!("Jump"),
|
|
||||||
RetHint(_) => print!("RetHint"),
|
|
||||||
StoreSsa(_) => print!("StoreSsa"),
|
|
||||||
StoreStructSsa(_) => print!("StoreStructSsa"),
|
|
||||||
StoreStruct(_) => print!("StoreStruct"),
|
|
||||||
Store(_) => print!("Store"),
|
|
||||||
JumpTo(_) => print!("JumpTo"),
|
|
||||||
Goto(_) => print!("Goto"),
|
|
||||||
FreeVarSlot(_) => print!("FreeVarSlot"),
|
|
||||||
SetVarField(_) => print!("SetVarField"),
|
|
||||||
SetVar(_) => print!("SetVar"),
|
|
||||||
FreeVarSlotSsa(_) => print!("FreeVarSlotSsa"),
|
|
||||||
SetVarSsaField(_) => print!("SetVarSsaField"),
|
|
||||||
SetVarAliasedField(_) => print!("SetVarAliasedField"),
|
|
||||||
SetVarAliased(_) => print!("SetVarAliased"),
|
|
||||||
SetVarSsa(_) => print!("SetVarSsa"),
|
|
||||||
VarPhi(_) => print!("VarPhi"),
|
|
||||||
MemPhi(_) => print!("MemPhi"),
|
|
||||||
VarSplit(_) => print!("VarSplit"),
|
|
||||||
SetVarSplit(_) => print!("SetVarSplit"),
|
|
||||||
VarSplitSsa(_) => print!("VarSplitSsa"),
|
|
||||||
SetVarSplitSsa(_) => print!("SetVarSplitSsa"),
|
|
||||||
Add(_) => print!("Add"),
|
|
||||||
Sub(_) => print!("Sub"),
|
|
||||||
And(_) => print!("And"),
|
|
||||||
Or(_) => print!("Or"),
|
|
||||||
Xor(_) => print!("Xor"),
|
|
||||||
Lsl(_) => print!("Lsl"),
|
|
||||||
Lsr(_) => print!("Lsr"),
|
|
||||||
Asr(_) => print!("Asr"),
|
|
||||||
Rol(_) => print!("Rol"),
|
|
||||||
Ror(_) => print!("Ror"),
|
|
||||||
Mul(_) => print!("Mul"),
|
|
||||||
MuluDp(_) => print!("MuluDp"),
|
|
||||||
MulsDp(_) => print!("MulsDp"),
|
|
||||||
Divu(_) => print!("Divu"),
|
|
||||||
DivuDp(_) => print!("DivuDp"),
|
|
||||||
Divs(_) => print!("Divs"),
|
|
||||||
DivsDp(_) => print!("DivsDp"),
|
|
||||||
Modu(_) => print!("Modu"),
|
|
||||||
ModuDp(_) => print!("ModuDp"),
|
|
||||||
Mods(_) => print!("Mods"),
|
|
||||||
ModsDp(_) => print!("ModsDp"),
|
|
||||||
CmpE(_) => print!("CmpE"),
|
|
||||||
CmpNe(_) => print!("CmpNe"),
|
|
||||||
CmpSlt(_) => print!("CmpSlt"),
|
|
||||||
CmpUlt(_) => print!("CmpUlt"),
|
|
||||||
CmpSle(_) => print!("CmpSle"),
|
|
||||||
CmpUle(_) => print!("CmpUle"),
|
|
||||||
CmpSge(_) => print!("CmpSge"),
|
|
||||||
CmpUge(_) => print!("CmpUge"),
|
|
||||||
CmpSgt(_) => print!("CmpSgt"),
|
|
||||||
CmpUgt(_) => print!("CmpUgt"),
|
|
||||||
TestBit(_) => print!("TestBit"),
|
|
||||||
AddOverflow(_) => print!("AddOverflow"),
|
|
||||||
FcmpE(_) => print!("FcmpE"),
|
|
||||||
FcmpNe(_) => print!("FcmpNe"),
|
|
||||||
FcmpLt(_) => print!("FcmpLt"),
|
|
||||||
FcmpLe(_) => print!("FcmpLe"),
|
|
||||||
FcmpGe(_) => print!("FcmpGe"),
|
|
||||||
FcmpGt(_) => print!("FcmpGt"),
|
|
||||||
FcmpO(_) => print!("FcmpO"),
|
|
||||||
FcmpUo(_) => print!("FcmpUo"),
|
|
||||||
Fadd(_) => print!("Fadd"),
|
|
||||||
Fsub(_) => print!("Fsub"),
|
|
||||||
Fmul(_) => print!("Fmul"),
|
|
||||||
Fdiv(_) => print!("Fdiv"),
|
|
||||||
Adc(_) => print!("Adc"),
|
|
||||||
Sbb(_) => print!("Sbb"),
|
|
||||||
Rlc(_) => print!("Rlc"),
|
|
||||||
Rrc(_) => print!("Rrc"),
|
|
||||||
Call(_) => print!("Call"),
|
|
||||||
Tailcall(_) => print!("Tailcall"),
|
|
||||||
Syscall(_) => print!("Syscall"),
|
|
||||||
Intrinsic(_) => print!("Intrinsic"),
|
|
||||||
IntrinsicSsa(_) => print!("IntrinsicSsa"),
|
|
||||||
CallSsa(_) => print!("CallSsa"),
|
|
||||||
TailcallSsa(_) => print!("TailcallSsa"),
|
|
||||||
CallUntypedSsa(_) => print!("CallUntypedSsa"),
|
|
||||||
TailcallUntypedSsa(_) => print!("TailcallUntypedSsa"),
|
|
||||||
SyscallSsa(_) => print!("SyscallSsa"),
|
|
||||||
SyscallUntypedSsa(_) => print!("SyscallUntypedSsa"),
|
|
||||||
CallUntyped(_) => print!("CallUntyped"),
|
|
||||||
TailcallUntyped(_) => print!("TailcallUntyped"),
|
|
||||||
SyscallUntyped(_) => print!("SyscallUntyped"),
|
|
||||||
SeparateParamList(_) => print!("SeparateParamList"),
|
|
||||||
SharedParamSlot(_) => print!("SharedParamSlot"),
|
|
||||||
Neg(_) => print!("Neg"),
|
|
||||||
Not(_) => print!("Not"),
|
|
||||||
Sx(_) => print!("Sx"),
|
|
||||||
Zx(_) => print!("Zx"),
|
|
||||||
LowPart(_) => print!("LowPart"),
|
|
||||||
BoolToInt(_) => print!("BoolToInt"),
|
|
||||||
UnimplMem(_) => print!("UnimplMem"),
|
|
||||||
Fsqrt(_) => print!("Fsqrt"),
|
|
||||||
Fneg(_) => print!("Fneg"),
|
|
||||||
Fabs(_) => print!("Fabs"),
|
|
||||||
FloatToInt(_) => print!("FloatToInt"),
|
|
||||||
IntToFloat(_) => print!("IntToFloat"),
|
|
||||||
FloatConv(_) => print!("FloatConv"),
|
|
||||||
RoundToInt(_) => print!("RoundToInt"),
|
|
||||||
Floor(_) => print!("Floor"),
|
|
||||||
Ceil(_) => print!("Ceil"),
|
|
||||||
Ftrunc(_) => print!("Ftrunc"),
|
|
||||||
Load(_) => print!("Load"),
|
|
||||||
LoadStruct(_) => print!("LoadStruct"),
|
|
||||||
LoadStructSsa(_) => print!("LoadStructSsa"),
|
|
||||||
LoadSsa(_) => print!("LoadSsa"),
|
|
||||||
Ret(_) => print!("Ret"),
|
|
||||||
Var(_) => print!("Var"),
|
|
||||||
AddressOf(_) => print!("AddressOf"),
|
|
||||||
VarField(_) => print!("VarField"),
|
|
||||||
AddressOfField(_) => print!("AddressOfField"),
|
|
||||||
VarSsa(_) => print!("VarSsa"),
|
|
||||||
VarAliased(_) => print!("VarAliased"),
|
|
||||||
VarSsaField(_) => print!("VarSsaField"),
|
|
||||||
VarAliasedField(_) => print!("VarAliasedField"),
|
|
||||||
Trap(_) => print!("Trap"),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_variable(func: &MediumLevelILFunction, var: &Variable) {
|
fn print_variable(func: &MediumLevelILFunction, var: &Variable) {
|
||||||
@@ -154,7 +21,7 @@ fn print_variable(func: &MediumLevelILFunction, var: &Variable) {
|
|||||||
fn print_il_expr(instr: &MediumLevelILLiftedInstruction, mut indent: usize) {
|
fn print_il_expr(instr: &MediumLevelILLiftedInstruction, mut indent: usize) {
|
||||||
print_indent(indent);
|
print_indent(indent);
|
||||||
print_operation(instr);
|
print_operation(instr);
|
||||||
println!("");
|
println!();
|
||||||
|
|
||||||
indent += 1;
|
indent += 1;
|
||||||
|
|
||||||
|
|||||||
1
examples/pdb-ng/.gitignore
vendored
Normal file
1
examples/pdb-ng/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
target
|
||||||
138
examples/pdb-ng/CMakeLists.txt
Normal file
138
examples/pdb-ng/CMakeLists.txt
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
cmake_minimum_required(VERSION 3.9 FATAL_ERROR)
|
||||||
|
|
||||||
|
project(pdb_import_plugin)
|
||||||
|
|
||||||
|
file(GLOB PLUGIN_SOURCES CONFIGURE_DEPENDS
|
||||||
|
${PROJECT_SOURCE_DIR}/Cargo.toml
|
||||||
|
${PROJECT_SOURCE_DIR}/src/*.rs)
|
||||||
|
|
||||||
|
file(GLOB_RECURSE API_SOURCES CONFIGURE_DEPENDS
|
||||||
|
${PROJECT_SOURCE_DIR}/../../../binaryninjacore.h
|
||||||
|
${PROJECT_SOURCE_DIR}/../../binaryninjacore-sys/build.rs
|
||||||
|
${PROJECT_SOURCE_DIR}/../../binaryninjacore-sys/Cargo.toml
|
||||||
|
${PROJECT_SOURCE_DIR}/../../binaryninjacore-sys/src/*
|
||||||
|
${PROJECT_SOURCE_DIR}/../../Cargo.toml
|
||||||
|
${PROJECT_SOURCE_DIR}/../../src/*.rs)
|
||||||
|
|
||||||
|
if(CMAKE_BUILD_TYPE MATCHES Debug)
|
||||||
|
set(TARGET_DIR ${PROJECT_BINARY_DIR}/target/debug)
|
||||||
|
set(CARGO_OPTS --target-dir=${PROJECT_BINARY_DIR}/target)
|
||||||
|
else()
|
||||||
|
set(TARGET_DIR ${PROJECT_BINARY_DIR}/target/release)
|
||||||
|
set(CARGO_OPTS --target-dir=${PROJECT_BINARY_DIR}/target --release)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(FORCE_COLORED_OUTPUT)
|
||||||
|
set(CARGO_OPTS ${CARGO_OPTS} --color always)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(DEMO)
|
||||||
|
set(CARGO_FEATURES --features demo --manifest-path ${PROJECT_SOURCE_DIR}/demo/Cargo.toml)
|
||||||
|
|
||||||
|
set(OUTPUT_FILE_NAME ${CMAKE_STATIC_LIBRARY_PREFIX}${PROJECT_NAME}_static${CMAKE_STATIC_LIBRARY_SUFFIX})
|
||||||
|
set(OUTPUT_PDB_NAME ${CMAKE_STATIC_LIBRARY_PREFIX}${PROJECT_NAME}.pdb)
|
||||||
|
set(OUTPUT_FILE_PATH ${CMAKE_BINARY_DIR}/${OUTPUT_FILE_NAME})
|
||||||
|
set(OUTPUT_PDB_PATH ${CMAKE_BINARY_DIR}/${OUTPUT_PDB_NAME})
|
||||||
|
|
||||||
|
set(BINJA_LIB_DIR $<TARGET_FILE_DIR:binaryninjacore>)
|
||||||
|
else()
|
||||||
|
set(CARGO_FEATURES "")
|
||||||
|
|
||||||
|
set(OUTPUT_FILE_NAME ${CMAKE_SHARED_LIBRARY_PREFIX}${PROJECT_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX})
|
||||||
|
set(OUTPUT_PDB_NAME ${CMAKE_SHARED_LIBRARY_PREFIX}${PROJECT_NAME}.pdb)
|
||||||
|
set(OUTPUT_FILE_PATH ${BN_CORE_PLUGIN_DIR}/${OUTPUT_FILE_NAME})
|
||||||
|
set(OUTPUT_PDB_PATH ${BN_CORE_PLUGIN_DIR}/${OUTPUT_PDB_NAME})
|
||||||
|
|
||||||
|
set(BINJA_LIB_DIR ${BN_INSTALL_BIN_DIR})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
add_custom_target(${PROJECT_NAME} ALL DEPENDS ${OUTPUT_FILE_PATH})
|
||||||
|
add_dependencies(${PROJECT_NAME} binaryninjaapi)
|
||||||
|
get_target_property(BN_API_SOURCE_DIR binaryninjaapi SOURCE_DIR)
|
||||||
|
list(APPEND CMAKE_MODULE_PATH "${BN_API_SOURCE_DIR}/cmake")
|
||||||
|
find_package(BinaryNinjaCore REQUIRED)
|
||||||
|
|
||||||
|
set_property(TARGET ${PROJECT_NAME} PROPERTY OUTPUT_FILE_PATH ${OUTPUT_FILE_PATH})
|
||||||
|
|
||||||
|
find_program(RUSTUP_PATH rustup REQUIRED HINTS ~/.cargo/bin)
|
||||||
|
set(RUSTUP_COMMAND ${RUSTUP_PATH} run ${CARGO_STABLE_VERSION} cargo)
|
||||||
|
|
||||||
|
if(APPLE)
|
||||||
|
if(UNIVERSAL)
|
||||||
|
if(CMAKE_BUILD_TYPE MATCHES Debug)
|
||||||
|
set(AARCH64_LIB_PATH ${PROJECT_BINARY_DIR}/target/aarch64-apple-darwin/debug/${OUTPUT_FILE_NAME})
|
||||||
|
set(X86_64_LIB_PATH ${PROJECT_BINARY_DIR}/target/x86_64-apple-darwin/debug/${OUTPUT_FILE_NAME})
|
||||||
|
else()
|
||||||
|
set(AARCH64_LIB_PATH ${PROJECT_BINARY_DIR}/target/aarch64-apple-darwin/release/${OUTPUT_FILE_NAME})
|
||||||
|
set(X86_64_LIB_PATH ${PROJECT_BINARY_DIR}/target/x86_64-apple-darwin/release/${OUTPUT_FILE_NAME})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
add_custom_command(
|
||||||
|
OUTPUT ${OUTPUT_FILE_PATH}
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E env
|
||||||
|
MACOSX_DEPLOYMENT_TARGET=10.14 BINARYNINJADIR=${BINJA_LIB_DIR}
|
||||||
|
${RUSTUP_COMMAND} clean --target=aarch64-apple-darwin ${CARGO_OPTS}
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E env
|
||||||
|
MACOSX_DEPLOYMENT_TARGET=10.14 BINARYNINJADIR=${BINJA_LIB_DIR}
|
||||||
|
${RUSTUP_COMMAND} clean --target=x86_64-apple-darwin ${CARGO_OPTS}
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E env
|
||||||
|
MACOSX_DEPLOYMENT_TARGET=10.14 BINARYNINJADIR=${BINJA_LIB_DIR}
|
||||||
|
${RUSTUP_COMMAND} build --target=aarch64-apple-darwin ${CARGO_OPTS} ${CARGO_FEATURES}
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E env
|
||||||
|
MACOSX_DEPLOYMENT_TARGET=10.14 BINARYNINJADIR=${BINJA_LIB_DIR}
|
||||||
|
${RUSTUP_COMMAND} build --target=x86_64-apple-darwin ${CARGO_OPTS} ${CARGO_FEATURES}
|
||||||
|
COMMAND lipo -create ${AARCH64_LIB_PATH} ${X86_64_LIB_PATH} -output ${OUTPUT_FILE_PATH}
|
||||||
|
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
|
||||||
|
DEPENDS ${PLUGIN_SOURCES} ${API_SOURCES}
|
||||||
|
)
|
||||||
|
else()
|
||||||
|
if(CMAKE_BUILD_TYPE MATCHES Debug)
|
||||||
|
set(LIB_PATH ${PROJECT_BINARY_DIR}/target/debug/${OUTPUT_FILE_NAME})
|
||||||
|
else()
|
||||||
|
set(LIB_PATH ${PROJECT_BINARY_DIR}/target/release/${OUTPUT_FILE_NAME})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
add_custom_command(
|
||||||
|
OUTPUT ${OUTPUT_FILE_PATH}
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E env
|
||||||
|
MACOSX_DEPLOYMENT_TARGET=10.14 BINARYNINJADIR=${BINJA_LIB_DIR}
|
||||||
|
${RUSTUP_COMMAND} clean ${CARGO_OPTS}
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E env
|
||||||
|
MACOSX_DEPLOYMENT_TARGET=10.14 BINARYNINJADIR=${BINJA_LIB_DIR}
|
||||||
|
${RUSTUP_COMMAND} build ${CARGO_OPTS} ${CARGO_FEATURES}
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E copy ${LIB_PATH} ${OUTPUT_FILE_PATH}
|
||||||
|
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
|
||||||
|
DEPENDS ${PLUGIN_SOURCES} ${API_SOURCES}
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
elseif(WIN32)
|
||||||
|
if(DEMO)
|
||||||
|
add_custom_command(
|
||||||
|
OUTPUT ${OUTPUT_FILE_PATH}
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E env BINARYNINJADIR=${BINJA_LIB_DIR} ${RUSTUP_COMMAND} clean ${CARGO_OPTS}
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E env BINARYNINJADIR=${BINJA_LIB_DIR} ${RUSTUP_COMMAND} build ${CARGO_OPTS} ${CARGO_FEATURES}
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E copy ${TARGET_DIR}/${OUTPUT_FILE_NAME} ${OUTPUT_FILE_PATH}
|
||||||
|
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
|
||||||
|
DEPENDS ${PLUGIN_SOURCES} ${API_SOURCES}
|
||||||
|
)
|
||||||
|
else()
|
||||||
|
add_custom_command(
|
||||||
|
OUTPUT ${OUTPUT_FILE_PATH}
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E env BINARYNINJADIR=${BINJA_LIB_DIR} ${RUSTUP_COMMAND} clean ${CARGO_OPTS}
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E env BINARYNINJADIR=${BINJA_LIB_DIR} ${RUSTUP_COMMAND} build ${CARGO_OPTS} ${CARGO_FEATURES}
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E copy ${TARGET_DIR}/${OUTPUT_FILE_NAME} ${OUTPUT_FILE_PATH}
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E copy ${TARGET_DIR}/${OUTPUT_PDB_NAME} ${OUTPUT_PDB_PATH}
|
||||||
|
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
|
||||||
|
DEPENDS ${PLUGIN_SOURCES} ${API_SOURCES}
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
else()
|
||||||
|
add_custom_command(
|
||||||
|
OUTPUT ${OUTPUT_FILE_PATH}
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E env BINARYNINJADIR=${BINJA_LIB_DIR} ${RUSTUP_COMMAND} clean ${CARGO_OPTS}
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E env BINARYNINJADIR=${BINJA_LIB_DIR} ${RUSTUP_COMMAND} build ${CARGO_OPTS} ${CARGO_FEATURES}
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E copy ${TARGET_DIR}/${OUTPUT_FILE_NAME} ${OUTPUT_FILE_PATH}
|
||||||
|
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
|
||||||
|
DEPENDS ${PLUGIN_SOURCES} ${API_SOURCES}
|
||||||
|
)
|
||||||
|
endif()
|
||||||
540
examples/pdb-ng/Cargo.lock
generated
Normal file
540
examples/pdb-ng/Cargo.lock
generated
Normal file
@@ -0,0 +1,540 @@
|
|||||||
|
# This file is automatically @generated by Cargo.
|
||||||
|
# It is not intended for manual editing.
|
||||||
|
version = 3
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "adler"
|
||||||
|
version = "1.0.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "aho-corasick"
|
||||||
|
version = "1.1.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anyhow"
|
||||||
|
version = "1.0.81"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "binaryninja"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"binaryninjacore-sys",
|
||||||
|
"lazy_static",
|
||||||
|
"libc",
|
||||||
|
"log",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "binaryninjacore-sys"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"bindgen",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bindgen"
|
||||||
|
version = "0.68.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "726e4313eb6ec35d2730258ad4e15b547ee75d6afaa1361a922e78e59b7d8078"
|
||||||
|
dependencies = [
|
||||||
|
"bitflags",
|
||||||
|
"cexpr",
|
||||||
|
"clang-sys",
|
||||||
|
"lazy_static",
|
||||||
|
"lazycell",
|
||||||
|
"log",
|
||||||
|
"peeking_take_while",
|
||||||
|
"prettyplease",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"regex",
|
||||||
|
"rustc-hash",
|
||||||
|
"shlex",
|
||||||
|
"syn",
|
||||||
|
"which",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bitflags"
|
||||||
|
version = "2.4.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "byteorder"
|
||||||
|
version = "1.5.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cab"
|
||||||
|
version = "0.4.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ae6b4de23c7d39c0631fd3cc952d87951c86c75a13812d7247cb7a896e7b3551"
|
||||||
|
dependencies = [
|
||||||
|
"byteorder",
|
||||||
|
"flate2",
|
||||||
|
"lzxd",
|
||||||
|
"time",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cexpr"
|
||||||
|
version = "0.6.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
|
||||||
|
dependencies = [
|
||||||
|
"nom",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cfg-if"
|
||||||
|
version = "1.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "clang-sys"
|
||||||
|
version = "1.7.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "67523a3b4be3ce1989d607a828d036249522dd9c1c8de7f4dd2dae43a37369d1"
|
||||||
|
dependencies = [
|
||||||
|
"glob",
|
||||||
|
"libc",
|
||||||
|
"libloading",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "crc32fast"
|
||||||
|
version = "1.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "deranged"
|
||||||
|
version = "0.3.11"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4"
|
||||||
|
dependencies = [
|
||||||
|
"powerfmt",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "either"
|
||||||
|
version = "1.10.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "errno"
|
||||||
|
version = "0.3.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
"windows-sys",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fallible-iterator"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "flate2"
|
||||||
|
version = "1.0.28"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e"
|
||||||
|
dependencies = [
|
||||||
|
"crc32fast",
|
||||||
|
"miniz_oxide",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "glob"
|
||||||
|
version = "0.3.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "home"
|
||||||
|
version = "0.5.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5"
|
||||||
|
dependencies = [
|
||||||
|
"windows-sys",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "itertools"
|
||||||
|
version = "0.11.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"
|
||||||
|
dependencies = [
|
||||||
|
"either",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "lazy_static"
|
||||||
|
version = "1.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "lazycell"
|
||||||
|
version = "1.3.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "libc"
|
||||||
|
version = "0.2.153"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "libloading"
|
||||||
|
version = "0.8.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"windows-targets",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "linux-raw-sys"
|
||||||
|
version = "0.4.13"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "log"
|
||||||
|
version = "0.4.21"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "lzxd"
|
||||||
|
version = "0.1.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "784462f20dddd9dfdb45de963fa4ad4a288cb10a7889ac5d2c34fb6481c6b213"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "memchr"
|
||||||
|
version = "2.7.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "minimal-lexical"
|
||||||
|
version = "0.2.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "miniz_oxide"
|
||||||
|
version = "0.7.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7"
|
||||||
|
dependencies = [
|
||||||
|
"adler",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nom"
|
||||||
|
version = "7.1.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
"minimal-lexical",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "num-conv"
|
||||||
|
version = "0.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "once_cell"
|
||||||
|
version = "1.19.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pdb"
|
||||||
|
version = "0.8.0"
|
||||||
|
dependencies = [
|
||||||
|
"fallible-iterator",
|
||||||
|
"scroll",
|
||||||
|
"uuid",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pdb-import-plugin"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"binaryninja",
|
||||||
|
"cab",
|
||||||
|
"home",
|
||||||
|
"itertools",
|
||||||
|
"log",
|
||||||
|
"pdb",
|
||||||
|
"regex",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "peeking_take_while"
|
||||||
|
version = "0.1.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "powerfmt"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "prettyplease"
|
||||||
|
version = "0.2.16"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a41cf62165e97c7f814d2221421dbb9afcbcdb0a88068e5ea206e19951c2cbb5"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "proc-macro2"
|
||||||
|
version = "1.0.79"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e"
|
||||||
|
dependencies = [
|
||||||
|
"unicode-ident",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "quote"
|
||||||
|
version = "1.0.35"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "regex"
|
||||||
|
version = "1.10.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15"
|
||||||
|
dependencies = [
|
||||||
|
"aho-corasick",
|
||||||
|
"memchr",
|
||||||
|
"regex-automata",
|
||||||
|
"regex-syntax",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "regex-automata"
|
||||||
|
version = "0.4.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea"
|
||||||
|
dependencies = [
|
||||||
|
"aho-corasick",
|
||||||
|
"memchr",
|
||||||
|
"regex-syntax",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "regex-syntax"
|
||||||
|
version = "0.8.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustc-hash"
|
||||||
|
version = "1.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustix"
|
||||||
|
version = "0.38.31"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949"
|
||||||
|
dependencies = [
|
||||||
|
"bitflags",
|
||||||
|
"errno",
|
||||||
|
"libc",
|
||||||
|
"linux-raw-sys",
|
||||||
|
"windows-sys",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "scroll"
|
||||||
|
version = "0.11.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "04c565b551bafbef4157586fa379538366e4385d42082f255bfd96e4fe8519da"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde"
|
||||||
|
version = "1.0.197"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2"
|
||||||
|
dependencies = [
|
||||||
|
"serde_derive",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde_derive"
|
||||||
|
version = "1.0.197"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "shlex"
|
||||||
|
version = "1.3.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "syn"
|
||||||
|
version = "2.0.52"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"unicode-ident",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "time"
|
||||||
|
version = "0.3.34"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749"
|
||||||
|
dependencies = [
|
||||||
|
"deranged",
|
||||||
|
"num-conv",
|
||||||
|
"powerfmt",
|
||||||
|
"serde",
|
||||||
|
"time-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "time-core"
|
||||||
|
version = "0.1.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicode-ident"
|
||||||
|
version = "1.0.12"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "uuid"
|
||||||
|
version = "1.7.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "which"
|
||||||
|
version = "4.4.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7"
|
||||||
|
dependencies = [
|
||||||
|
"either",
|
||||||
|
"home",
|
||||||
|
"once_cell",
|
||||||
|
"rustix",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows-sys"
|
||||||
|
version = "0.52.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||||
|
dependencies = [
|
||||||
|
"windows-targets",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows-targets"
|
||||||
|
version = "0.52.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b"
|
||||||
|
dependencies = [
|
||||||
|
"windows_aarch64_gnullvm",
|
||||||
|
"windows_aarch64_msvc",
|
||||||
|
"windows_i686_gnu",
|
||||||
|
"windows_i686_msvc",
|
||||||
|
"windows_x86_64_gnu",
|
||||||
|
"windows_x86_64_gnullvm",
|
||||||
|
"windows_x86_64_msvc",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_aarch64_gnullvm"
|
||||||
|
version = "0.52.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_aarch64_msvc"
|
||||||
|
version = "0.52.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_i686_gnu"
|
||||||
|
version = "0.52.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_i686_msvc"
|
||||||
|
version = "0.52.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_x86_64_gnu"
|
||||||
|
version = "0.52.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_x86_64_gnullvm"
|
||||||
|
version = "0.52.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_x86_64_msvc"
|
||||||
|
version = "0.52.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8"
|
||||||
20
examples/pdb-ng/Cargo.toml
Normal file
20
examples/pdb-ng/Cargo.toml
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
[package]
|
||||||
|
name = "pdb-import-plugin"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
crate-type = ["cdylib"]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
anyhow = "^1.0"
|
||||||
|
binaryninja = {path = "../../"}
|
||||||
|
home = "^0.5.5"
|
||||||
|
itertools = "^0.11"
|
||||||
|
log = "^0.4"
|
||||||
|
pdb = "^0.8"
|
||||||
|
cab = "^0.4"
|
||||||
|
regex = "1"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
demo = []
|
||||||
555
examples/pdb-ng/demo/Cargo.lock
generated
Normal file
555
examples/pdb-ng/demo/Cargo.lock
generated
Normal file
@@ -0,0 +1,555 @@
|
|||||||
|
# This file is automatically @generated by Cargo.
|
||||||
|
# It is not intended for manual editing.
|
||||||
|
version = 3
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "adler"
|
||||||
|
version = "1.0.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "aho-corasick"
|
||||||
|
version = "1.1.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anyhow"
|
||||||
|
version = "1.0.75"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "binaryninja"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"binaryninjacore-sys",
|
||||||
|
"lazy_static",
|
||||||
|
"libc",
|
||||||
|
"log",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "binaryninjacore-sys"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"bindgen",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bindgen"
|
||||||
|
version = "0.68.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "726e4313eb6ec35d2730258ad4e15b547ee75d6afaa1361a922e78e59b7d8078"
|
||||||
|
dependencies = [
|
||||||
|
"bitflags",
|
||||||
|
"cexpr",
|
||||||
|
"clang-sys",
|
||||||
|
"lazy_static",
|
||||||
|
"lazycell",
|
||||||
|
"log",
|
||||||
|
"peeking_take_while",
|
||||||
|
"prettyplease",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"regex",
|
||||||
|
"rustc-hash",
|
||||||
|
"shlex",
|
||||||
|
"syn",
|
||||||
|
"which",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bitflags"
|
||||||
|
version = "2.4.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "byteorder"
|
||||||
|
version = "1.5.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cab"
|
||||||
|
version = "0.4.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ae6b4de23c7d39c0631fd3cc952d87951c86c75a13812d7247cb7a896e7b3551"
|
||||||
|
dependencies = [
|
||||||
|
"byteorder",
|
||||||
|
"flate2",
|
||||||
|
"lzxd",
|
||||||
|
"time",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cexpr"
|
||||||
|
version = "0.6.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
|
||||||
|
dependencies = [
|
||||||
|
"nom",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cfg-if"
|
||||||
|
version = "1.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "clang-sys"
|
||||||
|
version = "1.6.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c688fc74432808e3eb684cae8830a86be1d66a2bd58e1f248ed0960a590baf6f"
|
||||||
|
dependencies = [
|
||||||
|
"glob",
|
||||||
|
"libc",
|
||||||
|
"libloading",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "crc32fast"
|
||||||
|
version = "1.3.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "deranged"
|
||||||
|
version = "0.3.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0f32d04922c60427da6f9fef14d042d9edddef64cb9d4ce0d64d0685fbeb1fd3"
|
||||||
|
dependencies = [
|
||||||
|
"powerfmt",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "either"
|
||||||
|
version = "1.9.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "errno"
|
||||||
|
version = "0.3.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
"windows-sys",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fallible-iterator"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "flate2"
|
||||||
|
version = "1.0.28"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e"
|
||||||
|
dependencies = [
|
||||||
|
"crc32fast",
|
||||||
|
"miniz_oxide",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "glob"
|
||||||
|
version = "0.3.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "home"
|
||||||
|
version = "0.5.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb"
|
||||||
|
dependencies = [
|
||||||
|
"windows-sys",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "itertools"
|
||||||
|
version = "0.11.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"
|
||||||
|
dependencies = [
|
||||||
|
"either",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "lazy_static"
|
||||||
|
version = "1.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "lazycell"
|
||||||
|
version = "1.3.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "libc"
|
||||||
|
version = "0.2.149"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "libloading"
|
||||||
|
version = "0.7.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"winapi",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "linux-raw-sys"
|
||||||
|
version = "0.4.10"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "log"
|
||||||
|
version = "0.4.20"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "lzxd"
|
||||||
|
version = "0.1.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "784462f20dddd9dfdb45de963fa4ad4a288cb10a7889ac5d2c34fb6481c6b213"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "memchr"
|
||||||
|
version = "2.6.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "minimal-lexical"
|
||||||
|
version = "0.2.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "miniz_oxide"
|
||||||
|
version = "0.7.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7"
|
||||||
|
dependencies = [
|
||||||
|
"adler",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nom"
|
||||||
|
version = "7.1.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
"minimal-lexical",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "once_cell"
|
||||||
|
version = "1.18.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pdb"
|
||||||
|
version = "0.8.0"
|
||||||
|
dependencies = [
|
||||||
|
"fallible-iterator",
|
||||||
|
"scroll",
|
||||||
|
"uuid",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pdb-import-plugin"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"binaryninja",
|
||||||
|
"cab",
|
||||||
|
"home",
|
||||||
|
"itertools",
|
||||||
|
"log",
|
||||||
|
"pdb",
|
||||||
|
"regex",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "peeking_take_while"
|
||||||
|
version = "0.1.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "powerfmt"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "prettyplease"
|
||||||
|
version = "0.2.15"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ae005bd773ab59b4725093fd7df83fd7892f7d8eafb48dbd7de6e024e4215f9d"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "proc-macro2"
|
||||||
|
version = "1.0.69"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
|
||||||
|
dependencies = [
|
||||||
|
"unicode-ident",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "quote"
|
||||||
|
version = "1.0.33"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "regex"
|
||||||
|
version = "1.10.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343"
|
||||||
|
dependencies = [
|
||||||
|
"aho-corasick",
|
||||||
|
"memchr",
|
||||||
|
"regex-automata",
|
||||||
|
"regex-syntax",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "regex-automata"
|
||||||
|
version = "0.4.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f"
|
||||||
|
dependencies = [
|
||||||
|
"aho-corasick",
|
||||||
|
"memchr",
|
||||||
|
"regex-syntax",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "regex-syntax"
|
||||||
|
version = "0.8.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustc-hash"
|
||||||
|
version = "1.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustix"
|
||||||
|
version = "0.38.19"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "745ecfa778e66b2b63c88a61cb36e0eea109e803b0b86bf9879fbc77c70e86ed"
|
||||||
|
dependencies = [
|
||||||
|
"bitflags",
|
||||||
|
"errno",
|
||||||
|
"libc",
|
||||||
|
"linux-raw-sys",
|
||||||
|
"windows-sys",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "scroll"
|
||||||
|
version = "0.11.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "04c565b551bafbef4157586fa379538366e4385d42082f255bfd96e4fe8519da"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde"
|
||||||
|
version = "1.0.189"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8e422a44e74ad4001bdc8eede9a4570ab52f71190e9c076d14369f38b9200537"
|
||||||
|
dependencies = [
|
||||||
|
"serde_derive",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde_derive"
|
||||||
|
version = "1.0.189"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1e48d1f918009ce3145511378cf68d613e3b3d9137d67272562080d68a2b32d5"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "shlex"
|
||||||
|
version = "1.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a7cee0529a6d40f580e7a5e6c495c8fbfe21b7b52795ed4bb5e62cdf92bc6380"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "syn"
|
||||||
|
version = "2.0.38"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"unicode-ident",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "time"
|
||||||
|
version = "0.3.30"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5"
|
||||||
|
dependencies = [
|
||||||
|
"deranged",
|
||||||
|
"powerfmt",
|
||||||
|
"serde",
|
||||||
|
"time-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "time-core"
|
||||||
|
version = "0.1.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicode-ident"
|
||||||
|
version = "1.0.12"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "uuid"
|
||||||
|
version = "1.4.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "which"
|
||||||
|
version = "4.4.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7"
|
||||||
|
dependencies = [
|
||||||
|
"either",
|
||||||
|
"home",
|
||||||
|
"once_cell",
|
||||||
|
"rustix",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "winapi"
|
||||||
|
version = "0.3.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
|
||||||
|
dependencies = [
|
||||||
|
"winapi-i686-pc-windows-gnu",
|
||||||
|
"winapi-x86_64-pc-windows-gnu",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "winapi-i686-pc-windows-gnu"
|
||||||
|
version = "0.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "winapi-x86_64-pc-windows-gnu"
|
||||||
|
version = "0.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows-sys"
|
||||||
|
version = "0.48.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
|
||||||
|
dependencies = [
|
||||||
|
"windows-targets",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows-targets"
|
||||||
|
version = "0.48.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
|
||||||
|
dependencies = [
|
||||||
|
"windows_aarch64_gnullvm",
|
||||||
|
"windows_aarch64_msvc",
|
||||||
|
"windows_i686_gnu",
|
||||||
|
"windows_i686_msvc",
|
||||||
|
"windows_x86_64_gnu",
|
||||||
|
"windows_x86_64_gnullvm",
|
||||||
|
"windows_x86_64_msvc",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_aarch64_gnullvm"
|
||||||
|
version = "0.48.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_aarch64_msvc"
|
||||||
|
version = "0.48.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_i686_gnu"
|
||||||
|
version = "0.48.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_i686_msvc"
|
||||||
|
version = "0.48.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_x86_64_gnu"
|
||||||
|
version = "0.48.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_x86_64_gnullvm"
|
||||||
|
version = "0.48.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_x86_64_msvc"
|
||||||
|
version = "0.48.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
|
||||||
21
examples/pdb-ng/demo/Cargo.toml
Normal file
21
examples/pdb-ng/demo/Cargo.toml
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
[package]
|
||||||
|
name = "pdb-import-plugin-static"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
crate-type = ["staticlib"]
|
||||||
|
path = "../src/lib.rs"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
anyhow = "^1.0"
|
||||||
|
binaryninja = {path = "../../../"}
|
||||||
|
home = "^0.5.5"
|
||||||
|
itertools = "^0.11"
|
||||||
|
log = "^0.4"
|
||||||
|
pdb = "^0.8"
|
||||||
|
cab = "^0.4"
|
||||||
|
regex = "1"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
demo = []
|
||||||
937
examples/pdb-ng/src/lib.rs
Normal file
937
examples/pdb-ng/src/lib.rs
Normal file
@@ -0,0 +1,937 @@
|
|||||||
|
// Copyright 2022-2024 Vector 35 Inc.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::env::{current_dir, current_exe, temp_dir};
|
||||||
|
use std::io::Cursor;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::str::FromStr;
|
||||||
|
use std::sync::mpsc;
|
||||||
|
use std::{env, fs};
|
||||||
|
|
||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use log::{debug, error, info, LevelFilter};
|
||||||
|
use pdb::PDB;
|
||||||
|
|
||||||
|
use binaryninja::binaryview::{BinaryView, BinaryViewBase, BinaryViewExt};
|
||||||
|
use binaryninja::debuginfo::{CustomDebugInfoParser, DebugInfo, DebugInfoParser};
|
||||||
|
use binaryninja::downloadprovider::{DownloadInstanceInputOutputCallbacks, DownloadProvider};
|
||||||
|
use binaryninja::interaction::{MessageBoxButtonResult, MessageBoxButtonSet};
|
||||||
|
use binaryninja::settings::Settings;
|
||||||
|
use binaryninja::string::BnString;
|
||||||
|
use binaryninja::{add_optional_plugin_dependency, interaction, logger, user_directory};
|
||||||
|
use parser::PDBParserInstance;
|
||||||
|
|
||||||
|
/// PDB Parser!!
|
||||||
|
///
|
||||||
|
/// General project structure:
|
||||||
|
/// - lib.rs: Interaction with DebugInfoParser and plugin actions
|
||||||
|
/// - parser.rs: PDB Parser base functionality, puts the internal structures into the DebugInfo
|
||||||
|
/// - type_parser.rs: Parses all the TPI type stream information into both named and indexed types
|
||||||
|
/// - symbol_parser.rs: Parses, one module at a time, symbol information into named symbols
|
||||||
|
/// - struct_grouper.rs: Ugly algorithm for handling union and structure members
|
||||||
|
mod parser;
|
||||||
|
mod struct_grouper;
|
||||||
|
mod symbol_parser;
|
||||||
|
mod type_parser;
|
||||||
|
|
||||||
|
// struct PDBLoad;
|
||||||
|
// struct PDBLoadFile;
|
||||||
|
// struct PDBSetSymbolPath;
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
struct PDBInfo {
|
||||||
|
path: String,
|
||||||
|
file_name: String,
|
||||||
|
age: u32,
|
||||||
|
guid: Vec<u8>,
|
||||||
|
guid_age_string: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_pdb(view: &BinaryView) -> bool {
|
||||||
|
let pdb_magic_bytes = "Microsoft C/C++ MSF 7.00\r\n\x1A\x44\x53\x00\x00\x00";
|
||||||
|
if let Ok(raw_view) = view.raw_view() {
|
||||||
|
raw_view.read_vec(0, pdb_magic_bytes.len()) == pdb_magic_bytes.as_bytes()
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_local_cache() -> Result<String> {
|
||||||
|
// The default value is a directory named "sym" immediately below the program directory
|
||||||
|
// of the calling application. This is sometimes referred to as the default local cache.
|
||||||
|
let current_path = current_exe()?;
|
||||||
|
let parent_path = current_path
|
||||||
|
.parent()
|
||||||
|
.ok_or_else(|| anyhow!("No parent to current exe"))?;
|
||||||
|
let mut cache_path = PathBuf::from(parent_path);
|
||||||
|
cache_path.push("sym");
|
||||||
|
return Ok(cache_path
|
||||||
|
.to_str()
|
||||||
|
.ok_or_else(|| anyhow!("Could not convert cache path to string"))?
|
||||||
|
.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
fn active_local_cache(view: Option<&BinaryView>) -> Result<String> {
|
||||||
|
// Check the local symbol store
|
||||||
|
let mut local_store_path = Settings::new("")
|
||||||
|
.get_string("pdb.files.localStoreAbsolute", view, None)
|
||||||
|
.to_string();
|
||||||
|
if local_store_path.is_empty() {
|
||||||
|
local_store_path = match user_directory() {
|
||||||
|
Ok(mut dir) => {
|
||||||
|
dir.push(
|
||||||
|
Settings::new("")
|
||||||
|
.get_string("pdb.files.localStoreRelative", view, None)
|
||||||
|
.to_string(),
|
||||||
|
);
|
||||||
|
match dir.to_str() {
|
||||||
|
Some(s) => s.to_string(),
|
||||||
|
_ => "".to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => "".to_string(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if !local_store_path.is_empty() {
|
||||||
|
Ok(local_store_path)
|
||||||
|
} else if let Ok(default_cache) = default_local_cache() {
|
||||||
|
Ok(default_cache)
|
||||||
|
} else if let Ok(current) = current_dir().map(|d| {
|
||||||
|
d.to_str()
|
||||||
|
.expect("Expected current dir to be a valid string")
|
||||||
|
.to_string()
|
||||||
|
}) {
|
||||||
|
Ok(current)
|
||||||
|
} else {
|
||||||
|
Ok(temp_dir()
|
||||||
|
.to_str()
|
||||||
|
.expect("Expected temp dir to be a valid string")
|
||||||
|
.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_sym_srv(
|
||||||
|
symbol_path: &String,
|
||||||
|
default_store: &String,
|
||||||
|
) -> Result<Box<dyn Iterator<Item = String>>> {
|
||||||
|
// https://docs.microsoft.com/en-us/windows/win32/debug/using-symsrv
|
||||||
|
// Why
|
||||||
|
|
||||||
|
// ... the symbol path (_NT_SYMBOL_PATH environment variable) can be made up of several path
|
||||||
|
// elements separated by semicolons. If any one or more of these path elements begins with
|
||||||
|
// the text "srv*", then the element is a symbol server and will use SymSrv to locate
|
||||||
|
// symbol files.
|
||||||
|
|
||||||
|
// If the "srv*" text is not specified but the actual path element is a symbol server store,
|
||||||
|
// then the symbol handler will act as if "srv*" were specified. The symbol handler makes
|
||||||
|
// this determination by searching for the existence of a file called "pingme.txt" in
|
||||||
|
// the root directory of the specified path.
|
||||||
|
|
||||||
|
// ... symbol servers are made up of symbol store elements separated by asterisks. There can
|
||||||
|
// be up to 10 symbol stores after the "srv*" prefix.
|
||||||
|
|
||||||
|
let mut sym_srv_results = vec![];
|
||||||
|
|
||||||
|
// 'path elements separated by semicolons'
|
||||||
|
for path_element in symbol_path.split(';') {
|
||||||
|
// 'begins with the text "srv*"'
|
||||||
|
if path_element.to_lowercase().starts_with("srv*") {
|
||||||
|
// 'symbol store elements separated by asterisks'
|
||||||
|
for store_element in path_element[4..].split('*') {
|
||||||
|
if store_element.is_empty() {
|
||||||
|
sym_srv_results.push(default_store.clone());
|
||||||
|
} else {
|
||||||
|
sym_srv_results.push(store_element.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if PathBuf::from(path_element).exists() {
|
||||||
|
// 'searching for the existence of a file called "pingme.txt" in the root directory'
|
||||||
|
let pingme_txt = path_element.to_string() + "/" + "pingme.txt";
|
||||||
|
if PathBuf::from(pingme_txt).exists() {
|
||||||
|
sym_srv_results.push(path_element.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Box::new(sym_srv_results.into_iter()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_from_sym_store(path: &String) -> Result<(bool, Vec<u8>)> {
|
||||||
|
info!("Read file: {}", path);
|
||||||
|
if !path.contains("://") {
|
||||||
|
// Local file
|
||||||
|
let conts = fs::read(path)?;
|
||||||
|
return Ok((false, conts));
|
||||||
|
}
|
||||||
|
|
||||||
|
if !Settings::new("").get_bool("network.pdbAutoDownload", None, None) {
|
||||||
|
return Err(anyhow!("Auto download disabled"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Download from remote
|
||||||
|
let (tx, rx) = mpsc::channel();
|
||||||
|
let write = move |data: &[u8]| -> usize {
|
||||||
|
if let Ok(_) = tx.send(Vec::from(data)) {
|
||||||
|
data.len()
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
info!("GET: {}", path);
|
||||||
|
|
||||||
|
let dp =
|
||||||
|
DownloadProvider::try_default().map_err(|_| anyhow!("No default download provider"))?;
|
||||||
|
let mut inst = dp
|
||||||
|
.create_instance()
|
||||||
|
.map_err(|_| anyhow!("Couldn't create download instance"))?;
|
||||||
|
let result = inst
|
||||||
|
.perform_custom_request(
|
||||||
|
"GET",
|
||||||
|
path.clone(),
|
||||||
|
HashMap::<BnString, BnString>::new(),
|
||||||
|
DownloadInstanceInputOutputCallbacks {
|
||||||
|
read: None,
|
||||||
|
write: Some(Box::new(write)),
|
||||||
|
progress: None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.map_err(|e| anyhow!(e.to_string()))?;
|
||||||
|
if result.status_code != 200 {
|
||||||
|
return Err(anyhow!("Path does not exist"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut expected_length = None;
|
||||||
|
for (k, v) in result.headers.iter() {
|
||||||
|
if k.to_lowercase() == "content-length" {
|
||||||
|
expected_length = Some(usize::from_str(v)?);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut data = vec![];
|
||||||
|
while let Ok(packet) = rx.try_recv() {
|
||||||
|
data.extend(packet.into_iter());
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(length) = expected_length {
|
||||||
|
if data.len() != length {
|
||||||
|
return Err(anyhow!(format!(
|
||||||
|
"Bad length: expected {} got {}",
|
||||||
|
length,
|
||||||
|
data.len()
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((true, data))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_sym_store(store_path: &String, pdb_info: &PDBInfo) -> Result<Option<Vec<u8>>> {
|
||||||
|
// https://www.technlg.net/windows/symbol-server-path-windbg-debugging/
|
||||||
|
// For symbol servers, to identify the files path easily, Windbg uses the format
|
||||||
|
// binaryname.pdb/GUID
|
||||||
|
|
||||||
|
// Doesn't actually say what the format is, just gives an example:
|
||||||
|
// https://docs.microsoft.com/en-us/windows/win32/debug/using-symstore
|
||||||
|
// In this example, the lookup path for the acpi.dbg symbol file might look something
|
||||||
|
// like this: \\mybuilds\symsrv\acpi.dbg\37cdb03962040.
|
||||||
|
let base_path =
|
||||||
|
store_path.clone() + "/" + &pdb_info.file_name + "/" + &pdb_info.guid_age_string;
|
||||||
|
|
||||||
|
// Three files may exist inside the lookup directory:
|
||||||
|
// 1. If the file was stored, then acpi.dbg will exist there.
|
||||||
|
// 2. If a pointer was stored, then a file called file.ptr will exist and contain the path
|
||||||
|
// to the actual symbol file.
|
||||||
|
// 3. A file called refs.ptr, which contains a list of all the current locations for
|
||||||
|
// acpi.dbg with this timestamp and image size that are currently added to the
|
||||||
|
// symbol store.
|
||||||
|
|
||||||
|
// We don't care about #3 because it says we don't
|
||||||
|
|
||||||
|
let direct_path = base_path.clone() + "/" + &pdb_info.file_name;
|
||||||
|
if let Ok((_remote, conts)) = read_from_sym_store(&direct_path) {
|
||||||
|
return Ok(Some(conts));
|
||||||
|
}
|
||||||
|
|
||||||
|
let file_ptr = base_path.clone() + "/" + "file.ptr";
|
||||||
|
if let Ok((_remote, conts)) = read_from_sym_store(&file_ptr) {
|
||||||
|
let path = String::from_utf8(conts)?;
|
||||||
|
// PATH:https://full/path
|
||||||
|
if path.starts_with("PATH:") {
|
||||||
|
if let Ok((_remote, conts)) = read_from_sym_store(&path[5..].to_string()) {
|
||||||
|
return Ok(Some(conts));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_pdb_info(view: &BinaryView) -> Option<PDBInfo> {
|
||||||
|
match view.get_metadata::<u64, _>("DEBUG_INFO_TYPE") {
|
||||||
|
Some(Ok(0x53445352 /* 'SDSR' */)) => {}
|
||||||
|
_ => return None,
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is stored in the BV by the PE loader
|
||||||
|
let file_path = match view.get_metadata::<String, _>("PDB_FILENAME") {
|
||||||
|
Some(Ok(md)) => md,
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
let mut guid = match view.get_metadata::<Vec<u8>, _>("PDB_GUID") {
|
||||||
|
Some(Ok(md)) => md,
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
let age = match view.get_metadata::<u64, _>("PDB_AGE") {
|
||||||
|
Some(Ok(md)) => md as u32,
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
|
||||||
|
if guid.len() != 16 {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
// struct _GUID {
|
||||||
|
// uint32_t Data1;
|
||||||
|
// uint16_t Data2;
|
||||||
|
// uint16_t Data3;
|
||||||
|
// uint8_t Data4[8];
|
||||||
|
// };
|
||||||
|
|
||||||
|
// Endian swap
|
||||||
|
// Data1
|
||||||
|
guid.swap(0, 3);
|
||||||
|
guid.swap(1, 2);
|
||||||
|
// Data2
|
||||||
|
guid.swap(4, 5);
|
||||||
|
// Data3
|
||||||
|
guid.swap(6, 7);
|
||||||
|
|
||||||
|
let guid_age_string = guid
|
||||||
|
.iter()
|
||||||
|
.take(16)
|
||||||
|
.map(|ch| format!("{:02X}", ch))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("")
|
||||||
|
+ &format!("{:X}", age);
|
||||||
|
|
||||||
|
// Just assume all the paths are /
|
||||||
|
let file_path = if cfg!(windows) {
|
||||||
|
file_path
|
||||||
|
} else {
|
||||||
|
file_path.replace("\\", "/")
|
||||||
|
};
|
||||||
|
let path = file_path;
|
||||||
|
let file_name = if let Some(idx) = path.rfind("\\") {
|
||||||
|
path[(idx + 1)..].to_string()
|
||||||
|
} else if let Some(idx) = path.rfind("/") {
|
||||||
|
path[(idx + 1)..].to_string()
|
||||||
|
} else {
|
||||||
|
path.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(PDBInfo {
|
||||||
|
path,
|
||||||
|
file_name,
|
||||||
|
age,
|
||||||
|
guid,
|
||||||
|
guid_age_string,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
struct PDBParser;
|
||||||
|
impl PDBParser {
|
||||||
|
fn load_from_file(
|
||||||
|
&self,
|
||||||
|
conts: &Vec<u8>,
|
||||||
|
debug_info: &mut DebugInfo,
|
||||||
|
view: &BinaryView,
|
||||||
|
progress: &Box<dyn Fn(usize, usize) -> Result<(), ()>>,
|
||||||
|
check_guid: bool,
|
||||||
|
did_download: bool,
|
||||||
|
) -> Result<()> {
|
||||||
|
let mut pdb = PDB::open(Cursor::new(&conts))?;
|
||||||
|
|
||||||
|
let settings = Settings::new("");
|
||||||
|
|
||||||
|
if let Some(info) = parse_pdb_info(view) {
|
||||||
|
let pdb_info = &pdb.pdb_information()?;
|
||||||
|
if info.guid.as_slice() != pdb_info.guid.as_ref() {
|
||||||
|
if check_guid {
|
||||||
|
return Err(anyhow!("PDB GUID does not match"));
|
||||||
|
} else {
|
||||||
|
let ask = settings.get_string(
|
||||||
|
"pdb.features.loadMismatchedPDB",
|
||||||
|
Some(view),
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
match ask.as_str() {
|
||||||
|
"true" => {},
|
||||||
|
"ask" => {
|
||||||
|
if interaction::show_message_box(
|
||||||
|
"Mismatched PDB",
|
||||||
|
"This PDB does not look like it matches your binary. Do you want to load it anyway?",
|
||||||
|
MessageBoxButtonSet::YesNoButtonSet,
|
||||||
|
binaryninja::interaction::MessageBoxIcon::QuestionIcon
|
||||||
|
) == MessageBoxButtonResult::NoButton {
|
||||||
|
return Err(anyhow!("User cancelled mismatched load"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
return Err(anyhow!("PDB GUID does not match"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Microsoft's symbol server sometimes gives us a different version of the PDB
|
||||||
|
// than what we ask for. It's weird, but if they're doing it, I trust it will work.
|
||||||
|
if info.age != pdb_info.age {
|
||||||
|
if info.age > pdb_info.age {
|
||||||
|
// Have not seen this case, so I'm not sure if this is fatal
|
||||||
|
info!("PDB age is older than our binary! Loading it anyway, but there may be missing information.");
|
||||||
|
} else {
|
||||||
|
info!("PDB age is newer than our binary! Loading it anyway, there probably shouldn't be any issues.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if did_download && settings.get_bool("pdb.files.localStoreCache", None, None) {
|
||||||
|
match active_local_cache(Some(view)) {
|
||||||
|
Ok(cache) => {
|
||||||
|
let mut cab_path = PathBuf::from(&cache);
|
||||||
|
cab_path.push(&info.file_name);
|
||||||
|
cab_path.push(
|
||||||
|
pdb_info
|
||||||
|
.guid
|
||||||
|
.as_ref()
|
||||||
|
.iter()
|
||||||
|
.map(|ch| format!("{:02X}", ch))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("")
|
||||||
|
+ &format!("{:X}", pdb_info.age),
|
||||||
|
);
|
||||||
|
let has_dir = if cab_path.is_dir() {
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
match fs::create_dir_all(&cab_path) {
|
||||||
|
Ok(_) => true,
|
||||||
|
Err(e) => {
|
||||||
|
error!("Could not create PDB cache dir: {}", e);
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if has_dir {
|
||||||
|
cab_path.push(&info.file_name);
|
||||||
|
match fs::write(&cab_path, &conts) {
|
||||||
|
Ok(_) => {
|
||||||
|
info!("Downloaded to: {}", cab_path.to_string_lossy());
|
||||||
|
}
|
||||||
|
Err(e) => error!("Could not write PDB to cache: {}", e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Also write with the age we expect in our binary view
|
||||||
|
if info.age < pdb_info.age {
|
||||||
|
let mut cab_path = PathBuf::from(&cache);
|
||||||
|
cab_path.push(&info.file_name);
|
||||||
|
cab_path.push(
|
||||||
|
pdb_info
|
||||||
|
.guid
|
||||||
|
.as_ref()
|
||||||
|
.iter()
|
||||||
|
.map(|ch| format!("{:02X}", ch))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("")
|
||||||
|
+ &format!("{:X}", info.age), // XXX: BV's pdb age
|
||||||
|
);
|
||||||
|
let has_dir = if cab_path.is_dir() {
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
match fs::create_dir_all(&cab_path) {
|
||||||
|
Ok(_) => true,
|
||||||
|
Err(e) => {
|
||||||
|
error!("Could not create PDB cache dir: {}", e);
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if has_dir {
|
||||||
|
cab_path.push(&info.file_name);
|
||||||
|
match fs::write(&cab_path, &conts) {
|
||||||
|
Ok(_) => {
|
||||||
|
info!("Downloaded to: {}", cab_path.to_string_lossy());
|
||||||
|
}
|
||||||
|
Err(e) => error!("Could not write PDB to cache: {}", e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => error!("Could not get local cache for writing: {}", e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if check_guid {
|
||||||
|
return Err(anyhow!("File not compiled with PDB information"));
|
||||||
|
} else {
|
||||||
|
let ask = settings.get_string(
|
||||||
|
"pdb.features.loadMismatchedPDB",
|
||||||
|
Some(view),
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
match ask.as_str() {
|
||||||
|
"true" => {},
|
||||||
|
"ask" => {
|
||||||
|
if interaction::show_message_box(
|
||||||
|
"No PDB Information",
|
||||||
|
"This file does not look like it was compiled with a PDB, so your PDB might not correctly apply to the analysis. Do you want to load it anyway?",
|
||||||
|
MessageBoxButtonSet::YesNoButtonSet,
|
||||||
|
binaryninja::interaction::MessageBoxIcon::QuestionIcon
|
||||||
|
) == MessageBoxButtonResult::NoButton {
|
||||||
|
return Err(anyhow!("User cancelled missing info load"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
return Err(anyhow!("File not compiled with PDB information"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut inst = match PDBParserInstance::new(debug_info, view, pdb) {
|
||||||
|
Ok(inst) => {
|
||||||
|
info!("Loaded PDB, parsing...");
|
||||||
|
inst
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!("Could not open PDB: {}", e);
|
||||||
|
return Err(e);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
match inst.try_parse_info(Box::new(|cur, max| {
|
||||||
|
(*progress)(cur, max).map_err(|_| anyhow!("Cancelled"))
|
||||||
|
})) {
|
||||||
|
Ok(()) => {
|
||||||
|
info!("Parsed pdb");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!("Could not parse PDB: {}", e);
|
||||||
|
if e.to_string() == "Todo" {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CustomDebugInfoParser for PDBParser {
|
||||||
|
fn is_valid(&self, view: &BinaryView) -> bool {
|
||||||
|
view.type_name().to_string() == "PE" || is_pdb(view)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_info(
|
||||||
|
&self,
|
||||||
|
debug_info: &mut DebugInfo,
|
||||||
|
view: &BinaryView,
|
||||||
|
debug_file: &BinaryView,
|
||||||
|
progress: Box<dyn Fn(usize, usize) -> Result<(), ()>>,
|
||||||
|
) -> bool {
|
||||||
|
if is_pdb(debug_file) {
|
||||||
|
match self.load_from_file(
|
||||||
|
&debug_file.read_vec(0, debug_file.len()),
|
||||||
|
debug_info,
|
||||||
|
view,
|
||||||
|
&progress,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
) {
|
||||||
|
Ok(_) => return true,
|
||||||
|
Err(e) if e.to_string() == "Cancelled" => return false,
|
||||||
|
Err(_) => {
|
||||||
|
error!("Chosen PDB file failed to load");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// See if we can get pdb info from the view
|
||||||
|
if let Some(info) = parse_pdb_info(view) {
|
||||||
|
// First, check _NT_SYMBOL_PATH
|
||||||
|
if let Ok(sym_path) = env::var("_NT_SYMBOL_PATH") {
|
||||||
|
let stores = if let Ok(default_cache) = active_local_cache(Some(view)) {
|
||||||
|
parse_sym_srv(&sym_path, &default_cache)
|
||||||
|
} else {
|
||||||
|
Err(anyhow!("No local cache found"))
|
||||||
|
};
|
||||||
|
if let Ok(stores) = stores {
|
||||||
|
for store in stores {
|
||||||
|
match search_sym_store(&store, &info) {
|
||||||
|
Ok(Some(conts)) => {
|
||||||
|
match self
|
||||||
|
.load_from_file(&conts, debug_info, view, &progress, true, true)
|
||||||
|
{
|
||||||
|
Ok(_) => return true,
|
||||||
|
Err(e) if e.to_string() == "Cancelled" => return false,
|
||||||
|
Err(e) => debug!("Skipping, {}", e.to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(None) => {}
|
||||||
|
e => error!("Error searching symbol store {}: {:?}", store, e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Does the raw path just exist?
|
||||||
|
if PathBuf::from(&info.path).exists() {
|
||||||
|
match fs::read(&info.path) {
|
||||||
|
Ok(conts) => match self
|
||||||
|
.load_from_file(&conts, debug_info, view, &progress, true, false)
|
||||||
|
{
|
||||||
|
Ok(_) => return true,
|
||||||
|
Err(e) if e.to_string() == "Cancelled" => return false,
|
||||||
|
Err(e) => debug!("Skipping, {}", e.to_string()),
|
||||||
|
},
|
||||||
|
Err(e) if e.to_string() == "Cancelled" => return false,
|
||||||
|
Err(e) => debug!("Could not read pdb: {}", e.to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try in the same directory as the file
|
||||||
|
let mut potential_path = PathBuf::from(view.file().filename().to_string());
|
||||||
|
potential_path.pop();
|
||||||
|
potential_path.push(&info.file_name);
|
||||||
|
if potential_path.exists() {
|
||||||
|
match fs::read(
|
||||||
|
&potential_path
|
||||||
|
.to_str()
|
||||||
|
.expect("Potential path is a real string")
|
||||||
|
.to_string(),
|
||||||
|
) {
|
||||||
|
Ok(conts) => match self
|
||||||
|
.load_from_file(&conts, debug_info, view, &progress, true, false)
|
||||||
|
{
|
||||||
|
Ok(_) => return true,
|
||||||
|
Err(e) if e.to_string() == "Cancelled" => return false,
|
||||||
|
Err(e) => debug!("Skipping, {}", e.to_string()),
|
||||||
|
},
|
||||||
|
Err(e) if e.to_string() == "Cancelled" => return false,
|
||||||
|
Err(e) => debug!("Could not read pdb: {}", e.to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check the local symbol store
|
||||||
|
if let Ok(local_store_path) = active_local_cache(Some(view)) {
|
||||||
|
match search_sym_store(&local_store_path, &info) {
|
||||||
|
Ok(Some(conts)) => {
|
||||||
|
match self.load_from_file(&conts, debug_info, view, &progress, true, false)
|
||||||
|
{
|
||||||
|
Ok(_) => return true,
|
||||||
|
Err(e) if e.to_string() == "Cancelled" => return false,
|
||||||
|
Err(e) => debug!("Skipping, {}", e.to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(None) => {}
|
||||||
|
e => error!(
|
||||||
|
"Error searching local symbol store {}: {:?}",
|
||||||
|
local_store_path, e
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Next, try downloading from all symbol servers in the server list
|
||||||
|
let server_list =
|
||||||
|
Settings::new("").get_string_list("pdb.files.symbolServerList", Some(view), None);
|
||||||
|
|
||||||
|
for server in server_list.iter() {
|
||||||
|
match search_sym_store(&server.to_string(), &info) {
|
||||||
|
Ok(Some(conts)) => {
|
||||||
|
match self.load_from_file(&conts, debug_info, view, &progress, true, true) {
|
||||||
|
Ok(_) => return true,
|
||||||
|
Err(e) if e.to_string() == "Cancelled" => return false,
|
||||||
|
Err(e) => debug!("Skipping, {}", e.to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(None) => {}
|
||||||
|
e => error!("Error searching remote symbol server {}: {:?}", server, e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "demo"))]
|
||||||
|
#[no_mangle]
|
||||||
|
pub extern "C" fn CorePluginDependencies() {
|
||||||
|
add_optional_plugin_dependency("view_pe");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "demo"))]
|
||||||
|
#[no_mangle]
|
||||||
|
pub extern "C" fn CorePluginInit() -> bool {
|
||||||
|
init_plugin()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "demo")]
|
||||||
|
#[no_mangle]
|
||||||
|
pub extern "C" fn PDBPluginInit() -> bool {
|
||||||
|
init_plugin()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn init_plugin() -> bool {
|
||||||
|
let _ = logger::init(LevelFilter::Debug);
|
||||||
|
DebugInfoParser::register("PDB", PDBParser {});
|
||||||
|
|
||||||
|
let settings = Settings::new("");
|
||||||
|
settings.register_group("pdb", "PDB Loader");
|
||||||
|
settings.register_setting_json(
|
||||||
|
"pdb.files.localStoreAbsolute",
|
||||||
|
r#"{
|
||||||
|
"title" : "Local Symbol Store Absolute Path",
|
||||||
|
"type" : "string",
|
||||||
|
"default" : "",
|
||||||
|
"aliases" : ["pdb.local-store-absolute", "pdb.localStoreAbsolute"],
|
||||||
|
"description" : "Absolute path specifying where the PDB symbol store exists on this machine, overrides relative path.",
|
||||||
|
"ignore" : []
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
settings.register_setting_json(
|
||||||
|
"pdb.files.localStoreRelative",
|
||||||
|
r#"{
|
||||||
|
"title" : "Local Symbol Store Relative Path",
|
||||||
|
"type" : "string",
|
||||||
|
"default" : "symbols",
|
||||||
|
"aliases" : ["pdb.local-store-relative", "pdb.localStoreRelative"],
|
||||||
|
"description" : "Path *relative* to the binaryninja _user_ directory, specifying the pdb symbol store. If the Local Symbol Store Absolute Path is specified, this is ignored.",
|
||||||
|
"ignore" : []
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
settings.register_setting_json(
|
||||||
|
"pdb.files.localStoreCache",
|
||||||
|
r#"{
|
||||||
|
"title" : "Cache Downloaded PDBs in Local Store",
|
||||||
|
"type" : "boolean",
|
||||||
|
"default" : true,
|
||||||
|
"aliases" : ["pdb.localStoreCache"],
|
||||||
|
"description" : "Store PDBs downloaded from Symbol Servers in the local Symbol Store Path.",
|
||||||
|
"ignore" : []
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
settings.register_setting_json(
|
||||||
|
"network.pdbAutoDownload",
|
||||||
|
r#"{
|
||||||
|
"title" : "Enable Auto Downloading PDBs",
|
||||||
|
"type" : "boolean",
|
||||||
|
"default" : true,
|
||||||
|
"aliases" : ["pdb.autoDownload", "pdb.auto-download-pdb"],
|
||||||
|
"description" : "Automatically search for and download pdb files from specified symbol servers.",
|
||||||
|
"ignore" : []
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
settings.register_setting_json(
|
||||||
|
"pdb.files.symbolServerList",
|
||||||
|
r#"{
|
||||||
|
"title" : "Symbol Server List",
|
||||||
|
"type" : "array",
|
||||||
|
"elementType" : "string",
|
||||||
|
"sorted" : false,
|
||||||
|
"default" : ["https://msdl.microsoft.com/download/symbols"],
|
||||||
|
"aliases" : ["pdb.symbol-server-list", "pdb.symbolServerList"],
|
||||||
|
"description" : "List of servers to query for pdb symbols.",
|
||||||
|
"ignore" : []
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
settings.register_setting_json(
|
||||||
|
"pdb.features.expandRTTIStructures",
|
||||||
|
r#"{
|
||||||
|
"title" : "Expand RTTI Structures",
|
||||||
|
"type" : "boolean",
|
||||||
|
"default" : true,
|
||||||
|
"aliases" : ["pdb.expandRTTIStructures"],
|
||||||
|
"description" : "Create structures for RTTI symbols with variable-sized names and arrays.",
|
||||||
|
"ignore" : []
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
settings.register_setting_json(
|
||||||
|
"pdb.features.generateVTables",
|
||||||
|
r#"{
|
||||||
|
"title" : "Generate Virtual Table Structures",
|
||||||
|
"type" : "boolean",
|
||||||
|
"default" : true,
|
||||||
|
"aliases" : ["pdb.generateVTables"],
|
||||||
|
"description" : "Create Virtual Table (VTable) structures for C++ classes found when parsing.",
|
||||||
|
"ignore" : []
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
settings.register_setting_json(
|
||||||
|
"pdb.features.loadGlobalSymbols",
|
||||||
|
r#"{
|
||||||
|
"title" : "Load Global Module Symbols",
|
||||||
|
"type" : "boolean",
|
||||||
|
"default" : true,
|
||||||
|
"aliases" : ["pdb.loadGlobalSymbols"],
|
||||||
|
"description" : "Load symbols in the Global module of the PDB. These symbols have generally lower quality types due to relying on the demangler.",
|
||||||
|
"ignore" : []
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
settings.register_setting_json(
|
||||||
|
"pdb.features.allowUnnamedVoidSymbols",
|
||||||
|
r#"{
|
||||||
|
"title" : "Allow Unnamed Untyped Symbols",
|
||||||
|
"type" : "boolean",
|
||||||
|
"default" : false,
|
||||||
|
"aliases" : ["pdb.allowUnnamedVoidSymbols"],
|
||||||
|
"description" : "Allow creation of symbols with no name and void types, often used as static local variables. Generally, these are just noisy and not relevant.",
|
||||||
|
"ignore" : []
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
settings.register_setting_json(
|
||||||
|
"pdb.features.allowVoidGlobals",
|
||||||
|
r#"{
|
||||||
|
"title" : "Allow Untyped Symbols",
|
||||||
|
"type" : "boolean",
|
||||||
|
"default" : true,
|
||||||
|
"aliases" : ["pdb.allowVoidGlobals"],
|
||||||
|
"description" : "Allow creation of symbols that have no type, and will be created as void-typed symbols. Generally, this happens in a stripped PDB when a Global symbol's mangled name does not contain type information.",
|
||||||
|
"ignore" : []
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
settings.register_setting_json(
|
||||||
|
"pdb.features.createMissingNamedTypes",
|
||||||
|
r#"{
|
||||||
|
"title" : "Create Missing Named Types",
|
||||||
|
"type" : "boolean",
|
||||||
|
"default" : true,
|
||||||
|
"aliases" : ["pdb.createMissingNamedTypes"],
|
||||||
|
"description" : "Allow creation of types named by function signatures which are not found in the PDB's types list or the Binary View. These types are usually found in stripped PDBs that have no type information but function signatures reference the stripped types.",
|
||||||
|
"ignore" : []
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
settings.register_setting_json(
|
||||||
|
"pdb.features.loadMismatchedPDB",
|
||||||
|
r#"{
|
||||||
|
"title" : "Load Mismatched PDB",
|
||||||
|
"type" : "string",
|
||||||
|
"default" : "ask",
|
||||||
|
"enum" : ["true", "ask", "false"],
|
||||||
|
"enumDescriptions" : [
|
||||||
|
"Always load the PDB",
|
||||||
|
"Use the Interaction system to ask if the PDB should be loaded",
|
||||||
|
"Never load the PDB"
|
||||||
|
],
|
||||||
|
"aliases" : [],
|
||||||
|
"description" : "If a manually loaded PDB has a mismatched GUID, should it be loaded?",
|
||||||
|
"ignore" : []
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
settings.register_setting_json(
|
||||||
|
"pdb.features.parseSymbols",
|
||||||
|
r#"{
|
||||||
|
"title" : "Parse PDB Symbols",
|
||||||
|
"type" : "boolean",
|
||||||
|
"default" : true,
|
||||||
|
"aliases" : [],
|
||||||
|
"description" : "Parse Symbol names and types. If you turn this off, you will only load Types.",
|
||||||
|
"ignore" : []
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_default_cache_path() {
|
||||||
|
println!("{:?}", default_local_cache());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_sym_srv() {
|
||||||
|
assert_eq!(
|
||||||
|
parse_sym_srv(
|
||||||
|
&r"srv*\\mybuilds\mysymbols".to_string(),
|
||||||
|
&r"DEFAULT_STORE".to_string()
|
||||||
|
)
|
||||||
|
.expect("parse success")
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
vec![r"\\mybuilds\mysymbols".to_string()]
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_sym_srv(
|
||||||
|
&r"srv*c:\localsymbols*\\mybuilds\mysymbols".to_string(),
|
||||||
|
&r"DEFAULT_STORE".to_string()
|
||||||
|
)
|
||||||
|
.expect("parse success")
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
vec![
|
||||||
|
r"c:\localsymbols".to_string(),
|
||||||
|
r"\\mybuilds\mysymbols".to_string()
|
||||||
|
]
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_sym_srv(
|
||||||
|
&r"srv**\\mybuilds\mysymbols".to_string(),
|
||||||
|
&r"DEFAULT_STORE".to_string()
|
||||||
|
)
|
||||||
|
.expect("parse success")
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
vec![
|
||||||
|
r"DEFAULT_STORE".to_string(),
|
||||||
|
r"\\mybuilds\mysymbols".to_string()
|
||||||
|
]
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_sym_srv(
|
||||||
|
&r"srv*c:\localsymbols*\\NearbyServer\store*https://DistantServer".to_string(),
|
||||||
|
&r"DEFAULT_STORE".to_string()
|
||||||
|
)
|
||||||
|
.expect("parse success")
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
vec![
|
||||||
|
r"c:\localsymbols".to_string(),
|
||||||
|
r"\\NearbyServer\store".to_string(),
|
||||||
|
r"https://DistantServer".to_string()
|
||||||
|
]
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_sym_srv(
|
||||||
|
&r"srv*c:\DownstreamStore*https://msdl.microsoft.com/download/symbols".to_string(),
|
||||||
|
&r"DEFAULT_STORE".to_string()
|
||||||
|
)
|
||||||
|
.expect("parse success")
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
vec![
|
||||||
|
r"c:\DownstreamStore".to_string(),
|
||||||
|
r"https://msdl.microsoft.com/download/symbols".to_string()
|
||||||
|
]
|
||||||
|
);
|
||||||
|
}
|
||||||
508
examples/pdb-ng/src/parser.rs
Normal file
508
examples/pdb-ng/src/parser.rs
Normal file
@@ -0,0 +1,508 @@
|
|||||||
|
// Copyright 2022-2024 Vector 35 Inc.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
use std::collections::{BTreeMap, HashMap, HashSet};
|
||||||
|
use std::env;
|
||||||
|
use std::fmt::Display;
|
||||||
|
use std::sync::OnceLock;
|
||||||
|
|
||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use log::{debug, info};
|
||||||
|
use pdb::*;
|
||||||
|
|
||||||
|
use binaryninja::architecture::{Architecture, CoreArchitecture};
|
||||||
|
use binaryninja::binaryview::{BinaryView, BinaryViewExt};
|
||||||
|
use binaryninja::callingconvention::CallingConvention;
|
||||||
|
use binaryninja::debuginfo::{DebugFunctionInfo, DebugInfo};
|
||||||
|
use binaryninja::platform::Platform;
|
||||||
|
use binaryninja::rc::Ref;
|
||||||
|
use binaryninja::settings::Settings;
|
||||||
|
use binaryninja::types::{
|
||||||
|
min_confidence, Conf, DataVariableAndName, EnumerationBuilder, NamedTypeReference,
|
||||||
|
NamedTypeReferenceClass, StructureBuilder, StructureType, Type, TypeClass,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::symbol_parser::{ParsedDataSymbol, ParsedProcedure, ParsedSymbol};
|
||||||
|
use crate::type_parser::ParsedType;
|
||||||
|
|
||||||
|
/// Megastruct for all the parsing
|
||||||
|
/// Certain fields are only used by specific files, as marked below.
|
||||||
|
/// Why not make new structs for them? Because vvvv this garbage
|
||||||
|
pub struct PDBParserInstance<'a, S: Source<'a> + 'a> {
|
||||||
|
/// DebugInfo where types/functions will be stored eventually
|
||||||
|
pub(crate) debug_info: &'a mut DebugInfo,
|
||||||
|
/// Parent binary view (usually during BinaryView::Finalize)
|
||||||
|
pub(crate) bv: &'a BinaryView,
|
||||||
|
/// Default arch of self.bv
|
||||||
|
pub(crate) arch: CoreArchitecture,
|
||||||
|
/// Default calling convention for self.arch
|
||||||
|
pub(crate) default_cc: Ref<CallingConvention<CoreArchitecture>>,
|
||||||
|
/// Thiscall calling convention for self.bv, or default_cc if we can't find one
|
||||||
|
pub(crate) thiscall_cc: Ref<CallingConvention<CoreArchitecture>>,
|
||||||
|
/// Cdecl calling convention for self.bv, or default_cc if we can't find one
|
||||||
|
pub(crate) cdecl_cc: Ref<CallingConvention<CoreArchitecture>>,
|
||||||
|
/// Default platform of self.bv
|
||||||
|
pub(crate) platform: Ref<Platform>,
|
||||||
|
/// pdb-rs structure for making lifetime hell a real place
|
||||||
|
pub(crate) pdb: PDB<'a, S>,
|
||||||
|
/// pdb-rs Mapping of modules to addresses for resolving RVAs
|
||||||
|
pub(crate) address_map: AddressMap<'a>,
|
||||||
|
/// Binja Settings instance (for optimization)
|
||||||
|
pub(crate) settings: Ref<Settings>,
|
||||||
|
|
||||||
|
/// type_parser.rs
|
||||||
|
|
||||||
|
/// TypeIndex -> ParsedType enum used during parsing
|
||||||
|
pub(crate) indexed_types: BTreeMap<TypeIndex, ParsedType>,
|
||||||
|
/// QName -> Binja Type for finished types
|
||||||
|
pub(crate) named_types: BTreeMap<String, Ref<Type>>,
|
||||||
|
/// Raw (mangled) name -> TypeIndex for resolving forward references
|
||||||
|
pub(crate) full_type_indices: BTreeMap<String, TypeIndex>,
|
||||||
|
/// Stack of types we're currently parsing
|
||||||
|
pub(crate) type_stack: Vec<TypeIndex>,
|
||||||
|
/// Stack of parent types we're parsing nested types inside of
|
||||||
|
pub(crate) namespace_stack: Vec<String>,
|
||||||
|
/// Type Index -> Does it return on the stack
|
||||||
|
pub(crate) type_default_returnable: BTreeMap<TypeIndex, bool>,
|
||||||
|
|
||||||
|
/// symbol_parser.rs
|
||||||
|
|
||||||
|
/// List of fully parsed symbols from all modules
|
||||||
|
pub(crate) parsed_symbols: Vec<ParsedSymbol>,
|
||||||
|
/// Raw name -> index in parsed_symbols
|
||||||
|
pub(crate) parsed_symbols_by_name: BTreeMap<String, usize>,
|
||||||
|
/// Raw name -> Symbol index for looking up symbols for the currently parsing module (mostly for thunks)
|
||||||
|
pub(crate) named_symbols: BTreeMap<String, SymbolIndex>,
|
||||||
|
/// Parent -> Children symbol index tree for the currently parsing module
|
||||||
|
pub(crate) symbol_tree: BTreeMap<SymbolIndex, Vec<SymbolIndex>>,
|
||||||
|
/// Child -> Parent symbol index mapping, inverse of symbol_tree
|
||||||
|
pub(crate) symbol_parents: BTreeMap<SymbolIndex, SymbolIndex>,
|
||||||
|
/// Stack of (start, end) indices for the current symbols being parsed while constructing the tree
|
||||||
|
pub(crate) symbol_stack: Vec<(SymbolIndex, SymbolIndex)>,
|
||||||
|
/// Index -> parsed symbol for the currently parsing module
|
||||||
|
pub(crate) indexed_symbols: BTreeMap<SymbolIndex, ParsedSymbol>,
|
||||||
|
/// Symbol address -> Symbol for looking up by address
|
||||||
|
pub(crate) addressed_symbols: BTreeMap<u64, Vec<ParsedSymbol>>,
|
||||||
|
/// CPU type of the currently parsing module
|
||||||
|
pub(crate) module_cpu_type: Option<CPUType>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, S: Source<'a> + 'a> PDBParserInstance<'a, S> {
|
||||||
|
/// Try to create a new parser instance from a given bv/pdb
|
||||||
|
pub fn new(
|
||||||
|
debug_info: &'a mut DebugInfo,
|
||||||
|
bv: &'a BinaryView,
|
||||||
|
mut pdb: PDB<'a, S>,
|
||||||
|
) -> Result<Self> {
|
||||||
|
let arch = if let Some(arch) = bv.default_arch() {
|
||||||
|
arch
|
||||||
|
} else {
|
||||||
|
return Err(anyhow!("Cannot parse to view with no architecture"));
|
||||||
|
};
|
||||||
|
|
||||||
|
let platform = bv
|
||||||
|
.default_platform()
|
||||||
|
.expect("Expected bv to have a platform");
|
||||||
|
|
||||||
|
let address_map = pdb.address_map()?;
|
||||||
|
|
||||||
|
let default_cc = platform
|
||||||
|
.get_default_calling_convention()
|
||||||
|
.expect("Expected default calling convention");
|
||||||
|
|
||||||
|
let thiscall_cc = Self::find_calling_convention(platform.as_ref(), "thiscall")
|
||||||
|
.unwrap_or(default_cc.clone());
|
||||||
|
|
||||||
|
let cdecl_cc = platform
|
||||||
|
.get_cdecl_calling_convention()
|
||||||
|
.unwrap_or(default_cc.clone());
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
debug_info,
|
||||||
|
bv,
|
||||||
|
arch,
|
||||||
|
default_cc,
|
||||||
|
thiscall_cc,
|
||||||
|
cdecl_cc,
|
||||||
|
platform,
|
||||||
|
pdb,
|
||||||
|
address_map,
|
||||||
|
settings: Settings::new(""),
|
||||||
|
indexed_types: Default::default(),
|
||||||
|
named_types: Default::default(),
|
||||||
|
full_type_indices: Default::default(),
|
||||||
|
type_stack: Default::default(),
|
||||||
|
namespace_stack: Default::default(),
|
||||||
|
type_default_returnable: Default::default(),
|
||||||
|
parsed_symbols: Default::default(),
|
||||||
|
parsed_symbols_by_name: Default::default(),
|
||||||
|
named_symbols: Default::default(),
|
||||||
|
symbol_tree: Default::default(),
|
||||||
|
symbol_parents: Default::default(),
|
||||||
|
symbol_stack: Default::default(),
|
||||||
|
indexed_symbols: Default::default(),
|
||||||
|
addressed_symbols: Default::default(),
|
||||||
|
module_cpu_type: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Try to parse the pdb into the DebugInfo
|
||||||
|
pub fn try_parse_info(
|
||||||
|
&mut self,
|
||||||
|
progress: Box<dyn Fn(usize, usize) -> Result<()> + 'a>,
|
||||||
|
) -> Result<()> {
|
||||||
|
self.parse_types(Self::split_progress(&progress, 0, &[1.0, 3.0, 0.5, 0.5]))?;
|
||||||
|
for (name, ty) in self.named_types.iter() {
|
||||||
|
self.debug_info.add_type(name, ty.as_ref(), &[]); // TODO : Components
|
||||||
|
}
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"PDB found {} types (before resolving NTRs)",
|
||||||
|
self.named_types.len()
|
||||||
|
);
|
||||||
|
|
||||||
|
if self
|
||||||
|
.settings
|
||||||
|
.get_bool("pdb.features.parseSymbols", Some(self.bv), None)
|
||||||
|
{
|
||||||
|
let (symbols, functions) =
|
||||||
|
self.parse_symbols(Self::split_progress(&progress, 1, &[1.0, 3.0, 0.5, 0.5]))?;
|
||||||
|
|
||||||
|
if self
|
||||||
|
.settings
|
||||||
|
.get_bool("pdb.features.createMissingNamedTypes", Some(self.bv), None)
|
||||||
|
{
|
||||||
|
self.resolve_missing_ntrs(
|
||||||
|
&symbols,
|
||||||
|
Self::split_progress(&progress, 2, &[1.0, 3.0, 0.5, 0.5]),
|
||||||
|
)?;
|
||||||
|
self.resolve_missing_ntrs(
|
||||||
|
&functions,
|
||||||
|
Self::split_progress(&progress, 3, &[1.0, 3.0, 0.5, 0.5]),
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("PDB found {} types", self.named_types.len());
|
||||||
|
info!("PDB found {} data variables", symbols.len());
|
||||||
|
info!("PDB found {} functions", functions.len());
|
||||||
|
|
||||||
|
let allow_void =
|
||||||
|
self.settings
|
||||||
|
.get_bool("pdb.features.allowVoidGlobals", Some(self.bv), None);
|
||||||
|
|
||||||
|
let min_confidence_type = Conf::new(Type::void(), min_confidence());
|
||||||
|
for sym in symbols.iter() {
|
||||||
|
match sym {
|
||||||
|
ParsedSymbol::Data(ParsedDataSymbol {
|
||||||
|
address,
|
||||||
|
name,
|
||||||
|
type_,
|
||||||
|
..
|
||||||
|
}) => {
|
||||||
|
let real_type =
|
||||||
|
type_.as_ref().unwrap_or(&min_confidence_type);
|
||||||
|
|
||||||
|
if real_type.contents.type_class() == TypeClass::VoidTypeClass {
|
||||||
|
if !allow_void {
|
||||||
|
self.log(|| {
|
||||||
|
format!("Not adding void-typed symbol {:?}@{:x}", name, address)
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.log(|| {
|
||||||
|
format!(
|
||||||
|
"Adding data variable: 0x{:x}: {} {:?}",
|
||||||
|
address, &name.raw_name, real_type
|
||||||
|
)
|
||||||
|
});
|
||||||
|
self.debug_info
|
||||||
|
.add_data_variable_info(DataVariableAndName::new(
|
||||||
|
*address,
|
||||||
|
real_type.clone(),
|
||||||
|
true,
|
||||||
|
name.full_name.as_ref().unwrap_or(&name.raw_name),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
s => {
|
||||||
|
self.log(|| format!("Not adding non-data symbol {:?}", s));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for sym in functions {
|
||||||
|
match sym {
|
||||||
|
ParsedSymbol::Procedure(ParsedProcedure {
|
||||||
|
address,
|
||||||
|
name,
|
||||||
|
type_,
|
||||||
|
locals: _,
|
||||||
|
..
|
||||||
|
}) => {
|
||||||
|
self.log(|| {
|
||||||
|
format!(
|
||||||
|
"Adding function: 0x{:x}: {} {:?}",
|
||||||
|
address, &name.raw_name, type_
|
||||||
|
)
|
||||||
|
});
|
||||||
|
self.debug_info.add_function(DebugFunctionInfo::new(
|
||||||
|
Some(name.short_name.unwrap_or(name.raw_name.clone())),
|
||||||
|
Some(name.full_name.unwrap_or(name.raw_name.clone())),
|
||||||
|
Some(name.raw_name),
|
||||||
|
type_.clone().and_then(|conf| {
|
||||||
|
// TODO: When DebugInfo support confidence on function types, remove this
|
||||||
|
if conf.confidence == 0 {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(conf.contents)
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
Some(address),
|
||||||
|
Some(self.platform.clone()),
|
||||||
|
vec![], // TODO : Components
|
||||||
|
vec![], //TODO: local variables
|
||||||
|
));
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn collect_name(
|
||||||
|
&self,
|
||||||
|
name: &NamedTypeReference,
|
||||||
|
unknown_names: &mut HashMap<String, NamedTypeReferenceClass>,
|
||||||
|
) {
|
||||||
|
let used_name = name.name().to_string();
|
||||||
|
if let Some(&found) =
|
||||||
|
unknown_names.get(&used_name)
|
||||||
|
{
|
||||||
|
if found != name.class() {
|
||||||
|
// Interesting case, not sure we care
|
||||||
|
self.log(|| {
|
||||||
|
format!(
|
||||||
|
"Mismatch unknown NTR class for {}: {} ?",
|
||||||
|
&used_name,
|
||||||
|
name.class() as u32
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self.log(|| format!("Found new unused name: {}", &used_name));
|
||||||
|
unknown_names.insert(used_name, name.class());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn collect_names(
|
||||||
|
&self,
|
||||||
|
ty: &Type,
|
||||||
|
unknown_names: &mut HashMap<String, NamedTypeReferenceClass>,
|
||||||
|
) {
|
||||||
|
match ty.type_class() {
|
||||||
|
TypeClass::StructureTypeClass => {
|
||||||
|
if let Ok(structure) = ty.get_structure() {
|
||||||
|
if let Ok(members) = structure.members() {
|
||||||
|
for member in members {
|
||||||
|
self.collect_names(member.ty.contents.as_ref(), unknown_names);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Ok(bases) = structure.base_structures() {
|
||||||
|
for base in bases {
|
||||||
|
self.collect_name(base.ty.as_ref(), unknown_names);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TypeClass::PointerTypeClass => {
|
||||||
|
if let Ok(target) = ty.target() {
|
||||||
|
self.collect_names(target.contents.as_ref(), unknown_names);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TypeClass::ArrayTypeClass => {
|
||||||
|
if let Ok(element_type) = ty.element_type() {
|
||||||
|
self.collect_names(element_type.contents.as_ref(), unknown_names);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TypeClass::FunctionTypeClass => {
|
||||||
|
if let Ok(return_value) = ty.return_value() {
|
||||||
|
self.collect_names(return_value.contents.as_ref(), unknown_names);
|
||||||
|
}
|
||||||
|
if let Ok(params) = ty.parameters() {
|
||||||
|
for param in params {
|
||||||
|
self.collect_names(param.t.contents.as_ref(), unknown_names);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TypeClass::NamedTypeReferenceClass => {
|
||||||
|
if let Ok(ntr) = ty.get_named_type_reference() {
|
||||||
|
self.collect_name(ntr.as_ref(), unknown_names);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resolve_missing_ntrs(
|
||||||
|
&mut self,
|
||||||
|
symbols: &Vec<ParsedSymbol>,
|
||||||
|
progress: Box<dyn Fn(usize, usize) -> Result<()> + '_>,
|
||||||
|
) -> Result<()> {
|
||||||
|
let mut unknown_names = HashMap::new();
|
||||||
|
let mut known_names = self
|
||||||
|
.bv
|
||||||
|
.types()
|
||||||
|
.iter()
|
||||||
|
.map(|qnat| qnat.name().string())
|
||||||
|
.collect::<HashSet<_>>();
|
||||||
|
|
||||||
|
for ty in &self.named_types {
|
||||||
|
known_names.insert(ty.0.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
let count = symbols.len();
|
||||||
|
for (i, sym) in symbols.into_iter().enumerate() {
|
||||||
|
match sym {
|
||||||
|
ParsedSymbol::Data(ParsedDataSymbol {
|
||||||
|
type_: Some(type_), ..
|
||||||
|
}) => {
|
||||||
|
self.collect_names(type_.contents.as_ref(), &mut unknown_names);
|
||||||
|
}
|
||||||
|
ParsedSymbol::Procedure(ParsedProcedure {
|
||||||
|
type_: Some(type_),
|
||||||
|
locals,
|
||||||
|
..
|
||||||
|
}) => {
|
||||||
|
self.collect_names(type_.contents.as_ref(), &mut unknown_names);
|
||||||
|
for l in locals {
|
||||||
|
if let Some(ltype) = &l.type_ {
|
||||||
|
self.collect_names(ltype.contents.as_ref(), &mut unknown_names);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
(progress)(i, count)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (name, class) in unknown_names.into_iter() {
|
||||||
|
if known_names.contains(&name) {
|
||||||
|
self.log(|| format!("Found referenced name and ignoring: {}", &name));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
self.log(|| format!("Adding referenced but unknown type {} (likely due to demangled name and stripped type)", &name));
|
||||||
|
match class {
|
||||||
|
NamedTypeReferenceClass::UnknownNamedTypeClass
|
||||||
|
| NamedTypeReferenceClass::TypedefNamedTypeClass => {
|
||||||
|
self.debug_info.add_type(name, Type::void().as_ref(), &[]); // TODO : Components
|
||||||
|
}
|
||||||
|
NamedTypeReferenceClass::ClassNamedTypeClass
|
||||||
|
| NamedTypeReferenceClass::StructNamedTypeClass
|
||||||
|
| NamedTypeReferenceClass::UnionNamedTypeClass => {
|
||||||
|
let structure = StructureBuilder::new();
|
||||||
|
match class {
|
||||||
|
NamedTypeReferenceClass::ClassNamedTypeClass => {
|
||||||
|
structure.set_structure_type(StructureType::ClassStructureType);
|
||||||
|
}
|
||||||
|
NamedTypeReferenceClass::StructNamedTypeClass => {
|
||||||
|
structure.set_structure_type(StructureType::StructStructureType);
|
||||||
|
}
|
||||||
|
NamedTypeReferenceClass::UnionNamedTypeClass => {
|
||||||
|
structure.set_structure_type(StructureType::UnionStructureType);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
structure.set_width(1);
|
||||||
|
structure.set_alignment(1);
|
||||||
|
|
||||||
|
self.debug_info.add_type(
|
||||||
|
name,
|
||||||
|
Type::structure(structure.finalize().as_ref()).as_ref(),
|
||||||
|
&[], // TODO : Components
|
||||||
|
);
|
||||||
|
}
|
||||||
|
NamedTypeReferenceClass::EnumNamedTypeClass => {
|
||||||
|
let enumeration = EnumerationBuilder::new();
|
||||||
|
self.debug_info.add_type(
|
||||||
|
name,
|
||||||
|
Type::enumeration(
|
||||||
|
enumeration.finalize().as_ref(),
|
||||||
|
self.arch.default_integer_size(),
|
||||||
|
false,
|
||||||
|
)
|
||||||
|
.as_ref(),
|
||||||
|
&[], // TODO : Components
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Lazy logging function that prints like 20MB of messages
|
||||||
|
pub(crate) fn log<F: FnOnce() -> D, D: Display>(&self, msg: F) {
|
||||||
|
static MEM: OnceLock<bool> = OnceLock::new();
|
||||||
|
let debug_pdb = MEM.get_or_init(|| {
|
||||||
|
env::var("BN_DEBUG_PDB").is_ok()
|
||||||
|
});
|
||||||
|
if *debug_pdb {
|
||||||
|
let space = "\t".repeat(self.type_stack.len()) + &"\t".repeat(self.symbol_stack.len());
|
||||||
|
let msg = format!("{}", msg());
|
||||||
|
debug!(
|
||||||
|
"{}{}",
|
||||||
|
space,
|
||||||
|
msg.replace("\n", &*("\n".to_string() + &space))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn split_progress<'b, F: Fn(usize, usize) -> Result<()> + 'b>(
|
||||||
|
original_fn: F,
|
||||||
|
subpart: usize,
|
||||||
|
subpart_weights: &[f64],
|
||||||
|
) -> Box<dyn Fn(usize, usize) -> Result<()> + 'b> {
|
||||||
|
// Normalize weights
|
||||||
|
let weight_sum: f64 = subpart_weights.iter().sum();
|
||||||
|
if weight_sum < 0.0001 {
|
||||||
|
return Box::new(|_, _| Ok(()));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Keep a running count of weights for the start
|
||||||
|
let mut subpart_starts = vec![];
|
||||||
|
let mut start = 0f64;
|
||||||
|
for w in subpart_weights {
|
||||||
|
subpart_starts.push(start);
|
||||||
|
start += *w;
|
||||||
|
}
|
||||||
|
|
||||||
|
let subpart_start = subpart_starts[subpart] / weight_sum;
|
||||||
|
let weight = subpart_weights[subpart] / weight_sum;
|
||||||
|
|
||||||
|
Box::new(move |cur: usize, max: usize| {
|
||||||
|
// Just use a large number for easy divisibility
|
||||||
|
let steps = 1000000f64;
|
||||||
|
let subpart_size = steps * weight;
|
||||||
|
let subpart_progress = ((cur as f64) / (max as f64)) * subpart_size;
|
||||||
|
|
||||||
|
original_fn(
|
||||||
|
(subpart_start * steps + subpart_progress) as usize,
|
||||||
|
steps as usize,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
1164
examples/pdb-ng/src/struct_grouper.rs
Normal file
1164
examples/pdb-ng/src/struct_grouper.rs
Normal file
File diff suppressed because it is too large
Load Diff
2061
examples/pdb-ng/src/symbol_parser.rs
Normal file
2061
examples/pdb-ng/src/symbol_parser.rs
Normal file
File diff suppressed because it is too large
Load Diff
2477
examples/pdb-ng/src/type_parser.rs
Normal file
2477
examples/pdb-ng/src/type_parser.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -31,9 +31,7 @@ fn main() {
|
|||||||
.get_data(),
|
.get_data(),
|
||||||
addr,
|
addr,
|
||||||
) {
|
) {
|
||||||
tokens
|
tokens.iter().for_each(|token| print!("{}", token.text()));
|
||||||
.iter()
|
|
||||||
.for_each(|token| print!("{}", token.text().as_str()));
|
|
||||||
println!();
|
println!();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ use std::{
|
|||||||
collections::HashMap,
|
collections::HashMap,
|
||||||
ffi::{c_char, c_int, CStr, CString},
|
ffi::{c_char, c_int, CStr, CString},
|
||||||
hash::Hash,
|
hash::Hash,
|
||||||
mem::zeroed,
|
mem::{zeroed, MaybeUninit},
|
||||||
ops, ptr, slice,
|
ops, ptr, slice,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -97,11 +97,11 @@ impl<'a> Iterator for BranchIter<'a> {
|
|||||||
#[repr(C)]
|
#[repr(C)]
|
||||||
pub struct InstructionInfo(BNInstructionInfo);
|
pub struct InstructionInfo(BNInstructionInfo);
|
||||||
impl InstructionInfo {
|
impl InstructionInfo {
|
||||||
pub fn new(len: usize, branch_delay: bool) -> Self {
|
pub fn new(len: usize, delay_slots: u8) -> Self {
|
||||||
InstructionInfo(BNInstructionInfo {
|
InstructionInfo(BNInstructionInfo {
|
||||||
length: len,
|
length: len,
|
||||||
archTransitionByTargetAddr: false,
|
archTransitionByTargetAddr: false,
|
||||||
branchDelay: branch_delay,
|
delaySlots: delay_slots,
|
||||||
branchCount: 0usize,
|
branchCount: 0usize,
|
||||||
branchType: [BranchType::UnresolvedBranch; 3],
|
branchType: [BranchType::UnresolvedBranch; 3],
|
||||||
branchTarget: [0u64; 3],
|
branchTarget: [0u64; 3],
|
||||||
@@ -121,8 +121,8 @@ impl InstructionInfo {
|
|||||||
self.0.branchCount
|
self.0.branchCount
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn branch_delay(&self) -> bool {
|
pub fn delay_slots(&self) -> u8 {
|
||||||
self.0.branchDelay
|
self.0.delaySlots
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn branches(&self) -> BranchIter {
|
pub fn branches(&self) -> BranchIter {
|
||||||
@@ -296,7 +296,7 @@ pub trait FlagGroup: Sized + Clone + Copy {
|
|||||||
/// Types to represent the different comparisons, so for `cr1_lt` we
|
/// Types to represent the different comparisons, so for `cr1_lt` we
|
||||||
/// would return a mapping along the lines of:
|
/// would return a mapping along the lines of:
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```text
|
||||||
/// cr1_signed -> LLFC_SLT,
|
/// cr1_signed -> LLFC_SLT,
|
||||||
/// cr1_unsigned -> LLFC_ULT,
|
/// cr1_unsigned -> LLFC_ULT,
|
||||||
/// ```
|
/// ```
|
||||||
@@ -313,7 +313,7 @@ pub trait Intrinsic: Sized + Clone + Copy {
|
|||||||
fn id(&self) -> u32;
|
fn id(&self) -> u32;
|
||||||
|
|
||||||
/// Reeturns the list of the input names and types for this intrinsic.
|
/// Reeturns the list of the input names and types for this intrinsic.
|
||||||
fn inputs(&self) -> Vec<NameAndType<String>>;
|
fn inputs(&self) -> Vec<Ref<NameAndType>>;
|
||||||
|
|
||||||
/// Returns the list of the output types for this intrinsic.
|
/// Returns the list of the output types for this intrinsic.
|
||||||
fn outputs(&self) -> Vec<Conf<Ref<Type>>>;
|
fn outputs(&self) -> Vec<Conf<Ref<Type>>>;
|
||||||
@@ -650,7 +650,7 @@ impl Intrinsic for UnusedIntrinsic {
|
|||||||
fn id(&self) -> u32 {
|
fn id(&self) -> u32 {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
fn inputs(&self) -> Vec<NameAndType<String>> {
|
fn inputs(&self) -> Vec<Ref<NameAndType>> {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
fn outputs(&self) -> Vec<Conf<Ref<Type>>> {
|
fn outputs(&self) -> Vec<Conf<Ref<Type>>> {
|
||||||
@@ -715,6 +715,21 @@ impl Register for CoreRegister {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for CoreRegister {
|
||||||
|
type Raw = u32;
|
||||||
|
type Context = CoreArchitecture;
|
||||||
|
type Wrapped<'a> = Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl CoreArrayProviderInner for CoreRegister {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, _count: usize, _context: &Self::Context) {
|
||||||
|
BNFreeRegisterList(raw)
|
||||||
|
}
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
Self(context.0, *raw)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub struct CoreRegisterStackInfo(*mut BNArchitecture, BNRegisterStackInfo);
|
pub struct CoreRegisterStackInfo(*mut BNArchitecture, BNRegisterStackInfo);
|
||||||
|
|
||||||
impl RegisterStackInfo for CoreRegisterStackInfo {
|
impl RegisterStackInfo for CoreRegisterStackInfo {
|
||||||
@@ -968,8 +983,8 @@ impl FlagGroup for CoreFlagGroup {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||||
pub struct CoreIntrinsic(*mut BNArchitecture, u32);
|
pub struct CoreIntrinsic(pub(crate) *mut BNArchitecture, pub(crate) u32);
|
||||||
|
|
||||||
impl Intrinsic for crate::architecture::CoreIntrinsic {
|
impl Intrinsic for crate::architecture::CoreIntrinsic {
|
||||||
fn name(&self) -> Cow<str> {
|
fn name(&self) -> Cow<str> {
|
||||||
@@ -992,7 +1007,7 @@ impl Intrinsic for crate::architecture::CoreIntrinsic {
|
|||||||
self.1
|
self.1
|
||||||
}
|
}
|
||||||
|
|
||||||
fn inputs(&self) -> Vec<NameAndType<String>> {
|
fn inputs(&self) -> Vec<Ref<NameAndType>> {
|
||||||
let mut count: usize = 0;
|
let mut count: usize = 0;
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
@@ -1000,7 +1015,7 @@ impl Intrinsic for crate::architecture::CoreIntrinsic {
|
|||||||
|
|
||||||
let ret = slice::from_raw_parts_mut(inputs, count)
|
let ret = slice::from_raw_parts_mut(inputs, count)
|
||||||
.iter()
|
.iter()
|
||||||
.map(NameAndType::from_raw)
|
.map(|x| NameAndType::from_raw(x).to_owned())
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
BNFreeNameAndTypeList(inputs, count);
|
BNFreeNameAndTypeList(inputs, count);
|
||||||
@@ -1162,10 +1177,11 @@ impl Architecture for CoreArchitecture {
|
|||||||
&mut result as *mut _,
|
&mut result as *mut _,
|
||||||
&mut count as *mut _,
|
&mut count as *mut _,
|
||||||
) {
|
) {
|
||||||
let vec = Vec::<BNInstructionTextToken>::from_raw_parts(result, count, count)
|
let vec = slice::from_raw_parts(result, count)
|
||||||
.iter()
|
.iter()
|
||||||
.map(|x| InstructionTextToken::from_raw(x))
|
.map(|x| InstructionTextToken::from_raw(x).to_owned())
|
||||||
.collect();
|
.collect();
|
||||||
|
BNFreeInstructionText(result, count);
|
||||||
Some((consumed, vec))
|
Some((consumed, vec))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
@@ -1689,8 +1705,8 @@ where
|
|||||||
A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
|
A: 'static + Architecture<Handle = CustomArchitectureHandle<A>> + Send + Sync,
|
||||||
F: FnOnce(CustomArchitectureHandle<A>, CoreArchitecture) -> A,
|
F: FnOnce(CustomArchitectureHandle<A>, CoreArchitecture) -> A,
|
||||||
{
|
{
|
||||||
arch: A,
|
arch: MaybeUninit<A>,
|
||||||
func: F,
|
func: Option<F>,
|
||||||
}
|
}
|
||||||
|
|
||||||
extern "C" fn cb_init<A, F>(ctxt: *mut c_void, obj: *mut BNArchitecture)
|
extern "C" fn cb_init<A, F>(ctxt: *mut c_void, obj: *mut BNArchitecture)
|
||||||
@@ -1704,11 +1720,10 @@ where
|
|||||||
handle: ctxt as *mut A,
|
handle: ctxt as *mut A,
|
||||||
};
|
};
|
||||||
|
|
||||||
let create = ptr::read(&custom_arch.func);
|
let create = custom_arch.func.take().unwrap();
|
||||||
ptr::write(
|
custom_arch
|
||||||
&mut custom_arch.arch,
|
.arch
|
||||||
create(custom_arch_handle, CoreArchitecture(obj)),
|
.write(create(custom_arch_handle, CoreArchitecture(obj)));
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1811,13 +1826,15 @@ where
|
|||||||
let data = unsafe { slice::from_raw_parts(data, *len) };
|
let data = unsafe { slice::from_raw_parts(data, *len) };
|
||||||
let result = unsafe { &mut *result };
|
let result = unsafe { &mut *result };
|
||||||
|
|
||||||
match custom_arch.instruction_text(data, addr) {
|
let Some((res_size, res_tokens)) = custom_arch.instruction_text(data, addr) else {
|
||||||
Some((res_size, mut res_tokens)) => {
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
let res_tokens: Box<[_]> = res_tokens.into_boxed_slice();
|
||||||
unsafe {
|
unsafe {
|
||||||
// TODO: Can't use into_raw_parts as it's unstable so we do this instead...
|
let res_tokens = Box::leak(res_tokens);
|
||||||
let r_ptr = res_tokens.as_mut_ptr();
|
let r_ptr = res_tokens.as_mut_ptr();
|
||||||
let r_count = res_tokens.len();
|
let r_count = res_tokens.len();
|
||||||
mem::forget(res_tokens);
|
|
||||||
|
|
||||||
*result = &mut (*r_ptr).0;
|
*result = &mut (*r_ptr).0;
|
||||||
*count = r_count;
|
*count = r_count;
|
||||||
@@ -1825,13 +1842,9 @@ where
|
|||||||
}
|
}
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
None => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
extern "C" fn cb_free_instruction_text(tokens: *mut BNInstructionTextToken, count: usize) {
|
extern "C" fn cb_free_instruction_text(tokens: *mut BNInstructionTextToken, count: usize) {
|
||||||
let _tokens =
|
let _tokens = unsafe { Box::from_raw(ptr::slice_from_raw_parts_mut(tokens, count)) };
|
||||||
unsafe { Vec::from_raw_parts(tokens as *mut InstructionTextToken, count, count) };
|
|
||||||
}
|
}
|
||||||
|
|
||||||
extern "C" fn cb_instruction_llil<A>(
|
extern "C" fn cb_instruction_llil<A>(
|
||||||
@@ -1931,15 +1944,7 @@ where
|
|||||||
if len == 0 {
|
if len == 0 {
|
||||||
ptr::null_mut()
|
ptr::null_mut()
|
||||||
} else {
|
} else {
|
||||||
let mut res = Vec::with_capacity(len + 1);
|
let mut res: Box<[_]> = [len as u32].into_iter().chain(items).collect();
|
||||||
|
|
||||||
res.push(len as u32);
|
|
||||||
|
|
||||||
for i in items {
|
|
||||||
res.push(i);
|
|
||||||
}
|
|
||||||
|
|
||||||
assert!(res.len() == len + 1);
|
|
||||||
|
|
||||||
let raw = res.as_mut_ptr();
|
let raw = res.as_mut_ptr();
|
||||||
mem::forget(res);
|
mem::forget(res);
|
||||||
@@ -2280,7 +2285,8 @@ where
|
|||||||
unsafe {
|
unsafe {
|
||||||
let actual_start = regs.offset(-1);
|
let actual_start = regs.offset(-1);
|
||||||
let len = *actual_start + 1;
|
let len = *actual_start + 1;
|
||||||
let _regs = Vec::from_raw_parts(actual_start, len as usize, len as usize);
|
let regs_ptr = ptr::slice_from_raw_parts_mut(actual_start, len.try_into().unwrap());
|
||||||
|
let _regs = Box::from_raw(regs_ptr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2420,12 +2426,18 @@ where
|
|||||||
{
|
{
|
||||||
let custom_arch = unsafe { &*(ctxt as *mut A) };
|
let custom_arch = unsafe { &*(ctxt as *mut A) };
|
||||||
|
|
||||||
if let Some(intrinsic) = custom_arch.intrinsic_from_id(intrinsic) {
|
let Some(intrinsic) = custom_arch.intrinsic_from_id(intrinsic) else {
|
||||||
let inputs = intrinsic.inputs();
|
unsafe {
|
||||||
let mut res = Vec::with_capacity(inputs.len());
|
*count = 0;
|
||||||
for input in inputs {
|
|
||||||
res.push(input.into_raw());
|
|
||||||
}
|
}
|
||||||
|
return ptr::null_mut();
|
||||||
|
};
|
||||||
|
|
||||||
|
let inputs = intrinsic.inputs();
|
||||||
|
let mut res: Box<[_]> = inputs
|
||||||
|
.into_iter()
|
||||||
|
.map(|input| unsafe { Ref::into_raw(input) }.0)
|
||||||
|
.collect();
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
*count = res.len();
|
*count = res.len();
|
||||||
@@ -2437,12 +2449,6 @@ where
|
|||||||
raw
|
raw
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
unsafe {
|
|
||||||
*count = 0;
|
|
||||||
}
|
|
||||||
ptr::null_mut()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
extern "C" fn cb_free_name_and_types<A>(ctxt: *mut c_void, nt: *mut BNNameAndType, count: usize)
|
extern "C" fn cb_free_name_and_types<A>(ctxt: *mut c_void, nt: *mut BNNameAndType, count: usize)
|
||||||
@@ -2453,9 +2459,9 @@ where
|
|||||||
|
|
||||||
if !nt.is_null() {
|
if !nt.is_null() {
|
||||||
unsafe {
|
unsafe {
|
||||||
let list = Vec::from_raw_parts(nt, count, count);
|
let name_and_types = Box::from_raw(ptr::slice_from_raw_parts_mut(nt, count));
|
||||||
for nt in list {
|
for nt in name_and_types.iter() {
|
||||||
BnString::from_raw(nt.name);
|
Ref::new(NameAndType::from_raw(nt));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -2473,10 +2479,7 @@ where
|
|||||||
|
|
||||||
if let Some(intrinsic) = custom_arch.intrinsic_from_id(intrinsic) {
|
if let Some(intrinsic) = custom_arch.intrinsic_from_id(intrinsic) {
|
||||||
let inputs = intrinsic.outputs();
|
let inputs = intrinsic.outputs();
|
||||||
let mut res = Vec::with_capacity(inputs.len());
|
let mut res: Box<[_]> = inputs.iter().map(|input| input.as_ref().into()).collect();
|
||||||
for input in inputs {
|
|
||||||
res.push(input.into());
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
*count = res.len();
|
*count = res.len();
|
||||||
@@ -2505,9 +2508,7 @@ where
|
|||||||
{
|
{
|
||||||
let _custom_arch = unsafe { &*(ctxt as *mut A) };
|
let _custom_arch = unsafe { &*(ctxt as *mut A) };
|
||||||
if !tl.is_null() {
|
if !tl.is_null() {
|
||||||
unsafe {
|
let _type_list = unsafe { Box::from_raw(ptr::slice_from_raw_parts_mut(tl, count)) };
|
||||||
let _list = Vec::from_raw_parts(tl, count, count);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2685,8 +2686,8 @@ where
|
|||||||
let name = name.into_bytes_with_nul();
|
let name = name.into_bytes_with_nul();
|
||||||
|
|
||||||
let uninit_arch = ArchitectureBuilder {
|
let uninit_arch = ArchitectureBuilder {
|
||||||
arch: unsafe { zeroed() },
|
arch: MaybeUninit::zeroed(),
|
||||||
func,
|
func: Some(func),
|
||||||
};
|
};
|
||||||
|
|
||||||
let raw = Box::into_raw(Box::new(uninit_arch));
|
let raw = Box::into_raw(Box::new(uninit_arch));
|
||||||
@@ -2776,7 +2777,7 @@ where
|
|||||||
|
|
||||||
assert!(!res.is_null());
|
assert!(!res.is_null());
|
||||||
|
|
||||||
&(*raw).arch
|
(*raw).arch.assume_init_mut()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -104,21 +104,14 @@ unsafe impl RefCountable for BackgroundTask {
|
|||||||
impl CoreArrayProvider for BackgroundTask {
|
impl CoreArrayProvider for BackgroundTask {
|
||||||
type Raw = *mut BNBackgroundTask;
|
type Raw = *mut BNBackgroundTask;
|
||||||
type Context = ();
|
type Context = ();
|
||||||
|
type Wrapped<'a> = Guard<'a, BackgroundTask>;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl CoreOwnedArrayProvider for BackgroundTask {
|
unsafe impl CoreArrayProviderInner for BackgroundTask {
|
||||||
unsafe fn free(raw: *mut *mut BNBackgroundTask, count: usize, _context: &()) {
|
unsafe fn free(raw: *mut *mut BNBackgroundTask, count: usize, _context: &()) {
|
||||||
BNFreeBackgroundTaskList(raw, count);
|
BNFreeBackgroundTaskList(raw, count);
|
||||||
}
|
}
|
||||||
}
|
unsafe fn wrap_raw<'a>(raw: &'a *mut BNBackgroundTask, context: &'a ()) -> Self::Wrapped<'a> {
|
||||||
|
|
||||||
unsafe impl<'a> CoreArrayWrapper<'a> for BackgroundTask {
|
|
||||||
type Wrapped = Guard<'a, BackgroundTask>;
|
|
||||||
|
|
||||||
unsafe fn wrap_raw(
|
|
||||||
raw: &'a *mut BNBackgroundTask,
|
|
||||||
context: &'a (),
|
|
||||||
) -> Guard<'a, BackgroundTask> {
|
|
||||||
Guard::new(BackgroundTask::from_raw(*raw), context)
|
Guard::new(BackgroundTask::from_raw(*raw), context)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -68,18 +68,14 @@ pub struct EdgeContext<'a, C: 'a + BlockContext> {
|
|||||||
impl<'a, C: 'a + BlockContext> CoreArrayProvider for Edge<'a, C> {
|
impl<'a, C: 'a + BlockContext> CoreArrayProvider for Edge<'a, C> {
|
||||||
type Raw = BNBasicBlockEdge;
|
type Raw = BNBasicBlockEdge;
|
||||||
type Context = EdgeContext<'a, C>;
|
type Context = EdgeContext<'a, C>;
|
||||||
|
type Wrapped<'b> = Edge<'b, C> where 'a: 'b;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl<'a, C: 'a + BlockContext> CoreOwnedArrayProvider for Edge<'a, C> {
|
unsafe impl<'a, C: 'a + BlockContext> CoreArrayProviderInner for Edge<'a, C> {
|
||||||
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
BNFreeBasicBlockEdgeList(raw, count);
|
BNFreeBasicBlockEdgeList(raw, count);
|
||||||
}
|
}
|
||||||
}
|
unsafe fn wrap_raw<'b>(raw: &'b Self::Raw, context: &'b Self::Context) -> Self::Wrapped<'b> {
|
||||||
|
|
||||||
unsafe impl<'a, C: 'a + BlockContext> CoreArrayWrapper<'a> for Edge<'a, C> {
|
|
||||||
type Wrapped = Edge<'a, C>;
|
|
||||||
|
|
||||||
unsafe fn wrap_raw(raw: &'a Self::Raw, context: &'a Self::Context) -> Edge<'a, C> {
|
|
||||||
let edge_target = Guard::new(
|
let edge_target = Guard::new(
|
||||||
BasicBlock::from_raw(raw.target, context.orig_block.context.clone()),
|
BasicBlock::from_raw(raw.target, context.orig_block.context.clone()),
|
||||||
raw,
|
raw,
|
||||||
@@ -301,18 +297,14 @@ unsafe impl<C: BlockContext> RefCountable for BasicBlock<C> {
|
|||||||
impl<C: BlockContext> CoreArrayProvider for BasicBlock<C> {
|
impl<C: BlockContext> CoreArrayProvider for BasicBlock<C> {
|
||||||
type Raw = *mut BNBasicBlock;
|
type Raw = *mut BNBasicBlock;
|
||||||
type Context = C;
|
type Context = C;
|
||||||
|
type Wrapped<'a> = Guard<'a, BasicBlock<C>> where C: 'a;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl<C: BlockContext> CoreOwnedArrayProvider for BasicBlock<C> {
|
unsafe impl<C: BlockContext> CoreArrayProviderInner for BasicBlock<C> {
|
||||||
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
BNFreeBasicBlockList(raw, count);
|
BNFreeBasicBlockList(raw, count);
|
||||||
}
|
}
|
||||||
}
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
|
||||||
unsafe impl<'a, C: 'a + BlockContext> CoreArrayWrapper<'a> for BasicBlock<C> {
|
|
||||||
type Wrapped = Guard<'a, BasicBlock<C>>;
|
|
||||||
|
|
||||||
unsafe fn wrap_raw(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped {
|
|
||||||
Guard::new(BasicBlock::from_raw(*raw, context.clone()), context)
|
Guard::new(BasicBlock::from_raw(*raw, context.clone()), context)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,24 +23,20 @@ pub use binaryninjacore_sys::BNAnalysisState as AnalysisState;
|
|||||||
pub use binaryninjacore_sys::BNModificationStatus as ModificationStatus;
|
pub use binaryninjacore_sys::BNModificationStatus as ModificationStatus;
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::ffi::c_void;
|
use std::ffi::{c_char, c_void};
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
use std::os::raw::c_char;
|
use std::{ops, ptr, result, slice};
|
||||||
use std::ptr;
|
|
||||||
use std::result;
|
|
||||||
use std::{ops, slice};
|
|
||||||
|
|
||||||
use crate::architecture::Architecture;
|
use crate::architecture::{Architecture, CoreArchitecture};
|
||||||
use crate::architecture::CoreArchitecture;
|
|
||||||
use crate::basicblock::BasicBlock;
|
use crate::basicblock::BasicBlock;
|
||||||
|
use crate::component::{Component, ComponentBuilder, IntoComponentGuid};
|
||||||
use crate::databuffer::DataBuffer;
|
use crate::databuffer::DataBuffer;
|
||||||
use crate::debuginfo::DebugInfo;
|
use crate::debuginfo::DebugInfo;
|
||||||
use crate::fileaccessor::FileAccessor;
|
use crate::fileaccessor::FileAccessor;
|
||||||
use crate::filemetadata::FileMetadata;
|
use crate::filemetadata::FileMetadata;
|
||||||
use crate::flowgraph::FlowGraph;
|
use crate::flowgraph::FlowGraph;
|
||||||
use crate::function::{Function, NativeBlock};
|
use crate::function::{Function, NativeBlock};
|
||||||
use crate::linearview::LinearDisassemblyLine;
|
use crate::linearview::{LinearDisassemblyLine, LinearViewCursor};
|
||||||
use crate::linearview::LinearViewCursor;
|
|
||||||
use crate::metadata::Metadata;
|
use crate::metadata::Metadata;
|
||||||
use crate::platform::Platform;
|
use crate::platform::Platform;
|
||||||
use crate::relocation::Relocation;
|
use crate::relocation::Relocation;
|
||||||
@@ -49,7 +45,10 @@ use crate::segment::{Segment, SegmentBuilder};
|
|||||||
use crate::settings::Settings;
|
use crate::settings::Settings;
|
||||||
use crate::symbol::{Symbol, SymbolType};
|
use crate::symbol::{Symbol, SymbolType};
|
||||||
use crate::tags::{Tag, TagType};
|
use crate::tags::{Tag, TagType};
|
||||||
use crate::types::{DataVariable, NamedTypeReference, QualifiedName, QualifiedNameAndType, Type};
|
use crate::typelibrary::TypeLibrary;
|
||||||
|
use crate::types::{
|
||||||
|
Conf, DataVariable, NamedTypeReference, QualifiedName, QualifiedNameAndType, Type,
|
||||||
|
};
|
||||||
use crate::Endianness;
|
use crate::Endianness;
|
||||||
|
|
||||||
use crate::rc::*;
|
use crate::rc::*;
|
||||||
@@ -225,18 +224,10 @@ pub trait BinaryViewExt: BinaryViewBase {
|
|||||||
|
|
||||||
/// Reads up to `len` bytes from address `offset`
|
/// Reads up to `len` bytes from address `offset`
|
||||||
fn read_vec(&self, offset: u64, len: usize) -> Vec<u8> {
|
fn read_vec(&self, offset: u64, len: usize) -> Vec<u8> {
|
||||||
let mut ret = Vec::with_capacity(len);
|
let mut ret = vec![0; len];
|
||||||
|
|
||||||
unsafe {
|
let size = self.read(&mut ret, offset);
|
||||||
let res;
|
ret.truncate(size);
|
||||||
|
|
||||||
{
|
|
||||||
let dest_slice = ret.get_unchecked_mut(0..len);
|
|
||||||
res = self.read(dest_slice, offset);
|
|
||||||
}
|
|
||||||
|
|
||||||
ret.set_len(res);
|
|
||||||
}
|
|
||||||
|
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
@@ -244,26 +235,10 @@ pub trait BinaryViewExt: BinaryViewBase {
|
|||||||
/// Appends up to `len` bytes from address `offset` into `dest`
|
/// Appends up to `len` bytes from address `offset` into `dest`
|
||||||
fn read_into_vec(&self, dest: &mut Vec<u8>, offset: u64, len: usize) -> usize {
|
fn read_into_vec(&self, dest: &mut Vec<u8>, offset: u64, len: usize) -> usize {
|
||||||
let starting_len = dest.len();
|
let starting_len = dest.len();
|
||||||
let space = dest.capacity() - starting_len;
|
dest.resize(starting_len + len, 0);
|
||||||
|
let read_size = self.read(&mut dest[starting_len..], offset);
|
||||||
if space < len {
|
dest.truncate(starting_len + read_size);
|
||||||
dest.reserve(len - space);
|
read_size
|
||||||
}
|
|
||||||
|
|
||||||
unsafe {
|
|
||||||
let res;
|
|
||||||
|
|
||||||
{
|
|
||||||
let dest_slice = dest.get_unchecked_mut(starting_len..starting_len + len);
|
|
||||||
res = self.read(dest_slice, offset);
|
|
||||||
}
|
|
||||||
|
|
||||||
if res > 0 {
|
|
||||||
dest.set_len(starting_len + res);
|
|
||||||
}
|
|
||||||
|
|
||||||
res
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn notify_data_written(&self, offset: u64, len: usize) {
|
fn notify_data_written(&self, offset: u64, len: usize) {
|
||||||
@@ -292,6 +267,18 @@ pub trait BinaryViewExt: BinaryViewBase {
|
|||||||
unsafe { BNIsOffsetWritableSemantics(self.as_ref().handle, offset) }
|
unsafe { BNIsOffsetWritableSemantics(self.as_ref().handle, offset) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn original_image_base(&self) -> u64 {
|
||||||
|
unsafe {
|
||||||
|
BNGetOriginalImageBase(self.as_ref().handle)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_original_image_base(&self, image_base: u64) {
|
||||||
|
unsafe {
|
||||||
|
BNSetOriginalImageBase(self.as_ref().handle, image_base)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn end(&self) -> u64 {
|
fn end(&self) -> u64 {
|
||||||
unsafe { BNGetEndOffset(self.as_ref().handle) }
|
unsafe { BNGetEndOffset(self.as_ref().handle) }
|
||||||
}
|
}
|
||||||
@@ -574,16 +561,27 @@ pub trait BinaryViewExt: BinaryViewBase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn define_auto_data_var(&self, dv: DataVariable) {
|
fn data_variable_at_address(&self, addr: u64) -> Option<Ref<DataVariable>> {
|
||||||
|
let dv = BNDataVariable::default();
|
||||||
unsafe {
|
unsafe {
|
||||||
BNDefineDataVariable(self.as_ref().handle, dv.address, &mut dv.t.into());
|
if BNGetDataVariableAtAddress(self.as_ref().handle, addr, std::mem::transmute(&dv)) {
|
||||||
|
Some(DataVariable(dv).to_owned())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn define_auto_data_var<'a, T: Into<Conf<&'a Type>>>(&self, addr: u64, ty: T) {
|
||||||
|
unsafe {
|
||||||
|
BNDefineDataVariable(self.as_ref().handle, addr, &mut ty.into().into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// You likely would also like to call [`Self::define_user_symbol`] to bind this data variable with a name
|
/// You likely would also like to call [`Self::define_user_symbol`] to bind this data variable with a name
|
||||||
fn define_user_data_var(&self, dv: DataVariable) {
|
fn define_user_data_var<'a, T: Into<Conf<&'a Type>>>(&self, addr: u64, ty: T) {
|
||||||
unsafe {
|
unsafe {
|
||||||
BNDefineUserDataVariable(self.as_ref().handle, dv.address, &mut dv.t.into());
|
BNDefineUserDataVariable(self.as_ref().handle, addr, &mut ty.into().into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -674,7 +672,7 @@ pub trait BinaryViewExt: BinaryViewBase {
|
|||||||
let name_array = unsafe { Array::<QualifiedName>::new(result_names, result_count, ()) };
|
let name_array = unsafe { Array::<QualifiedName>::new(result_names, result_count, ()) };
|
||||||
|
|
||||||
for (id, name) in id_array.iter().zip(name_array.iter()) {
|
for (id, name) in id_array.iter().zip(name_array.iter()) {
|
||||||
result.insert(id.as_str().to_owned(), name.clone());
|
result.insert(id.to_owned(), name.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
result
|
result
|
||||||
@@ -964,6 +962,15 @@ pub trait BinaryViewExt: BinaryViewBase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn entry_point_functions(&self) -> Array<Function> {
|
||||||
|
unsafe {
|
||||||
|
let mut count = 0;
|
||||||
|
let functions = BNGetAllEntryFunctions(self.as_ref().handle, &mut count);
|
||||||
|
|
||||||
|
Array::new(functions, count, ())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn functions(&self) -> Array<Function> {
|
fn functions(&self) -> Array<Function> {
|
||||||
unsafe {
|
unsafe {
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
@@ -1056,7 +1063,7 @@ pub trait BinaryViewExt: BinaryViewBase {
|
|||||||
unsafe { BNApplyDebugInfo(self.as_ref().handle, debug_info.handle) }
|
unsafe { BNApplyDebugInfo(self.as_ref().handle, debug_info.handle) }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn show_graph_report<S: BnStrCompatible>(&self, raw_name: S, graph: FlowGraph) {
|
fn show_graph_report<S: BnStrCompatible>(&self, raw_name: S, graph: &FlowGraph) {
|
||||||
let raw_name = raw_name.into_bytes_with_nul();
|
let raw_name = raw_name.into_bytes_with_nul();
|
||||||
unsafe {
|
unsafe {
|
||||||
BNShowGraphReport(
|
BNShowGraphReport(
|
||||||
@@ -1363,6 +1370,261 @@ pub trait BinaryViewExt: BinaryViewBase {
|
|||||||
Array::new(handle, count, ())
|
Array::new(handle, count, ())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn component_by_guid<S: BnStrCompatible>(&self, guid: S) -> Option<Component> {
|
||||||
|
let name = guid.into_bytes_with_nul();
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetComponentByGuid(
|
||||||
|
self.as_ref().handle,
|
||||||
|
name.as_ref().as_ptr() as *const core::ffi::c_char,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
core::ptr::NonNull::new(result).map(|h| unsafe { Component::from_raw(h) })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn root_component(&self) -> Option<Component> {
|
||||||
|
let result = unsafe { BNGetRootComponent(self.as_ref().handle) };
|
||||||
|
core::ptr::NonNull::new(result).map(|h| unsafe { Component::from_raw(h) })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn component_builder(&self) -> ComponentBuilder {
|
||||||
|
ComponentBuilder::new_from_raw(self.as_ref().handle)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn component_by_path<P: BnStrCompatible>(&self, path: P) -> Option<Component> {
|
||||||
|
let path = path.into_bytes_with_nul();
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetComponentByPath(
|
||||||
|
self.as_ref().handle,
|
||||||
|
path.as_ref().as_ptr() as *const core::ffi::c_char,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
core::ptr::NonNull::new(result).map(|h| unsafe { Component::from_raw(h) })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn remove_component(&self, component: &Component) -> bool {
|
||||||
|
unsafe { BNRemoveComponent(self.as_ref().handle, component.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn remove_component_by_guid<P: IntoComponentGuid>(&self, guid: P) -> bool {
|
||||||
|
let path = guid.component_guid();
|
||||||
|
unsafe { BNRemoveComponentByGuid(self.as_ref().handle, path.as_ptr()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn data_variable_parent_components(
|
||||||
|
&self,
|
||||||
|
data_variable: &DataVariable,
|
||||||
|
) -> Array<Component> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetDataVariableParentComponents(
|
||||||
|
self.as_ref().handle,
|
||||||
|
data_variable.address(),
|
||||||
|
&mut count,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Make the contents of a type library available for type/import resolution
|
||||||
|
fn add_type_library(&self, library: &TypeLibrary) {
|
||||||
|
unsafe { BNAddBinaryViewTypeLibrary(self.as_ref().handle, library.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn type_library_by_name<S: BnStrCompatible>(&self, name: S) -> Option<TypeLibrary> {
|
||||||
|
let name = name.into_bytes_with_nul();
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetBinaryViewTypeLibrary(
|
||||||
|
self.as_ref().handle,
|
||||||
|
name.as_ref().as_ptr() as *const core::ffi::c_char,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
core::ptr::NonNull::new(result).map(|h| unsafe { TypeLibrary::from_raw(h) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Should be called by custom py:py:class:`BinaryView` implementations
|
||||||
|
/// when they have successfully imported an object from a type library (eg a symbol's type).
|
||||||
|
/// Values recorded with this function will then be queryable via [BinaryViewExt::lookup_imported_object_library].
|
||||||
|
///
|
||||||
|
/// * `lib` - Type Library containing the imported type
|
||||||
|
/// * `name` - Name of the object in the type library
|
||||||
|
/// * `addr` - address of symbol at import site
|
||||||
|
/// * `platform` - Platform of symbol at import site
|
||||||
|
fn record_imported_object_library(
|
||||||
|
&self,
|
||||||
|
lib: &TypeLibrary,
|
||||||
|
name: &QualifiedName,
|
||||||
|
addr: u64,
|
||||||
|
platform: &Platform,
|
||||||
|
) {
|
||||||
|
unsafe {
|
||||||
|
BNBinaryViewRecordImportedObjectLibrary(
|
||||||
|
self.as_ref().handle,
|
||||||
|
platform.handle,
|
||||||
|
addr,
|
||||||
|
lib.as_raw(),
|
||||||
|
&name.0 as *const _ as *mut _,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Recursively imports a type from the specified type library, or, if
|
||||||
|
/// no library was explicitly provided, the first type library associated with the current [BinaryView]
|
||||||
|
/// that provides the name requested.
|
||||||
|
///
|
||||||
|
/// This may have the impact of loading other type libraries as dependencies on other type libraries are lazily resolved
|
||||||
|
/// when references to types provided by them are first encountered.
|
||||||
|
///
|
||||||
|
/// Note that the name actually inserted into the view may not match the name as it exists in the type library in
|
||||||
|
/// the event of a name conflict. To aid in this, the [Type] object returned is a `NamedTypeReference` to
|
||||||
|
/// the deconflicted name used.
|
||||||
|
fn import_type_library(
|
||||||
|
&self,
|
||||||
|
name: &QualifiedName,
|
||||||
|
mut lib: Option<TypeLibrary>,
|
||||||
|
) -> Option<Ref<Type>> {
|
||||||
|
let mut lib_ref = lib
|
||||||
|
.as_mut()
|
||||||
|
.map(|l| unsafe { l.as_raw() } as *mut _)
|
||||||
|
.unwrap_or(ptr::null_mut());
|
||||||
|
let result = unsafe {
|
||||||
|
BNBinaryViewImportTypeLibraryType(
|
||||||
|
self.as_ref().handle,
|
||||||
|
&mut lib_ref,
|
||||||
|
&name.0 as *const _ as *mut _,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
(!result.is_null()).then(|| unsafe { Type::ref_from_raw(result) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Recursively imports an object from the specified type library, or, if
|
||||||
|
/// no library was explicitly provided, the first type library associated with the current [BinaryView]
|
||||||
|
/// that provides the name requested.
|
||||||
|
///
|
||||||
|
/// This may have the impact of loading other type libraries as dependencies on other type libraries are lazily resolved
|
||||||
|
/// when references to types provided by them are first encountered.
|
||||||
|
///
|
||||||
|
/// .. note:: If you are implementing a custom BinaryView and use this method to import object types,
|
||||||
|
/// you should then call [BinaryViewExt::record_imported_object_library] with the details of where the object is located.
|
||||||
|
fn import_type_object(
|
||||||
|
&self,
|
||||||
|
name: &QualifiedName,
|
||||||
|
mut lib: Option<TypeLibrary>,
|
||||||
|
) -> Option<Ref<Type>> {
|
||||||
|
let mut lib_ref = lib
|
||||||
|
.as_mut()
|
||||||
|
.map(|l| unsafe { l.as_raw() } as *mut _)
|
||||||
|
.unwrap_or(ptr::null_mut());
|
||||||
|
let result = unsafe {
|
||||||
|
BNBinaryViewImportTypeLibraryObject(
|
||||||
|
self.as_ref().handle,
|
||||||
|
&mut lib_ref,
|
||||||
|
&name.0 as *const _ as *mut _,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
(!result.is_null()).then(|| unsafe { Type::ref_from_raw(result) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Recursively imports a type interface given its GUID.
|
||||||
|
///
|
||||||
|
/// .. note:: To support this type of lookup a type library must have
|
||||||
|
/// contain a metadata key called "type_guids" which is a map
|
||||||
|
/// Dict[string_guid, string_type_name] or
|
||||||
|
/// Dict[string_guid, Tuple[string_type_name, type_library_name]]
|
||||||
|
fn import_type_by_guid<S: BnStrCompatible>(&self, guid: S) -> Option<Ref<Type>> {
|
||||||
|
let guid = guid.into_bytes_with_nul();
|
||||||
|
let result = unsafe {
|
||||||
|
BNBinaryViewImportTypeLibraryTypeByGuid(
|
||||||
|
self.as_ref().handle,
|
||||||
|
guid.as_ref().as_ptr() as *const c_char,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
(!result.is_null()).then(|| unsafe { Type::ref_from_raw(result) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Recursively exports `type_obj` into `lib` as a type with name `name`
|
||||||
|
///
|
||||||
|
/// As other referenced types are encountered, they are either copied into the destination type library or
|
||||||
|
/// else the type library that provided the referenced type is added as a dependency for the destination library.
|
||||||
|
fn export_type_to_library(&self, lib: &TypeLibrary, name: &QualifiedName, type_obj: &Type) {
|
||||||
|
unsafe {
|
||||||
|
BNBinaryViewExportTypeToTypeLibrary(
|
||||||
|
self.as_ref().handle,
|
||||||
|
lib.as_raw(),
|
||||||
|
&name.0 as *const _ as *mut _,
|
||||||
|
type_obj.handle,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Recursively exports `type_obj` into `lib` as a type with name `name`
|
||||||
|
///
|
||||||
|
/// As other referenced types are encountered, they are either copied into the destination type library or
|
||||||
|
/// else the type library that provided the referenced type is added as a dependency for the destination library.
|
||||||
|
fn export_object_to_library(&self, lib: &TypeLibrary, name: &QualifiedName, type_obj: &Type) {
|
||||||
|
unsafe {
|
||||||
|
BNBinaryViewExportObjectToTypeLibrary(
|
||||||
|
self.as_ref().handle,
|
||||||
|
lib.as_raw(),
|
||||||
|
&name.0 as *const _ as *mut _,
|
||||||
|
type_obj.handle,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gives you details of which type library and name was used to determine
|
||||||
|
/// the type of a symbol at a given address
|
||||||
|
///
|
||||||
|
/// * `addr` - address of symbol at import site
|
||||||
|
/// * `platform` - Platform of symbol at import site
|
||||||
|
fn lookup_imported_object_library(
|
||||||
|
&self,
|
||||||
|
addr: u64,
|
||||||
|
platform: &Platform,
|
||||||
|
) -> Option<(TypeLibrary, QualifiedName)> {
|
||||||
|
let mut result_lib = ptr::null_mut();
|
||||||
|
let mut result_name = Default::default();
|
||||||
|
let success = unsafe {
|
||||||
|
BNBinaryViewLookupImportedObjectLibrary(
|
||||||
|
self.as_ref().handle,
|
||||||
|
platform.handle,
|
||||||
|
addr,
|
||||||
|
&mut result_lib,
|
||||||
|
&mut result_name,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
if !success {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let lib = unsafe { TypeLibrary::from_raw(ptr::NonNull::new(result_lib)?) };
|
||||||
|
let name = QualifiedName(result_name);
|
||||||
|
Some((lib, name))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gives you details of from which type library and name a given type in the analysis was imported.
|
||||||
|
///
|
||||||
|
/// * `name` - Name of type in analysis
|
||||||
|
fn lookup_imported_type_library(
|
||||||
|
&self,
|
||||||
|
name: &QualifiedNameAndType,
|
||||||
|
) -> Option<(TypeLibrary, QualifiedName)> {
|
||||||
|
let mut result_lib = ptr::null_mut();
|
||||||
|
let mut result_name = Default::default();
|
||||||
|
let success = unsafe {
|
||||||
|
BNBinaryViewLookupImportedTypeLibrary(
|
||||||
|
self.as_ref().handle,
|
||||||
|
&name.0 as *const _ as *mut _,
|
||||||
|
&mut result_lib,
|
||||||
|
&mut result_name,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
if !success {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let lib = unsafe { TypeLibrary::from_raw(ptr::NonNull::new(result_lib)?) };
|
||||||
|
let name = QualifiedName(result_name);
|
||||||
|
Some((lib, name))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: BinaryViewBase> BinaryViewExt for T {}
|
impl<T: BinaryViewBase> BinaryViewExt for T {}
|
||||||
@@ -1545,7 +1807,7 @@ pub type BinaryViewEventType = BNBinaryViewEventType;
|
|||||||
///
|
///
|
||||||
/// # Example
|
/// # Example
|
||||||
///
|
///
|
||||||
/// ```rust
|
/// ```no_run
|
||||||
/// use binaryninja::binaryview::{BinaryView, BinaryViewEventHandler, BinaryViewEventType, register_binary_view_event};
|
/// use binaryninja::binaryview::{BinaryView, BinaryViewEventHandler, BinaryViewEventType, register_binary_view_event};
|
||||||
///
|
///
|
||||||
/// struct EventHandlerContext {
|
/// struct EventHandlerContext {
|
||||||
@@ -1553,7 +1815,7 @@ pub type BinaryViewEventType = BNBinaryViewEventType;
|
|||||||
/// }
|
/// }
|
||||||
///
|
///
|
||||||
/// impl BinaryViewEventHandler for EventHandlerContext {
|
/// impl BinaryViewEventHandler for EventHandlerContext {
|
||||||
/// fn on_event(&mut self, binary_view: &BinaryView) {
|
/// fn on_event(&self, binary_view: &BinaryView) {
|
||||||
/// // handle event
|
/// // handle event
|
||||||
/// }
|
/// }
|
||||||
/// }
|
/// }
|
||||||
@@ -1576,11 +1838,9 @@ where
|
|||||||
ctx: *mut ::std::os::raw::c_void,
|
ctx: *mut ::std::os::raw::c_void,
|
||||||
view: *mut BNBinaryView,
|
view: *mut BNBinaryView,
|
||||||
) {
|
) {
|
||||||
ffi_wrap!("EventHandler::on_event", unsafe {
|
ffi_wrap!("EventHandler::on_event", {
|
||||||
let mut context = &mut *(ctx as *mut Handler);
|
let context = unsafe { &*(ctx as *const Handler) };
|
||||||
|
context.on_event(&BinaryView::from_raw(BNNewViewReference(view)));
|
||||||
let handle = BinaryView::from_raw(BNNewViewReference(view));
|
|
||||||
Handler::on_event(&mut context, handle.as_ref());
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ use binaryninjacore_sys::*;
|
|||||||
|
|
||||||
use crate::architecture::{Architecture, ArchitectureExt, CoreArchitecture, Register};
|
use crate::architecture::{Architecture, ArchitectureExt, CoreArchitecture, Register};
|
||||||
use crate::rc::{
|
use crate::rc::{
|
||||||
CoreArrayProvider, CoreArrayWrapper, CoreOwnedArrayProvider, Guard, Ref, RefCountable,
|
CoreArrayProvider, CoreArrayProviderInner, Guard, Ref, RefCountable,
|
||||||
};
|
};
|
||||||
use crate::string::*;
|
use crate::string::*;
|
||||||
|
|
||||||
@@ -89,23 +89,15 @@ where
|
|||||||
*count = len;
|
*count = len;
|
||||||
|
|
||||||
if len == 0 {
|
if len == 0 {
|
||||||
ptr::null_mut()
|
return ptr::null_mut();
|
||||||
} else {
|
|
||||||
let mut res = Vec::with_capacity(len + 1);
|
|
||||||
|
|
||||||
res.push(len as u32);
|
|
||||||
|
|
||||||
for i in items {
|
|
||||||
res.push(i);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
assert!(res.len() == len + 1);
|
let res: Box<[_]> = [len as u32].into_iter().chain(items).collect();
|
||||||
|
debug_assert!(res.len() == len + 1);
|
||||||
|
|
||||||
let raw = res.as_mut_ptr();
|
// it's free on the function below: `cb_free_register_list`
|
||||||
mem::forget(res);
|
let raw = Box::leak(res);
|
||||||
|
&mut raw[1]
|
||||||
unsafe { raw.offset(1) }
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
extern "C" fn cb_free_register_list(_ctxt: *mut c_void, regs: *mut u32) {
|
extern "C" fn cb_free_register_list(_ctxt: *mut c_void, regs: *mut u32) {
|
||||||
@@ -115,8 +107,8 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
let actual_start = regs.offset(-1);
|
let actual_start = regs.offset(-1);
|
||||||
let len = *actual_start + 1;
|
let len = (*actual_start) + 1;
|
||||||
let _regs = Vec::from_raw_parts(actual_start, len as usize, len as usize);
|
let _regs = Box::from_raw(ptr::slice_from_raw_parts_mut(actual_start, len as usize));
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -136,7 +128,7 @@ where
|
|||||||
where
|
where
|
||||||
C: CallingConventionBase,
|
C: CallingConventionBase,
|
||||||
{
|
{
|
||||||
ffi_wrap!("CallingConvention::_callee_saved_registers", unsafe {
|
ffi_wrap!("CallingConvention::callee_saved_registers", unsafe {
|
||||||
let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
|
let ctxt = &*(ctxt as *mut CustomCallingConventionContext<C>);
|
||||||
let regs = ctxt.cc.callee_saved_registers();
|
let regs = ctxt.cc.callee_saved_registers();
|
||||||
|
|
||||||
@@ -448,24 +440,21 @@ impl<A: Architecture> CallingConvention<A> {
|
|||||||
unsafe { BnString::from_raw(BNGetCallingConventionName(self.handle)) }
|
unsafe { BnString::from_raw(BNGetCallingConventionName(self.handle)) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn variables_for_parameters<S: Clone + BnStrCompatible>(
|
pub fn variables_for_parameters(
|
||||||
&self,
|
&self,
|
||||||
params: &[FunctionParameter<S>],
|
params: &[FunctionParameter],
|
||||||
permitted_registers: Option<&[A::Register]>,
|
permitted_registers: Option<&[A::Register]>,
|
||||||
) -> Vec<Variable> {
|
) -> Vec<Variable> {
|
||||||
let mut bn_params: Vec<BNFunctionParameter> = vec![];
|
let mut bn_params: Vec<BNFunctionParameter> = vec![];
|
||||||
let mut name_strings = vec![];
|
let name_strings = params.iter().map(|parameter| ¶meter.name);
|
||||||
|
|
||||||
for parameter in params.iter() {
|
for (parameter, raw_name) in params.iter().zip(name_strings) {
|
||||||
name_strings.push(parameter.name.clone().into_bytes_with_nul());
|
|
||||||
}
|
|
||||||
for (parameter, raw_name) in params.iter().zip(name_strings.iter_mut()) {
|
|
||||||
let location = match ¶meter.location {
|
let location = match ¶meter.location {
|
||||||
Some(location) => location.raw(),
|
Some(location) => location.raw(),
|
||||||
None => unsafe { mem::zeroed() },
|
None => unsafe { mem::zeroed() },
|
||||||
};
|
};
|
||||||
bn_params.push(BNFunctionParameter {
|
bn_params.push(BNFunctionParameter {
|
||||||
name: raw_name.as_ref().as_ptr() as *mut _,
|
name: BnString::new(raw_name).into_raw(),
|
||||||
type_: parameter.t.contents.handle,
|
type_: parameter.t.contents.handle,
|
||||||
typeConfidence: parameter.t.confidence,
|
typeConfidence: parameter.t.confidence,
|
||||||
defaultLocation: parameter.location.is_none(),
|
defaultLocation: parameter.location.is_none(),
|
||||||
@@ -501,9 +490,6 @@ impl<A: Architecture> CallingConvention<A> {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Gotta keep this around so the pointers are valid during the call
|
|
||||||
drop(name_strings);
|
|
||||||
|
|
||||||
let vars_slice = unsafe { slice::from_raw_parts(vars, count) };
|
let vars_slice = unsafe { slice::from_raw_parts(vars, count) };
|
||||||
let mut result = vec![];
|
let mut result = vec![];
|
||||||
for var in vars_slice {
|
for var in vars_slice {
|
||||||
@@ -575,11 +561,43 @@ impl<A: Architecture> CallingConventionBase for CallingConvention<A> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn int_arg_registers(&self) -> Vec<A::Register> {
|
fn int_arg_registers(&self) -> Vec<A::Register> {
|
||||||
Vec::new()
|
unsafe {
|
||||||
|
let mut count = 0;
|
||||||
|
let regs = BNGetIntegerArgumentRegisters(self.handle, &mut count);
|
||||||
|
let arch = self.arch_handle.borrow();
|
||||||
|
|
||||||
|
let res = slice::from_raw_parts(regs, count)
|
||||||
|
.iter()
|
||||||
|
.map(|&r| {
|
||||||
|
arch.register_from_id(r)
|
||||||
|
.expect("bad reg id from CallingConvention")
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
BNFreeRegisterList(regs);
|
||||||
|
|
||||||
|
res
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn float_arg_registers(&self) -> Vec<A::Register> {
|
fn float_arg_registers(&self) -> Vec<A::Register> {
|
||||||
Vec::new()
|
unsafe {
|
||||||
|
let mut count = 0;
|
||||||
|
let regs = BNGetFloatArgumentRegisters(self.handle, &mut count);
|
||||||
|
let arch = self.arch_handle.borrow();
|
||||||
|
|
||||||
|
let res = slice::from_raw_parts(regs, count)
|
||||||
|
.iter()
|
||||||
|
.map(|&r| {
|
||||||
|
arch.register_from_id(r)
|
||||||
|
.expect("bad reg id from CallingConvention")
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
BNFreeRegisterList(regs);
|
||||||
|
|
||||||
|
res
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn arg_registers_shared_index(&self) -> bool {
|
fn arg_registers_shared_index(&self) -> bool {
|
||||||
@@ -660,18 +678,14 @@ unsafe impl<A: Architecture> RefCountable for CallingConvention<A> {
|
|||||||
impl<A: Architecture> CoreArrayProvider for CallingConvention<A> {
|
impl<A: Architecture> CoreArrayProvider for CallingConvention<A> {
|
||||||
type Raw = *mut BNCallingConvention;
|
type Raw = *mut BNCallingConvention;
|
||||||
type Context = A::Handle;
|
type Context = A::Handle;
|
||||||
|
type Wrapped<'a> = Guard<'a, CallingConvention<A>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl<A: Architecture> CoreOwnedArrayProvider for CallingConvention<A> {
|
unsafe impl<A: Architecture> CoreArrayProviderInner for CallingConvention<A> {
|
||||||
unsafe fn free(raw: *mut *mut BNCallingConvention, count: usize, _content: &Self::Context) {
|
unsafe fn free(raw: *mut *mut BNCallingConvention, count: usize, _content: &Self::Context) {
|
||||||
BNFreeCallingConventionList(raw, count);
|
BNFreeCallingConventionList(raw, count);
|
||||||
}
|
}
|
||||||
}
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
|
||||||
unsafe impl<'a, A: Architecture> CoreArrayWrapper<'a> for CallingConvention<A> {
|
|
||||||
type Wrapped = Guard<'a, CallingConvention<A>>;
|
|
||||||
|
|
||||||
unsafe fn wrap_raw(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped {
|
|
||||||
Guard::new(
|
Guard::new(
|
||||||
CallingConvention {
|
CallingConvention {
|
||||||
handle: *raw,
|
handle: *raw,
|
||||||
@@ -691,7 +705,7 @@ impl Debug for CallingConvention<CoreArchitecture> {
|
|||||||
|
|
||||||
pub struct ConventionBuilder<A: Architecture> {
|
pub struct ConventionBuilder<A: Architecture> {
|
||||||
caller_saved_registers: Vec<A::Register>,
|
caller_saved_registers: Vec<A::Register>,
|
||||||
_callee_saved_registers: Vec<A::Register>,
|
callee_saved_registers: Vec<A::Register>,
|
||||||
int_arg_registers: Vec<A::Register>,
|
int_arg_registers: Vec<A::Register>,
|
||||||
float_arg_registers: Vec<A::Register>,
|
float_arg_registers: Vec<A::Register>,
|
||||||
|
|
||||||
@@ -760,7 +774,7 @@ impl<A: Architecture> ConventionBuilder<A> {
|
|||||||
pub fn new(arch: &A) -> Self {
|
pub fn new(arch: &A) -> Self {
|
||||||
Self {
|
Self {
|
||||||
caller_saved_registers: Vec::new(),
|
caller_saved_registers: Vec::new(),
|
||||||
_callee_saved_registers: Vec::new(),
|
callee_saved_registers: Vec::new(),
|
||||||
int_arg_registers: Vec::new(),
|
int_arg_registers: Vec::new(),
|
||||||
float_arg_registers: Vec::new(),
|
float_arg_registers: Vec::new(),
|
||||||
|
|
||||||
@@ -785,7 +799,7 @@ impl<A: Architecture> ConventionBuilder<A> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
reg_list!(caller_saved_registers);
|
reg_list!(caller_saved_registers);
|
||||||
reg_list!(_callee_saved_registers);
|
reg_list!(callee_saved_registers);
|
||||||
reg_list!(int_arg_registers);
|
reg_list!(int_arg_registers);
|
||||||
reg_list!(float_arg_registers);
|
reg_list!(float_arg_registers);
|
||||||
|
|
||||||
@@ -819,7 +833,7 @@ impl<A: Architecture> CallingConventionBase for ConventionBuilder<A> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn callee_saved_registers(&self) -> Vec<A::Register> {
|
fn callee_saved_registers(&self) -> Vec<A::Register> {
|
||||||
self.caller_saved_registers.clone()
|
self.callee_saved_registers.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn int_arg_registers(&self) -> Vec<A::Register> {
|
fn int_arg_registers(&self) -> Vec<A::Register> {
|
||||||
|
|||||||
@@ -16,12 +16,16 @@
|
|||||||
//!
|
//!
|
||||||
//! All plugins need to provide one of the following functions for Binary Ninja to call:
|
//! All plugins need to provide one of the following functions for Binary Ninja to call:
|
||||||
//!
|
//!
|
||||||
//! ```rust
|
//! ```no_run
|
||||||
//! pub extern "C" fn CorePluginInit() -> bool {}
|
//! pub extern "C" fn CorePluginInit() -> bool {
|
||||||
|
//! todo!();
|
||||||
|
//! }
|
||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
//! ```rust
|
//! ```no_run
|
||||||
//! pub extern "C" fn UIPluginInit() -> bool {}
|
//! pub extern "C" fn UIPluginInit() -> bool {
|
||||||
|
//! todo!();
|
||||||
|
//! }
|
||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
//! Both of these functions can call any of the following registration functions, though `CorePluginInit` is called during Binary Ninja core initialization, and `UIPluginInit` is called during Binary Ninja UI initialization.
|
//! Both of these functions can call any of the following registration functions, though `CorePluginInit` is called during Binary Ninja core initialization, and `UIPluginInit` is called during Binary Ninja UI initialization.
|
||||||
@@ -62,7 +66,9 @@ where
|
|||||||
/// The function call required for generic commands; commands added in this way will be in the `Plugins` submenu of the menu bar.
|
/// The function call required for generic commands; commands added in this way will be in the `Plugins` submenu of the menu bar.
|
||||||
///
|
///
|
||||||
/// # Example
|
/// # Example
|
||||||
/// ```rust
|
/// ```no_run
|
||||||
|
/// # use binaryninja::command::Command;
|
||||||
|
/// # use binaryninja::binaryview::BinaryView;
|
||||||
/// struct MyCommand;
|
/// struct MyCommand;
|
||||||
///
|
///
|
||||||
/// impl Command for MyCommand {
|
/// impl Command for MyCommand {
|
||||||
@@ -76,6 +82,7 @@ where
|
|||||||
/// }
|
/// }
|
||||||
/// }
|
/// }
|
||||||
///
|
///
|
||||||
|
/// # use binaryninja::command::register;
|
||||||
/// #[no_mangle]
|
/// #[no_mangle]
|
||||||
/// pub extern "C" fn CorePluginInit() -> bool {
|
/// pub extern "C" fn CorePluginInit() -> bool {
|
||||||
/// register(
|
/// register(
|
||||||
@@ -160,7 +167,9 @@ where
|
|||||||
/// The function call required for generic commands; commands added in this way will be in the `Plugins` submenu of the menu bar.
|
/// The function call required for generic commands; commands added in this way will be in the `Plugins` submenu of the menu bar.
|
||||||
///
|
///
|
||||||
/// # Example
|
/// # Example
|
||||||
/// ```rust
|
/// ```no_run
|
||||||
|
/// # use binaryninja::command::AddressCommand;
|
||||||
|
/// # use binaryninja::binaryview::BinaryView;
|
||||||
/// struct MyCommand;
|
/// struct MyCommand;
|
||||||
///
|
///
|
||||||
/// impl AddressCommand for MyCommand {
|
/// impl AddressCommand for MyCommand {
|
||||||
@@ -174,6 +183,7 @@ where
|
|||||||
/// }
|
/// }
|
||||||
/// }
|
/// }
|
||||||
///
|
///
|
||||||
|
/// # use binaryninja::command::register_for_address;
|
||||||
/// #[no_mangle]
|
/// #[no_mangle]
|
||||||
/// pub extern "C" fn CorePluginInit() -> bool {
|
/// pub extern "C" fn CorePluginInit() -> bool {
|
||||||
/// register_for_address(
|
/// register_for_address(
|
||||||
@@ -258,10 +268,13 @@ where
|
|||||||
/// The function call required for generic commands; commands added in this way will be in the `Plugins` submenu of the menu bar.
|
/// The function call required for generic commands; commands added in this way will be in the `Plugins` submenu of the menu bar.
|
||||||
///
|
///
|
||||||
/// # Example
|
/// # Example
|
||||||
/// ```rust
|
/// ```no_run
|
||||||
|
/// # use std::ops::Range;
|
||||||
|
/// # use binaryninja::command::RangeCommand;
|
||||||
|
/// # use binaryninja::binaryview::BinaryView;
|
||||||
/// struct MyCommand;
|
/// struct MyCommand;
|
||||||
///
|
///
|
||||||
/// impl AddressCommand for MyCommand {
|
/// impl RangeCommand for MyCommand {
|
||||||
/// fn action(&self, view: &BinaryView, range: Range<u64>) {
|
/// fn action(&self, view: &BinaryView, range: Range<u64>) {
|
||||||
/// // Your code here
|
/// // Your code here
|
||||||
/// }
|
/// }
|
||||||
@@ -272,6 +285,7 @@ where
|
|||||||
/// }
|
/// }
|
||||||
/// }
|
/// }
|
||||||
///
|
///
|
||||||
|
/// # use binaryninja::command::register_for_range;
|
||||||
/// #[no_mangle]
|
/// #[no_mangle]
|
||||||
/// pub extern "C" fn CorePluginInit() -> bool {
|
/// pub extern "C" fn CorePluginInit() -> bool {
|
||||||
/// register_for_range(
|
/// register_for_range(
|
||||||
@@ -361,10 +375,14 @@ where
|
|||||||
/// The function call required for generic commands; commands added in this way will be in the `Plugins` submenu of the menu bar.
|
/// The function call required for generic commands; commands added in this way will be in the `Plugins` submenu of the menu bar.
|
||||||
///
|
///
|
||||||
/// # Example
|
/// # Example
|
||||||
/// ```rust
|
/// ```no_run
|
||||||
|
/// # use binaryninja::command::FunctionCommand;
|
||||||
|
/// # use binaryninja::binaryview::BinaryView;
|
||||||
|
/// # use binaryninja::function::Function;
|
||||||
|
/// # use binaryninja::command::register_for_function;
|
||||||
/// struct MyCommand;
|
/// struct MyCommand;
|
||||||
///
|
///
|
||||||
/// impl AddressCommand for MyCommand {
|
/// impl FunctionCommand for MyCommand {
|
||||||
/// fn action(&self, view: &BinaryView, func: &Function) {
|
/// fn action(&self, view: &BinaryView, func: &Function) {
|
||||||
/// // Your code here
|
/// // Your code here
|
||||||
/// }
|
/// }
|
||||||
|
|||||||
295
src/component.rs
Normal file
295
src/component.rs
Normal file
@@ -0,0 +1,295 @@
|
|||||||
|
use core::{ffi, mem, ptr};
|
||||||
|
|
||||||
|
use crate::binaryview::{BinaryView, BinaryViewBase, BinaryViewExt};
|
||||||
|
use crate::function::Function;
|
||||||
|
use crate::rc::{Array, CoreArrayProvider, CoreArrayProviderInner, Ref};
|
||||||
|
use crate::string::{BnStrCompatible, BnString};
|
||||||
|
use crate::types::{ComponentReferencedTypes, DataVariable};
|
||||||
|
|
||||||
|
use binaryninjacore_sys::*;
|
||||||
|
|
||||||
|
pub struct ComponentBuilder {
|
||||||
|
bv: *mut BNBinaryView,
|
||||||
|
parent: Option<BnString>,
|
||||||
|
name: Option<BnString>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ComponentBuilder {
|
||||||
|
pub(crate) fn new_from_raw(bv: *mut BNBinaryView) -> Self {
|
||||||
|
Self {
|
||||||
|
bv,
|
||||||
|
parent: None,
|
||||||
|
name: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn new<I: BinaryViewBase>(bv: &I) -> Self {
|
||||||
|
Self {
|
||||||
|
bv: bv.as_ref().handle,
|
||||||
|
parent: None,
|
||||||
|
name: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parent<G: IntoComponentGuid>(mut self, parent: G) -> Self {
|
||||||
|
self.parent = Some(parent.component_guid());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn name<S: BnStrCompatible>(mut self, name: S) -> Self {
|
||||||
|
self.name = Some(BnString::new(name));
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn finalize(self) -> Component {
|
||||||
|
let result = match (&self.parent, &self.name) {
|
||||||
|
(None, None) => unsafe { BNCreateComponent(self.bv) },
|
||||||
|
(None, Some(name)) => unsafe { BNCreateComponentWithName(self.bv, name.as_ptr()) },
|
||||||
|
(Some(guid), None) => unsafe { BNCreateComponentWithParent(self.bv, guid.as_ptr()) },
|
||||||
|
(Some(guid), Some(name)) => unsafe {
|
||||||
|
BNCreateComponentWithParentAndName(self.bv, guid.as_ptr(), name.as_ptr())
|
||||||
|
},
|
||||||
|
};
|
||||||
|
unsafe { Component::from_raw(ptr::NonNull::new(result).unwrap()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Components are objects that can contain Functions, Data Variables, and other Components.
|
||||||
|
///
|
||||||
|
/// They can be queried for information about the items contained within them.
|
||||||
|
///
|
||||||
|
/// Components have a Guid, which persistent across saves and loads of the database, and should be
|
||||||
|
/// used for retrieving components when such is required and a reference to the Component cannot be held.
|
||||||
|
#[repr(transparent)]
|
||||||
|
pub struct Component {
|
||||||
|
handle: ptr::NonNull<BNComponent>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Component {
|
||||||
|
#[allow(clippy::mut_from_ref)]
|
||||||
|
pub(crate) unsafe fn as_raw(&self) -> &mut BNComponent {
|
||||||
|
&mut *self.handle.as_ptr()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) unsafe fn from_raw(handle: ptr::NonNull<BNComponent>) -> Self {
|
||||||
|
Self { handle }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) unsafe fn ref_from_raw(handle: &*mut BNComponent) -> &Self {
|
||||||
|
assert!(!handle.is_null());
|
||||||
|
mem::transmute(handle)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn guid(&self) -> BnString {
|
||||||
|
let result = unsafe { BNComponentGetGuid(self.as_raw()) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { BnString::from_raw(result) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add function to this component.
|
||||||
|
pub fn add_function(&self, func: &Function) -> bool {
|
||||||
|
unsafe { BNComponentAddFunctionReference(self.as_raw(), func.handle) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check whether this component contains a function.
|
||||||
|
pub fn contains_function(&self, func: &Function) -> bool {
|
||||||
|
unsafe { BNComponentContainsFunction(self.as_raw(), func.handle) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove function from this component.
|
||||||
|
pub fn remove_function(&self, func: &Function) -> bool {
|
||||||
|
unsafe { BNComponentRemoveFunctionReference(self.as_raw(), func.handle) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Move component to this component. This will remove it from the old parent.
|
||||||
|
pub fn add_component(&self, component: &Component) -> bool {
|
||||||
|
unsafe { BNComponentAddComponent(self.as_raw(), component.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check whether this component contains a component.
|
||||||
|
pub fn contains_component(&self, component: &Component) -> bool {
|
||||||
|
unsafe { BNComponentContainsComponent(self.as_raw(), component.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove a component from the current component, moving it to the root.
|
||||||
|
///
|
||||||
|
/// This function has no effect when used from the root component.
|
||||||
|
/// Use `BinaryView.remove_component` to Remove a component from the tree entirely.
|
||||||
|
pub fn remove_component(&self, component: &Component) -> bool {
|
||||||
|
self.view()
|
||||||
|
.unwrap()
|
||||||
|
.root_component()
|
||||||
|
.unwrap()
|
||||||
|
.add_component(component)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add data variable to this component.
|
||||||
|
pub fn add_data_variable(&self, data_variable: &DataVariable) -> bool {
|
||||||
|
unsafe { BNComponentAddDataVariable(self.as_raw(), data_variable.address()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check whether this component contains a data variable.
|
||||||
|
pub fn contains_data_variable(&self, data_variable: &DataVariable) -> bool {
|
||||||
|
unsafe { BNComponentContainsDataVariable(self.as_raw(), data_variable.address()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove data variable from this component.
|
||||||
|
pub fn remove_data_variable(&self, data_variable: &DataVariable) -> bool {
|
||||||
|
unsafe { BNComponentRemoveDataVariable(self.as_raw(), data_variable.address()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Original name of the component
|
||||||
|
pub fn display_name(&self) -> BnString {
|
||||||
|
let result = unsafe { BNComponentGetDisplayName(self.as_raw()) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { BnString::from_raw(result) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Original name set for this component
|
||||||
|
|
||||||
|
/// :note: The `.display_name` property should be used for `bv.get_component_by_path()` lookups.
|
||||||
|
|
||||||
|
/// This can differ from the .display_name property if one of its sibling components has the same .original_name; In that
|
||||||
|
/// case, .name will be an automatically generated unique name (e.g. "MyComponentName (1)") while .original_name will
|
||||||
|
/// remain what was originally set (e.g. "MyComponentName")
|
||||||
|
|
||||||
|
/// If this component has a duplicate name and is moved to a component where none of its siblings share its name,
|
||||||
|
/// .name will return the original "MyComponentName"
|
||||||
|
pub fn name(&self) -> BnString {
|
||||||
|
let result = unsafe { BNComponentGetOriginalName(self.as_raw()) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { BnString::from_raw(result) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_name<S: BnStrCompatible>(&self, name: S) {
|
||||||
|
let name = name.into_bytes_with_nul();
|
||||||
|
unsafe { BNComponentSetName(self.as_raw(), name.as_ref().as_ptr() as *const ffi::c_char) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The component that contains this component, if it exists.
|
||||||
|
pub fn parent(&self) -> Option<Component> {
|
||||||
|
let result = unsafe { BNComponentGetParent(self.as_raw()) };
|
||||||
|
ptr::NonNull::new(result).map(|h| unsafe { Self::from_raw(h) })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn view(&self) -> Option<Ref<BinaryView>> {
|
||||||
|
let result = unsafe { BNComponentGetView(self.as_raw()) };
|
||||||
|
(!result.is_null()).then(|| unsafe { BinaryView::from_raw(result) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Is an iterator for all Components contained within this Component
|
||||||
|
pub fn components(&self) -> Array<Component> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe { BNComponentGetContainedComponents(self.as_raw(), &mut count) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List of all Functions contained within this Component
|
||||||
|
pub fn functions(&self) -> Array<Function> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe { BNComponentGetContainedFunctions(self.as_raw(), &mut count) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List of all Data Variables contained within this Component
|
||||||
|
pub fn data_variables(&self) -> Array<DataVariable> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe { BNComponentGetContainedDataVariables(self.as_raw(), &mut count) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get data variables referenced by this component
|
||||||
|
///
|
||||||
|
/// * `recursive` - Get all DataVariables referenced by this component and subcomponents.
|
||||||
|
pub fn get_referenced_data_variables(&self, recursive: bool) -> Array<DataVariable> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = if recursive {
|
||||||
|
unsafe { BNComponentGetReferencedDataVariablesRecursive(self.as_raw(), &mut count) }
|
||||||
|
} else {
|
||||||
|
unsafe { BNComponentGetReferencedDataVariables(self.as_raw(), &mut count) }
|
||||||
|
};
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get Types referenced by this component
|
||||||
|
///
|
||||||
|
/// * `recursive` - Get all Types referenced by this component and subcomponents.
|
||||||
|
pub fn get_referenced_types(&self, recursive: bool) -> Array<ComponentReferencedTypes> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = if recursive {
|
||||||
|
unsafe { BNComponentGetReferencedTypesRecursive(self.as_raw(), &mut count) }
|
||||||
|
} else {
|
||||||
|
unsafe { BNComponentGetReferencedTypes(self.as_raw(), &mut count) }
|
||||||
|
};
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remove_all_functions(&self) {
|
||||||
|
unsafe { BNComponentRemoveAllFunctions(self.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_all_members_from(&self, component: &Component) {
|
||||||
|
unsafe { BNComponentAddAllMembersFromComponent(self.as_raw(), component.as_raw()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq for Component {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
unsafe { BNComponentsEqual(self.as_raw(), other.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::partialeq_ne_impl)]
|
||||||
|
fn ne(&self, other: &Self) -> bool {
|
||||||
|
unsafe { BNComponentsNotEqual(self.as_raw(), other.as_raw()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Eq for Component {}
|
||||||
|
|
||||||
|
impl Drop for Component {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
unsafe { BNFreeComponent(self.as_raw()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for Component {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
unsafe {
|
||||||
|
Self::from_raw(ptr::NonNull::new(BNNewComponentReference(self.as_raw())).unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for Component {
|
||||||
|
type Raw = *mut BNComponent;
|
||||||
|
type Context = ();
|
||||||
|
type Wrapped<'a> = &'a Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl CoreArrayProviderInner for Component {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
|
BNFreeComponents(raw, count)
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
Self::ref_from_raw(raw)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait IntoComponentGuid {
|
||||||
|
fn component_guid(self) -> BnString;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoComponentGuid for &Component {
|
||||||
|
fn component_guid(self) -> BnString {
|
||||||
|
self.guid()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: BnStrCompatible> IntoComponentGuid for S {
|
||||||
|
fn component_guid(self) -> BnString {
|
||||||
|
BnString::new(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -20,6 +20,7 @@ pub use binaryninjacore_sys::BNModificationStatus as ModificationStatus;
|
|||||||
|
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
use std::mem::MaybeUninit;
|
||||||
use std::os::raw::c_void;
|
use std::os::raw::c_void;
|
||||||
use std::ptr;
|
use std::ptr;
|
||||||
use std::slice;
|
use std::slice;
|
||||||
@@ -122,11 +123,10 @@ where
|
|||||||
let long_name = long_name.into_bytes_with_nul();
|
let long_name = long_name.into_bytes_with_nul();
|
||||||
let long_name_ptr = long_name.as_ref().as_ptr() as *mut _;
|
let long_name_ptr = long_name.as_ref().as_ptr() as *mut _;
|
||||||
|
|
||||||
let ctxt = Box::new(unsafe { mem::zeroed() });
|
let ctxt = Box::leak(Box::new(MaybeUninit::zeroed()));
|
||||||
let ctxt = Box::into_raw(ctxt);
|
|
||||||
|
|
||||||
let mut bn_obj = BNCustomBinaryViewType {
|
let mut bn_obj = BNCustomBinaryViewType {
|
||||||
context: ctxt as *mut _,
|
context: ctxt.as_mut_ptr() as *mut _,
|
||||||
create: Some(cb_create::<T>),
|
create: Some(cb_create::<T>),
|
||||||
parse: Some(cb_parse::<T>),
|
parse: Some(cb_parse::<T>),
|
||||||
isValidForData: Some(cb_valid::<T>),
|
isValidForData: Some(cb_valid::<T>),
|
||||||
@@ -140,15 +140,16 @@ where
|
|||||||
if res.is_null() {
|
if res.is_null() {
|
||||||
// avoid leaking the space allocated for the type, but also
|
// avoid leaking the space allocated for the type, but also
|
||||||
// avoid running its Drop impl (if any -- not that there should
|
// avoid running its Drop impl (if any -- not that there should
|
||||||
// be one since view types live for the life of the process)
|
// be one since view types live for the life of the process) as
|
||||||
mem::forget(*Box::from_raw(ctxt));
|
// MaybeUninit suppress the Drop implementation of it's inner type
|
||||||
|
drop(Box::from_raw(ctxt));
|
||||||
|
|
||||||
panic!("bvt registration failed");
|
panic!("bvt registration failed");
|
||||||
}
|
}
|
||||||
|
|
||||||
ptr::write(ctxt, constructor(BinaryViewType(res)));
|
ctxt.write(constructor(BinaryViewType(res)));
|
||||||
|
|
||||||
&*ctxt
|
ctxt.assume_init_mut()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -289,19 +290,15 @@ impl BinaryViewTypeBase for BinaryViewType {
|
|||||||
impl CoreArrayProvider for BinaryViewType {
|
impl CoreArrayProvider for BinaryViewType {
|
||||||
type Raw = *mut BNBinaryViewType;
|
type Raw = *mut BNBinaryViewType;
|
||||||
type Context = ();
|
type Context = ();
|
||||||
|
type Wrapped<'a> = Guard<'a, BinaryViewType>;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl CoreOwnedArrayProvider for BinaryViewType {
|
unsafe impl CoreArrayProviderInner for BinaryViewType {
|
||||||
unsafe fn free(raw: *mut Self::Raw, _count: usize, _context: &Self::Context) {
|
unsafe fn free(raw: *mut Self::Raw, _count: usize, _context: &Self::Context) {
|
||||||
BNFreeBinaryViewTypeList(raw);
|
BNFreeBinaryViewTypeList(raw);
|
||||||
}
|
}
|
||||||
}
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
Guard::new(BinaryViewType(*raw), &())
|
||||||
unsafe impl<'a> CoreArrayWrapper<'a> for BinaryViewType {
|
|
||||||
type Wrapped = BinaryViewType;
|
|
||||||
|
|
||||||
unsafe fn wrap_raw(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped {
|
|
||||||
BinaryViewType(*raw)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -388,7 +385,7 @@ impl<'a, T: CustomBinaryViewType> CustomViewBuilder<'a, T> {
|
|||||||
|
|
||||||
let view_name = view_type.name();
|
let view_name = view_type.name();
|
||||||
|
|
||||||
if let Ok(bv) = file.get_view_of_type(view_name.as_cstr()) {
|
if let Ok(bv) = file.get_view_of_type(view_name.as_str()) {
|
||||||
// while it seems to work most of the time, you can get really unlucky
|
// while it seems to work most of the time, you can get really unlucky
|
||||||
// if the a free of the existing view of the same type kicks off while
|
// if the a free of the existing view of the same type kicks off while
|
||||||
// BNCreateBinaryViewOfType is still running. the freeObject callback
|
// BNCreateBinaryViewOfType is still running. the freeObject callback
|
||||||
@@ -772,7 +769,7 @@ impl<'a, T: CustomBinaryViewType> CustomViewBuilder<'a, T> {
|
|||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
let res = BNCreateCustomBinaryView(
|
let res = BNCreateCustomBinaryView(
|
||||||
view_name.as_cstr().as_ptr(),
|
view_name.as_ptr(),
|
||||||
file.handle,
|
file.handle,
|
||||||
parent.handle,
|
parent.handle,
|
||||||
&mut bn_obj,
|
&mut bn_obj,
|
||||||
|
|||||||
654
src/database.rs
Normal file
654
src/database.rs
Normal file
@@ -0,0 +1,654 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
use std::time::{Duration, SystemTime, UNIX_EPOCH};
|
||||||
|
use std::{ffi, mem, ptr};
|
||||||
|
|
||||||
|
use binaryninjacore_sys::*;
|
||||||
|
|
||||||
|
use crate::binaryview::BinaryView;
|
||||||
|
use crate::databuffer::DataBuffer;
|
||||||
|
use crate::disassembly::InstructionTextToken;
|
||||||
|
use crate::filemetadata::FileMetadata;
|
||||||
|
use crate::rc::{Array, CoreArrayProvider, CoreArrayProviderInner, Ref};
|
||||||
|
use crate::string::{BnStrCompatible, BnString};
|
||||||
|
|
||||||
|
#[repr(transparent)]
|
||||||
|
pub struct Database {
|
||||||
|
handle: ptr::NonNull<BNDatabase>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Database {
|
||||||
|
pub(crate) unsafe fn from_raw(handle: ptr::NonNull<BNDatabase>) -> Self {
|
||||||
|
Self { handle }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::mut_from_ref)]
|
||||||
|
pub(crate) unsafe fn as_raw(&self) -> &mut BNDatabase {
|
||||||
|
&mut *self.handle.as_ptr()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a snapshot by its id, or None if no snapshot with that id exists
|
||||||
|
pub fn snapshot(&self, id: i64) -> Option<Snapshot> {
|
||||||
|
let result = unsafe { BNGetDatabaseSnapshot(self.as_raw(), id) };
|
||||||
|
ptr::NonNull::new(result).map(|handle| unsafe { Snapshot::from_raw(handle) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a list of all snapshots in the database
|
||||||
|
pub fn snapshots(&self) -> Array<Snapshot> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe { BNGetDatabaseSnapshots(self.as_raw(), &mut count) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the current snapshot
|
||||||
|
pub fn current_snapshot(&self) -> Option<Snapshot> {
|
||||||
|
let result = unsafe { BNGetDatabaseCurrentSnapshot(self.as_raw()) };
|
||||||
|
ptr::NonNull::new(result).map(|handle| unsafe { Snapshot::from_raw(handle) })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_current_snapshot(&self, value: &Snapshot) {
|
||||||
|
unsafe { BNSetDatabaseCurrentSnapshot(self.as_raw(), value.id()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write_snapshot_data<N: BnStrCompatible>(
|
||||||
|
&self,
|
||||||
|
parents: &[i64],
|
||||||
|
file: &BinaryView,
|
||||||
|
name: N,
|
||||||
|
data: &KeyValueStore,
|
||||||
|
auto_save: bool,
|
||||||
|
) -> i64 {
|
||||||
|
let name_raw = name.into_bytes_with_nul();
|
||||||
|
let name_ptr = name_raw.as_ref().as_ptr() as *const ffi::c_char;
|
||||||
|
unsafe {
|
||||||
|
BNWriteDatabaseSnapshotData(
|
||||||
|
self.as_raw(),
|
||||||
|
parents.as_ptr() as *mut _,
|
||||||
|
parents.len(),
|
||||||
|
file.handle,
|
||||||
|
name_ptr,
|
||||||
|
data.as_raw(),
|
||||||
|
auto_save,
|
||||||
|
ptr::null_mut(),
|
||||||
|
Some(cb_progress_nop),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write_snapshot_data_with_progress<N, F>(
|
||||||
|
&self,
|
||||||
|
parents: &[i64],
|
||||||
|
file: &BinaryView,
|
||||||
|
name: N,
|
||||||
|
data: &KeyValueStore,
|
||||||
|
auto_save: bool,
|
||||||
|
mut progress: F,
|
||||||
|
) -> i64
|
||||||
|
where
|
||||||
|
N: BnStrCompatible,
|
||||||
|
F: FnMut(usize, usize) -> bool,
|
||||||
|
{
|
||||||
|
let name_raw = name.into_bytes_with_nul();
|
||||||
|
let name_ptr = name_raw.as_ref().as_ptr() as *const ffi::c_char;
|
||||||
|
let ctxt = &mut progress as *mut _ as *mut ffi::c_void;
|
||||||
|
unsafe {
|
||||||
|
BNWriteDatabaseSnapshotData(
|
||||||
|
self.as_raw(),
|
||||||
|
parents.as_ptr() as *mut _,
|
||||||
|
parents.len(),
|
||||||
|
file.handle,
|
||||||
|
name_ptr,
|
||||||
|
data.as_raw(),
|
||||||
|
auto_save,
|
||||||
|
ctxt,
|
||||||
|
Some(cb_progress::<F>),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Trim a snapshot's contents in the database by id, but leave the parent/child
|
||||||
|
/// hierarchy intact. Future references to this snapshot will return False for has_contents
|
||||||
|
pub fn trim_snapshot(&self, id: i64) -> Result<(), ()> {
|
||||||
|
if unsafe { BNTrimDatabaseSnapshot(self.as_raw(), id) } {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove a snapshot in the database by id, deleting its contents and references.
|
||||||
|
/// Attempting to remove a snapshot with children will raise an exception.
|
||||||
|
pub fn remove_snapshot(&self, id: i64) -> Result<(), ()> {
|
||||||
|
if unsafe { BNRemoveDatabaseSnapshot(self.as_raw(), id) } {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn has_global<S: BnStrCompatible>(&self, key: S) -> bool {
|
||||||
|
let key_raw = key.into_bytes_with_nul();
|
||||||
|
let key_ptr = key_raw.as_ref().as_ptr() as *const ffi::c_char;
|
||||||
|
unsafe { BNDatabaseHasGlobal(self.as_raw(), key_ptr) != 0 }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a list of keys for all globals in the database
|
||||||
|
pub fn global_keys(&self) -> Array<BnString> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe { BNGetDatabaseGlobalKeys(self.as_raw(), &mut count) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a dictionary of all globals
|
||||||
|
pub fn globals(&self) -> HashMap<String, String> {
|
||||||
|
self.global_keys()
|
||||||
|
.iter()
|
||||||
|
.filter_map(|key| Some((key.to_string(), self.read_global(key)?.to_string())))
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a specific global by key
|
||||||
|
pub fn read_global<S: BnStrCompatible>(&self, key: S) -> Option<BnString> {
|
||||||
|
let key_raw = key.into_bytes_with_nul();
|
||||||
|
let key_ptr = key_raw.as_ref().as_ptr() as *const ffi::c_char;
|
||||||
|
let result = unsafe { BNReadDatabaseGlobal(self.as_raw(), key_ptr) };
|
||||||
|
unsafe { ptr::NonNull::new(result).map(|_| BnString::from_raw(result)) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write a global into the database
|
||||||
|
pub fn write_global<K: BnStrCompatible, V: BnStrCompatible>(&self, key: K, value: V) -> bool {
|
||||||
|
let key_raw = key.into_bytes_with_nul();
|
||||||
|
let key_ptr = key_raw.as_ref().as_ptr() as *const ffi::c_char;
|
||||||
|
let value_raw = value.into_bytes_with_nul();
|
||||||
|
let value_ptr = value_raw.as_ref().as_ptr() as *const ffi::c_char;
|
||||||
|
unsafe { BNWriteDatabaseGlobal(self.as_raw(), key_ptr, value_ptr) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a specific global by key, as a binary buffer
|
||||||
|
pub fn read_global_data<S: BnStrCompatible>(&self, key: S) -> Option<DataBuffer> {
|
||||||
|
let key_raw = key.into_bytes_with_nul();
|
||||||
|
let key_ptr = key_raw.as_ref().as_ptr() as *const ffi::c_char;
|
||||||
|
let result = unsafe { BNReadDatabaseGlobalData(self.as_raw(), key_ptr) };
|
||||||
|
ptr::NonNull::new(result).map(|_| DataBuffer::from_raw(result))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write a binary buffer into a global in the database
|
||||||
|
pub fn write_global_data<K: BnStrCompatible>(&self, key: K, value: &DataBuffer) -> bool {
|
||||||
|
let key_raw = key.into_bytes_with_nul();
|
||||||
|
let key_ptr = key_raw.as_ref().as_ptr() as *const ffi::c_char;
|
||||||
|
unsafe { BNWriteDatabaseGlobalData(self.as_raw(), key_ptr, value.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the owning FileMetadata
|
||||||
|
pub fn file(&self) -> Ref<FileMetadata> {
|
||||||
|
let result = unsafe { BNGetDatabaseFile(self.as_raw()) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Ref::new(FileMetadata::from_raw(result)) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the backing analysis cache kvs
|
||||||
|
pub fn analysis_cache(&self) -> KeyValueStore {
|
||||||
|
let result = unsafe { BNReadDatabaseAnalysisCache(self.as_raw()) };
|
||||||
|
unsafe { KeyValueStore::from_raw(ptr::NonNull::new(result).unwrap()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reload_connection(&self) {
|
||||||
|
unsafe { BNDatabaseReloadConnection(self.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write_analysis_cache(&self, val: &KeyValueStore) -> Result<(), ()> {
|
||||||
|
if unsafe { BNWriteDatabaseAnalysisCache(self.as_raw(), val.as_raw()) } {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn snapshot_has_data(&self, id: i64) -> bool {
|
||||||
|
unsafe { BNSnapshotHasData(self.as_raw(), id) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for Database {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
unsafe { Self::from_raw(ptr::NonNull::new(BNNewDatabaseReference(self.as_raw())).unwrap()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for Database {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
unsafe { BNFreeDatabase(self.as_raw()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[repr(transparent)]
|
||||||
|
pub struct Snapshot {
|
||||||
|
handle: ptr::NonNull<BNSnapshot>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Snapshot {
|
||||||
|
pub(crate) unsafe fn from_raw(handle: ptr::NonNull<BNSnapshot>) -> Self {
|
||||||
|
Self { handle }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) unsafe fn ref_from_raw(handle: &*mut BNSnapshot) -> &Self {
|
||||||
|
mem::transmute(handle)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::mut_from_ref)]
|
||||||
|
pub(crate) unsafe fn as_raw(&self) -> &mut BNSnapshot {
|
||||||
|
&mut *self.handle.as_ptr()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the owning database
|
||||||
|
pub fn database(&self) -> Database {
|
||||||
|
unsafe {
|
||||||
|
Database::from_raw(ptr::NonNull::new(BNGetSnapshotDatabase(self.as_raw())).unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the numerical id (read-only)
|
||||||
|
pub fn id(&self) -> i64 {
|
||||||
|
unsafe { BNGetSnapshotId(self.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the displayed snapshot name
|
||||||
|
pub fn name(&self) -> BnString {
|
||||||
|
unsafe { BnString::from_raw(BNGetSnapshotName(self.as_raw())) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the displayed snapshot name
|
||||||
|
pub fn set_name<S: BnStrCompatible>(&self, value: S) {
|
||||||
|
let value_raw = value.into_bytes_with_nul();
|
||||||
|
let value_ptr = value_raw.as_ref().as_ptr() as *const ffi::c_char;
|
||||||
|
unsafe { BNSetSnapshotName(self.as_raw(), value_ptr) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If the snapshot was the result of an auto-save
|
||||||
|
pub fn is_auto_save(&self) -> bool {
|
||||||
|
unsafe { BNIsSnapshotAutoSave(self.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If the snapshot has contents, and has not been trimmed
|
||||||
|
pub fn has_contents(&self) -> bool {
|
||||||
|
unsafe { BNSnapshotHasContents(self.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If the snapshot has undo data
|
||||||
|
pub fn has_undo(&self) -> bool {
|
||||||
|
unsafe { BNSnapshotHasUndo(self.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the first parent of the snapshot, or None if it has no parents
|
||||||
|
pub fn first_parent(&self) -> Option<Snapshot> {
|
||||||
|
let result = unsafe { BNGetSnapshotFirstParent(self.as_raw()) };
|
||||||
|
ptr::NonNull::new(result).map(|s| unsafe { Snapshot::from_raw(s) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a list of all parent snapshots of the snapshot
|
||||||
|
pub fn parents(&self) -> Array<Snapshot> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe { BNGetSnapshotParents(self.as_raw(), &mut count) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a list of all child snapshots of the snapshot
|
||||||
|
pub fn children(&self) -> Array<Snapshot> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe { BNGetSnapshotChildren(self.as_raw(), &mut count) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a buffer of the raw data at the time of the snapshot
|
||||||
|
pub fn file_contents(&self) -> Option<DataBuffer> {
|
||||||
|
self.has_contents().then(|| unsafe {
|
||||||
|
let result = BNGetSnapshotFileContents(self.as_raw());
|
||||||
|
assert!(!result.is_null());
|
||||||
|
DataBuffer::from_raw(result)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a hash of the data at the time of the snapshot
|
||||||
|
pub fn file_contents_hash(&self) -> Option<DataBuffer> {
|
||||||
|
self.has_contents().then(|| unsafe {
|
||||||
|
let result = BNGetSnapshotFileContentsHash(self.as_raw());
|
||||||
|
assert!(!result.is_null());
|
||||||
|
DataBuffer::from_raw(result)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a list of undo entries at the time of the snapshot
|
||||||
|
pub fn undo_entries(&self) -> Array<UndoEntry> {
|
||||||
|
assert!(self.has_undo());
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe { BNGetSnapshotUndoEntries(self.as_raw(), &mut count) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn undo_entries_with_progress<F: FnMut(usize, usize) -> bool>(
|
||||||
|
&self,
|
||||||
|
mut progress: F,
|
||||||
|
) -> Array<UndoEntry> {
|
||||||
|
assert!(self.has_undo());
|
||||||
|
let ctxt = &mut progress as *mut _ as *mut ffi::c_void;
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetSnapshotUndoEntriesWithProgress(
|
||||||
|
self.as_raw(),
|
||||||
|
ctxt,
|
||||||
|
Some(cb_progress::<F>),
|
||||||
|
&mut count,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the backing kvs data with snapshot fields
|
||||||
|
pub fn read_data(&self) -> KeyValueStore {
|
||||||
|
let result = unsafe { BNReadSnapshotData(self.as_raw()) };
|
||||||
|
unsafe { KeyValueStore::from_raw(ptr::NonNull::new(result).unwrap()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read_data_with_progress<F: FnMut(usize, usize) -> bool>(
|
||||||
|
&self,
|
||||||
|
mut progress: F,
|
||||||
|
) -> KeyValueStore {
|
||||||
|
let ctxt = &mut progress as *mut _ as *mut ffi::c_void;
|
||||||
|
let result =
|
||||||
|
unsafe { BNReadSnapshotDataWithProgress(self.as_raw(), ctxt, Some(cb_progress::<F>)) };
|
||||||
|
unsafe { KeyValueStore::from_raw(ptr::NonNull::new(result).unwrap()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn undo_data(&self) -> DataBuffer {
|
||||||
|
let result = unsafe { BNGetSnapshotUndoData(self.as_raw()) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
DataBuffer::from_raw(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn store_data<F: FnMut(usize, usize) -> bool>(
|
||||||
|
&self,
|
||||||
|
data: KeyValueStore,
|
||||||
|
mut progress: F,
|
||||||
|
) -> bool {
|
||||||
|
let ctxt = &mut progress as *mut _ as *mut ffi::c_void;
|
||||||
|
unsafe { BNSnapshotStoreData(self.as_raw(), data.as_raw(), ctxt, Some(cb_progress::<F>)) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Determine if this snapshot has another as an ancestor
|
||||||
|
pub fn has_ancestor(self, other: &Snapshot) -> bool {
|
||||||
|
unsafe { BNSnapshotHasAncestor(self.as_raw(), other.as_raw()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for Snapshot {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
unsafe { Self::from_raw(ptr::NonNull::new(BNNewSnapshotReference(self.as_raw())).unwrap()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for Snapshot {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
unsafe { BNFreeSnapshot(self.as_raw()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for Snapshot {
|
||||||
|
type Raw = *mut BNSnapshot;
|
||||||
|
type Context = ();
|
||||||
|
type Wrapped<'a> = &'a Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl CoreArrayProviderInner for Snapshot {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
|
BNFreeSnapshotList(raw, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
Self::ref_from_raw(raw)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[repr(transparent)]
|
||||||
|
pub struct KeyValueStore {
|
||||||
|
handle: ptr::NonNull<BNKeyValueStore>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl KeyValueStore {
|
||||||
|
pub(crate) unsafe fn from_raw(handle: ptr::NonNull<BNKeyValueStore>) -> Self {
|
||||||
|
Self { handle }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::mut_from_ref)]
|
||||||
|
pub(crate) unsafe fn as_raw(&self) -> &mut BNKeyValueStore {
|
||||||
|
&mut *self.handle.as_ptr()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a list of all keys stored in the kvs
|
||||||
|
pub fn keys(&self) -> Array<BnString> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe { BNGetKeyValueStoreKeys(self.as_raw(), &mut count) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the value for a single key
|
||||||
|
pub fn value<S: BnStrCompatible>(&self, key: S) -> Option<DataBuffer> {
|
||||||
|
let key_raw = key.into_bytes_with_nul();
|
||||||
|
let key_ptr = key_raw.as_ref().as_ptr() as *const ffi::c_char;
|
||||||
|
let result = unsafe { BNGetKeyValueStoreBuffer(self.as_raw(), key_ptr) };
|
||||||
|
ptr::NonNull::new(result).map(|_| DataBuffer::from_raw(result))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the value for a single key
|
||||||
|
pub fn set_value<S: BnStrCompatible>(&self, key: S, value: &DataBuffer) -> bool {
|
||||||
|
let key_raw = key.into_bytes_with_nul();
|
||||||
|
let key_ptr = key_raw.as_ref().as_ptr() as *const ffi::c_char;
|
||||||
|
unsafe { BNSetKeyValueStoreBuffer(self.as_raw(), key_ptr, value.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the stored representation of the kvs
|
||||||
|
pub fn serialized_data(&self) -> DataBuffer {
|
||||||
|
let result = unsafe { BNGetKeyValueStoreSerializedData(self.as_raw()) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
DataBuffer::from_raw(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Begin storing new keys into a namespace
|
||||||
|
pub fn begin_namespace<S: BnStrCompatible>(&self, name: S) {
|
||||||
|
let name_raw = name.into_bytes_with_nul();
|
||||||
|
let name_ptr = name_raw.as_ref().as_ptr() as *const ffi::c_char;
|
||||||
|
unsafe { BNBeginKeyValueStoreNamespace(self.as_raw(), name_ptr) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// End storing new keys into a namespace
|
||||||
|
pub fn end_namespace(&self) {
|
||||||
|
unsafe { BNEndKeyValueStoreNamespace(self.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If the kvs is empty
|
||||||
|
pub fn empty(&self) -> bool {
|
||||||
|
unsafe { BNIsKeyValueStoreEmpty(self.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Number of values in the kvs
|
||||||
|
pub fn value_size(&self) -> usize {
|
||||||
|
unsafe { BNGetKeyValueStoreValueSize(self.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Length of serialized data
|
||||||
|
pub fn data_size(&self) -> usize {
|
||||||
|
unsafe { BNGetKeyValueStoreDataSize(self.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Size of all data in storage
|
||||||
|
pub fn value_storage_size(self) -> usize {
|
||||||
|
unsafe { BNGetKeyValueStoreValueStorageSize(self.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Number of namespaces pushed with begin_namespace
|
||||||
|
pub fn namespace_size(self) -> usize {
|
||||||
|
unsafe { BNGetKeyValueStoreNamespaceSize(self.as_raw()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for KeyValueStore {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
unsafe {
|
||||||
|
Self::from_raw(ptr::NonNull::new(BNNewKeyValueStoreReference(self.as_raw())).unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for KeyValueStore {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
unsafe { BNFreeKeyValueStore(self.as_raw()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[repr(transparent)]
|
||||||
|
pub struct UndoEntry {
|
||||||
|
handle: ptr::NonNull<BNUndoEntry>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UndoEntry {
|
||||||
|
pub(crate) unsafe fn from_raw(handle: ptr::NonNull<BNUndoEntry>) -> Self {
|
||||||
|
Self { handle }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) unsafe fn ref_from_raw(handle: &*mut BNUndoEntry) -> &Self {
|
||||||
|
mem::transmute(handle)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::mut_from_ref)]
|
||||||
|
pub(crate) unsafe fn as_raw(&self) -> &mut BNUndoEntry {
|
||||||
|
&mut *self.handle.as_ptr()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn id(&self) -> BnString {
|
||||||
|
let result = unsafe { BNUndoEntryGetId(self.as_raw()) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { BnString::from_raw(result) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn actions(&self) -> Array<UndoAction> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe { BNUndoEntryGetActions(self.as_raw(), &mut count) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn time(&self) -> SystemTime {
|
||||||
|
let m = Duration::from_secs(unsafe { BNUndoEntryGetTimestamp(self.as_raw()) });
|
||||||
|
UNIX_EPOCH + m
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for UndoEntry {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
unsafe {
|
||||||
|
Self::from_raw(ptr::NonNull::new(BNNewUndoEntryReference(self.as_raw())).unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for UndoEntry {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
unsafe { BNFreeUndoEntry(self.as_raw()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for UndoEntry {
|
||||||
|
type Raw = *mut BNUndoEntry;
|
||||||
|
type Context = ();
|
||||||
|
type Wrapped<'a> = &'a Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl CoreArrayProviderInner for UndoEntry {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
|
BNFreeUndoEntryList(raw, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
Self::ref_from_raw(raw)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[repr(transparent)]
|
||||||
|
pub struct UndoAction {
|
||||||
|
handle: ptr::NonNull<BNUndoAction>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UndoAction {
|
||||||
|
pub(crate) unsafe fn from_raw(handle: ptr::NonNull<BNUndoAction>) -> Self {
|
||||||
|
Self { handle }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) unsafe fn ref_from_raw(handle: &*mut BNUndoAction) -> &Self {
|
||||||
|
mem::transmute(handle)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::mut_from_ref)]
|
||||||
|
pub(crate) unsafe fn as_raw(&self) -> &mut BNUndoAction {
|
||||||
|
&mut *self.handle.as_ptr()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn summary_text(&self) -> BnString {
|
||||||
|
let result = unsafe { BNUndoActionGetSummaryText(self.as_raw()) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { BnString::from_raw(result) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn summary(&self) -> Array<InstructionTextToken> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe { BNUndoActionGetSummary(self.as_raw(), &mut count) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for UndoAction {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
unsafe {
|
||||||
|
Self::from_raw(ptr::NonNull::new(BNNewUndoActionReference(self.as_raw())).unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for UndoAction {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
unsafe { BNFreeUndoAction(self.as_raw()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for UndoAction {
|
||||||
|
type Raw = *mut BNUndoAction;
|
||||||
|
type Context = ();
|
||||||
|
type Wrapped<'a> = &'a Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl CoreArrayProviderInner for UndoAction {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
|
BNFreeUndoActionList(raw, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
Self::ref_from_raw(raw)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe extern "C" fn cb_progress<F: FnMut(usize, usize) -> bool>(
|
||||||
|
ctxt: *mut ffi::c_void,
|
||||||
|
arg1: usize,
|
||||||
|
arg2: usize,
|
||||||
|
) -> bool {
|
||||||
|
let ctxt: &mut F = &mut *(ctxt as *mut F);
|
||||||
|
ctxt(arg1, arg2)
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe extern "C" fn cb_progress_nop(_ctxt: *mut ffi::c_void, _arg1: usize, _arg2: usize) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
@@ -17,9 +17,10 @@
|
|||||||
use binaryninjacore_sys::*;
|
use binaryninjacore_sys::*;
|
||||||
|
|
||||||
use std::ffi::c_void;
|
use std::ffi::c_void;
|
||||||
use std::ptr;
|
|
||||||
use std::slice;
|
use std::slice;
|
||||||
|
|
||||||
|
use crate::string::BnString;
|
||||||
|
|
||||||
pub struct DataBuffer(*mut BNDataBuffer);
|
pub struct DataBuffer(*mut BNDataBuffer);
|
||||||
|
|
||||||
impl DataBuffer {
|
impl DataBuffer {
|
||||||
@@ -31,10 +32,6 @@ impl DataBuffer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_data(&self) -> &[u8] {
|
pub fn get_data(&self) -> &[u8] {
|
||||||
if self.0.is_null() {
|
|
||||||
// TODO : Change the default value and remove this
|
|
||||||
return &[];
|
|
||||||
}
|
|
||||||
let buffer = unsafe { BNGetDataBufferContents(self.0) };
|
let buffer = unsafe { BNGetDataBufferContents(self.0) };
|
||||||
if buffer.is_null() {
|
if buffer.is_null() {
|
||||||
&[]
|
&[]
|
||||||
@@ -43,6 +40,65 @@ impl DataBuffer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_data_at(&self, offset: usize) -> &[u8] {
|
||||||
|
let len = self.len();
|
||||||
|
if offset > len {
|
||||||
|
panic!();
|
||||||
|
}
|
||||||
|
let slice_len = len - offset;
|
||||||
|
let buffer = unsafe { BNGetDataBufferContentsAt(self.0, offset) };
|
||||||
|
if buffer.is_null() {
|
||||||
|
&[]
|
||||||
|
} else {
|
||||||
|
unsafe { slice::from_raw_parts(buffer as *const _, slice_len) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a copy of a especified part of the data
|
||||||
|
pub fn get_slice(&self, start: usize, len: usize) -> Option<Self> {
|
||||||
|
if start + len > self.len() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let ptr = unsafe { BNGetDataBufferSlice(self.0, start, len) };
|
||||||
|
(!ptr.is_null()).then(|| Self(ptr))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// change the size of the allocated data, if new size is bigger data is
|
||||||
|
/// need to be initialized
|
||||||
|
pub unsafe fn set_len(&mut self, len: usize) {
|
||||||
|
unsafe { BNSetDataBufferLength(self.0, len) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// set the size to 0
|
||||||
|
pub fn clear(&self) {
|
||||||
|
unsafe { BNClearDataBuffer(self.0) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Copy the contents of `src` into `dst`
|
||||||
|
pub fn assign(dst: &mut Self, src: &Self) {
|
||||||
|
unsafe { BNAssignDataBuffer(dst.0, src.0) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Concat the contents of `src` into `dst`
|
||||||
|
pub fn append(dst: &mut Self, src: &Self) {
|
||||||
|
unsafe { BNAppendDataBuffer(dst.0, src.0) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// concat the contents of `data` into self
|
||||||
|
pub fn append_data(&self, data: &[u8]) {
|
||||||
|
unsafe { BNAppendDataBufferContents(self.0, data.as_ptr() as *const c_void, data.len()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the byte at `offset`
|
||||||
|
pub unsafe fn byte_at(&self, offset: usize) -> u8 {
|
||||||
|
unsafe { BNGetDataBufferByte(self.0, offset) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the value of the byte at `offset`
|
||||||
|
pub unsafe fn set_byte_at(&mut self, offset: usize, byte: u8) {
|
||||||
|
unsafe { BNSetDataBufferByte(self.0, offset, byte) }
|
||||||
|
}
|
||||||
|
|
||||||
pub fn set_data(&mut self, data: &[u8]) {
|
pub fn set_data(&mut self, data: &[u8]) {
|
||||||
unsafe {
|
unsafe {
|
||||||
BNSetDataBufferContents(
|
BNSetDataBufferContents(
|
||||||
@@ -53,12 +109,48 @@ impl DataBuffer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn to_escaped_string(&self, null_terminates: bool) -> BnString {
|
||||||
|
unsafe { BnString::from_raw(BNDataBufferToEscapedString(self.0, null_terminates)) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_escaped_string(value: &BnString) -> Self {
|
||||||
|
Self(unsafe { BNDecodeEscapedString(value.as_raw()) })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_base64(&self) -> BnString {
|
||||||
|
unsafe { BnString::from_raw(BNDataBufferToBase64(self.0)) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_base64(value: &BnString) -> Self {
|
||||||
|
Self(unsafe { BNDecodeBase64(value.as_raw()) })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn zlib_compress(&self) -> Self {
|
||||||
|
Self(unsafe { BNZlibCompress(self.0) })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn zlib_decompress(&self) -> Self {
|
||||||
|
Self(unsafe { BNZlibDecompress(self.0) })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lzma_decompress(&self) -> Self {
|
||||||
|
Self(unsafe { BNLzmaDecompress(self.0) })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lzma2_decompress(&self) -> Self {
|
||||||
|
Self(unsafe { BNLzma2Decompress(self.0) })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn xz_decompress(&self) -> Self {
|
||||||
|
Self(unsafe { BNXzDecompress(self.0) })
|
||||||
|
}
|
||||||
|
|
||||||
pub fn len(&self) -> usize {
|
pub fn len(&self) -> usize {
|
||||||
unsafe { BNGetDataBufferLength(self.0) }
|
unsafe { BNGetDataBufferLength(self.0) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_empty(&self) -> bool {
|
pub fn is_empty(&self) -> bool {
|
||||||
unsafe { BNGetDataBufferLength(self.0) == 0 }
|
self.len() == 0
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(data: &[u8]) -> Result<Self, ()> {
|
pub fn new(data: &[u8]) -> Result<Self, ()> {
|
||||||
@@ -71,25 +163,171 @@ impl DataBuffer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO : delete this
|
|
||||||
impl Default for DataBuffer {
|
impl Default for DataBuffer {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
DataBuffer::from_raw(ptr::null_mut())
|
Self(unsafe { BNCreateDataBuffer([].as_ptr() as *const c_void, 0) })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for DataBuffer {
|
impl Drop for DataBuffer {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
if !self.0.is_null() {
|
|
||||||
unsafe {
|
unsafe {
|
||||||
BNFreeDataBuffer(self.0);
|
BNFreeDataBuffer(self.0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
impl Clone for DataBuffer {
|
impl Clone for DataBuffer {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
Self::from_raw(unsafe { BNDuplicateDataBuffer(self.0) })
|
Self::from_raw(unsafe { BNDuplicateDataBuffer(self.0) })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl TryFrom<&[u8]> for DataBuffer {
|
||||||
|
type Error = ();
|
||||||
|
|
||||||
|
fn try_from(value: &[u8]) -> Result<Self, Self::Error> {
|
||||||
|
DataBuffer::new(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsRef<[u8]> for DataBuffer {
|
||||||
|
fn as_ref(&self) -> &[u8] {
|
||||||
|
self.get_data()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::borrow::Borrow<[u8]> for DataBuffer {
|
||||||
|
fn borrow(&self) -> &[u8] {
|
||||||
|
self.as_ref()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! data_buffer_index {
|
||||||
|
($range:ty, $output:ty) => {
|
||||||
|
impl std::ops::Index<$range> for DataBuffer {
|
||||||
|
type Output = $output;
|
||||||
|
|
||||||
|
fn index(&self, index: $range) -> &Self::Output {
|
||||||
|
&self.get_data()[index]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
data_buffer_index!(usize, u8);
|
||||||
|
data_buffer_index!(std::ops::Range<usize>, [u8]);
|
||||||
|
data_buffer_index!(std::ops::RangeInclusive<usize>, [u8]);
|
||||||
|
data_buffer_index!(std::ops::RangeTo<usize>, [u8]);
|
||||||
|
data_buffer_index!(std::ops::RangeFull, [u8]);
|
||||||
|
|
||||||
|
impl PartialEq for DataBuffer {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.as_ref() == other.as_ref()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Eq for DataBuffer {}
|
||||||
|
|
||||||
|
impl PartialOrd for DataBuffer {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||||
|
Some(self.as_ref().cmp(other.as_ref()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ord for DataBuffer {
|
||||||
|
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||||
|
self.as_ref().cmp(other.as_ref())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::DataBuffer;
|
||||||
|
|
||||||
|
const DUMMY_DATA_0: &[u8] = b"0123456789\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x09\xFF";
|
||||||
|
const DUMMY_DATA_1: &[u8] = b"qwertyuiopasdfghjkl\xE7zxcvbnm\x00\x01\x00";
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn get_slice() {
|
||||||
|
let data = DataBuffer::new(DUMMY_DATA_0).unwrap();
|
||||||
|
let slice = data.get_slice(9, 10).unwrap();
|
||||||
|
assert_eq!(slice.get_data(), &DUMMY_DATA_0[9..19]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn set_len_write() {
|
||||||
|
let mut data = DataBuffer::default();
|
||||||
|
assert_eq!(data.get_data(), &[]);
|
||||||
|
unsafe { data.set_len(DUMMY_DATA_0.len()) };
|
||||||
|
assert_eq!(data.len(), DUMMY_DATA_0.len());
|
||||||
|
let mut contents = DUMMY_DATA_0.to_vec();
|
||||||
|
data.set_data(&contents);
|
||||||
|
// modify the orinal contents, to make sure DataBuffer copied the data
|
||||||
|
// and is not using the original pointer
|
||||||
|
contents.as_mut_slice().fill(0x55);
|
||||||
|
drop(contents);
|
||||||
|
assert_eq!(data.get_data(), &DUMMY_DATA_0[..]);
|
||||||
|
|
||||||
|
// make sure the new len truncate the original data
|
||||||
|
unsafe { data.set_len(13) };
|
||||||
|
assert_eq!(data.get_data(), &DUMMY_DATA_0[..13]);
|
||||||
|
|
||||||
|
data.clear();
|
||||||
|
assert_eq!(data.get_data(), &[]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn assign_append() {
|
||||||
|
let mut dst = DataBuffer::new(DUMMY_DATA_0).unwrap();
|
||||||
|
let mut src = DataBuffer::new(DUMMY_DATA_1).unwrap();
|
||||||
|
DataBuffer::assign(&mut dst, &src);
|
||||||
|
|
||||||
|
assert_eq!(dst.get_data(), DUMMY_DATA_1);
|
||||||
|
assert_eq!(src.get_data(), DUMMY_DATA_1);
|
||||||
|
// overwrite the src, to make sure that src is copied to dst, and not
|
||||||
|
// moved into it
|
||||||
|
src.set_data(DUMMY_DATA_0);
|
||||||
|
assert_eq!(dst.get_data(), DUMMY_DATA_1);
|
||||||
|
assert_eq!(src.get_data(), DUMMY_DATA_0);
|
||||||
|
|
||||||
|
DataBuffer::append(&mut dst, &src);
|
||||||
|
let result: Vec<_> = DUMMY_DATA_1.iter().chain(DUMMY_DATA_0).copied().collect();
|
||||||
|
assert_eq!(dst.get_data(), &result);
|
||||||
|
|
||||||
|
assert_eq!(src.get_data(), DUMMY_DATA_0);
|
||||||
|
src.set_data(DUMMY_DATA_1);
|
||||||
|
assert_eq!(src.get_data(), DUMMY_DATA_1);
|
||||||
|
assert_eq!(dst.get_data(), &result);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn to_from_formats() {
|
||||||
|
let data = DataBuffer::new(DUMMY_DATA_0).unwrap();
|
||||||
|
let escaped = data.to_escaped_string(false);
|
||||||
|
let unescaped = DataBuffer::from_escaped_string(&escaped);
|
||||||
|
drop(escaped);
|
||||||
|
let escaped_part = data.to_escaped_string(true);
|
||||||
|
let unescaped_part = DataBuffer::from_escaped_string(&escaped_part);
|
||||||
|
drop(escaped_part);
|
||||||
|
|
||||||
|
let part = &DUMMY_DATA_0[0..DUMMY_DATA_0
|
||||||
|
.iter()
|
||||||
|
.position(|x| *x == 0)
|
||||||
|
.unwrap_or(DUMMY_DATA_0.len())];
|
||||||
|
assert_eq!(data.get_data(), DUMMY_DATA_0);
|
||||||
|
assert_eq!(unescaped.get_data(), DUMMY_DATA_0);
|
||||||
|
assert_eq!(unescaped_part.get_data(), part);
|
||||||
|
|
||||||
|
let escaped = data.to_base64();
|
||||||
|
let unescaped = DataBuffer::from_base64(&escaped);
|
||||||
|
drop(escaped);
|
||||||
|
assert_eq!(data.get_data(), DUMMY_DATA_0);
|
||||||
|
assert_eq!(unescaped.get_data(), DUMMY_DATA_0);
|
||||||
|
|
||||||
|
let compressed = data.zlib_compress();
|
||||||
|
let decompressed = compressed.zlib_decompress();
|
||||||
|
drop(compressed);
|
||||||
|
assert_eq!(data.get_data(), DUMMY_DATA_0);
|
||||||
|
assert_eq!(decompressed.get_data(), DUMMY_DATA_0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
117
src/debuginfo.rs
117
src/debuginfo.rs
@@ -27,7 +27,7 @@
|
|||||||
//! And finally calling `binaryninja::debuginfo::DebugInfoParser::register` to register it with the core.
|
//! And finally calling `binaryninja::debuginfo::DebugInfoParser::register` to register it with the core.
|
||||||
//!
|
//!
|
||||||
//! Here's a minimal, complete example boilerplate-plugin:
|
//! Here's a minimal, complete example boilerplate-plugin:
|
||||||
//! ```
|
//! ```no_run
|
||||||
//! use binaryninja::{
|
//! use binaryninja::{
|
||||||
//! binaryview::BinaryView,
|
//! binaryview::BinaryView,
|
||||||
//! debuginfo::{CustomDebugInfoParser, DebugInfo, DebugInfoParser},
|
//! debuginfo::{CustomDebugInfoParser, DebugInfo, DebugInfoParser},
|
||||||
@@ -40,8 +40,9 @@
|
|||||||
//! true
|
//! true
|
||||||
//! }
|
//! }
|
||||||
//!
|
//!
|
||||||
//! fn parse_info(&self, _debug_info: &mut DebugInfo, _view: &BinaryView, _debug_file: &BinaryView, _progress: Box<dyn Fn(usize, usize) -> bool>) {
|
//! fn parse_info(&self, _debug_info: &mut DebugInfo, _view: &BinaryView, _debug_file: &BinaryView, _progress: Box<dyn Fn(usize, usize) -> Result<(), ()>>) -> bool {
|
||||||
//! println!("Parsing info");
|
//! println!("Parsing info");
|
||||||
|
//! true
|
||||||
//! }
|
//! }
|
||||||
//! }
|
//! }
|
||||||
//!
|
//!
|
||||||
@@ -53,11 +54,14 @@
|
|||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
//! `DebugInfo` will then be automatically applied to binary views that contain debug information (via the setting `analysis.debugInfo.internal`), binary views that provide valid external debug info files (`analysis.debugInfo.external`), or manually fetched/applied as below:
|
//! `DebugInfo` will then be automatically applied to binary views that contain debug information (via the setting `analysis.debugInfo.internal`), binary views that provide valid external debug info files (`analysis.debugInfo.external`), or manually fetched/applied as below:
|
||||||
//! ```
|
//! ```no_run
|
||||||
//! let valid_parsers = DebugInfoParser::parsers_for_view(bv);
|
//! # use binaryninja::debuginfo::DebugInfoParser;
|
||||||
//! let parser = valid_parsers[0];
|
//! # use binaryninja::binaryview::BinaryViewExt;
|
||||||
//! let debug_info = parser.parse_debug_info(bv);
|
//! let bv = binaryninja::load("example").unwrap();
|
||||||
//! bv.apply_debug_info(debug_info);
|
//! let valid_parsers = DebugInfoParser::parsers_for_view(&bv);
|
||||||
|
//! let parser = valid_parsers.get(0);
|
||||||
|
//! let debug_info = parser.parse_debug_info(&bv, &bv, None, None).unwrap();
|
||||||
|
//! bv.apply_debug_info(&debug_info);
|
||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
//! Multiple debug-info parsers can manually contribute debug info for a binary view by simply calling `parse_debug_info` with the
|
//! Multiple debug-info parsers can manually contribute debug info for a binary view by simply calling `parse_debug_info` with the
|
||||||
@@ -71,10 +75,10 @@ use crate::{
|
|||||||
platform::Platform,
|
platform::Platform,
|
||||||
rc::*,
|
rc::*,
|
||||||
string::{raw_to_string, BnStrCompatible, BnString},
|
string::{raw_to_string, BnStrCompatible, BnString},
|
||||||
types::{DataVariableAndName, NameAndType, Type},
|
types::{DataVariableAndName, NameAndType, NamedTypedVariable, Type},
|
||||||
};
|
};
|
||||||
|
|
||||||
use std::{hash::Hash, mem, os::raw::c_void, ptr, slice};
|
use std::{hash::Hash, os::raw::c_void, ptr, slice};
|
||||||
|
|
||||||
struct ProgressContext(Option<Box<dyn Fn(usize, usize) -> Result<(), ()>>>);
|
struct ProgressContext(Option<Box<dyn Fn(usize, usize) -> Result<(), ()>>>);
|
||||||
|
|
||||||
@@ -109,14 +113,14 @@ impl DebugInfoParser {
|
|||||||
|
|
||||||
/// List all debug-info parsers
|
/// List all debug-info parsers
|
||||||
pub fn list() -> Array<DebugInfoParser> {
|
pub fn list() -> Array<DebugInfoParser> {
|
||||||
let mut count: usize = unsafe { mem::zeroed() };
|
let mut count = 0;
|
||||||
let raw_parsers = unsafe { BNGetDebugInfoParsers(&mut count as *mut _) };
|
let raw_parsers = unsafe { BNGetDebugInfoParsers(&mut count as *mut _) };
|
||||||
unsafe { Array::new(raw_parsers, count, ()) }
|
unsafe { Array::new(raw_parsers, count, ()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a list of debug-info parsers that are valid for the provided binary view
|
/// Returns a list of debug-info parsers that are valid for the provided binary view
|
||||||
pub fn parsers_for_view(bv: &BinaryView) -> Array<DebugInfoParser> {
|
pub fn parsers_for_view(bv: &BinaryView) -> Array<DebugInfoParser> {
|
||||||
let mut count: usize = unsafe { mem::zeroed() };
|
let mut count = 0;
|
||||||
let raw_parsers = unsafe { BNGetDebugInfoParsersForView(bv.handle, &mut count as *mut _) };
|
let raw_parsers = unsafe { BNGetDebugInfoParsersForView(bv.handle, &mut count as *mut _) };
|
||||||
unsafe { Array::new(raw_parsers, count, ()) }
|
unsafe { Array::new(raw_parsers, count, ()) }
|
||||||
}
|
}
|
||||||
@@ -269,12 +273,16 @@ impl ToOwned for DebugInfoParser {
|
|||||||
impl CoreArrayProvider for DebugInfoParser {
|
impl CoreArrayProvider for DebugInfoParser {
|
||||||
type Raw = *mut BNDebugInfoParser;
|
type Raw = *mut BNDebugInfoParser;
|
||||||
type Context = ();
|
type Context = ();
|
||||||
|
type Wrapped<'a> = Guard<'a, DebugInfoParser>;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl CoreOwnedArrayProvider for DebugInfoParser {
|
unsafe impl CoreArrayProviderInner for DebugInfoParser {
|
||||||
unsafe fn free(raw: *mut Self::Raw, count: usize, _: &Self::Context) {
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _: &Self::Context) {
|
||||||
BNFreeDebugInfoParserList(raw, count);
|
BNFreeDebugInfoParserList(raw, count);
|
||||||
}
|
}
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
Guard::new(Self { handle: *raw }, context)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
///////////////////////
|
///////////////////////
|
||||||
@@ -293,6 +301,7 @@ pub struct DebugFunctionInfo {
|
|||||||
address: u64,
|
address: u64,
|
||||||
platform: Option<Ref<Platform>>,
|
platform: Option<Ref<Platform>>,
|
||||||
components: Vec<String>,
|
components: Vec<String>,
|
||||||
|
local_variables: Vec<NamedTypedVariable>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&BNDebugFunctionInfo> for DebugFunctionInfo {
|
impl From<&BNDebugFunctionInfo> for DebugFunctionInfo {
|
||||||
@@ -302,6 +311,15 @@ impl From<&BNDebugFunctionInfo> for DebugFunctionInfo {
|
|||||||
.map(|component| raw_to_string(*component as *const _).unwrap())
|
.map(|component| raw_to_string(*component as *const _).unwrap())
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
let local_variables: Vec<NamedTypedVariable> = unsafe { slice::from_raw_parts(raw.localVariables, raw.localVariableN) }
|
||||||
|
.iter()
|
||||||
|
.map(|local_variable| {
|
||||||
|
unsafe {
|
||||||
|
NamedTypedVariable::from_raw(local_variable)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
short_name: raw_to_string(raw.shortName),
|
short_name: raw_to_string(raw.shortName),
|
||||||
full_name: raw_to_string(raw.fullName),
|
full_name: raw_to_string(raw.fullName),
|
||||||
@@ -318,11 +336,13 @@ impl From<&BNDebugFunctionInfo> for DebugFunctionInfo {
|
|||||||
Some(unsafe { Platform::ref_from_raw(raw.platform) })
|
Some(unsafe { Platform::ref_from_raw(raw.platform) })
|
||||||
},
|
},
|
||||||
components,
|
components,
|
||||||
|
local_variables,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DebugFunctionInfo {
|
impl DebugFunctionInfo {
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub fn new(
|
pub fn new(
|
||||||
short_name: Option<String>,
|
short_name: Option<String>,
|
||||||
full_name: Option<String>,
|
full_name: Option<String>,
|
||||||
@@ -331,18 +351,17 @@ impl DebugFunctionInfo {
|
|||||||
address: Option<u64>,
|
address: Option<u64>,
|
||||||
platform: Option<Ref<Platform>>,
|
platform: Option<Ref<Platform>>,
|
||||||
components: Vec<String>,
|
components: Vec<String>,
|
||||||
|
local_variables: Vec<NamedTypedVariable>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
short_name,
|
short_name,
|
||||||
full_name,
|
full_name,
|
||||||
raw_name,
|
raw_name,
|
||||||
type_,
|
type_,
|
||||||
address: match address {
|
address: address.unwrap_or(0),
|
||||||
Some(address) => address,
|
|
||||||
_ => 0,
|
|
||||||
},
|
|
||||||
platform,
|
platform,
|
||||||
components,
|
components,
|
||||||
|
local_variables,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -376,7 +395,7 @@ impl DebugInfo {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a generator of all types provided by a named DebugInfoParser
|
/// Returns a generator of all types provided by a named DebugInfoParser
|
||||||
pub fn types_by_name<S: BnStrCompatible>(&self, parser_name: S) -> Vec<NameAndType<String>> {
|
pub fn types_by_name<S: BnStrCompatible>(&self, parser_name: S) -> Vec<Ref<NameAndType>> {
|
||||||
let parser_name = parser_name.into_bytes_with_nul();
|
let parser_name = parser_name.into_bytes_with_nul();
|
||||||
|
|
||||||
let mut count: usize = 0;
|
let mut count: usize = 0;
|
||||||
@@ -387,10 +406,10 @@ impl DebugInfo {
|
|||||||
&mut count,
|
&mut count,
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
let result: Vec<NameAndType<String>> = unsafe {
|
let result: Vec<Ref<NameAndType>> = unsafe {
|
||||||
slice::from_raw_parts_mut(debug_types_ptr, count)
|
slice::from_raw_parts_mut(debug_types_ptr, count)
|
||||||
.iter()
|
.iter()
|
||||||
.map(NameAndType::<String>::from_raw)
|
.map(|x| NameAndType::from_raw(x).to_owned())
|
||||||
.collect()
|
.collect()
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -399,13 +418,13 @@ impl DebugInfo {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// A generator of all types provided by DebugInfoParsers
|
/// A generator of all types provided by DebugInfoParsers
|
||||||
pub fn types(&self) -> Vec<NameAndType<String>> {
|
pub fn types(&self) -> Vec<Ref<NameAndType>> {
|
||||||
let mut count: usize = 0;
|
let mut count: usize = 0;
|
||||||
let debug_types_ptr = unsafe { BNGetDebugTypes(self.handle, ptr::null_mut(), &mut count) };
|
let debug_types_ptr = unsafe { BNGetDebugTypes(self.handle, ptr::null_mut(), &mut count) };
|
||||||
let result: Vec<NameAndType<String>> = unsafe {
|
let result: Vec<Ref<NameAndType>> = unsafe {
|
||||||
slice::from_raw_parts_mut(debug_types_ptr, count)
|
slice::from_raw_parts_mut(debug_types_ptr, count)
|
||||||
.iter()
|
.iter()
|
||||||
.map(NameAndType::<String>::from_raw)
|
.map(|x| NameAndType::from_raw(x).to_owned())
|
||||||
.collect()
|
.collect()
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -416,7 +435,7 @@ impl DebugInfo {
|
|||||||
/// Returns a generator of all functions provided by a named DebugInfoParser
|
/// Returns a generator of all functions provided by a named DebugInfoParser
|
||||||
pub fn functions_by_name<S: BnStrCompatible>(
|
pub fn functions_by_name<S: BnStrCompatible>(
|
||||||
&self,
|
&self,
|
||||||
parser_name: S,
|
parser_name: S
|
||||||
) -> Vec<DebugFunctionInfo> {
|
) -> Vec<DebugFunctionInfo> {
|
||||||
let parser_name = parser_name.into_bytes_with_nul();
|
let parser_name = parser_name.into_bytes_with_nul();
|
||||||
|
|
||||||
@@ -758,30 +777,41 @@ impl DebugInfo {
|
|||||||
let short_name_bytes = new_func.short_name.map(|name| name.into_bytes_with_nul());
|
let short_name_bytes = new_func.short_name.map(|name| name.into_bytes_with_nul());
|
||||||
let short_name = short_name_bytes
|
let short_name = short_name_bytes
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map_or(ptr::null_mut() as *mut _, |name| {
|
.map_or(ptr::null_mut() as *mut _, |name| name.as_ptr() as _);
|
||||||
name.as_ptr() as _
|
|
||||||
});
|
|
||||||
let full_name_bytes = new_func.full_name.map(|name| name.into_bytes_with_nul());
|
let full_name_bytes = new_func.full_name.map(|name| name.into_bytes_with_nul());
|
||||||
let full_name = full_name_bytes
|
let full_name = full_name_bytes
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map_or(ptr::null_mut() as *mut _, |name| {
|
.map_or(ptr::null_mut() as *mut _, |name| name.as_ptr() as _);
|
||||||
name.as_ptr() as _
|
|
||||||
});
|
|
||||||
let raw_name_bytes = new_func.raw_name.map(|name| name.into_bytes_with_nul());
|
let raw_name_bytes = new_func.raw_name.map(|name| name.into_bytes_with_nul());
|
||||||
let raw_name = raw_name_bytes
|
let raw_name = raw_name_bytes
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map_or(ptr::null_mut() as *mut _, |name| {
|
.map_or(ptr::null_mut() as *mut _, |name| name.as_ptr() as _);
|
||||||
name.as_ptr() as _
|
|
||||||
});
|
|
||||||
|
|
||||||
let mut components_array: Vec<*const ::std::os::raw::c_char> =
|
let mut components_array: Vec<*mut ::std::os::raw::c_char> =
|
||||||
Vec::with_capacity(new_func.components.len());
|
Vec::with_capacity(new_func.components.len());
|
||||||
for component in &new_func.components {
|
|
||||||
components_array.push(component.as_ptr() as _);
|
|
||||||
}
|
let mut local_variables_array: Vec<BNVariableNameAndType> =
|
||||||
|
Vec::with_capacity(new_func.local_variables.len());
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
BNAddDebugFunction(
|
for component in &new_func.components {
|
||||||
|
components_array.push(BNAllocString(component.clone().into_bytes_with_nul().as_ptr() as _));
|
||||||
|
}
|
||||||
|
|
||||||
|
for local_variable in &new_func.local_variables {
|
||||||
|
local_variables_array.push(
|
||||||
|
BNVariableNameAndType {
|
||||||
|
var: local_variable.var.raw(),
|
||||||
|
autoDefined: local_variable.auto_defined,
|
||||||
|
typeConfidence: local_variable.ty.confidence,
|
||||||
|
name: BNAllocString(local_variable.name.clone().into_bytes_with_nul().as_ptr() as _),
|
||||||
|
type_: local_variable.ty.contents.handle,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = BNAddDebugFunction(
|
||||||
self.handle,
|
self.handle,
|
||||||
&mut BNDebugFunctionInfo {
|
&mut BNDebugFunctionInfo {
|
||||||
shortName: short_name,
|
shortName: short_name,
|
||||||
@@ -798,8 +828,19 @@ impl DebugInfo {
|
|||||||
},
|
},
|
||||||
components: components_array.as_ptr() as _,
|
components: components_array.as_ptr() as _,
|
||||||
componentN: new_func.components.len(),
|
componentN: new_func.components.len(),
|
||||||
|
localVariables: local_variables_array.as_ptr() as _,
|
||||||
|
localVariableN: local_variables_array.len(),
|
||||||
},
|
},
|
||||||
)
|
);
|
||||||
|
|
||||||
|
for i in components_array {
|
||||||
|
BNFreeString(i);
|
||||||
|
}
|
||||||
|
|
||||||
|
for i in &local_variables_array {
|
||||||
|
BNFreeString(i.name);
|
||||||
|
}
|
||||||
|
result
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -26,6 +26,49 @@ use crate::rc::*;
|
|||||||
|
|
||||||
pub type Result<R> = result::Result<R, ()>;
|
pub type Result<R> = result::Result<R, ()>;
|
||||||
|
|
||||||
|
pub fn demangle_llvm<S: BnStrCompatible>(
|
||||||
|
mangled_name: S,
|
||||||
|
simplify: bool,
|
||||||
|
) -> Result<Vec<String>> {
|
||||||
|
let mangled_name_bwn = mangled_name.into_bytes_with_nul();
|
||||||
|
let mangled_name_ptr = mangled_name_bwn.as_ref();
|
||||||
|
let mut out_name: *mut *mut std::os::raw::c_char = unsafe { std::mem::zeroed() };
|
||||||
|
let mut out_size: usize = 0;
|
||||||
|
let res = unsafe {
|
||||||
|
BNDemangleLLVM(
|
||||||
|
mangled_name_ptr.as_ptr() as *const c_char,
|
||||||
|
&mut out_name,
|
||||||
|
&mut out_size,
|
||||||
|
simplify,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
if !res || out_size == 0 {
|
||||||
|
let cstr = match CStr::from_bytes_with_nul(mangled_name_ptr) {
|
||||||
|
Ok(cstr) => cstr,
|
||||||
|
Err(_) => {
|
||||||
|
log::error!("demangle_llvm: failed to parse mangled name");
|
||||||
|
return Err(());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return Ok(vec![cstr.to_string_lossy().into_owned()]);
|
||||||
|
}
|
||||||
|
|
||||||
|
if out_name.is_null() {
|
||||||
|
log::error!("demangle_llvm: out_name is NULL");
|
||||||
|
return Err(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let names = unsafe { ArrayGuard::<BnString>::new(out_name, out_size, ()) }
|
||||||
|
.iter()
|
||||||
|
.map(str::to_string)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
unsafe { BNFreeDemangledName(&mut out_name, out_size) };
|
||||||
|
|
||||||
|
Ok(names)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn demangle_gnu3<S: BnStrCompatible>(
|
pub fn demangle_gnu3<S: BnStrCompatible>(
|
||||||
arch: &CoreArchitecture,
|
arch: &CoreArchitecture,
|
||||||
mangled_name: S,
|
mangled_name: S,
|
||||||
@@ -33,8 +76,8 @@ pub fn demangle_gnu3<S: BnStrCompatible>(
|
|||||||
) -> Result<(Option<Ref<Type>>, Vec<String>)> {
|
) -> Result<(Option<Ref<Type>>, Vec<String>)> {
|
||||||
let mangled_name_bwn = mangled_name.into_bytes_with_nul();
|
let mangled_name_bwn = mangled_name.into_bytes_with_nul();
|
||||||
let mangled_name_ptr = mangled_name_bwn.as_ref();
|
let mangled_name_ptr = mangled_name_bwn.as_ref();
|
||||||
let mut out_type: *mut BNType = unsafe { std::mem::zeroed() };
|
let mut out_type: *mut BNType = std::ptr::null_mut();
|
||||||
let mut out_name: *mut *mut std::os::raw::c_char = unsafe { std::mem::zeroed() };
|
let mut out_name: *mut *mut std::os::raw::c_char = std::ptr::null_mut();
|
||||||
let mut out_size: usize = 0;
|
let mut out_size: usize = 0;
|
||||||
let res = unsafe {
|
let res = unsafe {
|
||||||
BNDemangleGNU3(
|
BNDemangleGNU3(
|
||||||
@@ -73,7 +116,7 @@ pub fn demangle_gnu3<S: BnStrCompatible>(
|
|||||||
|
|
||||||
let names = unsafe { ArrayGuard::<BnString>::new(out_name, out_size, ()) }
|
let names = unsafe { ArrayGuard::<BnString>::new(out_name, out_size, ()) }
|
||||||
.iter()
|
.iter()
|
||||||
.map(|name| name.to_string())
|
.map(str::to_string)
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
unsafe { BNFreeDemangledName(&mut out_name, out_size) };
|
unsafe { BNFreeDemangledName(&mut out_name, out_size) };
|
||||||
@@ -89,8 +132,8 @@ pub fn demangle_ms<S: BnStrCompatible>(
|
|||||||
let mangled_name_bwn = mangled_name.into_bytes_with_nul();
|
let mangled_name_bwn = mangled_name.into_bytes_with_nul();
|
||||||
let mangled_name_ptr = mangled_name_bwn.as_ref();
|
let mangled_name_ptr = mangled_name_bwn.as_ref();
|
||||||
|
|
||||||
let mut out_type: *mut BNType = unsafe { std::mem::zeroed() };
|
let mut out_type: *mut BNType = std::ptr::null_mut();
|
||||||
let mut out_name: *mut *mut std::os::raw::c_char = unsafe { std::mem::zeroed() };
|
let mut out_name: *mut *mut std::os::raw::c_char = std::ptr::null_mut();
|
||||||
let mut out_size: usize = 0;
|
let mut out_size: usize = 0;
|
||||||
let res = unsafe {
|
let res = unsafe {
|
||||||
BNDemangleMS(
|
BNDemangleMS(
|
||||||
|
|||||||
@@ -16,12 +16,13 @@
|
|||||||
|
|
||||||
use binaryninjacore_sys::*;
|
use binaryninjacore_sys::*;
|
||||||
|
|
||||||
use crate::string::{BnStr, BnString};
|
use crate::string::BnString;
|
||||||
use crate::{BN_FULL_CONFIDENCE, BN_INVALID_EXPR};
|
use crate::{BN_FULL_CONFIDENCE, BN_INVALID_EXPR};
|
||||||
|
|
||||||
use crate::rc::*;
|
use crate::rc::*;
|
||||||
|
|
||||||
use std::convert::From;
|
use std::convert::From;
|
||||||
|
use std::ffi::CStr;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::ptr;
|
use std::ptr;
|
||||||
|
|
||||||
@@ -72,7 +73,7 @@ pub type InstructionTextTokenContext = BNInstructionTextTokenContext;
|
|||||||
// IndirectImportToken = 69,
|
// IndirectImportToken = 69,
|
||||||
// ExternalSymbolToken = 70,
|
// ExternalSymbolToken = 70,
|
||||||
|
|
||||||
#[repr(C)]
|
#[repr(transparent)]
|
||||||
pub struct InstructionTextToken(pub(crate) BNInstructionTextToken);
|
pub struct InstructionTextToken(pub(crate) BNInstructionTextToken);
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||||
@@ -98,11 +99,15 @@ pub enum InstructionTextTokenContents {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl InstructionTextToken {
|
impl InstructionTextToken {
|
||||||
pub(crate) unsafe fn from_raw(raw: &BNInstructionTextToken) -> Self {
|
pub(crate) unsafe fn from_raw(raw: &BNInstructionTextToken) -> &Self {
|
||||||
Self(*raw)
|
mem::transmute(raw)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(text: BnString, contents: InstructionTextTokenContents) -> Self {
|
pub(crate) fn into_raw(self) -> BNInstructionTextToken {
|
||||||
|
mem::ManuallyDrop::new(self).0
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new(text: &str, contents: InstructionTextTokenContents) -> Self {
|
||||||
let (value, address) = match contents {
|
let (value, address) = match contents {
|
||||||
InstructionTextTokenContents::Integer(v) => (v, 0),
|
InstructionTextTokenContents::Integer(v) => (v, 0),
|
||||||
InstructionTextTokenContents::PossibleAddress(v)
|
InstructionTextTokenContents::PossibleAddress(v)
|
||||||
@@ -149,7 +154,7 @@ impl InstructionTextToken {
|
|||||||
|
|
||||||
InstructionTextToken(BNInstructionTextToken {
|
InstructionTextToken(BNInstructionTextToken {
|
||||||
type_,
|
type_,
|
||||||
text: text.into_raw(),
|
text: BnString::new(text).into_raw(),
|
||||||
value,
|
value,
|
||||||
width,
|
width,
|
||||||
size: 0,
|
size: 0,
|
||||||
@@ -159,7 +164,7 @@ impl InstructionTextToken {
|
|||||||
address,
|
address,
|
||||||
typeNames: ptr::null_mut(),
|
typeNames: ptr::null_mut(),
|
||||||
namesCount: 0,
|
namesCount: 0,
|
||||||
exprIndex: BN_INVALID_EXPR
|
exprIndex: BN_INVALID_EXPR,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -171,8 +176,8 @@ impl InstructionTextToken {
|
|||||||
self.0.context = context;
|
self.0.context = context;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn text(&self) -> &BnStr {
|
pub fn text(&self) -> &str {
|
||||||
unsafe { BnStr::from_raw(self.0.text) }
|
unsafe { CStr::from_ptr(self.0.text) }.to_str().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn contents(&self) -> InstructionTextTokenContents {
|
pub fn contents(&self) -> InstructionTextTokenContents {
|
||||||
@@ -229,7 +234,7 @@ impl Default for InstructionTextToken {
|
|||||||
address: 0,
|
address: 0,
|
||||||
typeNames: ptr::null_mut(),
|
typeNames: ptr::null_mut(),
|
||||||
namesCount: 0,
|
namesCount: 0,
|
||||||
exprIndex: BN_INVALID_EXPR
|
exprIndex: BN_INVALID_EXPR,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -248,19 +253,51 @@ impl Clone for InstructionTextToken {
|
|||||||
confidence: 0xff,
|
confidence: 0xff,
|
||||||
typeNames: ptr::null_mut(),
|
typeNames: ptr::null_mut(),
|
||||||
namesCount: 0,
|
namesCount: 0,
|
||||||
exprIndex: self.0.exprIndex
|
exprIndex: self.0.exprIndex,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO : There is almost certainly a memory leak here - in the case where
|
impl Drop for InstructionTextToken {
|
||||||
// `impl CoreOwnedArrayProvider for InstructionTextToken` doesn't get triggered
|
fn drop(&mut self) {
|
||||||
// impl Drop for InstructionTextToken {
|
if !self.0.text.is_null() {
|
||||||
// fn drop(&mut self) {
|
let _owned = unsafe { BnString::from_raw(self.0.text) };
|
||||||
// let _owned = unsafe { BnString::from_raw(self.0.text) };
|
}
|
||||||
// }
|
if !self.0.typeNames.is_null() && self.0.namesCount != 0 {
|
||||||
// }
|
unsafe { BNFreeStringList(self.0.typeNames, self.0.namesCount) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for InstructionTextToken {
|
||||||
|
type Raw = BNInstructionTextToken;
|
||||||
|
type Context = ();
|
||||||
|
type Wrapped<'a> = &'a Self;
|
||||||
|
}
|
||||||
|
unsafe impl CoreArrayProviderInner for InstructionTextToken {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
|
BNFreeInstructionText(raw, count)
|
||||||
|
}
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
core::mem::transmute(raw)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for Array<InstructionTextToken> {
|
||||||
|
type Raw = BNInstructionTextLine;
|
||||||
|
type Context = ();
|
||||||
|
type Wrapped<'a> = mem::ManuallyDrop<Self>;
|
||||||
|
}
|
||||||
|
unsafe impl CoreArrayProviderInner for Array<InstructionTextToken> {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
|
BNFreeInstructionTextLines(raw, count)
|
||||||
|
}
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
mem::ManuallyDrop::new(Self::new(raw.tokens, raw.count, ()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[repr(transparent)]
|
||||||
pub struct DisassemblyTextLine(pub(crate) BNDisassemblyTextLine);
|
pub struct DisassemblyTextLine(pub(crate) BNDisassemblyTextLine);
|
||||||
|
|
||||||
impl DisassemblyTextLine {
|
impl DisassemblyTextLine {
|
||||||
@@ -289,7 +326,7 @@ impl DisassemblyTextLine {
|
|||||||
unsafe {
|
unsafe {
|
||||||
std::slice::from_raw_parts::<BNInstructionTextToken>(self.0.tokens, self.0.count)
|
std::slice::from_raw_parts::<BNInstructionTextToken>(self.0.tokens, self.0.count)
|
||||||
.iter()
|
.iter()
|
||||||
.map(|&x| InstructionTextToken::from_raw(&x))
|
.map(|x| InstructionTextToken::from_raw(x).clone())
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -306,10 +343,9 @@ impl std::fmt::Display for DisassemblyTextLine {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl From<Vec<InstructionTextToken>> for DisassemblyTextLine {
|
impl From<Vec<InstructionTextToken>> for DisassemblyTextLine {
|
||||||
fn from(mut tokens: Vec<InstructionTextToken>) -> Self {
|
fn from(tokens: Vec<InstructionTextToken>) -> Self {
|
||||||
tokens.shrink_to_fit();
|
let mut tokens: Box<[_]> = tokens.into();
|
||||||
|
|
||||||
assert!(tokens.len() == tokens.capacity());
|
|
||||||
// TODO: let (tokens_pointer, tokens_len, _) = unsafe { tokens.into_raw_parts() }; // Can't use for now...still a rust nightly feature
|
// TODO: let (tokens_pointer, tokens_len, _) = unsafe { tokens.into_raw_parts() }; // Can't use for now...still a rust nightly feature
|
||||||
let tokens_pointer = tokens.as_mut_ptr();
|
let tokens_pointer = tokens.as_mut_ptr();
|
||||||
let tokens_len = tokens.len();
|
let tokens_len = tokens.len();
|
||||||
@@ -344,12 +380,11 @@ impl From<Vec<InstructionTextToken>> for DisassemblyTextLine {
|
|||||||
|
|
||||||
impl From<&Vec<&str>> for DisassemblyTextLine {
|
impl From<&Vec<&str>> for DisassemblyTextLine {
|
||||||
fn from(string_tokens: &Vec<&str>) -> Self {
|
fn from(string_tokens: &Vec<&str>) -> Self {
|
||||||
let mut tokens: Vec<BNInstructionTextToken> = Vec::with_capacity(string_tokens.len());
|
let mut tokens: Box<[BNInstructionTextToken]> = string_tokens
|
||||||
tokens.extend(string_tokens.iter().map(|&token| {
|
.iter()
|
||||||
InstructionTextToken::new(BnString::new(token), InstructionTextTokenContents::Text).0
|
.map(|&token| InstructionTextToken::new(token, InstructionTextTokenContents::Text).into_raw())
|
||||||
}));
|
.collect();
|
||||||
|
|
||||||
assert!(tokens.len() == tokens.capacity());
|
|
||||||
// let (tokens_pointer, tokens_len, _) = unsafe { tokens.into_raw_parts() }; // Can't use for now...still a rust nighly feature
|
// let (tokens_pointer, tokens_len, _) = unsafe { tokens.into_raw_parts() }; // Can't use for now...still a rust nighly feature
|
||||||
let tokens_pointer = tokens.as_mut_ptr();
|
let tokens_pointer = tokens.as_mut_ptr();
|
||||||
let tokens_len = tokens.len();
|
let tokens_len = tokens.len();
|
||||||
@@ -413,12 +448,28 @@ impl Default for DisassemblyTextLine {
|
|||||||
|
|
||||||
impl Drop for DisassemblyTextLine {
|
impl Drop for DisassemblyTextLine {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
unsafe {
|
if !self.0.tokens.is_null() {
|
||||||
Vec::from_raw_parts(self.0.tokens, self.0.count, self.0.count);
|
let ptr = core::ptr::slice_from_raw_parts_mut(self.0.tokens, self.0.count);
|
||||||
|
let _ = unsafe { Box::from_raw(ptr) };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for DisassemblyTextLine {
|
||||||
|
type Raw = BNDisassemblyTextLine;
|
||||||
|
type Context = ();
|
||||||
|
type Wrapped<'a> = &'a Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl CoreArrayProviderInner for DisassemblyTextLine {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
|
BNFreeDisassemblyTextLines(raw, count)
|
||||||
|
}
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
core::mem::transmute(raw)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub type DisassemblyOption = BNDisassemblyOption;
|
pub type DisassemblyOption = BNDisassemblyOption;
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, Hash)]
|
#[derive(PartialEq, Eq, Hash)]
|
||||||
|
|||||||
@@ -1,11 +1,9 @@
|
|||||||
use crate::rc::{
|
use crate::rc::{Array, CoreArrayProvider, Guard, CoreArrayProviderInner, Ref, RefCountable};
|
||||||
Array, CoreArrayProvider, CoreArrayWrapper, CoreOwnedArrayProvider, Ref, RefCountable,
|
|
||||||
};
|
|
||||||
use crate::settings::Settings;
|
use crate::settings::Settings;
|
||||||
use crate::string::{BnStr, BnStrCompatible, BnString};
|
use crate::string::{BnStrCompatible, BnString};
|
||||||
use binaryninjacore_sys::*;
|
use binaryninjacore_sys::*;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::ffi::c_void;
|
use std::ffi::{c_void, CStr};
|
||||||
use std::os::raw::c_char;
|
use std::os::raw::c_char;
|
||||||
use std::ptr::null_mut;
|
use std::ptr::null_mut;
|
||||||
use std::slice;
|
use std::slice;
|
||||||
@@ -63,19 +61,15 @@ impl DownloadProvider {
|
|||||||
impl CoreArrayProvider for DownloadProvider {
|
impl CoreArrayProvider for DownloadProvider {
|
||||||
type Raw = *mut BNDownloadProvider;
|
type Raw = *mut BNDownloadProvider;
|
||||||
type Context = ();
|
type Context = ();
|
||||||
|
type Wrapped<'a> = Guard<'a, DownloadProvider>;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl CoreOwnedArrayProvider for DownloadProvider {
|
unsafe impl CoreArrayProviderInner for DownloadProvider {
|
||||||
unsafe fn free(raw: *mut Self::Raw, _count: usize, _context: &Self::Context) {
|
unsafe fn free(raw: *mut Self::Raw, _count: usize, _context: &Self::Context) {
|
||||||
BNFreeDownloadProviderList(raw);
|
BNFreeDownloadProviderList(raw);
|
||||||
}
|
}
|
||||||
}
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
Guard::new(DownloadProvider::from_raw(*raw), &())
|
||||||
unsafe impl<'a> CoreArrayWrapper<'a> for DownloadProvider {
|
|
||||||
type Wrapped = DownloadProvider;
|
|
||||||
|
|
||||||
unsafe fn wrap_raw(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped {
|
|
||||||
DownloadProvider::from_raw(*raw)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -270,8 +264,8 @@ impl DownloadInstance {
|
|||||||
.zip(response_header_values.iter())
|
.zip(response_header_values.iter())
|
||||||
{
|
{
|
||||||
response_headers.insert(
|
response_headers.insert(
|
||||||
BnStr::from_raw(*key).to_string(),
|
CStr::from_ptr(*key).to_str().unwrap().to_owned(),
|
||||||
BnStr::from_raw(*value).to_string(),
|
CStr::from_ptr(*value).to_str().unwrap().to_owned(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
196
src/enterprise.rs
Normal file
196
src/enterprise.rs
Normal file
@@ -0,0 +1,196 @@
|
|||||||
|
use std::marker::PhantomData;
|
||||||
|
use std::time::{Duration, SystemTime, UNIX_EPOCH};
|
||||||
|
|
||||||
|
use crate::rc::Array;
|
||||||
|
use crate::string::{BnStrCompatible, BnString};
|
||||||
|
|
||||||
|
pub fn server_username() -> BnString {
|
||||||
|
unsafe { BnString::from_raw(binaryninjacore_sys::BNGetEnterpriseServerUsername()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn server_url() -> BnString {
|
||||||
|
unsafe { BnString::from_raw(binaryninjacore_sys::BNGetEnterpriseServerUrl()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_server_url<S: BnStrCompatible>(url: S) -> Result<(), ()> {
|
||||||
|
let url = url.into_bytes_with_nul();
|
||||||
|
let result = unsafe {
|
||||||
|
binaryninjacore_sys::BNSetEnterpriseServerUrl(url.as_ref().as_ptr() as *const std::os::raw::c_char)
|
||||||
|
};
|
||||||
|
if result {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn server_name() -> BnString {
|
||||||
|
unsafe { BnString::from_raw(binaryninjacore_sys::BNGetEnterpriseServerName()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn server_id() -> BnString {
|
||||||
|
unsafe { BnString::from_raw(binaryninjacore_sys::BNGetEnterpriseServerId()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn server_version() -> u64 {
|
||||||
|
unsafe { binaryninjacore_sys::BNGetEnterpriseServerVersion() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn server_build_id() -> BnString {
|
||||||
|
unsafe { BnString::from_raw(binaryninjacore_sys::BNGetEnterpriseServerBuildId()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn server_token() -> BnString {
|
||||||
|
unsafe { BnString::from_raw(binaryninjacore_sys::BNGetEnterpriseServerToken()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn license_duration() -> Duration {
|
||||||
|
Duration::from_secs(unsafe { binaryninjacore_sys::BNGetEnterpriseServerLicenseDuration() })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn license_expiration_time() -> SystemTime {
|
||||||
|
let m = Duration::from_secs(unsafe {
|
||||||
|
binaryninjacore_sys::BNGetEnterpriseServerLicenseExpirationTime()
|
||||||
|
});
|
||||||
|
UNIX_EPOCH + m
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn server_reservation_time_limit() -> Duration {
|
||||||
|
Duration::from_secs(unsafe { binaryninjacore_sys::BNGetEnterpriseServerReservationTimeLimit() })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_server_floating_license() -> bool {
|
||||||
|
unsafe { binaryninjacore_sys::BNIsEnterpriseServerFloatingLicense() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_server_license_still_activated() -> bool {
|
||||||
|
unsafe { binaryninjacore_sys::BNIsEnterpriseServerLicenseStillActivated() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn authenticate_server_with_credentials<U, P>(username: U, password: P, remember: bool) -> bool
|
||||||
|
where
|
||||||
|
U: BnStrCompatible,
|
||||||
|
P: BnStrCompatible,
|
||||||
|
{
|
||||||
|
let username = username.into_bytes_with_nul();
|
||||||
|
let password = password.into_bytes_with_nul();
|
||||||
|
unsafe {
|
||||||
|
binaryninjacore_sys::BNAuthenticateEnterpriseServerWithCredentials(
|
||||||
|
username.as_ref().as_ptr() as *const std::os::raw::c_char,
|
||||||
|
password.as_ref().as_ptr() as *const std::os::raw::c_char,
|
||||||
|
remember,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn authenticate_server_with_method<S: BnStrCompatible>(method: S, remember: bool) -> bool {
|
||||||
|
let method = method.into_bytes_with_nul();
|
||||||
|
unsafe {
|
||||||
|
binaryninjacore_sys::BNAuthenticateEnterpriseServerWithMethod(
|
||||||
|
method.as_ref().as_ptr() as *const std::os::raw::c_char,
|
||||||
|
remember,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn connect_server() -> bool {
|
||||||
|
unsafe { binaryninjacore_sys::BNConnectEnterpriseServer() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deauthenticate_server() -> bool {
|
||||||
|
unsafe { binaryninjacore_sys::BNDeauthenticateEnterpriseServer() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn cancel_server_authentication() {
|
||||||
|
unsafe { binaryninjacore_sys::BNCancelEnterpriseServerAuthentication() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_server_license(timeout: Duration) -> bool {
|
||||||
|
unsafe { binaryninjacore_sys::BNUpdateEnterpriseServerLicense(timeout.as_secs()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn release_server_license() -> bool {
|
||||||
|
unsafe { binaryninjacore_sys::BNReleaseEnterpriseServerLicense() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_server_connected() -> bool {
|
||||||
|
unsafe { binaryninjacore_sys::BNIsEnterpriseServerConnected() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_server_authenticated() -> bool {
|
||||||
|
unsafe { binaryninjacore_sys::BNIsEnterpriseServerAuthenticated() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_server_initialized() -> bool {
|
||||||
|
unsafe { binaryninjacore_sys::BNIsEnterpriseServerInitialized() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn server_last_error() -> BnString {
|
||||||
|
unsafe { BnString::from_raw(binaryninjacore_sys::BNGetEnterpriseServerLastError()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn server_authentication_methods() -> (Array<BnString>, Array<BnString>) {
|
||||||
|
let mut methods = core::ptr::null_mut();
|
||||||
|
let mut names = core::ptr::null_mut();
|
||||||
|
let count = unsafe {
|
||||||
|
binaryninjacore_sys::BNGetEnterpriseServerAuthenticationMethods(&mut methods, &mut names)
|
||||||
|
};
|
||||||
|
unsafe { (Array::new(methods, count, ()), Array::new(names, count, ())) }
|
||||||
|
}
|
||||||
|
|
||||||
|
// NOTE don't implement Clone, Copy, so each callback can only be
|
||||||
|
// register/unregistered only once
|
||||||
|
#[repr(transparent)]
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct EnterpriseServerCallback<'a> {
|
||||||
|
handle: binaryninjacore_sys::BNEnterpriseServerCallbacks,
|
||||||
|
lifetime: PhantomData<&'a ()>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_license_changed_callback<'a, F: FnMut(bool) + 'a>(
|
||||||
|
callback: F,
|
||||||
|
) -> EnterpriseServerCallback<'a> {
|
||||||
|
unsafe extern "C" fn cb_license_status_changed<F: FnMut(bool)>(
|
||||||
|
ctxt: *mut ::std::os::raw::c_void,
|
||||||
|
still_valid: bool,
|
||||||
|
) {
|
||||||
|
let ctxt: &mut F = &mut *(ctxt as *mut F);
|
||||||
|
ctxt(still_valid)
|
||||||
|
}
|
||||||
|
let mut handle = binaryninjacore_sys::BNEnterpriseServerCallbacks {
|
||||||
|
context: Box::leak(Box::new(callback)) as *mut F as *mut core::ffi::c_void,
|
||||||
|
licenseStatusChanged: Some(cb_license_status_changed::<F>),
|
||||||
|
};
|
||||||
|
unsafe { binaryninjacore_sys::BNRegisterEnterpriseServerNotification(&mut handle) }
|
||||||
|
EnterpriseServerCallback {
|
||||||
|
handle,
|
||||||
|
lifetime: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn unregister_license_changed_callback(mut callback_handle: EnterpriseServerCallback) {
|
||||||
|
unsafe {
|
||||||
|
binaryninjacore_sys::BNUnregisterEnterpriseServerNotification(&mut callback_handle.handle)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> EnterpriseServerCallback<'a> {
|
||||||
|
/// register the license changed callback
|
||||||
|
pub fn register<F: FnMut(bool) + 'a>(callback: F) -> Self {
|
||||||
|
register_license_changed_callback(callback)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// deregister the license changed callback, equivalent to drop the struct
|
||||||
|
pub fn deregister(self) {
|
||||||
|
// Nothing, just drop self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for EnterpriseServerCallback<'_> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
unregister_license_changed_callback(EnterpriseServerCallback {
|
||||||
|
handle: self.handle,
|
||||||
|
lifetime: PhantomData,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
209
src/externallibrary.rs
Normal file
209
src/externallibrary.rs
Normal file
@@ -0,0 +1,209 @@
|
|||||||
|
use core::{ffi, mem, ptr};
|
||||||
|
|
||||||
|
use binaryninjacore_sys::*;
|
||||||
|
|
||||||
|
use crate::project::ProjectFile;
|
||||||
|
use crate::rc::{CoreArrayProvider, CoreArrayProviderInner};
|
||||||
|
use crate::string::{BnStrCompatible, BnString};
|
||||||
|
use crate::symbol::Symbol;
|
||||||
|
|
||||||
|
/// An ExternalLibrary is an abstraction for a library that is optionally backed
|
||||||
|
/// by a [ProjectFile].
|
||||||
|
#[repr(transparent)]
|
||||||
|
pub struct ExternalLibrary {
|
||||||
|
handle: ptr::NonNull<BNExternalLibrary>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for ExternalLibrary {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
unsafe { BNFreeExternalLibrary(self.as_raw()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for ExternalLibrary {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
unsafe {
|
||||||
|
Self::from_raw(ptr::NonNull::new(BNNewExternalLibraryReference(self.as_raw())).unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExternalLibrary {
|
||||||
|
pub(crate) unsafe fn from_raw(handle: ptr::NonNull<BNExternalLibrary>) -> Self {
|
||||||
|
Self { handle }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) unsafe fn ref_from_raw(handle: &*mut BNExternalLibrary) -> &Self {
|
||||||
|
assert!(!handle.is_null());
|
||||||
|
mem::transmute(handle)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::mut_from_ref)]
|
||||||
|
pub(crate) unsafe fn as_raw(&self) -> &mut BNExternalLibrary {
|
||||||
|
&mut *self.handle.as_ptr()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the name of this external library
|
||||||
|
pub fn name(&self) -> BnString {
|
||||||
|
let result = unsafe { BNExternalLibraryGetName(self.as_raw()) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { BnString::from_raw(result) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the file backing this external library
|
||||||
|
pub fn backing_file(&self) -> Option<ProjectFile> {
|
||||||
|
let result = unsafe { BNExternalLibraryGetBackingFile(self.as_raw()) };
|
||||||
|
let handle = ptr::NonNull::new(result)?;
|
||||||
|
Some(unsafe { ProjectFile::from_raw(handle) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the file backing this external library
|
||||||
|
pub fn set_backing_file(&self, file: Option<&ProjectFile>) {
|
||||||
|
let file_handle = file
|
||||||
|
.map(|x| unsafe {x.as_raw() as *mut _})
|
||||||
|
.unwrap_or(ptr::null_mut());
|
||||||
|
unsafe { BNExternalLibrarySetBackingFile(self.as_raw(), file_handle) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for ExternalLibrary {
|
||||||
|
type Raw = *mut BNExternalLibrary;
|
||||||
|
type Context = ();
|
||||||
|
type Wrapped<'a> = &'a Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl CoreArrayProviderInner for ExternalLibrary {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
|
BNFreeExternalLibraryList(raw, count)
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
Self::ref_from_raw(raw)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An ExternalLocation is an association from a source symbol in a binary view
|
||||||
|
/// to a target symbol and/or address in an [ExternalLibrary].
|
||||||
|
#[repr(transparent)]
|
||||||
|
pub struct ExternalLocation {
|
||||||
|
handle: ptr::NonNull<BNExternalLocation>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for ExternalLocation {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
unsafe { BNFreeExternalLocation(self.as_raw()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for ExternalLocation {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
unsafe {
|
||||||
|
Self::from_raw(
|
||||||
|
ptr::NonNull::new(BNNewExternalLocationReference(self.as_raw())).unwrap(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExternalLocation {
|
||||||
|
pub(crate) unsafe fn from_raw(handle: ptr::NonNull<BNExternalLocation>) -> Self {
|
||||||
|
Self { handle }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) unsafe fn ref_from_raw(handle: &*mut BNExternalLocation) -> &Self {
|
||||||
|
assert!(!handle.is_null());
|
||||||
|
mem::transmute(handle)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::mut_from_ref)]
|
||||||
|
pub(crate) unsafe fn as_raw(&self) -> &mut BNExternalLocation {
|
||||||
|
&mut *self.handle.as_ptr()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the source symbol for this ExternalLocation
|
||||||
|
pub fn source_symbol(&self) -> Symbol {
|
||||||
|
let result = unsafe { BNExternalLocationGetSourceSymbol(self.as_raw()) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Symbol::from_raw(result) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the ExternalLibrary that this ExternalLocation targets
|
||||||
|
pub fn library(&self) -> Option<ExternalLibrary> {
|
||||||
|
let result = unsafe { BNExternalLocationGetExternalLibrary(self.as_raw()) };
|
||||||
|
let handle = ptr::NonNull::new(result)?;
|
||||||
|
Some(unsafe { ExternalLibrary::from_raw(handle) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the ExternalLibrary that this ExternalLocation targets
|
||||||
|
pub fn set_external_library(&self, lib: Option<&ExternalLibrary>) {
|
||||||
|
let lib_handle = lib
|
||||||
|
.map(|x| unsafe {x.as_raw() as *mut _})
|
||||||
|
.unwrap_or(ptr::null_mut());
|
||||||
|
unsafe { BNExternalLocationSetExternalLibrary(self.as_raw(), lib_handle) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if this ExternalLocation has a target address
|
||||||
|
pub fn has_target_address(&self) -> bool {
|
||||||
|
unsafe { BNExternalLocationHasTargetAddress(self.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if this ExternalLocation has a target symbol
|
||||||
|
pub fn has_target_symbol(&self) -> bool {
|
||||||
|
unsafe { BNExternalLocationHasTargetSymbol(self.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the address pointed to by this ExternalLocation, if any
|
||||||
|
pub fn target_address(&self) -> Option<u64> {
|
||||||
|
self.has_target_address()
|
||||||
|
.then(|| unsafe { BNExternalLocationGetTargetAddress(self.as_raw()) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the address pointed to by this ExternalLocation.
|
||||||
|
/// ExternalLocations must have a valid target address and/or symbol set.
|
||||||
|
pub fn set_target_address(&self, mut address: Option<u64>) -> bool {
|
||||||
|
let address_ptr = address
|
||||||
|
.as_mut()
|
||||||
|
.map(|x| x as *mut u64)
|
||||||
|
.unwrap_or(ptr::null_mut());
|
||||||
|
unsafe { BNExternalLocationSetTargetAddress(self.as_raw(), address_ptr) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the symbol pointed to by this ExternalLocation, if any
|
||||||
|
pub fn target_symbol(&self) -> Option<BnString> {
|
||||||
|
self.has_target_symbol().then(|| unsafe {
|
||||||
|
let result = BNExternalLocationGetTargetSymbol(self.as_raw());
|
||||||
|
assert!(!result.is_null());
|
||||||
|
BnString::from_raw(result)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the symbol pointed to by this ExternalLocation.
|
||||||
|
/// ExternalLocations must have a valid target address and/or symbol set.
|
||||||
|
pub fn set_target_symbol<S: BnStrCompatible>(&self, symbol: Option<S>) -> bool {
|
||||||
|
let symbol = symbol
|
||||||
|
.map(|x| x.into_bytes_with_nul().as_ref().as_ptr() as *const ffi::c_char)
|
||||||
|
.unwrap_or(ptr::null_mut());
|
||||||
|
unsafe {
|
||||||
|
BNExternalLocationSetTargetSymbol(
|
||||||
|
self.as_raw(),
|
||||||
|
symbol,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for ExternalLocation {
|
||||||
|
type Raw = *mut BNExternalLocation;
|
||||||
|
type Context = ();
|
||||||
|
type Wrapped<'a> = &'a Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl CoreArrayProviderInner for ExternalLocation {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
|
BNFreeExternalLocationList(raw, count)
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
Self::ref_from_raw(raw)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -27,6 +27,7 @@ use binaryninjacore_sys::{
|
|||||||
BNIsAnalysisChanged,
|
BNIsAnalysisChanged,
|
||||||
BNIsBackedByDatabase,
|
BNIsBackedByDatabase,
|
||||||
//BNSetFileMetadataNavigationHandler,
|
//BNSetFileMetadataNavigationHandler,
|
||||||
|
BNGetFileMetadataDatabase,
|
||||||
BNIsFileModified,
|
BNIsFileModified,
|
||||||
BNMarkFileModified,
|
BNMarkFileModified,
|
||||||
BNMarkFileSaved,
|
BNMarkFileSaved,
|
||||||
@@ -40,8 +41,10 @@ use binaryninjacore_sys::{
|
|||||||
BNSetFilename,
|
BNSetFilename,
|
||||||
BNUndo,
|
BNUndo,
|
||||||
};
|
};
|
||||||
|
use binaryninjacore_sys::{BNCreateDatabaseWithProgress, BNOpenExistingDatabaseWithProgress};
|
||||||
|
|
||||||
use crate::binaryview::BinaryView;
|
use crate::binaryview::BinaryView;
|
||||||
|
use crate::database::Database;
|
||||||
|
|
||||||
use crate::rc::*;
|
use crate::rc::*;
|
||||||
use crate::string::*;
|
use crate::string::*;
|
||||||
@@ -204,16 +207,28 @@ impl FileMetadata {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_database<S: BnStrCompatible>(&self, filename: S) -> bool {
|
pub fn create_database<S: BnStrCompatible>(
|
||||||
|
&self,
|
||||||
|
filename: S,
|
||||||
|
progress_func: Option<fn(usize, usize) -> bool>,
|
||||||
|
) -> bool {
|
||||||
let filename = filename.into_bytes_with_nul();
|
let filename = filename.into_bytes_with_nul();
|
||||||
|
let filename_ptr = filename.as_ref().as_ptr() as *mut _;
|
||||||
let raw = "Raw".into_bytes_with_nul();
|
let raw = "Raw".into_bytes_with_nul();
|
||||||
|
let raw_ptr = raw.as_ptr() as *mut _;
|
||||||
|
|
||||||
unsafe {
|
let handle = unsafe { BNGetFileViewOfType(self.handle, raw_ptr) };
|
||||||
BNCreateDatabase(
|
match progress_func {
|
||||||
BNGetFileViewOfType(self.handle, raw.as_ptr() as *mut _),
|
None => unsafe { BNCreateDatabase(handle, filename_ptr, ptr::null_mut()) },
|
||||||
filename.as_ref().as_ptr() as *mut _,
|
Some(func) => unsafe {
|
||||||
ptr::null_mut() as *mut _,
|
BNCreateDatabaseWithProgress(
|
||||||
|
handle,
|
||||||
|
filename_ptr,
|
||||||
|
func as *mut libc::c_void,
|
||||||
|
Some(cb_progress_func),
|
||||||
|
ptr::null_mut(),
|
||||||
)
|
)
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -244,17 +259,25 @@ impl FileMetadata {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn open_database<S: BnStrCompatible>(&self, filename: S) -> Result<Ref<BinaryView>, ()> {
|
pub fn open_database<S: BnStrCompatible>(
|
||||||
|
&self,
|
||||||
|
filename: S,
|
||||||
|
progress_func: Option<fn(usize, usize) -> bool>,
|
||||||
|
) -> Result<Ref<BinaryView>, ()> {
|
||||||
let filename = filename.into_bytes_with_nul();
|
let filename = filename.into_bytes_with_nul();
|
||||||
let filename_ptr = filename.as_ref().as_ptr() as *mut _;
|
let filename_ptr = filename.as_ref().as_ptr() as *mut _;
|
||||||
|
|
||||||
let view = unsafe { BNOpenExistingDatabase(self.handle, filename_ptr) };
|
let view = match progress_func {
|
||||||
|
None => unsafe { BNOpenExistingDatabase(self.handle, filename_ptr) },
|
||||||
// TODO : add optional progress function
|
Some(func) => unsafe {
|
||||||
// let view = match progress_func {
|
BNOpenExistingDatabaseWithProgress(
|
||||||
// None => BNOpenExistingDatabase(self.handle, filename_ptr),
|
self.handle,
|
||||||
// _ => BNOpenExistingDatabaseWithProgress(self.handle, str(filename), None, ctypes.CFUNCTYPE(ctypes.c_bool, ctypes.c_void_p, ctypes.c_ulonglong, ctypes.c_ulonglong)(lambda ctxt, cur, total: progress_func(cur, total)))
|
filename_ptr,
|
||||||
// };
|
func as *mut libc::c_void,
|
||||||
|
Some(cb_progress_func),
|
||||||
|
)
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
if view.is_null() {
|
if view.is_null() {
|
||||||
Err(())
|
Err(())
|
||||||
@@ -262,6 +285,12 @@ impl FileMetadata {
|
|||||||
Ok(unsafe { BinaryView::from_raw(view) })
|
Ok(unsafe { BinaryView::from_raw(view) })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the current database
|
||||||
|
pub fn database(&self) -> Option<Database> {
|
||||||
|
let result = unsafe { BNGetFileMetadataDatabase(self.handle) };
|
||||||
|
ptr::NonNull::new(result).map(|handle| unsafe { Database::from_raw(handle) })
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToOwned for FileMetadata {
|
impl ToOwned for FileMetadata {
|
||||||
@@ -284,7 +313,11 @@ unsafe impl RefCountable for FileMetadata {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
unsafe extern "C" fn cb_progress_func(
|
||||||
BNCreateDatabase,
|
ctxt: *mut ::std::os::raw::c_void,
|
||||||
BNCreateDatabaseWithProgress,
|
progress: usize,
|
||||||
*/
|
total: usize,
|
||||||
|
) -> bool {
|
||||||
|
let func: fn(usize, usize) -> bool = core::mem::transmute(ctxt);
|
||||||
|
func(progress, total)
|
||||||
|
}
|
||||||
|
|||||||
@@ -68,7 +68,7 @@ impl<'a> FlowGraphNode<'a> {
|
|||||||
unsafe { FlowGraphNode::from_raw(BNCreateFlowGraphNode(graph.handle)) }
|
unsafe { FlowGraphNode::from_raw(BNCreateFlowGraphNode(graph.handle)) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_disassembly_lines(&self, lines: &'a Vec<DisassemblyTextLine>) {
|
pub fn set_disassembly_lines(&self, lines: &'a [DisassemblyTextLine]) {
|
||||||
unsafe {
|
unsafe {
|
||||||
BNSetFlowGraphNodeLines(self.handle, lines.as_ptr() as *mut _, lines.len());
|
BNSetFlowGraphNodeLines(self.handle, lines.as_ptr() as *mut _, lines.len());
|
||||||
// BNFreeDisassemblyTextLines(lines.as_ptr() as *mut _, lines.len()); // Shouldn't need...would be a double free?
|
// BNFreeDisassemblyTextLines(lines.as_ptr() as *mut _, lines.len()); // Shouldn't need...would be a double free?
|
||||||
@@ -79,7 +79,7 @@ impl<'a> FlowGraphNode<'a> {
|
|||||||
let lines = lines
|
let lines = lines
|
||||||
.iter()
|
.iter()
|
||||||
.map(|&line| DisassemblyTextLine::from(&vec![line]))
|
.map(|&line| DisassemblyTextLine::from(&vec![line]))
|
||||||
.collect();
|
.collect::<Vec<_>>();
|
||||||
self.set_disassembly_lines(&lines);
|
self.set_disassembly_lines(&lines);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -114,8 +114,6 @@ impl<'a> ToOwned for FlowGraphNode<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO : FlowGraph are RefCounted objects, this needs to be changed to only return Refs to FlowGraph
|
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, Hash)]
|
#[derive(PartialEq, Eq, Hash)]
|
||||||
pub struct FlowGraph {
|
pub struct FlowGraph {
|
||||||
pub(crate) handle: *mut BNFlowGraph,
|
pub(crate) handle: *mut BNFlowGraph,
|
||||||
@@ -126,8 +124,8 @@ impl FlowGraph {
|
|||||||
Self { handle: raw }
|
Self { handle: raw }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Ref<Self> {
|
||||||
unsafe { FlowGraph::from_raw(BNCreateFlowGraph()) }
|
unsafe { Ref::new(FlowGraph::from_raw(BNCreateFlowGraph())) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn append(&self, node: &FlowGraphNode) -> usize {
|
pub fn append(&self, node: &FlowGraphNode) -> usize {
|
||||||
|
|||||||
1897
src/function.rs
1897
src/function.rs
File diff suppressed because it is too large
Load Diff
@@ -14,9 +14,8 @@
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
binaryview,
|
binaryview,
|
||||||
metadata::Metadata,
|
rc,
|
||||||
rc::{self, Ref},
|
string::{BnStrCompatible, IntoJson},
|
||||||
string::BnStrCompatible,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
@@ -59,6 +58,11 @@ fn binja_path() -> PathBuf {
|
|||||||
let path = CStr::from_ptr(info.dli_fname);
|
let path = CStr::from_ptr(info.dli_fname);
|
||||||
let path = OsStr::from_bytes(path.to_bytes());
|
let path = OsStr::from_bytes(path.to_bytes());
|
||||||
let mut path = PathBuf::from(path);
|
let mut path = PathBuf::from(path);
|
||||||
|
while path.is_symlink() {
|
||||||
|
path = path
|
||||||
|
.read_link()
|
||||||
|
.expect("Failed to find libbinaryninjacore path!");
|
||||||
|
}
|
||||||
|
|
||||||
path.pop();
|
path.pop();
|
||||||
path
|
path
|
||||||
@@ -95,13 +99,18 @@ pub fn shutdown() {
|
|||||||
unsafe { binaryninjacore_sys::BNShutdown() };
|
unsafe { binaryninjacore_sys::BNShutdown() };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_shutdown_requested() -> bool {
|
||||||
|
unsafe { binaryninjacore_sys::BNIsShutdownRequested() }
|
||||||
|
}
|
||||||
|
|
||||||
/// Prelued-postlued helper function (calls [`init`] and [`shutdown`] for you)
|
/// Prelued-postlued helper function (calls [`init`] and [`shutdown`] for you)
|
||||||
/// ```rust
|
/// ```no_run
|
||||||
|
/// # use binaryninja::binaryview::BinaryViewExt;
|
||||||
/// binaryninja::headless::script_helper(|| {
|
/// binaryninja::headless::script_helper(|| {
|
||||||
/// binaryninja::load("/bin/cat")
|
/// let cat = binaryninja::load("/bin/cat").expect("Couldn't open `/bin/cat`");
|
||||||
/// .expect("Couldn't open `/bin/cat`")
|
/// for function in cat.functions().iter() {
|
||||||
/// .iter()
|
/// println!(" `{}`", function.symbol().full_name());
|
||||||
/// .for_each(|func| println!(" `{}`", func.symbol().full_name()));
|
/// }
|
||||||
/// });
|
/// });
|
||||||
/// ```
|
/// ```
|
||||||
pub fn script_helper(func: fn()) {
|
pub fn script_helper(func: fn()) {
|
||||||
@@ -119,7 +128,7 @@ impl Session {
|
|||||||
Self {}
|
Self {}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// ```rust
|
/// ```no_run
|
||||||
/// let headless_session = binaryninja::headless::Session::new();
|
/// let headless_session = binaryninja::headless::Session::new();
|
||||||
///
|
///
|
||||||
/// let bv = headless_session.load("/bin/cat").expect("Couldn't open `/bin/cat`");
|
/// let bv = headless_session.load("/bin/cat").expect("Couldn't open `/bin/cat`");
|
||||||
@@ -128,18 +137,23 @@ impl Session {
|
|||||||
crate::load(filename)
|
crate::load(filename)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// ```rust
|
/// ```no_run
|
||||||
/// let settings = [("analysis.linearSweep.autorun", false)].into();
|
/// use binaryninja::{metadata::Metadata, rc::Ref};
|
||||||
|
/// use std::collections::HashMap;
|
||||||
|
///
|
||||||
|
/// let settings: Ref<Metadata> = HashMap::from([
|
||||||
|
/// ("analysis.linearSweep.autorun", false.into()),
|
||||||
|
/// ]).into();
|
||||||
/// let headless_session = binaryninja::headless::Session::new();
|
/// let headless_session = binaryninja::headless::Session::new();
|
||||||
///
|
///
|
||||||
/// let bv = headless_session.load_with_options("/bin/cat", true, Some(settings))
|
/// let bv = headless_session.load_with_options("/bin/cat", true, Some(settings))
|
||||||
/// .expect("Couldn't open `/bin/cat`");
|
/// .expect("Couldn't open `/bin/cat`");
|
||||||
/// ```
|
/// ```
|
||||||
pub fn load_with_options(
|
pub fn load_with_options<O: IntoJson>(
|
||||||
&self,
|
&self,
|
||||||
filename: &str,
|
filename: &str,
|
||||||
update_analysis_and_wait: bool,
|
update_analysis_and_wait: bool,
|
||||||
options: Option<Ref<Metadata>>,
|
options: Option<O>,
|
||||||
) -> Option<rc::Ref<binaryview::BinaryView>> {
|
) -> Option<rc::Ref<binaryview::BinaryView>> {
|
||||||
crate::load_with_options(filename, update_analysis_and_wait, options)
|
crate::load_with_options(filename, update_analysis_and_wait, options)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,8 +2,10 @@ use std::hash::{Hash, Hasher};
|
|||||||
|
|
||||||
use binaryninjacore_sys::BNFreeHighLevelILFunction;
|
use binaryninjacore_sys::BNFreeHighLevelILFunction;
|
||||||
use binaryninjacore_sys::BNGetHighLevelILBasicBlockList;
|
use binaryninjacore_sys::BNGetHighLevelILBasicBlockList;
|
||||||
|
use binaryninjacore_sys::BNGetHighLevelILIndexForInstruction;
|
||||||
use binaryninjacore_sys::BNGetHighLevelILInstructionCount;
|
use binaryninjacore_sys::BNGetHighLevelILInstructionCount;
|
||||||
use binaryninjacore_sys::BNGetHighLevelILOwnerFunction;
|
use binaryninjacore_sys::BNGetHighLevelILOwnerFunction;
|
||||||
|
use binaryninjacore_sys::BNGetHighLevelILRootExpr;
|
||||||
use binaryninjacore_sys::BNGetHighLevelILSSAForm;
|
use binaryninjacore_sys::BNGetHighLevelILSSAForm;
|
||||||
use binaryninjacore_sys::BNHighLevelILFunction;
|
use binaryninjacore_sys::BNHighLevelILFunction;
|
||||||
use binaryninjacore_sys::BNNewHighLevelILFunctionReference;
|
use binaryninjacore_sys::BNNewHighLevelILFunctionReference;
|
||||||
@@ -52,6 +54,29 @@ impl HighLevelILFunction {
|
|||||||
self.instruction_from_idx(expr_idx).lift()
|
self.instruction_from_idx(expr_idx).lift()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn instruction_from_instruction_idx(&self, instr_idx: usize) -> HighLevelILInstruction {
|
||||||
|
HighLevelILInstruction::new(self.as_non_ast(), unsafe {
|
||||||
|
BNGetHighLevelILIndexForInstruction(self.handle, instr_idx)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lifted_instruction_from_instruction_idx(
|
||||||
|
&self,
|
||||||
|
instr_idx: usize,
|
||||||
|
) -> HighLevelILLiftedInstruction {
|
||||||
|
self.instruction_from_instruction_idx(instr_idx).lift()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn root(&self) -> HighLevelILInstruction {
|
||||||
|
HighLevelILInstruction::new(self.as_ast(), unsafe {
|
||||||
|
BNGetHighLevelILRootExpr(self.handle)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lifted_root(&self) -> HighLevelILLiftedInstruction {
|
||||||
|
self.root().lift()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn instruction_count(&self) -> usize {
|
pub fn instruction_count(&self) -> usize {
|
||||||
unsafe { BNGetHighLevelILInstructionCount(self.handle) }
|
unsafe { BNGetHighLevelILInstructionCount(self.handle) }
|
||||||
}
|
}
|
||||||
@@ -81,6 +106,22 @@ impl HighLevelILFunction {
|
|||||||
|
|
||||||
unsafe { Array::new(blocks, count, context) }
|
unsafe { Array::new(blocks, count, context) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn as_ast(&self) -> Ref<HighLevelILFunction> {
|
||||||
|
Self {
|
||||||
|
handle: self.handle,
|
||||||
|
full_ast: true,
|
||||||
|
}
|
||||||
|
.to_owned()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_non_ast(&self) -> Ref<HighLevelILFunction> {
|
||||||
|
Self {
|
||||||
|
handle: self.handle,
|
||||||
|
full_ast: false,
|
||||||
|
}
|
||||||
|
.to_owned()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToOwned for HighLevelILFunction {
|
impl ToOwned for HighLevelILFunction {
|
||||||
|
|||||||
@@ -1,12 +1,10 @@
|
|||||||
use binaryninjacore_sys::BNFromVariableIdentifier;
|
|
||||||
use binaryninjacore_sys::BNGetHighLevelILByIndex;
|
use binaryninjacore_sys::BNGetHighLevelILByIndex;
|
||||||
use binaryninjacore_sys::BNHighLevelILOperation;
|
use binaryninjacore_sys::BNHighLevelILOperation;
|
||||||
|
|
||||||
|
use crate::architecture::CoreIntrinsic;
|
||||||
use crate::operand_iter::OperandIter;
|
use crate::operand_iter::OperandIter;
|
||||||
use crate::rc::Ref;
|
use crate::rc::Ref;
|
||||||
use crate::types::{
|
use crate::types::{ConstantData, RegisterValue, RegisterValueType, SSAVariable, Variable};
|
||||||
ConstantData, ILIntrinsic, RegisterValue, RegisterValueType, SSAVariable, Variable,
|
|
||||||
};
|
|
||||||
|
|
||||||
use super::operation::*;
|
use super::operation::*;
|
||||||
use super::{HighLevelILFunction, HighLevelILLiftedInstruction, HighLevelILLiftedInstructionKind};
|
use super::{HighLevelILFunction, HighLevelILLiftedInstruction, HighLevelILLiftedInstructionKind};
|
||||||
@@ -15,6 +13,8 @@ use super::{HighLevelILFunction, HighLevelILLiftedInstruction, HighLevelILLifted
|
|||||||
pub struct HighLevelILInstruction {
|
pub struct HighLevelILInstruction {
|
||||||
pub function: Ref<HighLevelILFunction>,
|
pub function: Ref<HighLevelILFunction>,
|
||||||
pub address: u64,
|
pub address: u64,
|
||||||
|
pub index: usize,
|
||||||
|
pub size: usize,
|
||||||
pub kind: HighLevelILInstructionKind,
|
pub kind: HighLevelILInstructionKind,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -144,8 +144,8 @@ pub enum HighLevelILInstructionKind {
|
|||||||
DoWhileSsa(WhileSsa),
|
DoWhileSsa(WhileSsa),
|
||||||
}
|
}
|
||||||
impl HighLevelILInstruction {
|
impl HighLevelILInstruction {
|
||||||
pub(crate) fn new(function: Ref<HighLevelILFunction>, idx: usize) -> Self {
|
pub(crate) fn new(function: Ref<HighLevelILFunction>, index: usize) -> Self {
|
||||||
let op = unsafe { BNGetHighLevelILByIndex(function.handle, idx, function.full_ast) };
|
let op = unsafe { BNGetHighLevelILByIndex(function.handle, index, function.full_ast) };
|
||||||
use BNHighLevelILOperation::*;
|
use BNHighLevelILOperation::*;
|
||||||
use HighLevelILInstructionKind as Op;
|
use HighLevelILInstructionKind as Op;
|
||||||
let kind = match op.operation {
|
let kind = match op.operation {
|
||||||
@@ -610,8 +610,8 @@ impl HighLevelILInstruction {
|
|||||||
body: op.operands[1] as usize,
|
body: op.operands[1] as usize,
|
||||||
}),
|
}),
|
||||||
HLIL_DO_WHILE => Op::DoWhile(While {
|
HLIL_DO_WHILE => Op::DoWhile(While {
|
||||||
condition: op.operands[0] as usize,
|
body: op.operands[0] as usize,
|
||||||
body: op.operands[1] as usize,
|
condition: op.operands[1] as usize,
|
||||||
}),
|
}),
|
||||||
HLIL_WHILE_SSA => Op::WhileSsa(WhileSsa {
|
HLIL_WHILE_SSA => Op::WhileSsa(WhileSsa {
|
||||||
condition_phi: op.operands[0] as usize,
|
condition_phi: op.operands[0] as usize,
|
||||||
@@ -627,6 +627,8 @@ impl HighLevelILInstruction {
|
|||||||
Self {
|
Self {
|
||||||
function,
|
function,
|
||||||
address: op.address,
|
address: op.address,
|
||||||
|
index,
|
||||||
|
size: op.size,
|
||||||
kind,
|
kind,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -809,11 +811,11 @@ impl HighLevelILInstruction {
|
|||||||
cond_false: self.lift_operand(op.cond_false),
|
cond_false: self.lift_operand(op.cond_false),
|
||||||
}),
|
}),
|
||||||
Intrinsic(op) => Lifted::Intrinsic(LiftedIntrinsic {
|
Intrinsic(op) => Lifted::Intrinsic(LiftedIntrinsic {
|
||||||
intrinsic: ILIntrinsic::new(self.function.get_function().arch(), op.intrinsic),
|
intrinsic: CoreIntrinsic(self.function.get_function().arch().0, op.intrinsic),
|
||||||
params: self.lift_instruction_list(op.first_param, op.num_params),
|
params: self.lift_instruction_list(op.first_param, op.num_params),
|
||||||
}),
|
}),
|
||||||
IntrinsicSsa(op) => Lifted::IntrinsicSsa(LiftedIntrinsicSsa {
|
IntrinsicSsa(op) => Lifted::IntrinsicSsa(LiftedIntrinsicSsa {
|
||||||
intrinsic: ILIntrinsic::new(self.function.get_function().arch(), op.intrinsic),
|
intrinsic: CoreIntrinsic(self.function.get_function().arch().0, op.intrinsic),
|
||||||
params: self.lift_instruction_list(op.first_param, op.num_params),
|
params: self.lift_instruction_list(op.first_param, op.num_params),
|
||||||
dest_memory: op.dest_memory,
|
dest_memory: op.dest_memory,
|
||||||
src_memory: op.src_memory,
|
src_memory: op.src_memory,
|
||||||
@@ -875,6 +877,8 @@ impl HighLevelILInstruction {
|
|||||||
HighLevelILLiftedInstruction {
|
HighLevelILLiftedInstruction {
|
||||||
function: self.function.clone(),
|
function: self.function.clone(),
|
||||||
address: self.address,
|
address: self.address,
|
||||||
|
index: self.index,
|
||||||
|
size: self.size,
|
||||||
kind,
|
kind,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -979,7 +983,7 @@ fn get_float(value: u64, size: usize) -> f64 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn get_var(id: u64) -> Variable {
|
fn get_var(id: u64) -> Variable {
|
||||||
unsafe { Variable::from_raw(BNFromVariableIdentifier(id)) }
|
unsafe { Variable::from_identifier(id) }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_member_index(idx: u64) -> Option<usize> {
|
fn get_member_index(idx: u64) -> Option<usize> {
|
||||||
|
|||||||
138
src/hlil/lift.rs
138
src/hlil/lift.rs
@@ -1,7 +1,9 @@
|
|||||||
use super::{operation::*, HighLevelILFunction};
|
use super::operation::*;
|
||||||
|
use super::HighLevelILFunction;
|
||||||
|
|
||||||
|
use crate::architecture::CoreIntrinsic;
|
||||||
use crate::rc::Ref;
|
use crate::rc::Ref;
|
||||||
use crate::types::{ConstantData, ILIntrinsic, SSAVariable, Variable};
|
use crate::types::{ConstantData, SSAVariable, Variable};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub enum HighLevelILLiftedOperand {
|
pub enum HighLevelILLiftedOperand {
|
||||||
@@ -11,7 +13,7 @@ pub enum HighLevelILLiftedOperand {
|
|||||||
Float(f64),
|
Float(f64),
|
||||||
Int(u64),
|
Int(u64),
|
||||||
IntList(Vec<u64>),
|
IntList(Vec<u64>),
|
||||||
Intrinsic(ILIntrinsic),
|
Intrinsic(CoreIntrinsic),
|
||||||
Label(GotoLabel),
|
Label(GotoLabel),
|
||||||
MemberIndex(Option<usize>),
|
MemberIndex(Option<usize>),
|
||||||
Var(Variable),
|
Var(Variable),
|
||||||
@@ -23,6 +25,8 @@ pub enum HighLevelILLiftedOperand {
|
|||||||
pub struct HighLevelILLiftedInstruction {
|
pub struct HighLevelILLiftedInstruction {
|
||||||
pub function: Ref<HighLevelILFunction>,
|
pub function: Ref<HighLevelILFunction>,
|
||||||
pub address: u64,
|
pub address: u64,
|
||||||
|
pub index: usize,
|
||||||
|
pub size: usize,
|
||||||
pub kind: HighLevelILLiftedInstructionKind,
|
pub kind: HighLevelILLiftedInstructionKind,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -153,6 +157,134 @@ pub enum HighLevelILLiftedInstructionKind {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl HighLevelILLiftedInstruction {
|
impl HighLevelILLiftedInstruction {
|
||||||
|
pub fn name(&self) -> &'static str {
|
||||||
|
use HighLevelILLiftedInstructionKind::*;
|
||||||
|
match self.kind {
|
||||||
|
Nop => "Nop",
|
||||||
|
Break => "Break",
|
||||||
|
Continue => "Continue",
|
||||||
|
Noret => "Noret",
|
||||||
|
Unreachable => "Unreachable",
|
||||||
|
Bp => "Bp",
|
||||||
|
Undef => "Undef",
|
||||||
|
Unimpl => "Unimpl",
|
||||||
|
Adc(_) => "Adc",
|
||||||
|
Sbb(_) => "Sbb",
|
||||||
|
Rlc(_) => "Rlc",
|
||||||
|
Rrc(_) => "Rrc",
|
||||||
|
Add(_) => "Add",
|
||||||
|
Sub(_) => "Sub",
|
||||||
|
And(_) => "And",
|
||||||
|
Or(_) => "Or",
|
||||||
|
Xor(_) => "Xor",
|
||||||
|
Lsl(_) => "Lsl",
|
||||||
|
Lsr(_) => "Lsr",
|
||||||
|
Asr(_) => "Asr",
|
||||||
|
Rol(_) => "Rol",
|
||||||
|
Ror(_) => "Ror",
|
||||||
|
Mul(_) => "Mul",
|
||||||
|
MuluDp(_) => "MuluDp",
|
||||||
|
MulsDp(_) => "MulsDp",
|
||||||
|
Divu(_) => "Divu",
|
||||||
|
DivuDp(_) => "DivuDp",
|
||||||
|
Divs(_) => "Divs",
|
||||||
|
DivsDp(_) => "DivsDp",
|
||||||
|
Modu(_) => "Modu",
|
||||||
|
ModuDp(_) => "ModuDp",
|
||||||
|
Mods(_) => "Mods",
|
||||||
|
ModsDp(_) => "ModsDp",
|
||||||
|
CmpE(_) => "CmpE",
|
||||||
|
CmpNe(_) => "CmpNe",
|
||||||
|
CmpSlt(_) => "CmpSlt",
|
||||||
|
CmpUlt(_) => "CmpUlt",
|
||||||
|
CmpSle(_) => "CmpSle",
|
||||||
|
CmpUle(_) => "CmpUle",
|
||||||
|
CmpSge(_) => "CmpSge",
|
||||||
|
CmpUge(_) => "CmpUge",
|
||||||
|
CmpSgt(_) => "CmpSgt",
|
||||||
|
CmpUgt(_) => "CmpUgt",
|
||||||
|
TestBit(_) => "TestBit",
|
||||||
|
AddOverflow(_) => "AddOverflow",
|
||||||
|
Fadd(_) => "Fadd",
|
||||||
|
Fsub(_) => "Fsub",
|
||||||
|
Fmul(_) => "Fmul",
|
||||||
|
Fdiv(_) => "Fdiv",
|
||||||
|
FcmpE(_) => "FcmpE",
|
||||||
|
FcmpNe(_) => "FcmpNe",
|
||||||
|
FcmpLt(_) => "FcmpLt",
|
||||||
|
FcmpLe(_) => "FcmpLe",
|
||||||
|
FcmpGe(_) => "FcmpGe",
|
||||||
|
FcmpGt(_) => "FcmpGt",
|
||||||
|
FcmpO(_) => "FcmpO",
|
||||||
|
FcmpUo(_) => "FcmpUo",
|
||||||
|
ArrayIndex(_) => "ArrayIndex",
|
||||||
|
ArrayIndexSsa(_) => "ArrayIndexSsa",
|
||||||
|
Assign(_) => "Assign",
|
||||||
|
AssignMemSsa(_) => "AssignMemSsa",
|
||||||
|
AssignUnpack(_) => "AssignUnpack",
|
||||||
|
AssignUnpackMemSsa(_) => "AssignUnpackMemSsa",
|
||||||
|
Block(_) => "Block",
|
||||||
|
Call(_) => "Call",
|
||||||
|
Tailcall(_) => "Tailcall",
|
||||||
|
CallSsa(_) => "CallSsa",
|
||||||
|
Case(_) => "Case",
|
||||||
|
Const(_) => "Const",
|
||||||
|
ConstPtr(_) => "ConstPtr",
|
||||||
|
Import(_) => "Import",
|
||||||
|
ConstData(_) => "ConstData",
|
||||||
|
Deref(_) => "Deref",
|
||||||
|
AddressOf(_) => "AddressOf",
|
||||||
|
Neg(_) => "Neg",
|
||||||
|
Not(_) => "Not",
|
||||||
|
Sx(_) => "Sx",
|
||||||
|
Zx(_) => "Zx",
|
||||||
|
LowPart(_) => "LowPart",
|
||||||
|
BoolToInt(_) => "BoolToInt",
|
||||||
|
UnimplMem(_) => "UnimplMem",
|
||||||
|
Fsqrt(_) => "Fsqrt",
|
||||||
|
Fneg(_) => "Fneg",
|
||||||
|
Fabs(_) => "Fabs",
|
||||||
|
FloatToInt(_) => "FloatToInt",
|
||||||
|
IntToFloat(_) => "IntToFloat",
|
||||||
|
FloatConv(_) => "FloatConv",
|
||||||
|
RoundToInt(_) => "RoundToInt",
|
||||||
|
Floor(_) => "Floor",
|
||||||
|
Ceil(_) => "Ceil",
|
||||||
|
Ftrunc(_) => "Ftrunc",
|
||||||
|
DerefFieldSsa(_) => "DerefFieldSsa",
|
||||||
|
DerefSsa(_) => "DerefSsa",
|
||||||
|
ExternPtr(_) => "ExternPtr",
|
||||||
|
FloatConst(_) => "FloatConst",
|
||||||
|
For(_) => "For",
|
||||||
|
ForSsa(_) => "ForSsa",
|
||||||
|
Goto(_) => "Goto",
|
||||||
|
Label(_) => "Label",
|
||||||
|
If(_) => "If",
|
||||||
|
Intrinsic(_) => "Intrinsic",
|
||||||
|
IntrinsicSsa(_) => "IntrinsicSsa",
|
||||||
|
Jump(_) => "Jump",
|
||||||
|
MemPhi(_) => "MemPhi",
|
||||||
|
Ret(_) => "Ret",
|
||||||
|
Split(_) => "Split",
|
||||||
|
StructField(_) => "StructField",
|
||||||
|
DerefField(_) => "DerefField",
|
||||||
|
Switch(_) => "Switch",
|
||||||
|
Syscall(_) => "Syscall",
|
||||||
|
SyscallSsa(_) => "SyscallSsa",
|
||||||
|
Trap(_) => "Trap",
|
||||||
|
VarDeclare(_) => "VarDeclare",
|
||||||
|
Var(_) => "Var",
|
||||||
|
VarInit(_) => "VarInit",
|
||||||
|
VarInitSsa(_) => "VarInitSsa",
|
||||||
|
VarPhi(_) => "VarPhi",
|
||||||
|
VarSsa(_) => "VarSsa",
|
||||||
|
While(_) => "While",
|
||||||
|
DoWhile(_) => "DoWhile",
|
||||||
|
WhileSsa(_) => "WhileSsa",
|
||||||
|
DoWhileSsa(_) => "DoWhileSsa",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn operands(&self) -> Vec<(&'static str, HighLevelILLiftedOperand)> {
|
pub fn operands(&self) -> Vec<(&'static str, HighLevelILLiftedOperand)> {
|
||||||
use HighLevelILLiftedInstructionKind::*;
|
use HighLevelILLiftedInstructionKind::*;
|
||||||
use HighLevelILLiftedOperand as Operand;
|
use HighLevelILLiftedOperand as Operand;
|
||||||
|
|||||||
@@ -1,22 +1,22 @@
|
|||||||
use binaryninjacore_sys::BNGetGotoLabelName;
|
use binaryninjacore_sys::BNGetGotoLabelName;
|
||||||
|
|
||||||
|
use crate::architecture::CoreIntrinsic;
|
||||||
use crate::function::Function;
|
use crate::function::Function;
|
||||||
use crate::rc::Ref;
|
use crate::rc::Ref;
|
||||||
use crate::types::{ConstantData, ILIntrinsic, SSAVariable, Variable};
|
use crate::string::BnString;
|
||||||
|
use crate::types::{ConstantData, SSAVariable, Variable};
|
||||||
|
|
||||||
use super::HighLevelILLiftedInstruction;
|
use super::HighLevelILLiftedInstruction;
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
pub struct GotoLabel {
|
pub struct GotoLabel {
|
||||||
pub(crate) function: Ref<Function>,
|
pub(crate) function: Ref<Function>,
|
||||||
pub(crate) target: u64,
|
pub target: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GotoLabel {
|
impl GotoLabel {
|
||||||
pub fn name(&self) -> &str {
|
pub fn name(&self) -> BnString {
|
||||||
let raw_str = unsafe { BNGetGotoLabelName(self.function.handle, self.target) };
|
unsafe { BnString::from_raw(BNGetGotoLabelName(self.function.handle, self.target)) }
|
||||||
let c_str = unsafe { core::ffi::CStr::from_ptr(raw_str) };
|
|
||||||
c_str.to_str().unwrap()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -320,7 +320,7 @@ pub struct Intrinsic {
|
|||||||
}
|
}
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub struct LiftedIntrinsic {
|
pub struct LiftedIntrinsic {
|
||||||
pub intrinsic: ILIntrinsic,
|
pub intrinsic: CoreIntrinsic,
|
||||||
pub params: Vec<HighLevelILLiftedInstruction>,
|
pub params: Vec<HighLevelILLiftedInstruction>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -335,7 +335,7 @@ pub struct IntrinsicSsa {
|
|||||||
}
|
}
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub struct LiftedIntrinsicSsa {
|
pub struct LiftedIntrinsicSsa {
|
||||||
pub intrinsic: ILIntrinsic,
|
pub intrinsic: CoreIntrinsic,
|
||||||
pub params: Vec<HighLevelILLiftedInstruction>,
|
pub params: Vec<HighLevelILLiftedInstruction>,
|
||||||
pub dest_memory: u64,
|
pub dest_memory: u64,
|
||||||
pub src_memory: u64,
|
pub src_memory: u64,
|
||||||
|
|||||||
@@ -16,12 +16,13 @@
|
|||||||
|
|
||||||
use binaryninjacore_sys::*;
|
use binaryninjacore_sys::*;
|
||||||
|
|
||||||
|
use std::ffi::CStr;
|
||||||
use std::os::raw::{c_char, c_void};
|
use std::os::raw::{c_char, c_void};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use crate::binaryview::BinaryView;
|
use crate::binaryview::BinaryView;
|
||||||
use crate::rc::Ref;
|
use crate::rc::Ref;
|
||||||
use crate::string::{BnStr, BnStrCompatible, BnString};
|
use crate::string::{BnStrCompatible, BnString};
|
||||||
|
|
||||||
pub fn get_text_line_input(prompt: &str, title: &str) -> Option<String> {
|
pub fn get_text_line_input(prompt: &str, title: &str) -> Option<String> {
|
||||||
let mut value: *mut libc::c_char = std::ptr::null_mut();
|
let mut value: *mut libc::c_char = std::ptr::null_mut();
|
||||||
@@ -295,7 +296,9 @@ impl FormInputBuilder {
|
|||||||
result.type_ = BNFormInputFieldType::AddressFormField;
|
result.type_ = BNFormInputFieldType::AddressFormField;
|
||||||
result.prompt = prompt.as_ref().as_ptr() as *const c_char;
|
result.prompt = prompt.as_ref().as_ptr() as *const c_char;
|
||||||
if let Some(view) = view {
|
if let Some(view) = view {
|
||||||
result.view = view.handle;
|
// the view is being moved into result, there is no need to clone
|
||||||
|
// and drop is intentionally being avoided with `Ref::into_raw`
|
||||||
|
result.view = unsafe { Ref::into_raw(view) }.handle;
|
||||||
}
|
}
|
||||||
result.currentAddress = current_address.unwrap_or(0);
|
result.currentAddress = current_address.unwrap_or(0);
|
||||||
result.hasDefault = default.is_some();
|
result.hasDefault = default.is_some();
|
||||||
@@ -448,8 +451,10 @@ impl FormInputBuilder {
|
|||||||
///
|
///
|
||||||
/// This API is flexible and works both in the UI via a pop-up dialog and on the command-line.
|
/// This API is flexible and works both in the UI via a pop-up dialog and on the command-line.
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```no_run
|
||||||
/// let responses = interaction::FormInputBuilder::new()
|
/// # use binaryninja::interaction::FormInputBuilder;
|
||||||
|
/// # use binaryninja::interaction::FormResponses;
|
||||||
|
/// let responses = FormInputBuilder::new()
|
||||||
/// .text_field("First Name", None)
|
/// .text_field("First Name", None)
|
||||||
/// .text_field("Last Name", None)
|
/// .text_field("Last Name", None)
|
||||||
/// .choice_field(
|
/// .choice_field(
|
||||||
@@ -466,15 +471,19 @@ impl FormInputBuilder {
|
|||||||
/// .get_form_input("Form Title");
|
/// .get_form_input("Form Title");
|
||||||
///
|
///
|
||||||
/// let food = match responses[2] {
|
/// let food = match responses[2] {
|
||||||
/// Index(0) => "Pizza",
|
/// FormResponses::Index(0) => "Pizza",
|
||||||
/// Index(1) => "Also Pizza",
|
/// FormResponses::Index(1) => "Also Pizza",
|
||||||
/// Index(2) => "Also Pizza",
|
/// FormResponses::Index(2) => "Also Pizza",
|
||||||
/// Index(3) => "Wrong Answer",
|
/// FormResponses::Index(3) => "Wrong Answer",
|
||||||
/// _ => panic!("This person doesn't like pizza?!?"),
|
/// _ => panic!("This person doesn't like pizza?!?"),
|
||||||
/// };
|
/// };
|
||||||
///
|
///
|
||||||
/// let interaction::FormResponses::String(last_name) = responses[0];
|
/// let FormResponses::String(last_name) = &responses[0] else {
|
||||||
/// let interaction::FormResponses::String(first_name) = responses[1];
|
/// unreachable!()
|
||||||
|
/// };
|
||||||
|
/// let FormResponses::String(first_name) = &responses[1] else {
|
||||||
|
/// unreachable!()
|
||||||
|
/// };
|
||||||
///
|
///
|
||||||
/// println!("{} {} likes {}", &first_name, &last_name, food);
|
/// println!("{} {} likes {}", &first_name, &last_name, food);
|
||||||
/// ```
|
/// ```
|
||||||
@@ -499,7 +508,10 @@ impl FormInputBuilder {
|
|||||||
| BNFormInputFieldType::SaveFileNameFormField
|
| BNFormInputFieldType::SaveFileNameFormField
|
||||||
| BNFormInputFieldType::DirectoryNameFormField => {
|
| BNFormInputFieldType::DirectoryNameFormField => {
|
||||||
FormResponses::String(unsafe {
|
FormResponses::String(unsafe {
|
||||||
BnStr::from_raw(form_field.stringResult).to_string()
|
CStr::from_ptr(form_field.stringResult)
|
||||||
|
.to_str()
|
||||||
|
.unwrap()
|
||||||
|
.to_owned()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
157
src/lib.rs
157
src/lib.rs
@@ -49,7 +49,7 @@
|
|||||||
//!
|
//!
|
||||||
//! Create a new library (`cargo new --lib <plugin-name>`) and include the following in your `Cargo.toml`:
|
//! Create a new library (`cargo new --lib <plugin-name>`) and include the following in your `Cargo.toml`:
|
||||||
//!
|
//!
|
||||||
//! ```
|
//! ```toml
|
||||||
//! [lib]
|
//! [lib]
|
||||||
//! crate-type = ["cdylib"]
|
//! crate-type = ["cdylib"]
|
||||||
//!
|
//!
|
||||||
@@ -73,8 +73,7 @@
|
|||||||
//!
|
//!
|
||||||
//! ### `main.rs`
|
//! ### `main.rs`
|
||||||
//! Standalone binaries need to initialize Binary Ninja before they can work. You can do this through [`headless::Session`], [`headless::script_helper`], or [`headless::init()`] at start and [`headless::shutdown()`] at shutdown.
|
//! Standalone binaries need to initialize Binary Ninja before they can work. You can do this through [`headless::Session`], [`headless::script_helper`], or [`headless::init()`] at start and [`headless::shutdown()`] at shutdown.
|
||||||
//! ```rust
|
//! ```no_run
|
||||||
//! fn main() {
|
|
||||||
//! // This loads all the core architecture, platform, etc plugins
|
//! // This loads all the core architecture, platform, etc plugins
|
||||||
//! // Standalone executables need to call this, but plugins do not
|
//! // Standalone executables need to call this, but plugins do not
|
||||||
//! let headless_session = binaryninja::headless::Session::new();
|
//! let headless_session = binaryninja::headless::Session::new();
|
||||||
@@ -83,11 +82,10 @@
|
|||||||
//! let bv = headless_session.load("/bin/cat").expect("Couldn't open `/bin/cat`");
|
//! let bv = headless_session.load("/bin/cat").expect("Couldn't open `/bin/cat`");
|
||||||
//!
|
//!
|
||||||
//! // Your code here...
|
//! // Your code here...
|
||||||
//! }
|
|
||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
//! ### `Cargo.toml`
|
//! ### `Cargo.toml`
|
||||||
//! ```
|
//! ```toml
|
||||||
//! [dependencies]
|
//! [dependencies]
|
||||||
//! binaryninja = { git = "https://github.com/Vector35/binaryninja-api.git", branch = "dev"}
|
//! binaryninja = { git = "https://github.com/Vector35/binaryninja-api.git", branch = "dev"}
|
||||||
//! ```
|
//! ```
|
||||||
@@ -135,11 +133,15 @@ pub mod binarywriter;
|
|||||||
pub mod callingconvention;
|
pub mod callingconvention;
|
||||||
pub mod command;
|
pub mod command;
|
||||||
pub mod custombinaryview;
|
pub mod custombinaryview;
|
||||||
|
pub mod database;
|
||||||
pub mod databuffer;
|
pub mod databuffer;
|
||||||
pub mod debuginfo;
|
pub mod debuginfo;
|
||||||
pub mod demangle;
|
pub mod demangle;
|
||||||
pub mod disassembly;
|
pub mod disassembly;
|
||||||
|
pub mod enterprise;
|
||||||
|
pub mod component;
|
||||||
pub mod downloadprovider;
|
pub mod downloadprovider;
|
||||||
|
pub mod externallibrary;
|
||||||
pub mod fileaccessor;
|
pub mod fileaccessor;
|
||||||
pub mod filemetadata;
|
pub mod filemetadata;
|
||||||
pub mod flowgraph;
|
pub mod flowgraph;
|
||||||
@@ -154,6 +156,7 @@ pub mod logger;
|
|||||||
pub mod metadata;
|
pub mod metadata;
|
||||||
pub mod mlil;
|
pub mod mlil;
|
||||||
pub mod platform;
|
pub mod platform;
|
||||||
|
pub mod project;
|
||||||
pub mod rc;
|
pub mod rc;
|
||||||
pub mod references;
|
pub mod references;
|
||||||
pub mod relocation;
|
pub mod relocation;
|
||||||
@@ -164,7 +167,10 @@ pub mod string;
|
|||||||
pub mod symbol;
|
pub mod symbol;
|
||||||
pub mod tags;
|
pub mod tags;
|
||||||
pub mod templatesimplifier;
|
pub mod templatesimplifier;
|
||||||
|
pub mod typelibrary;
|
||||||
|
pub mod typearchive;
|
||||||
pub mod types;
|
pub mod types;
|
||||||
|
pub mod update;
|
||||||
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
@@ -173,8 +179,8 @@ pub use binaryninjacore_sys::BNEndianness as Endianness;
|
|||||||
use binaryview::BinaryView;
|
use binaryview::BinaryView;
|
||||||
use metadata::Metadata;
|
use metadata::Metadata;
|
||||||
use metadata::MetadataType;
|
use metadata::MetadataType;
|
||||||
use rc::Ref;
|
|
||||||
use string::BnStrCompatible;
|
use string::BnStrCompatible;
|
||||||
|
use string::IntoJson;
|
||||||
|
|
||||||
// Commented out to suppress unused warnings
|
// Commented out to suppress unused warnings
|
||||||
// const BN_MAX_INSTRUCTION_LENGTH: u64 = 256;
|
// const BN_MAX_INSTRUCTION_LENGTH: u64 = 256;
|
||||||
@@ -199,14 +205,14 @@ const BN_INVALID_EXPR: usize = usize::MAX;
|
|||||||
/// The main way to open and load files into Binary Ninja. Make sure you've properly initialized the core before calling this function. See [`crate::headless::init()`]
|
/// The main way to open and load files into Binary Ninja. Make sure you've properly initialized the core before calling this function. See [`crate::headless::init()`]
|
||||||
pub fn load<S: BnStrCompatible>(filename: S) -> Option<rc::Ref<binaryview::BinaryView>> {
|
pub fn load<S: BnStrCompatible>(filename: S) -> Option<rc::Ref<binaryview::BinaryView>> {
|
||||||
let filename = filename.into_bytes_with_nul();
|
let filename = filename.into_bytes_with_nul();
|
||||||
let metadata = Metadata::new_of_type(MetadataType::KeyValueDataType);
|
let options = "\x00";
|
||||||
|
|
||||||
let handle = unsafe {
|
let handle = unsafe {
|
||||||
binaryninjacore_sys::BNLoadFilename(
|
binaryninjacore_sys::BNLoadFilename(
|
||||||
filename.as_ref().as_ptr() as *mut _,
|
filename.as_ref().as_ptr() as *mut _,
|
||||||
true,
|
true,
|
||||||
|
options.as_ptr() as *mut core::ffi::c_char,
|
||||||
None,
|
None,
|
||||||
metadata.handle,
|
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -219,31 +225,83 @@ pub fn load<S: BnStrCompatible>(filename: S) -> Option<rc::Ref<binaryview::Binar
|
|||||||
|
|
||||||
/// The main way to open and load files (with options) into Binary Ninja. Make sure you've properly initialized the core before calling this function. See [`crate::headless::init()`]
|
/// The main way to open and load files (with options) into Binary Ninja. Make sure you've properly initialized the core before calling this function. See [`crate::headless::init()`]
|
||||||
///
|
///
|
||||||
/// ```rust
|
/// <div class="warning">Strict JSON doesn't support single quotes for strings, so you'll need to either use a raw strings (<code>f#"{"setting": "value"}"#</code>) or escape double quotes (<code>"{\"setting\": \"value\"}"</code>). Or use <code>serde_json::json</code>.</div>
|
||||||
/// let settings = [("analysis.linearSweep.autorun", false)].into();
|
|
||||||
///
|
///
|
||||||
/// let bv = binaryninja::load_with_options("/bin/cat", true, Some(settings))
|
/// ```no_run
|
||||||
|
/// # // Mock implementation of json! macro for documentation purposes
|
||||||
|
/// # macro_rules! json {
|
||||||
|
/// # ($($arg:tt)*) => {
|
||||||
|
/// # stringify!($($arg)*)
|
||||||
|
/// # };
|
||||||
|
/// # }
|
||||||
|
/// use binaryninja::{metadata::Metadata, rc::Ref};
|
||||||
|
/// use std::collections::HashMap;
|
||||||
|
///
|
||||||
|
/// let bv = binaryninja::load_with_options("/bin/cat", true, Some(json!("analysis.linearSweep.autorun": false).to_string()))
|
||||||
/// .expect("Couldn't open `/bin/cat`");
|
/// .expect("Couldn't open `/bin/cat`");
|
||||||
/// ```
|
/// ```
|
||||||
pub fn load_with_options<S: BnStrCompatible>(
|
pub fn load_with_options<S: BnStrCompatible, O: IntoJson>(
|
||||||
filename: S,
|
filename: S,
|
||||||
update_analysis_and_wait: bool,
|
update_analysis_and_wait: bool,
|
||||||
options: Option<Ref<Metadata>>,
|
options: Option<O>,
|
||||||
) -> Option<rc::Ref<binaryview::BinaryView>> {
|
) -> Option<rc::Ref<binaryview::BinaryView>> {
|
||||||
let filename = filename.into_bytes_with_nul();
|
let filename = filename.into_bytes_with_nul();
|
||||||
|
|
||||||
let options_or_default = if let Some(opt) = options {
|
let options_or_default = if let Some(opt) = options {
|
||||||
opt
|
opt.get_json_string()
|
||||||
|
.ok()?
|
||||||
|
.into_bytes_with_nul()
|
||||||
|
.as_ref()
|
||||||
|
.to_vec()
|
||||||
} else {
|
} else {
|
||||||
Metadata::new_of_type(MetadataType::KeyValueDataType)
|
Metadata::new_of_type(MetadataType::KeyValueDataType)
|
||||||
|
.get_json_string()
|
||||||
|
.ok()?
|
||||||
|
.as_ref()
|
||||||
|
.to_vec()
|
||||||
};
|
};
|
||||||
|
|
||||||
let handle = unsafe {
|
let handle = unsafe {
|
||||||
binaryninjacore_sys::BNLoadFilename(
|
binaryninjacore_sys::BNLoadFilename(
|
||||||
filename.as_ref().as_ptr() as *mut _,
|
filename.as_ref().as_ptr() as *mut _,
|
||||||
update_analysis_and_wait,
|
update_analysis_and_wait,
|
||||||
|
options_or_default.as_ptr() as *mut core::ffi::c_char,
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
if handle.is_null() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(unsafe { BinaryView::from_raw(handle) })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn load_view<O: IntoJson>(
|
||||||
|
bv: &BinaryView,
|
||||||
|
update_analysis_and_wait: bool,
|
||||||
|
options: Option<O>,
|
||||||
|
) -> Option<rc::Ref<binaryview::BinaryView>> {
|
||||||
|
let options_or_default = if let Some(opt) = options {
|
||||||
|
opt.get_json_string()
|
||||||
|
.ok()?
|
||||||
|
.into_bytes_with_nul()
|
||||||
|
.as_ref()
|
||||||
|
.to_vec()
|
||||||
|
} else {
|
||||||
|
Metadata::new_of_type(MetadataType::KeyValueDataType)
|
||||||
|
.get_json_string()
|
||||||
|
.ok()?
|
||||||
|
.as_ref()
|
||||||
|
.to_vec()
|
||||||
|
};
|
||||||
|
|
||||||
|
let handle = unsafe {
|
||||||
|
binaryninjacore_sys::BNLoadBinaryView(
|
||||||
|
bv.handle as *mut _,
|
||||||
|
update_analysis_and_wait,
|
||||||
|
options_or_default.as_ptr() as *mut core::ffi::c_char,
|
||||||
None,
|
None,
|
||||||
options_or_default.as_ref().handle,
|
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -377,6 +435,75 @@ pub fn version() -> string::BnString {
|
|||||||
unsafe { string::BnString::from_raw(binaryninjacore_sys::BNGetVersionString()) }
|
unsafe { string::BnString::from_raw(binaryninjacore_sys::BNGetVersionString()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn build_id() -> u32 {
|
||||||
|
unsafe { binaryninjacore_sys::BNGetBuildId() }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||||
|
pub struct VersionInfo {
|
||||||
|
pub major: u32,
|
||||||
|
pub minor: u32,
|
||||||
|
pub build: u32,
|
||||||
|
pub channel: string::BnString,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn version_info() -> VersionInfo {
|
||||||
|
let info_raw = unsafe { binaryninjacore_sys::BNGetVersionInfo() };
|
||||||
|
VersionInfo {
|
||||||
|
major: info_raw.major,
|
||||||
|
minor: info_raw.minor,
|
||||||
|
build: info_raw.build,
|
||||||
|
channel: unsafe { string::BnString::from_raw(info_raw.channel) },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn serial_number() -> string::BnString {
|
||||||
|
unsafe { string::BnString::from_raw(binaryninjacore_sys::BNGetSerialNumber()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_license_validated() -> bool {
|
||||||
|
unsafe { binaryninjacore_sys::BNIsLicenseValidated() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn licensed_user_email() -> string::BnString {
|
||||||
|
unsafe { string::BnString::from_raw(binaryninjacore_sys::BNGetLicensedUserEmail()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn license_count() -> i32 {
|
||||||
|
unsafe { binaryninjacore_sys::BNGetLicenseCount() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_license<S: string::BnStrCompatible>(license: S) {
|
||||||
|
let license = license.into_bytes_with_nul();
|
||||||
|
let license_slice = license.as_ref();
|
||||||
|
unsafe { binaryninjacore_sys::BNSetLicense(license_slice.as_ptr() as *const std::os::raw::c_char) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn product() -> string::BnString {
|
||||||
|
unsafe { string::BnString::from_raw(binaryninjacore_sys::BNGetProduct()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn product_type() -> string::BnString {
|
||||||
|
unsafe { string::BnString::from_raw(binaryninjacore_sys::BNGetProductType()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn license_expiration_time() -> std::time::SystemTime {
|
||||||
|
let m = std::time::Duration::from_secs(unsafe {
|
||||||
|
binaryninjacore_sys::BNGetLicenseExpirationTime()
|
||||||
|
});
|
||||||
|
std::time::UNIX_EPOCH + m
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_ui_enabled() -> bool {
|
||||||
|
unsafe { binaryninjacore_sys::BNIsUIEnabled() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_database<S: string::BnStrCompatible>(filename: S) -> bool {
|
||||||
|
let filename = filename.into_bytes_with_nul();
|
||||||
|
let filename_slice = filename.as_ref();
|
||||||
|
unsafe { binaryninjacore_sys::BNIsDatabase(filename_slice.as_ptr() as *const std::os::raw::c_char) }
|
||||||
|
}
|
||||||
|
|
||||||
pub fn plugin_abi_version() -> u32 {
|
pub fn plugin_abi_version() -> u32 {
|
||||||
binaryninjacore_sys::BN_CURRENT_CORE_ABI_VERSION
|
binaryninjacore_sys::BN_CURRENT_CORE_ABI_VERSION
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -415,18 +415,14 @@ impl std::fmt::Display for LinearDisassemblyLine {
|
|||||||
impl CoreArrayProvider for LinearDisassemblyLine {
|
impl CoreArrayProvider for LinearDisassemblyLine {
|
||||||
type Raw = BNLinearDisassemblyLine;
|
type Raw = BNLinearDisassemblyLine;
|
||||||
type Context = ();
|
type Context = ();
|
||||||
|
type Wrapped<'a> = Guard<'a, LinearDisassemblyLine>;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl CoreOwnedArrayProvider for LinearDisassemblyLine {
|
unsafe impl CoreArrayProviderInner for LinearDisassemblyLine {
|
||||||
unsafe fn free(raw: *mut BNLinearDisassemblyLine, count: usize, _context: &()) {
|
unsafe fn free(raw: *mut BNLinearDisassemblyLine, count: usize, _context: &()) {
|
||||||
BNFreeLinearDisassemblyLines(raw, count);
|
BNFreeLinearDisassemblyLines(raw, count);
|
||||||
}
|
}
|
||||||
}
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
|
||||||
unsafe impl<'a> CoreArrayWrapper<'a> for LinearDisassemblyLine {
|
|
||||||
type Wrapped = Guard<'a, LinearDisassemblyLine>;
|
|
||||||
|
|
||||||
unsafe fn wrap_raw(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped {
|
|
||||||
Guard::new(LinearDisassemblyLine::from_raw(raw), _context)
|
Guard::new(LinearDisassemblyLine::from_raw(raw), _context)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -136,6 +136,8 @@ where
|
|||||||
LLIL_ZX => ExprInfo::Zx(Operation::new(function, op)),
|
LLIL_ZX => ExprInfo::Zx(Operation::new(function, op)),
|
||||||
LLIL_LOW_PART => ExprInfo::LowPart(Operation::new(function, op)),
|
LLIL_LOW_PART => ExprInfo::LowPart(Operation::new(function, op)),
|
||||||
|
|
||||||
|
LLIL_REG_SPLIT => ExprInfo::RegSplit(Operation::new(function, op)),
|
||||||
|
|
||||||
LLIL_CMP_E => ExprInfo::CmpE(Operation::new(function, op)),
|
LLIL_CMP_E => ExprInfo::CmpE(Operation::new(function, op)),
|
||||||
LLIL_CMP_NE => ExprInfo::CmpNe(Operation::new(function, op)),
|
LLIL_CMP_NE => ExprInfo::CmpNe(Operation::new(function, op)),
|
||||||
LLIL_CMP_SLT => ExprInfo::CmpSlt(Operation::new(function, op)),
|
LLIL_CMP_SLT => ExprInfo::CmpSlt(Operation::new(function, op)),
|
||||||
@@ -273,6 +275,7 @@ where
|
|||||||
LLIL_LOAD => ExprInfo::Load(Operation::new(self.function, op)),
|
LLIL_LOAD => ExprInfo::Load(Operation::new(self.function, op)),
|
||||||
LLIL_POP => ExprInfo::Pop(Operation::new(self.function, op)),
|
LLIL_POP => ExprInfo::Pop(Operation::new(self.function, op)),
|
||||||
LLIL_REG => ExprInfo::Reg(Operation::new(self.function, op)),
|
LLIL_REG => ExprInfo::Reg(Operation::new(self.function, op)),
|
||||||
|
LLIL_REG_SPLIT => ExprInfo::RegSplit(Operation::new(self.function, op)),
|
||||||
LLIL_FLAG => ExprInfo::Flag(Operation::new(self.function, op)),
|
LLIL_FLAG => ExprInfo::Flag(Operation::new(self.function, op)),
|
||||||
LLIL_FLAG_BIT => ExprInfo::FlagBit(Operation::new(self.function, op)),
|
LLIL_FLAG_BIT => ExprInfo::FlagBit(Operation::new(self.function, op)),
|
||||||
LLIL_FLAG_COND => ExprInfo::FlagCond(Operation::new(self.function, op)), // TODO lifted only
|
LLIL_FLAG_COND => ExprInfo::FlagCond(Operation::new(self.function, op)), // TODO lifted only
|
||||||
@@ -327,6 +330,7 @@ where
|
|||||||
match op.operation {
|
match op.operation {
|
||||||
LLIL_LOAD_SSA => ExprInfo::Load(Operation::new(self.function, op)),
|
LLIL_LOAD_SSA => ExprInfo::Load(Operation::new(self.function, op)),
|
||||||
LLIL_REG_SSA | LLIL_REG_SSA_PARTIAL => ExprInfo::Reg(Operation::new(self.function, op)),
|
LLIL_REG_SSA | LLIL_REG_SSA_PARTIAL => ExprInfo::Reg(Operation::new(self.function, op)),
|
||||||
|
LLIL_REG_SPLIT_SSA => ExprInfo::RegSplit(Operation::new(self.function, op)),
|
||||||
LLIL_FLAG_SSA => ExprInfo::Flag(Operation::new(self.function, op)),
|
LLIL_FLAG_SSA => ExprInfo::Flag(Operation::new(self.function, op)),
|
||||||
LLIL_FLAG_BIT_SSA => ExprInfo::FlagBit(Operation::new(self.function, op)),
|
LLIL_FLAG_BIT_SSA => ExprInfo::FlagBit(Operation::new(self.function, op)),
|
||||||
_ => common_info(self.function, op),
|
_ => common_info(self.function, op),
|
||||||
@@ -383,6 +387,7 @@ where
|
|||||||
Load(Operation<'func, A, M, F, operation::Load>),
|
Load(Operation<'func, A, M, F, operation::Load>),
|
||||||
Pop(Operation<'func, A, M, F, operation::Pop>),
|
Pop(Operation<'func, A, M, F, operation::Pop>),
|
||||||
Reg(Operation<'func, A, M, F, operation::Reg>),
|
Reg(Operation<'func, A, M, F, operation::Reg>),
|
||||||
|
RegSplit(Operation<'func, A, M, F, operation::RegSplit>),
|
||||||
Const(Operation<'func, A, M, F, operation::Const>),
|
Const(Operation<'func, A, M, F, operation::Const>),
|
||||||
ConstPtr(Operation<'func, A, M, F, operation::Const>),
|
ConstPtr(Operation<'func, A, M, F, operation::Const>),
|
||||||
Flag(Operation<'func, A, M, F, operation::Flag>),
|
Flag(Operation<'func, A, M, F, operation::Flag>),
|
||||||
@@ -595,6 +600,8 @@ where
|
|||||||
|
|
||||||
Reg(ref op) => &op.op,
|
Reg(ref op) => &op.op,
|
||||||
|
|
||||||
|
RegSplit(ref op) => &op.op,
|
||||||
|
|
||||||
Flag(ref op) => &op.op,
|
Flag(ref op) => &op.op,
|
||||||
|
|
||||||
FlagBit(ref op) => &op.op,
|
FlagBit(ref op) => &op.op,
|
||||||
@@ -648,6 +655,8 @@ where
|
|||||||
|
|
||||||
Reg(ref op) => op.flag_write(),
|
Reg(ref op) => op.flag_write(),
|
||||||
|
|
||||||
|
RegSplit(ref op) => op.flag_write(),
|
||||||
|
|
||||||
Flag(ref op) => op.flag_write(),
|
Flag(ref op) => op.flag_write(),
|
||||||
|
|
||||||
FlagBit(ref op) => op.flag_write(),
|
FlagBit(ref op) => op.flag_write(),
|
||||||
@@ -735,6 +744,31 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
RegSplit(ref op) => {
|
||||||
|
let low_reg = op.low_reg();
|
||||||
|
let high_reg = op.high_reg();
|
||||||
|
let size = op.size();
|
||||||
|
|
||||||
|
let low_size = match low_reg {
|
||||||
|
Register::Temp(_) => Some(size),
|
||||||
|
Register::ArchReg(ref r) if r.info().size() != size => Some(size),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let high_size = match high_reg {
|
||||||
|
Register::Temp(_) => Some(size),
|
||||||
|
Register::ArchReg(ref r) if r.info().size() != size => Some(size),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
match (low_size, high_size) {
|
||||||
|
(Some(ls), Some(hs)) => write!(f, "{:?}.{}:{:?}.{}", high_reg, hs, low_reg, ls),
|
||||||
|
(Some(ls), None) => write!(f, "{:?}:{:?}.{}", high_reg, low_reg, ls),
|
||||||
|
(None, Some(hs)) => write!(f, "{:?}.{}:{:?}", high_reg, hs, low_reg),
|
||||||
|
_ => write!(f, "{:?}:{:?}", high_reg, low_reg),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Flag(ref _op) => write!(f, "flag"), // TODO
|
Flag(ref _op) => write!(f, "flag"), // TODO
|
||||||
|
|
||||||
FlagBit(ref _op) => write!(f, "flag_bit"), // TODO
|
FlagBit(ref _op) => write!(f, "flag_bit"), // TODO
|
||||||
|
|||||||
@@ -99,7 +99,7 @@ where
|
|||||||
let expr_idx =
|
let expr_idx =
|
||||||
unsafe { BNGetLowLevelILIndexForInstruction(self.function.handle, self.instr_idx) };
|
unsafe { BNGetLowLevelILIndexForInstruction(self.function.handle, self.instr_idx) };
|
||||||
let op = unsafe { BNGetLowLevelILByIndex(self.function.handle, expr_idx) };
|
let op = unsafe { BNGetLowLevelILByIndex(self.function.handle, expr_idx) };
|
||||||
return op.address;
|
op.address
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn info(&self) -> InstrInfo<'func, A, M, NonSSA<V>> {
|
pub fn info(&self) -> InstrInfo<'func, A, M, NonSSA<V>> {
|
||||||
|
|||||||
@@ -607,6 +607,26 @@ where
|
|||||||
A: 'a + Architecture,
|
A: 'a + Architecture,
|
||||||
R: ExpressionResultType,
|
R: ExpressionResultType,
|
||||||
{
|
{
|
||||||
|
pub fn from_expr(expr: Expression<'a, A, Mutable, NonSSA<LiftedNonSSA>, R>) -> Self {
|
||||||
|
use binaryninjacore_sys::BNGetLowLevelILByIndex;
|
||||||
|
|
||||||
|
let instr = unsafe {
|
||||||
|
BNGetLowLevelILByIndex(expr.function.handle, expr.expr_idx)
|
||||||
|
};
|
||||||
|
|
||||||
|
ExpressionBuilder {
|
||||||
|
function: expr.function,
|
||||||
|
op: instr.operation,
|
||||||
|
size: instr.size,
|
||||||
|
flags: instr.flags,
|
||||||
|
op1: instr.operands[0],
|
||||||
|
op2: instr.operands[1],
|
||||||
|
op3: instr.operands[2],
|
||||||
|
op4: instr.operands[3],
|
||||||
|
_ty: PhantomData
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn with_flag_write(mut self, flag_write: A::FlagWrite) -> Self {
|
pub fn with_flag_write(mut self, flag_write: A::FlagWrite) -> Self {
|
||||||
// TODO verify valid id
|
// TODO verify valid id
|
||||||
self.flags = flag_write.id();
|
self.flags = flag_write.id();
|
||||||
@@ -1016,6 +1036,43 @@ where
|
|||||||
Expression::new(self, expr_idx)
|
Expression::new(self, expr_idx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn reg_split<H: Into<Register<A::Register>>, L: Into<Register<A::Register>>>(
|
||||||
|
&self,
|
||||||
|
size: usize,
|
||||||
|
hi_reg: H,
|
||||||
|
lo_reg: L,
|
||||||
|
) -> Expression<A, Mutable, NonSSA<LiftedNonSSA>, ValueExpr> {
|
||||||
|
use binaryninjacore_sys::BNLowLevelILAddExpr;
|
||||||
|
use binaryninjacore_sys::BNLowLevelILOperation::LLIL_REG_SPLIT;
|
||||||
|
|
||||||
|
// TODO verify valid id
|
||||||
|
let hi_reg = match hi_reg.into() {
|
||||||
|
Register::ArchReg(r) => r.id(),
|
||||||
|
Register::Temp(r) => 0x8000_0000 | r,
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO verify valid id
|
||||||
|
let lo_reg = match lo_reg.into() {
|
||||||
|
Register::ArchReg(r) => r.id(),
|
||||||
|
Register::Temp(r) => 0x8000_0000 | r,
|
||||||
|
};
|
||||||
|
|
||||||
|
let expr_idx = unsafe {
|
||||||
|
BNLowLevelILAddExpr(
|
||||||
|
self.handle,
|
||||||
|
LLIL_REG_SPLIT,
|
||||||
|
size,
|
||||||
|
0,
|
||||||
|
hi_reg as u64,
|
||||||
|
lo_reg as u64,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
Expression::new(self, expr_idx)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn set_reg<'a, R, E>(
|
pub fn set_reg<'a, R, E>(
|
||||||
&'a self,
|
&'a self,
|
||||||
size: usize,
|
size: usize,
|
||||||
|
|||||||
@@ -12,8 +12,9 @@
|
|||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
use binaryninjacore_sys::BNLowLevelILInstruction;
|
use binaryninjacore_sys::{BNGetLowLevelILByIndex, BNLowLevelILInstruction};
|
||||||
|
|
||||||
|
use std::collections::BTreeMap;
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
@@ -289,6 +290,62 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// LLIL_REG_SPLIT
|
||||||
|
pub struct RegSplit;
|
||||||
|
|
||||||
|
impl<'func, A, M, V> Operation<'func, A, M, NonSSA<V>, RegSplit>
|
||||||
|
where
|
||||||
|
A: 'func + Architecture,
|
||||||
|
M: FunctionMutability,
|
||||||
|
V: NonSSAVariant,
|
||||||
|
{
|
||||||
|
pub fn size(&self) -> usize {
|
||||||
|
self.op.size
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn low_reg(&self) -> Register<A::Register> {
|
||||||
|
let raw_id = self.op.operands[0] as u32;
|
||||||
|
|
||||||
|
if raw_id >= 0x8000_0000 {
|
||||||
|
Register::Temp(raw_id & 0x7fff_ffff)
|
||||||
|
} else {
|
||||||
|
self.function
|
||||||
|
.arch()
|
||||||
|
.register_from_id(raw_id)
|
||||||
|
.map(Register::ArchReg)
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
error!(
|
||||||
|
"got garbage register from LLIL_REG @ 0x{:x}",
|
||||||
|
self.op.address
|
||||||
|
);
|
||||||
|
|
||||||
|
Register::Temp(0)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn high_reg(&self) -> Register<A::Register> {
|
||||||
|
let raw_id = self.op.operands[1] as u32;
|
||||||
|
|
||||||
|
if raw_id >= 0x8000_0000 {
|
||||||
|
Register::Temp(raw_id & 0x7fff_ffff)
|
||||||
|
} else {
|
||||||
|
self.function
|
||||||
|
.arch()
|
||||||
|
.register_from_id(raw_id)
|
||||||
|
.map(Register::ArchReg)
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
error!(
|
||||||
|
"got garbage register from LLIL_REG @ 0x{:x}",
|
||||||
|
self.op.address
|
||||||
|
);
|
||||||
|
|
||||||
|
Register::Temp(0)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// LLIL_FLAG, LLIL_FLAG_SSA
|
// LLIL_FLAG, LLIL_FLAG_SSA
|
||||||
pub struct Flag;
|
pub struct Flag;
|
||||||
|
|
||||||
@@ -312,6 +369,36 @@ where
|
|||||||
// LLIL_JUMP_TO
|
// LLIL_JUMP_TO
|
||||||
pub struct JumpTo;
|
pub struct JumpTo;
|
||||||
|
|
||||||
|
struct TargetListIter<'func, A, M, F>
|
||||||
|
where
|
||||||
|
A: 'func + Architecture,
|
||||||
|
M: FunctionMutability,
|
||||||
|
F: FunctionForm,
|
||||||
|
{
|
||||||
|
function: &'func Function<A, M, F>,
|
||||||
|
cursor: BNLowLevelILInstruction,
|
||||||
|
cursor_operand: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'func, A, M, F> TargetListIter<'func, A, M, F>
|
||||||
|
where
|
||||||
|
A: 'func + Architecture,
|
||||||
|
M: FunctionMutability,
|
||||||
|
F: FunctionForm,
|
||||||
|
{
|
||||||
|
fn next(&mut self) -> u64 {
|
||||||
|
if self.cursor_operand >= 3 {
|
||||||
|
self.cursor = unsafe {
|
||||||
|
BNGetLowLevelILByIndex(self.function.handle, self.cursor.operands[3] as usize)
|
||||||
|
};
|
||||||
|
self.cursor_operand = 0;
|
||||||
|
}
|
||||||
|
let result = self.cursor.operands[self.cursor_operand];
|
||||||
|
self.cursor_operand += 1;
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'func, A, M, F> Operation<'func, A, M, F, JumpTo>
|
impl<'func, A, M, F> Operation<'func, A, M, F, JumpTo>
|
||||||
where
|
where
|
||||||
A: 'func + Architecture,
|
A: 'func + Architecture,
|
||||||
@@ -321,7 +408,26 @@ where
|
|||||||
pub fn target(&self) -> Expression<'func, A, M, F, ValueExpr> {
|
pub fn target(&self) -> Expression<'func, A, M, F, ValueExpr> {
|
||||||
Expression::new(self.function, self.op.operands[0] as usize)
|
Expression::new(self.function, self.op.operands[0] as usize)
|
||||||
}
|
}
|
||||||
// TODO target list
|
|
||||||
|
pub fn target_list(&self) -> BTreeMap<u64, usize> {
|
||||||
|
let mut result = BTreeMap::new();
|
||||||
|
let count = self.op.operands[1] as usize / 2;
|
||||||
|
let mut list = TargetListIter {
|
||||||
|
function: self.function,
|
||||||
|
cursor: unsafe {
|
||||||
|
BNGetLowLevelILByIndex(self.function.handle, self.op.operands[2] as usize)
|
||||||
|
},
|
||||||
|
cursor_operand: 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
for _ in 0..count {
|
||||||
|
let value = list.next();
|
||||||
|
let target = list.next() as usize;
|
||||||
|
result.insert(value, target);
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// LLIL_CALL, LLIL_CALL_SSA
|
// LLIL_CALL, LLIL_CALL_SSA
|
||||||
@@ -382,12 +488,20 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn true_target_idx(&self) -> usize {
|
||||||
|
self.op.operands[1] as usize
|
||||||
|
}
|
||||||
|
|
||||||
pub fn false_target(&self) -> Instruction<'func, A, M, F> {
|
pub fn false_target(&self) -> Instruction<'func, A, M, F> {
|
||||||
Instruction {
|
Instruction {
|
||||||
function: self.function,
|
function: self.function,
|
||||||
instr_idx: self.op.operands[2] as usize,
|
instr_idx: self.op.operands[2] as usize,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn false_target_idx(&self) -> usize {
|
||||||
|
self.op.operands[2] as usize
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// LLIL_GOTO
|
// LLIL_GOTO
|
||||||
@@ -405,6 +519,10 @@ where
|
|||||||
instr_idx: self.op.operands[0] as usize,
|
instr_idx: self.op.operands[0] as usize,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn target_idx(&self) -> usize {
|
||||||
|
self.op.operands[0] as usize
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// LLIL_FLAG_COND
|
// LLIL_FLAG_COND
|
||||||
@@ -640,6 +758,7 @@ impl OperationArguments for SetFlag {}
|
|||||||
impl OperationArguments for Load {}
|
impl OperationArguments for Load {}
|
||||||
impl OperationArguments for Store {}
|
impl OperationArguments for Store {}
|
||||||
impl OperationArguments for Reg {}
|
impl OperationArguments for Reg {}
|
||||||
|
impl OperationArguments for RegSplit {}
|
||||||
impl OperationArguments for Flag {}
|
impl OperationArguments for Flag {}
|
||||||
impl OperationArguments for FlagBit {}
|
impl OperationArguments for FlagBit {}
|
||||||
impl OperationArguments for Jump {}
|
impl OperationArguments for Jump {}
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
//! To use logging in your script, do something like:
|
//! To use logging in your script, do something like:
|
||||||
//!
|
//!
|
||||||
//! ```
|
//! ```no-test
|
||||||
//! use binaryninja::logger;
|
//! use binaryninja::logger;
|
||||||
//! use log::{info, LevelFilter};
|
//! use log::{info, LevelFilter};
|
||||||
//!
|
//!
|
||||||
@@ -15,7 +15,7 @@
|
|||||||
//!
|
//!
|
||||||
//! or
|
//! or
|
||||||
//!
|
//!
|
||||||
//!```
|
//!```no-test
|
||||||
//! use binaryninja::logger;
|
//! use binaryninja::logger;
|
||||||
//! use log::{info, LevelFilter};
|
//! use log::{info, LevelFilter};
|
||||||
//!
|
//!
|
||||||
@@ -29,12 +29,11 @@
|
|||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
|
|
||||||
use crate::string::BnStr;
|
|
||||||
|
|
||||||
pub use binaryninjacore_sys::BNLogLevel as Level;
|
pub use binaryninjacore_sys::BNLogLevel as Level;
|
||||||
use binaryninjacore_sys::{BNLogListener, BNUpdateLogListeners};
|
use binaryninjacore_sys::{BNLogListener, BNUpdateLogListeners};
|
||||||
|
|
||||||
use log;
|
use log;
|
||||||
|
use std::ffi::CStr;
|
||||||
use std::os::raw::{c_char, c_void};
|
use std::os::raw::{c_char, c_void};
|
||||||
|
|
||||||
struct Logger;
|
struct Logger;
|
||||||
@@ -84,7 +83,7 @@ pub fn init(filter: log::LevelFilter) -> Result<(), log::SetLoggerError> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub trait LogListener: 'static + Sync {
|
pub trait LogListener: 'static + Sync {
|
||||||
fn log(&self, session: usize, level: Level, msg: &BnStr, logger_name: &BnStr, tid: usize);
|
fn log(&self, session: usize, level: Level, msg: &CStr, logger_name: &CStr, tid: usize);
|
||||||
fn level(&self) -> Level;
|
fn level(&self) -> Level;
|
||||||
fn close(&self) {}
|
fn close(&self) {}
|
||||||
}
|
}
|
||||||
@@ -147,8 +146,8 @@ extern "C" fn cb_log<L>(
|
|||||||
listener.log(
|
listener.log(
|
||||||
session,
|
session,
|
||||||
level,
|
level,
|
||||||
BnStr::from_raw(msg),
|
CStr::from_ptr(msg),
|
||||||
BnStr::from_raw(logger_name),
|
CStr::from_ptr(logger_name),
|
||||||
tid,
|
tid,
|
||||||
);
|
);
|
||||||
})
|
})
|
||||||
|
|||||||
122
src/metadata.rs
122
src/metadata.rs
@@ -1,7 +1,5 @@
|
|||||||
use crate::rc::{
|
use crate::rc::{Array, CoreArrayProvider, CoreArrayProviderInner, Guard, Ref, RefCountable};
|
||||||
Array, CoreArrayProvider, CoreArrayWrapper, CoreOwnedArrayProvider, Guard, Ref, RefCountable,
|
use crate::string::{BnStrCompatible, BnString, IntoJson};
|
||||||
};
|
|
||||||
use crate::string::{BnStrCompatible, BnString};
|
|
||||||
use binaryninjacore_sys::*;
|
use binaryninjacore_sys::*;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::os::raw::c_char;
|
use std::os::raw::c_char;
|
||||||
@@ -168,6 +166,19 @@ impl Metadata {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_json_string(&self) -> Result<BnString, ()> {
|
||||||
|
match self.get_type() {
|
||||||
|
MetadataType::StringDataType => {
|
||||||
|
let ptr: *mut c_char = unsafe { BNMetadataGetJsonString(self.handle) };
|
||||||
|
if ptr.is_null() {
|
||||||
|
return Err(());
|
||||||
|
}
|
||||||
|
Ok(unsafe { BnString::from_raw(ptr) })
|
||||||
|
}
|
||||||
|
_ => Err(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_raw(&self) -> Result<Vec<u8>, ()> {
|
pub fn get_raw(&self) -> Result<Vec<u8>, ()> {
|
||||||
match self.get_type() {
|
match self.get_type() {
|
||||||
MetadataType::RawDataType => {
|
MetadataType::RawDataType => {
|
||||||
@@ -335,18 +346,14 @@ unsafe impl RefCountable for Metadata {
|
|||||||
impl CoreArrayProvider for Metadata {
|
impl CoreArrayProvider for Metadata {
|
||||||
type Raw = *mut BNMetadata;
|
type Raw = *mut BNMetadata;
|
||||||
type Context = ();
|
type Context = ();
|
||||||
|
type Wrapped<'a> = Guard<'a, Metadata>;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl CoreOwnedArrayProvider for Metadata {
|
unsafe impl CoreArrayProviderInner for Metadata {
|
||||||
unsafe fn free(raw: *mut *mut BNMetadata, _count: usize, _context: &()) {
|
unsafe fn free(raw: *mut *mut BNMetadata, _count: usize, _context: &()) {
|
||||||
BNFreeMetadataArray(raw);
|
BNFreeMetadataArray(raw);
|
||||||
}
|
}
|
||||||
}
|
unsafe fn wrap_raw<'a>(raw: &'a *mut BNMetadata, context: &'a ()) -> Self::Wrapped<'a> {
|
||||||
|
|
||||||
unsafe impl<'a> CoreArrayWrapper<'a> for Metadata {
|
|
||||||
type Wrapped = Guard<'a, Metadata>;
|
|
||||||
|
|
||||||
unsafe fn wrap_raw(raw: &'a *mut BNMetadata, context: &'a ()) -> Guard<'a, Metadata> {
|
|
||||||
Guard::new(Metadata::from_raw(*raw), context)
|
Guard::new(Metadata::from_raw(*raw), context)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -403,12 +410,6 @@ impl From<&str> for Ref<Metadata> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Into<Ref<Metadata>>> From<&T> for Ref<Metadata> {
|
|
||||||
fn from(value: &T) -> Self {
|
|
||||||
value.into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&Vec<u8>> for Ref<Metadata> {
|
impl From<&Vec<u8>> for Ref<Metadata> {
|
||||||
fn from(value: &Vec<u8>) -> Self {
|
fn from(value: &Vec<u8>) -> Self {
|
||||||
unsafe { Metadata::ref_from_raw(BNCreateMetadataRawData(value.as_ptr(), value.len())) }
|
unsafe { Metadata::ref_from_raw(BNCreateMetadataRawData(value.as_ptr(), value.len())) }
|
||||||
@@ -441,16 +442,15 @@ impl From<&Array<Metadata>> for Ref<Metadata> {
|
|||||||
|
|
||||||
impl<S: BnStrCompatible> From<HashMap<S, Ref<Metadata>>> for Ref<Metadata> {
|
impl<S: BnStrCompatible> From<HashMap<S, Ref<Metadata>>> for Ref<Metadata> {
|
||||||
fn from(value: HashMap<S, Ref<Metadata>>) -> Self {
|
fn from(value: HashMap<S, Ref<Metadata>>) -> Self {
|
||||||
let mut key_refs: Vec<S::Result> = vec![];
|
let data: Vec<(S::Result, Ref<Metadata>)> = value
|
||||||
let mut keys: Vec<*const c_char> = vec![];
|
.into_iter()
|
||||||
let mut values: Vec<*mut BNMetadata> = vec![];
|
.map(|(k, v)| (k.into_bytes_with_nul(), v))
|
||||||
for (k, v) in value.into_iter() {
|
.collect();
|
||||||
key_refs.push(k.into_bytes_with_nul());
|
let mut keys: Vec<*const c_char> = data
|
||||||
values.push(v.as_ref().handle);
|
.iter()
|
||||||
}
|
.map(|(k, _)| k.as_ref().as_ptr() as *const c_char)
|
||||||
for k in &key_refs {
|
.collect();
|
||||||
keys.push(k.as_ref().as_ptr() as *const c_char);
|
let mut values: Vec<*mut BNMetadata> = data.iter().map(|(_, v)| v.handle).collect();
|
||||||
}
|
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
Metadata::ref_from_raw(BNCreateMetadataValueStore(
|
Metadata::ref_from_raw(BNCreateMetadataValueStore(
|
||||||
@@ -462,19 +462,21 @@ impl<S: BnStrCompatible> From<HashMap<S, Ref<Metadata>>> for Ref<Metadata> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S: BnStrCompatible + Copy, T: Into<Ref<Metadata>>> From<&[(S, T)]> for Ref<Metadata> {
|
impl<S, T> From<&[(S, T)]> for Ref<Metadata>
|
||||||
|
where
|
||||||
|
S: BnStrCompatible + Copy,
|
||||||
|
for<'a> &'a T: Into<Ref<Metadata>>,
|
||||||
|
{
|
||||||
fn from(value: &[(S, T)]) -> Self {
|
fn from(value: &[(S, T)]) -> Self {
|
||||||
let mut key_refs: Vec<S::Result> = vec![];
|
let data: Vec<(S::Result, Ref<Metadata>)> = value
|
||||||
let mut keys: Vec<*const c_char> = vec![];
|
.iter()
|
||||||
let mut values: Vec<*mut BNMetadata> = vec![];
|
.map(|(k, v)| (k.into_bytes_with_nul(), v.into()))
|
||||||
for (k, v) in value.iter() {
|
.collect();
|
||||||
key_refs.push(k.into_bytes_with_nul());
|
let mut keys: Vec<*const c_char> = data
|
||||||
let value_metadata: Ref<Metadata> = v.into();
|
.iter()
|
||||||
values.push(value_metadata.handle);
|
.map(|(k, _)| k.as_ref().as_ptr() as *const c_char)
|
||||||
}
|
.collect();
|
||||||
for k in &key_refs {
|
let mut values: Vec<*mut BNMetadata> = data.iter().map(|(_, v)| v.handle).collect();
|
||||||
keys.push(k.as_ref().as_ptr() as *const c_char);
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
Metadata::ref_from_raw(BNCreateMetadataValueStore(
|
Metadata::ref_from_raw(BNCreateMetadataValueStore(
|
||||||
@@ -486,29 +488,15 @@ impl<S: BnStrCompatible + Copy, T: Into<Ref<Metadata>>> From<&[(S, T)]> for Ref<
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S: BnStrCompatible + Copy, T: Into<Ref<Metadata>>, const N: usize> From<[(S, T); N]>
|
impl<S, T, const N: usize> From<[(S, T); N]> for Ref<Metadata>
|
||||||
for Ref<Metadata>
|
where
|
||||||
|
S: BnStrCompatible + Copy,
|
||||||
|
for<'a> &'a T: Into<Ref<Metadata>>,
|
||||||
{
|
{
|
||||||
fn from(value: [(S, T); N]) -> Self {
|
fn from(value: [(S, T); N]) -> Self {
|
||||||
let mut key_refs: Vec<S::Result> = vec![];
|
let slice = &value[..];
|
||||||
let mut keys: Vec<*const c_char> = vec![];
|
// use the `impl From<&[(S, T)]>`
|
||||||
let mut values: Vec<*mut BNMetadata> = vec![];
|
slice.into()
|
||||||
for (k, v) in value.into_iter() {
|
|
||||||
key_refs.push(k.into_bytes_with_nul());
|
|
||||||
let value_metadata: Ref<Metadata> = v.into();
|
|
||||||
values.push(value_metadata.handle);
|
|
||||||
}
|
|
||||||
for k in &key_refs {
|
|
||||||
keys.push(k.as_ref().as_ptr() as *const c_char);
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe {
|
|
||||||
Metadata::ref_from_raw(BNCreateMetadataValueStore(
|
|
||||||
keys.as_mut_ptr(),
|
|
||||||
values.as_mut_ptr(),
|
|
||||||
keys.len(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -714,3 +702,17 @@ impl TryFrom<&Metadata> for HashMap<String, Ref<Metadata>> {
|
|||||||
.map(|m| m.into_iter().map(|(k, v)| (k.to_string(), v)).collect())
|
.map(|m| m.into_iter().map(|(k, v)| (k.to_string(), v)).collect())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl IntoJson for &Metadata {
|
||||||
|
type Output = BnString;
|
||||||
|
fn get_json_string(self) -> Result<BnString, ()> {
|
||||||
|
Metadata::get_json_string(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoJson for Ref<Metadata> {
|
||||||
|
type Output = BnString;
|
||||||
|
fn get_json_string(self) -> Result<BnString, ()> {
|
||||||
|
Metadata::get_json_string(&self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,18 +1,18 @@
|
|||||||
use core::hash::{Hash, Hasher};
|
use core::hash::{Hash, Hasher};
|
||||||
|
use std::ffi::c_char;
|
||||||
|
|
||||||
use binaryninjacore_sys::BNFreeMediumLevelILFunction;
|
use binaryninjacore_sys::*;
|
||||||
use binaryninjacore_sys::BNGetMediumLevelILBasicBlockList;
|
|
||||||
use binaryninjacore_sys::BNGetMediumLevelILInstructionCount;
|
|
||||||
use binaryninjacore_sys::BNGetMediumLevelILOwnerFunction;
|
|
||||||
use binaryninjacore_sys::BNGetMediumLevelILSSAForm;
|
|
||||||
use binaryninjacore_sys::BNMediumLevelILFunction;
|
|
||||||
use binaryninjacore_sys::BNMediumLevelILGetInstructionStart;
|
|
||||||
use binaryninjacore_sys::BNNewMediumLevelILFunctionReference;
|
|
||||||
|
|
||||||
|
use crate::architecture::CoreArchitecture;
|
||||||
use crate::basicblock::BasicBlock;
|
use crate::basicblock::BasicBlock;
|
||||||
use crate::function::Function;
|
use crate::disassembly::DisassemblySettings;
|
||||||
use crate::function::Location;
|
use crate::flowgraph::FlowGraph;
|
||||||
use crate::rc::{Array, Ref, RefCountable};
|
use crate::function::{Function, Location};
|
||||||
|
use crate::rc::{Array, CoreArrayProvider, CoreArrayProviderInner, Ref, RefCountable};
|
||||||
|
use crate::string::BnStrCompatible;
|
||||||
|
use crate::types::{
|
||||||
|
Conf, PossibleValueSet, RegisterValue, SSAVariable, Type, UserVariableValues, Variable,
|
||||||
|
};
|
||||||
|
|
||||||
use super::{MediumLevelILBlock, MediumLevelILInstruction, MediumLevelILLiftedInstruction};
|
use super::{MediumLevelILBlock, MediumLevelILInstruction, MediumLevelILLiftedInstruction};
|
||||||
|
|
||||||
@@ -65,6 +65,19 @@ impl MediumLevelILFunction {
|
|||||||
self.instruction_from_idx(expr_idx).lift()
|
self.instruction_from_idx(expr_idx).lift()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn instruction_from_instruction_idx(&self, instr_idx: usize) -> MediumLevelILInstruction {
|
||||||
|
MediumLevelILInstruction::new(self.to_owned(), unsafe {
|
||||||
|
BNGetMediumLevelILIndexForInstruction(self.handle, instr_idx)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lifted_instruction_from_instruction_idx(
|
||||||
|
&self,
|
||||||
|
instr_idx: usize,
|
||||||
|
) -> MediumLevelILLiftedInstruction {
|
||||||
|
self.instruction_from_instruction_idx(instr_idx).lift()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn instruction_count(&self) -> usize {
|
pub fn instruction_count(&self) -> usize {
|
||||||
unsafe { BNGetMediumLevelILInstructionCount(self.handle) }
|
unsafe { BNGetMediumLevelILInstructionCount(self.handle) }
|
||||||
}
|
}
|
||||||
@@ -91,6 +104,478 @@ impl MediumLevelILFunction {
|
|||||||
|
|
||||||
unsafe { Array::new(blocks, count, context) }
|
unsafe { Array::new(blocks, count, context) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_var_definitions<'a>(&'a self, var: &Variable) -> MediumLevelILInstructionList<'a> {
|
||||||
|
let mut count = 0;
|
||||||
|
let raw_instrs =
|
||||||
|
unsafe { BNGetMediumLevelILVariableDefinitions(self.handle, &var.raw(), &mut count) };
|
||||||
|
assert!(!raw_instrs.is_null());
|
||||||
|
let instrs = unsafe { core::slice::from_raw_parts(raw_instrs, count) };
|
||||||
|
MediumLevelILInstructionList {
|
||||||
|
mlil: self,
|
||||||
|
ptr: raw_instrs,
|
||||||
|
instr_idxs: instrs.iter(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create_user_stack_var<'a, S: BnStrCompatible, C: Into<Conf<&'a Type>>>(
|
||||||
|
self,
|
||||||
|
offset: i64,
|
||||||
|
var_type: C,
|
||||||
|
name: S,
|
||||||
|
) {
|
||||||
|
let var_type = var_type.into();
|
||||||
|
let mut raw_var_type: BNTypeWithConfidence = var_type.into();
|
||||||
|
let name = name.into_bytes_with_nul();
|
||||||
|
unsafe {
|
||||||
|
BNCreateUserStackVariable(
|
||||||
|
self.get_function().handle,
|
||||||
|
offset,
|
||||||
|
&mut raw_var_type,
|
||||||
|
name.as_ref().as_ptr() as *const c_char,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn delete_user_stack_var(self, offset: i64) {
|
||||||
|
unsafe { BNDeleteUserStackVariable(self.get_function().handle, offset) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create_user_var<'a, S: BnStrCompatible, C: Into<Conf<&'a Type>>>(
|
||||||
|
&self,
|
||||||
|
var: &Variable,
|
||||||
|
var_type: C,
|
||||||
|
name: S,
|
||||||
|
ignore_disjoint_uses: bool,
|
||||||
|
) {
|
||||||
|
let var_type = var_type.into();
|
||||||
|
let raw_var_type: BNTypeWithConfidence = var_type.into();
|
||||||
|
let name = name.into_bytes_with_nul();
|
||||||
|
unsafe {
|
||||||
|
BNCreateUserVariable(
|
||||||
|
self.get_function().handle,
|
||||||
|
&var.raw(),
|
||||||
|
&raw_var_type as *const _ as *mut _,
|
||||||
|
name.as_ref().as_ptr() as *const _,
|
||||||
|
ignore_disjoint_uses,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn delete_user_var(&self, var: &Variable) {
|
||||||
|
unsafe { BNDeleteUserVariable(self.get_function().handle, &var.raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_var_user_defined(&self, var: &Variable) -> bool {
|
||||||
|
unsafe { BNIsVariableUserDefined(self.get_function().handle, &var.raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Allows the user to specify a PossibleValueSet value for an MLIL
|
||||||
|
/// variable at its definition site.
|
||||||
|
///
|
||||||
|
/// .. warning:: Setting the variable value, triggers a reanalysis of the
|
||||||
|
/// function and allows the dataflow to compute and propagate values which
|
||||||
|
/// depend on the current variable. This implies that branch conditions
|
||||||
|
/// whose values can be determined statically will be computed, leading to
|
||||||
|
/// potential branch elimination at the HLIL layer.
|
||||||
|
///
|
||||||
|
/// * `var` - Variable for which the value is to be set
|
||||||
|
/// * `addr` - Address of the definition site of the variable
|
||||||
|
/// * `value` - Informed value of the variable
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
/// ```no_run
|
||||||
|
/// # use binaryninja::mlil::MediumLevelILFunction;
|
||||||
|
/// # use binaryninja::types::PossibleValueSet;
|
||||||
|
/// # let mlil_fun: MediumLevelILFunction = todo!();
|
||||||
|
/// let (mlil_var, arch_addr, _val) = mlil_fun.user_var_values().all().next().unwrap();
|
||||||
|
/// let def_address = arch_addr.address;
|
||||||
|
/// let var_value = PossibleValueSet::ConstantValue{value: 5};
|
||||||
|
/// mlil_fun.set_user_var_value(&mlil_var, def_address, var_value).unwrap();
|
||||||
|
/// ```
|
||||||
|
pub fn set_user_var_value(
|
||||||
|
&self,
|
||||||
|
var: &Variable,
|
||||||
|
addr: u64,
|
||||||
|
value: PossibleValueSet,
|
||||||
|
) -> Result<(), ()> {
|
||||||
|
let Some(_def_site) = self
|
||||||
|
.get_var_definitions(var)
|
||||||
|
.find(|def| def.address == addr)
|
||||||
|
else {
|
||||||
|
// Error "No definition for Variable found at given address"
|
||||||
|
return Err(());
|
||||||
|
};
|
||||||
|
let function = self.get_function();
|
||||||
|
let def_site = BNArchitectureAndAddress {
|
||||||
|
arch: function.arch().0,
|
||||||
|
address: addr,
|
||||||
|
};
|
||||||
|
let value = value.into_raw();
|
||||||
|
|
||||||
|
unsafe { BNSetUserVariableValue(function.handle, &var.raw(), &def_site, value.as_ffi()) }
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Clears a previously defined user variable value.
|
||||||
|
///
|
||||||
|
/// * `var` - Variable for which the value was informed
|
||||||
|
/// * `def_addr` - Address of the definition site of the variable
|
||||||
|
pub fn clear_user_var_value(&self, var: &Variable, addr: u64) -> Result<(), ()> {
|
||||||
|
let Some(_var_def) = self
|
||||||
|
.get_var_definitions(var)
|
||||||
|
.find(|site| site.address == addr)
|
||||||
|
else {
|
||||||
|
//error "Could not get definition for Variable"
|
||||||
|
return Err(());
|
||||||
|
};
|
||||||
|
|
||||||
|
let function = self.get_function();
|
||||||
|
let def_site = BNArchitectureAndAddress {
|
||||||
|
arch: function.arch().0,
|
||||||
|
address: addr,
|
||||||
|
};
|
||||||
|
|
||||||
|
unsafe { BNClearUserVariableValue(function.handle, &var.raw(), &def_site) };
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a map of current defined user variable values.
|
||||||
|
/// Returns a Map of user current defined user variable values and their definition sites.
|
||||||
|
pub fn user_var_values(&self) -> UserVariableValues {
|
||||||
|
let mut count = 0;
|
||||||
|
let function = self.get_function();
|
||||||
|
let var_values = unsafe { BNGetAllUserVariableValues(function.handle, &mut count) };
|
||||||
|
assert!(!var_values.is_null());
|
||||||
|
UserVariableValues {
|
||||||
|
vars: core::ptr::slice_from_raw_parts(var_values, count),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Clear all user defined variable values.
|
||||||
|
pub fn clear_user_var_values(&self) -> Result<(), ()> {
|
||||||
|
for (var, arch_and_addr, _value) in self.user_var_values().all() {
|
||||||
|
self.clear_user_var_value(&var, arch_and_addr.address)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create_auto_stack_var<'a, T: Into<Conf<&'a Type>>, S: BnStrCompatible>(
|
||||||
|
&self,
|
||||||
|
offset: i64,
|
||||||
|
var_type: T,
|
||||||
|
name: S,
|
||||||
|
) {
|
||||||
|
let var_type: Conf<&Type> = var_type.into();
|
||||||
|
let mut var_type = var_type.into();
|
||||||
|
let name = name.into_bytes_with_nul();
|
||||||
|
let name_c_str = name.as_ref();
|
||||||
|
unsafe {
|
||||||
|
BNCreateAutoStackVariable(
|
||||||
|
self.get_function().handle,
|
||||||
|
offset,
|
||||||
|
&mut var_type,
|
||||||
|
name_c_str.as_ptr() as *const c_char,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn delete_auto_stack_var(&self, offset: i64) {
|
||||||
|
unsafe { BNDeleteAutoStackVariable(self.get_function().handle, offset) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create_auto_var<'a, S: BnStrCompatible, C: Into<Conf<&'a Type>>>(
|
||||||
|
&self,
|
||||||
|
var: &Variable,
|
||||||
|
var_type: C,
|
||||||
|
name: S,
|
||||||
|
ignore_disjoint_uses: bool,
|
||||||
|
) {
|
||||||
|
let var_type: Conf<&Type> = var_type.into();
|
||||||
|
let mut var_type = var_type.into();
|
||||||
|
let name = name.into_bytes_with_nul();
|
||||||
|
let name_c_str = name.as_ref();
|
||||||
|
unsafe {
|
||||||
|
BNCreateAutoVariable(
|
||||||
|
self.get_function().handle,
|
||||||
|
&var.raw(),
|
||||||
|
&mut var_type,
|
||||||
|
name_c_str.as_ptr() as *const c_char,
|
||||||
|
ignore_disjoint_uses,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a list of ILReferenceSource objects (IL xrefs or cross-references)
|
||||||
|
/// that reference the given variable. The variable is a local variable that can be either on the stack,
|
||||||
|
/// in a register, or in a flag.
|
||||||
|
/// This function is related to get_hlil_var_refs(), which returns variable references collected
|
||||||
|
/// from HLIL. The two can be different in several cases, e.g., multiple variables in MLIL can be merged
|
||||||
|
/// into a single variable in HLIL.
|
||||||
|
///
|
||||||
|
/// * `var` - Variable for which to query the xref
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
/// ```no_run
|
||||||
|
/// # use binaryninja::mlil::MediumLevelILFunction;
|
||||||
|
/// # use binaryninja::types::Variable;
|
||||||
|
/// # let mlil_fun: MediumLevelILFunction = todo!();
|
||||||
|
/// # let mlil_var: Variable = todo!();
|
||||||
|
/// let instr = mlil_fun.var_refs(&mlil_var).get(0).expr();
|
||||||
|
/// ```
|
||||||
|
pub fn var_refs(&self, var: &Variable) -> Array<ILReferenceSource> {
|
||||||
|
let mut count = 0;
|
||||||
|
let refs = unsafe {
|
||||||
|
BNGetMediumLevelILVariableReferences(
|
||||||
|
self.get_function().handle,
|
||||||
|
&mut var.raw(),
|
||||||
|
&mut count,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
assert!(!refs.is_null());
|
||||||
|
unsafe { Array::new(refs, count, self.to_owned()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a list of variables referenced by code in the function ``func``,
|
||||||
|
/// of the architecture ``arch``, and at the address ``addr``. If no function is specified, references from
|
||||||
|
/// all functions and containing the address will be returned. If no architecture is specified, the
|
||||||
|
/// architecture of the function will be used.
|
||||||
|
/// This function is related to get_hlil_var_refs_from(), which returns variable references collected
|
||||||
|
/// from HLIL. The two can be different in several cases, e.g., multiple variables in MLIL can be merged
|
||||||
|
/// into a single variable in HLIL.
|
||||||
|
///
|
||||||
|
/// * `addr` - virtual address to query for variable references
|
||||||
|
/// * `length` - optional length of query
|
||||||
|
/// * `arch` - optional architecture of query
|
||||||
|
pub fn var_refs_from(
|
||||||
|
&self,
|
||||||
|
addr: u64,
|
||||||
|
length: Option<u64>,
|
||||||
|
arch: Option<CoreArchitecture>,
|
||||||
|
) -> Array<VariableReferenceSource> {
|
||||||
|
let function = self.get_function();
|
||||||
|
let arch = arch.unwrap_or_else(|| function.arch());
|
||||||
|
let mut count = 0;
|
||||||
|
|
||||||
|
let refs = if let Some(length) = length {
|
||||||
|
unsafe {
|
||||||
|
BNGetMediumLevelILVariableReferencesInRange(
|
||||||
|
function.handle,
|
||||||
|
arch.0,
|
||||||
|
addr,
|
||||||
|
length,
|
||||||
|
&mut count,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
unsafe {
|
||||||
|
BNGetMediumLevelILVariableReferencesFrom(function.handle, arch.0, addr, &mut count)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
assert!(!refs.is_null());
|
||||||
|
unsafe { Array::new(refs, count, self.to_owned()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Current IL Address
|
||||||
|
pub fn current_address(&self) -> u64 {
|
||||||
|
unsafe { BNMediumLevelILGetCurrentAddress(self.handle) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the current IL Address
|
||||||
|
pub fn set_current_address(&self, value: u64, arch: Option<CoreArchitecture>) {
|
||||||
|
let arch = arch
|
||||||
|
.map(|x| x.0)
|
||||||
|
.unwrap_or_else(|| self.get_function().arch().0);
|
||||||
|
unsafe { BNMediumLevelILSetCurrentAddress(self.handle, arch, value) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the BasicBlock at the given MLIL `instruction`.
|
||||||
|
pub fn basic_block_containing(
|
||||||
|
&self,
|
||||||
|
instruction: &MediumLevelILInstruction,
|
||||||
|
) -> Option<BasicBlock<MediumLevelILBlock>> {
|
||||||
|
let index = instruction.index;
|
||||||
|
let block = unsafe { BNGetMediumLevelILBasicBlockForInstruction(self.handle, index) };
|
||||||
|
(!block.is_null()).then(|| unsafe {
|
||||||
|
BasicBlock::from_raw(
|
||||||
|
block,
|
||||||
|
MediumLevelILBlock {
|
||||||
|
function: self.to_owned(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
/// ends the function and computes the list of basic blocks.
|
||||||
|
pub fn finalize(&self) {
|
||||||
|
unsafe { BNFinalizeMediumLevelILFunction(self.handle) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate SSA form given the current MLIL
|
||||||
|
///
|
||||||
|
/// * `analyze_conditionals` - whether or not to analyze conditionals
|
||||||
|
/// * `handle_aliases` - whether or not to handle aliases
|
||||||
|
/// * `known_not_aliases` - optional list of variables known to be not aliased
|
||||||
|
/// * `known_aliases` - optional list of variables known to be aliased
|
||||||
|
pub fn generate_ssa_form(
|
||||||
|
&self,
|
||||||
|
analyze_conditionals: bool,
|
||||||
|
handle_aliases: bool,
|
||||||
|
known_not_aliases: impl IntoIterator<Item = Variable>,
|
||||||
|
known_aliases: impl IntoIterator<Item = Variable>,
|
||||||
|
) {
|
||||||
|
let mut known_not_aliases: Box<[_]> =
|
||||||
|
known_not_aliases.into_iter().map(|x| x.raw()).collect();
|
||||||
|
let mut known_aliases: Box<[_]> = known_aliases.into_iter().map(|x| x.raw()).collect();
|
||||||
|
let (known_not_aliases_ptr, known_not_aliases_len) = if known_not_aliases.is_empty() {
|
||||||
|
(core::ptr::null_mut(), 0)
|
||||||
|
} else {
|
||||||
|
(known_not_aliases.as_mut_ptr(), known_not_aliases.len())
|
||||||
|
};
|
||||||
|
let (known_aliases_ptr, known_aliases_len) = if known_not_aliases.is_empty() {
|
||||||
|
(core::ptr::null_mut(), 0)
|
||||||
|
} else {
|
||||||
|
(known_aliases.as_mut_ptr(), known_aliases.len())
|
||||||
|
};
|
||||||
|
unsafe {
|
||||||
|
BNGenerateMediumLevelILSSAForm(
|
||||||
|
self.handle,
|
||||||
|
analyze_conditionals,
|
||||||
|
handle_aliases,
|
||||||
|
known_not_aliases_ptr,
|
||||||
|
known_not_aliases_len,
|
||||||
|
known_aliases_ptr,
|
||||||
|
known_aliases_len,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets the instruction that contains the given SSA variable's definition.
|
||||||
|
///
|
||||||
|
/// Since SSA variables can only be defined once, this will return the single instruction where that occurs.
|
||||||
|
/// For SSA variable version 0s, which don't have definitions, this will return None instead.
|
||||||
|
pub fn ssa_variable_definition(&self, var: SSAVariable) -> Option<MediumLevelILInstruction> {
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetMediumLevelILSSAVarDefinition(self.handle, &var.variable.raw(), var.version)
|
||||||
|
};
|
||||||
|
(result < self.instruction_count())
|
||||||
|
.then(|| MediumLevelILInstruction::new(self.to_owned(), result))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ssa_memory_definition(&self, version: usize) -> Option<MediumLevelILInstruction> {
|
||||||
|
let result = unsafe { BNGetMediumLevelILSSAMemoryDefinition(self.handle, version) };
|
||||||
|
(result < self.instruction_count())
|
||||||
|
.then(|| MediumLevelILInstruction::new(self.to_owned(), result))
|
||||||
|
}
|
||||||
|
|
||||||
|
///Gets all the instructions that use the given SSA variable.
|
||||||
|
pub fn ssa_variable_uses(&self, ssa_var: SSAVariable) -> Array<MediumLevelILInstruction> {
|
||||||
|
let mut count = 0;
|
||||||
|
let uses = unsafe {
|
||||||
|
BNGetMediumLevelILSSAVarUses(
|
||||||
|
self.handle,
|
||||||
|
&ssa_var.variable.raw(),
|
||||||
|
ssa_var.version,
|
||||||
|
&mut count,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
assert!(!uses.is_null());
|
||||||
|
unsafe { Array::new(uses, count, self.to_owned()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ssa_memory_uses(&self, version: usize) -> Array<MediumLevelILInstruction> {
|
||||||
|
let mut count = 0;
|
||||||
|
let uses = unsafe { BNGetMediumLevelILSSAMemoryUses(self.handle, version, &mut count) };
|
||||||
|
assert!(!uses.is_null());
|
||||||
|
unsafe { Array::new(uses, count, self.to_owned()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// determines if `ssa_var` is live at any point in the function
|
||||||
|
pub fn is_ssa_variable_live(&self, ssa_var: SSAVariable) -> bool {
|
||||||
|
unsafe {
|
||||||
|
BNIsMediumLevelILSSAVarLive(self.handle, &ssa_var.variable.raw(), ssa_var.version)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn variable_definitions(&self, variable: Variable) -> Array<MediumLevelILInstruction> {
|
||||||
|
let mut count = 0;
|
||||||
|
let defs = unsafe {
|
||||||
|
BNGetMediumLevelILVariableDefinitions(self.handle, &variable.raw(), &mut count)
|
||||||
|
};
|
||||||
|
unsafe { Array::new(defs, count, self.to_owned()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn variable_uses(&self, variable: Variable) -> Array<MediumLevelILInstruction> {
|
||||||
|
let mut count = 0;
|
||||||
|
let uses =
|
||||||
|
unsafe { BNGetMediumLevelILVariableUses(self.handle, &variable.raw(), &mut count) };
|
||||||
|
unsafe { Array::new(uses, count, self.to_owned()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Computes the list of instructions for which `var` is live.
|
||||||
|
/// If `include_last_use` is false, the last use of the variable will not be included in the
|
||||||
|
/// list (this allows for easier computation of overlaps in liveness between two variables).
|
||||||
|
/// If the variable is never used, this function will return an empty list.
|
||||||
|
///
|
||||||
|
/// `var` - the variable to query
|
||||||
|
/// `include_last_use` - whether to include the last use of the variable in the list of instructions
|
||||||
|
pub fn live_instruction_for_variable(
|
||||||
|
&self,
|
||||||
|
variable: Variable,
|
||||||
|
include_last_user: bool,
|
||||||
|
) -> Array<MediumLevelILInstruction> {
|
||||||
|
let mut count = 0;
|
||||||
|
let uses = unsafe {
|
||||||
|
BNGetMediumLevelILLiveInstructionsForVariable(
|
||||||
|
self.handle,
|
||||||
|
&variable.raw(),
|
||||||
|
include_last_user,
|
||||||
|
&mut count,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
unsafe { Array::new(uses, count, self.to_owned()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ssa_variable_value(&self, ssa_var: SSAVariable) -> RegisterValue {
|
||||||
|
unsafe {
|
||||||
|
BNGetMediumLevelILSSAVarValue(self.handle, &ssa_var.variable.raw(), ssa_var.version)
|
||||||
|
}
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create_graph(&self, settings: Option<DisassemblySettings>) -> FlowGraph {
|
||||||
|
let settings = settings.map(|x| x.handle).unwrap_or(core::ptr::null_mut());
|
||||||
|
let graph = unsafe { BNCreateMediumLevelILFunctionGraph(self.handle, settings) };
|
||||||
|
unsafe { FlowGraph::from_raw(graph) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This gets just the MLIL variables - you may be interested in the union
|
||||||
|
/// of [MediumLevelILFunction::aliased_variables] and
|
||||||
|
/// [crate::function::Function::parameter_variables] for all the
|
||||||
|
/// variables used in the function
|
||||||
|
pub fn variables(&self) -> Array<Variable> {
|
||||||
|
let mut count = 0;
|
||||||
|
let uses = unsafe { BNGetMediumLevelILVariables(self.handle, &mut count) };
|
||||||
|
unsafe { Array::new(uses, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This returns a list of Variables that are taken reference to and used
|
||||||
|
/// elsewhere. You may also wish to consider [MediumLevelILFunction::variables]
|
||||||
|
/// and [crate::function::Function::parameter_variables]
|
||||||
|
pub fn aliased_variables(&self) -> Array<Variable> {
|
||||||
|
let mut count = 0;
|
||||||
|
let uses = unsafe { BNGetMediumLevelILAliasedVariables(self.handle, &mut count) };
|
||||||
|
unsafe { Array::new(uses, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This gets just the MLIL SSA variables - you may be interested in the
|
||||||
|
/// union of [MediumLevelILFunction::aliased_variables] and
|
||||||
|
/// [crate::function::Function::parameter_variables] for all the
|
||||||
|
/// variables used in the function.
|
||||||
|
pub fn ssa_variables(&self) -> Array<Array<SSAVariable>> {
|
||||||
|
let mut count = 0;
|
||||||
|
let vars = unsafe { BNGetMediumLevelILVariables(self.handle, &mut count) };
|
||||||
|
unsafe { Array::new(vars, count, self.to_owned()) }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToOwned for MediumLevelILFunction {
|
impl ToOwned for MediumLevelILFunction {
|
||||||
@@ -118,3 +603,125 @@ impl core::fmt::Debug for MediumLevelILFunction {
|
|||||||
write!(f, "<mlil func handle {:p}>", self.handle)
|
write!(f, "<mlil func handle {:p}>", self.handle)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct MediumLevelILInstructionList<'a> {
|
||||||
|
mlil: &'a MediumLevelILFunction,
|
||||||
|
ptr: *mut usize,
|
||||||
|
instr_idxs: core::slice::Iter<'a, usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for MediumLevelILInstructionList<'_> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
unsafe { BNFreeILInstructionList(self.ptr) };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Iterator for MediumLevelILInstructionList<'_> {
|
||||||
|
type Item = MediumLevelILInstruction;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
self.instr_idxs
|
||||||
|
.next()
|
||||||
|
.map(|i| self.mlil.instruction_from_instruction_idx(*i))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DoubleEndedIterator for MediumLevelILInstructionList<'_> {
|
||||||
|
fn next_back(&mut self) -> Option<Self::Item> {
|
||||||
|
self.instr_idxs
|
||||||
|
.next_back()
|
||||||
|
.map(|i| self.mlil.instruction_from_instruction_idx(*i))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExactSizeIterator for MediumLevelILInstructionList<'_> {}
|
||||||
|
impl core::iter::FusedIterator for MediumLevelILInstructionList<'_> {}
|
||||||
|
|
||||||
|
/////////////////////////
|
||||||
|
// FunctionGraphType
|
||||||
|
|
||||||
|
pub type FunctionGraphType = binaryninjacore_sys::BNFunctionGraphType;
|
||||||
|
|
||||||
|
/////////////////////////
|
||||||
|
// ILReferenceSource
|
||||||
|
|
||||||
|
pub struct ILReferenceSource {
|
||||||
|
mlil: Ref<MediumLevelILFunction>,
|
||||||
|
_func: Ref<Function>,
|
||||||
|
_arch: CoreArchitecture,
|
||||||
|
addr: u64,
|
||||||
|
type_: FunctionGraphType,
|
||||||
|
expr_id: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ILReferenceSource {
|
||||||
|
unsafe fn from_raw(value: BNILReferenceSource, mlil: Ref<MediumLevelILFunction>) -> Self {
|
||||||
|
Self {
|
||||||
|
mlil,
|
||||||
|
_func: Function::from_raw(value.func),
|
||||||
|
_arch: CoreArchitecture::from_raw(value.arch),
|
||||||
|
addr: value.addr,
|
||||||
|
type_: value.type_,
|
||||||
|
expr_id: value.exprId,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn addr(&self) -> u64 {
|
||||||
|
self.addr
|
||||||
|
}
|
||||||
|
pub fn graph_type(&self) -> FunctionGraphType {
|
||||||
|
self.type_
|
||||||
|
}
|
||||||
|
pub fn expr(&self) -> MediumLevelILInstruction {
|
||||||
|
self.mlil.instruction_from_idx(self.expr_id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for ILReferenceSource {
|
||||||
|
type Raw = BNILReferenceSource;
|
||||||
|
type Context = Ref<MediumLevelILFunction>;
|
||||||
|
type Wrapped<'a> = Self;
|
||||||
|
}
|
||||||
|
unsafe impl CoreArrayProviderInner for ILReferenceSource {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
|
BNFreeILReferences(raw, count)
|
||||||
|
}
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
Self::from_raw(*raw, context.to_owned())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/////////////////////////
|
||||||
|
// VariableReferenceSource
|
||||||
|
|
||||||
|
pub struct VariableReferenceSource {
|
||||||
|
var: Variable,
|
||||||
|
source: ILReferenceSource,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl VariableReferenceSource {
|
||||||
|
pub fn variable(&self) -> &Variable {
|
||||||
|
&self.var
|
||||||
|
}
|
||||||
|
pub fn source(&self) -> &ILReferenceSource {
|
||||||
|
&self.source
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for VariableReferenceSource {
|
||||||
|
type Raw = BNVariableReferenceSource;
|
||||||
|
type Context = Ref<MediumLevelILFunction>;
|
||||||
|
type Wrapped<'a> = Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl CoreArrayProviderInner for VariableReferenceSource {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
|
BNFreeVariableReferenceSourceList(raw, count)
|
||||||
|
}
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
Self {
|
||||||
|
var: Variable::from_raw(raw.var),
|
||||||
|
source: ILReferenceSource::from_raw(raw.source, context.to_owned()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
use binaryninjacore_sys::BNFromVariableIdentifier;
|
use binaryninjacore_sys::*;
|
||||||
use binaryninjacore_sys::BNGetMediumLevelILByIndex;
|
|
||||||
use binaryninjacore_sys::BNMediumLevelILInstruction;
|
|
||||||
use binaryninjacore_sys::BNMediumLevelILOperation;
|
|
||||||
|
|
||||||
|
use crate::architecture::CoreIntrinsic;
|
||||||
|
use crate::disassembly::InstructionTextToken;
|
||||||
use crate::operand_iter::OperandIter;
|
use crate::operand_iter::OperandIter;
|
||||||
use crate::rc::Ref;
|
use crate::rc::{Array, CoreArrayProvider, CoreArrayProviderInner, Ref};
|
||||||
use crate::types::{
|
use crate::types::{
|
||||||
ConstantData, ILIntrinsic, RegisterValue, RegisterValueType, SSAVariable, Variable,
|
Conf, ConstantData, DataFlowQueryOption, ILBranchDependence, PossibleValueSet,
|
||||||
|
RegisterValue, RegisterValueType, SSAVariable, Type, Variable,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::lift::*;
|
use super::lift::*;
|
||||||
@@ -17,6 +17,8 @@ use super::MediumLevelILFunction;
|
|||||||
pub struct MediumLevelILInstruction {
|
pub struct MediumLevelILInstruction {
|
||||||
pub function: Ref<MediumLevelILFunction>,
|
pub function: Ref<MediumLevelILFunction>,
|
||||||
pub address: u64,
|
pub address: u64,
|
||||||
|
pub index: usize,
|
||||||
|
pub size: usize,
|
||||||
pub kind: MediumLevelILInstructionKind,
|
pub kind: MediumLevelILInstructionKind,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -166,8 +168,8 @@ impl core::fmt::Debug for MediumLevelILInstruction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl MediumLevelILInstruction {
|
impl MediumLevelILInstruction {
|
||||||
pub(crate) fn new(function: Ref<MediumLevelILFunction>, idx: usize) -> Self {
|
pub(crate) fn new(function: Ref<MediumLevelILFunction>, index: usize) -> Self {
|
||||||
let op = unsafe { BNGetMediumLevelILByIndex(function.handle, idx) };
|
let op = unsafe { BNGetMediumLevelILByIndex(function.handle, index) };
|
||||||
use BNMediumLevelILOperation::*;
|
use BNMediumLevelILOperation::*;
|
||||||
use MediumLevelILInstructionKind as Op;
|
use MediumLevelILInstructionKind as Op;
|
||||||
let kind = match op.operation {
|
let kind = match op.operation {
|
||||||
@@ -703,7 +705,12 @@ impl MediumLevelILInstruction {
|
|||||||
}),
|
}),
|
||||||
// translated directly into a list for Expression or Variables
|
// translated directly into a list for Expression or Variables
|
||||||
// TODO MLIL_MEMORY_INTRINSIC_SSA needs to be handled properly
|
// TODO MLIL_MEMORY_INTRINSIC_SSA needs to be handled properly
|
||||||
MLIL_CALL_OUTPUT | MLIL_CALL_PARAM | MLIL_CALL_PARAM_SSA | MLIL_CALL_OUTPUT_SSA | MLIL_MEMORY_INTRINSIC_OUTPUT_SSA | MLIL_MEMORY_INTRINSIC_SSA => {
|
MLIL_CALL_OUTPUT
|
||||||
|
| MLIL_CALL_PARAM
|
||||||
|
| MLIL_CALL_PARAM_SSA
|
||||||
|
| MLIL_CALL_OUTPUT_SSA
|
||||||
|
| MLIL_MEMORY_INTRINSIC_OUTPUT_SSA
|
||||||
|
| MLIL_MEMORY_INTRINSIC_SSA => {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -711,6 +718,8 @@ impl MediumLevelILInstruction {
|
|||||||
Self {
|
Self {
|
||||||
function,
|
function,
|
||||||
address: op.address,
|
address: op.address,
|
||||||
|
index,
|
||||||
|
size: op.size,
|
||||||
kind,
|
kind,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -897,7 +906,7 @@ impl MediumLevelILInstruction {
|
|||||||
output: OperandIter::new(&*self.function, op.first_output, op.num_outputs)
|
output: OperandIter::new(&*self.function, op.first_output, op.num_outputs)
|
||||||
.vars()
|
.vars()
|
||||||
.collect(),
|
.collect(),
|
||||||
intrinsic: ILIntrinsic::new(self.function.get_function().arch(), op.intrinsic),
|
intrinsic: CoreIntrinsic(self.function.get_function().arch().0, op.intrinsic),
|
||||||
params: OperandIter::new(&*self.function, op.first_param, op.num_params)
|
params: OperandIter::new(&*self.function, op.first_param, op.num_params)
|
||||||
.exprs()
|
.exprs()
|
||||||
.map(|expr| expr.lift())
|
.map(|expr| expr.lift())
|
||||||
@@ -916,7 +925,7 @@ impl MediumLevelILInstruction {
|
|||||||
output: OperandIter::new(&*self.function, op.first_output, op.num_outputs)
|
output: OperandIter::new(&*self.function, op.first_output, op.num_outputs)
|
||||||
.ssa_vars()
|
.ssa_vars()
|
||||||
.collect(),
|
.collect(),
|
||||||
intrinsic: ILIntrinsic::new(self.function.get_function().arch(), op.intrinsic),
|
intrinsic: CoreIntrinsic(self.function.get_function().arch().0, op.intrinsic),
|
||||||
params: OperandIter::new(&*self.function, op.first_param, op.num_params)
|
params: OperandIter::new(&*self.function, op.first_param, op.num_params)
|
||||||
.exprs()
|
.exprs()
|
||||||
.map(|expr| expr.lift())
|
.map(|expr| expr.lift())
|
||||||
@@ -1019,10 +1028,394 @@ impl MediumLevelILInstruction {
|
|||||||
MediumLevelILLiftedInstruction {
|
MediumLevelILLiftedInstruction {
|
||||||
function: self.function.clone(),
|
function: self.function.clone(),
|
||||||
address: self.address,
|
address: self.address,
|
||||||
|
index: self.index,
|
||||||
|
size: self.size,
|
||||||
kind,
|
kind,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn tokens(&self) -> Array<InstructionTextToken> {
|
||||||
|
let mut count = 0;
|
||||||
|
let mut tokens = core::ptr::null_mut();
|
||||||
|
assert!(unsafe {
|
||||||
|
BNGetMediumLevelILExprText(
|
||||||
|
self.function.handle,
|
||||||
|
self.function.get_function().arch().0,
|
||||||
|
self.index,
|
||||||
|
&mut tokens,
|
||||||
|
&mut count,
|
||||||
|
core::ptr::null_mut(),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
unsafe { Array::new(tokens, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Value of expression if constant or a known value
|
||||||
|
pub fn value(&self) -> RegisterValue {
|
||||||
|
unsafe { BNGetMediumLevelILExprValue(self.function.handle, self.index) }.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Possible values of expression using path-sensitive static data flow analysis
|
||||||
|
pub fn possible_values(&self, options: Option<&[DataFlowQueryOption]>) -> PossibleValueSet {
|
||||||
|
let options_ptr = options
|
||||||
|
.map(|op| op.as_ptr() as *mut DataFlowQueryOption)
|
||||||
|
.unwrap_or(core::ptr::null_mut());
|
||||||
|
let options_len = options.map(|op| op.len()).unwrap_or(0);
|
||||||
|
let mut value = unsafe {
|
||||||
|
BNGetMediumLevelILPossibleExprValues(
|
||||||
|
self.function.handle,
|
||||||
|
self.index,
|
||||||
|
options_ptr,
|
||||||
|
options_len,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
let result = unsafe { PossibleValueSet::from_raw(value) };
|
||||||
|
unsafe { BNFreePossibleValueSet(&mut value) }
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn possible_ssa_variable_values(
|
||||||
|
&self,
|
||||||
|
ssa_var: SSAVariable,
|
||||||
|
options: Option<&[DataFlowQueryOption]>,
|
||||||
|
) -> PossibleValueSet {
|
||||||
|
let options_ptr = options
|
||||||
|
.map(|op| op.as_ptr() as *mut DataFlowQueryOption)
|
||||||
|
.unwrap_or(core::ptr::null_mut());
|
||||||
|
let options_len = options.map(|op| op.len()).unwrap_or(0);
|
||||||
|
let mut value = unsafe {
|
||||||
|
BNGetMediumLevelILPossibleSSAVarValues(
|
||||||
|
self.function.handle,
|
||||||
|
&ssa_var.variable.raw(),
|
||||||
|
ssa_var.version,
|
||||||
|
self.index,
|
||||||
|
options_ptr,
|
||||||
|
options_len,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
let result = unsafe { PossibleValueSet::from_raw(value) };
|
||||||
|
unsafe { BNFreePossibleValueSet(&mut value) }
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
/// return the variable version used at this instruction
|
||||||
|
pub fn ssa_variable_version(&self, var: Variable) -> SSAVariable {
|
||||||
|
let version = unsafe {
|
||||||
|
BNGetMediumLevelILSSAVarVersionAtILInstruction(
|
||||||
|
self.function.handle,
|
||||||
|
&var.raw(),
|
||||||
|
self.index,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
SSAVariable::new(var, version)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set of branching instructions that must take the true or false path to reach this instruction
|
||||||
|
pub fn branch_dependence(&self) -> Array<BranchDependence> {
|
||||||
|
let mut count = 0;
|
||||||
|
let deps = unsafe {
|
||||||
|
BNGetAllMediumLevelILBranchDependence(self.function.handle, self.index, &mut count)
|
||||||
|
};
|
||||||
|
assert!(!deps.is_null());
|
||||||
|
unsafe { Array::new(deps, count, self.function.clone()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn branch_dependence_at(&self, instruction: MediumLevelILInstruction) -> BranchDependence {
|
||||||
|
let deps = unsafe {
|
||||||
|
BNGetMediumLevelILBranchDependence(self.function.handle, self.index, instruction.index)
|
||||||
|
};
|
||||||
|
BranchDependence {
|
||||||
|
instruction,
|
||||||
|
dependence: deps,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Version of active memory contents in SSA form for this instruction
|
||||||
|
pub fn ssa_memory_version(&self) -> usize {
|
||||||
|
unsafe {
|
||||||
|
BNGetMediumLevelILSSAMemoryVersionAtILInstruction(self.function.handle, self.index)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Type of expression
|
||||||
|
pub fn expr_type(&self) -> Option<Conf<Ref<Type>>> {
|
||||||
|
let result = unsafe { BNGetMediumLevelILExprType(self.function.handle, self.index) };
|
||||||
|
(!result.type_.is_null()).then(|| {
|
||||||
|
Conf::new(
|
||||||
|
unsafe { Type::ref_from_raw(result.type_) },
|
||||||
|
result.confidence,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set type of expression
|
||||||
|
///
|
||||||
|
/// This API is only meant for workflows or for debugging purposes, since the changes they make are not persistent
|
||||||
|
/// and get lost after a database save and reload. To make persistent changes to the analysis, one should use other
|
||||||
|
/// APIs to, for example, change the type of variables. The analysis will then propagate the type of the variable
|
||||||
|
/// and update the type of related expressions.
|
||||||
|
pub fn set_expr_type<'a, T: Into<Conf<&'a Type>>>(&self, value: T) {
|
||||||
|
let type_: Conf<&'a Type> = value.into();
|
||||||
|
let mut type_raw: BNTypeWithConfidence = BNTypeWithConfidence {
|
||||||
|
type_: type_.contents.handle,
|
||||||
|
confidence: type_.confidence,
|
||||||
|
};
|
||||||
|
unsafe { BNSetMediumLevelILExprType(self.function.handle, self.index, &mut type_raw) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn variable_for_register(&self, reg_id: u32) -> Variable {
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetMediumLevelILVariableForRegisterAtInstruction(
|
||||||
|
self.function.handle,
|
||||||
|
reg_id,
|
||||||
|
self.index,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
unsafe { Variable::from_raw(result) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn variable_for_flag(&self, flag_id: u32) -> Variable {
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetMediumLevelILVariableForFlagAtInstruction(
|
||||||
|
self.function.handle,
|
||||||
|
flag_id,
|
||||||
|
self.index,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
unsafe { Variable::from_raw(result) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn variable_for_stack_location(&self, offset: i64) -> Variable {
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetMediumLevelILVariableForStackLocationAtInstruction(
|
||||||
|
self.function.handle,
|
||||||
|
offset,
|
||||||
|
self.index,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
unsafe { Variable::from_raw(result) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_value(&self, reg_id: u32) -> RegisterValue {
|
||||||
|
unsafe {
|
||||||
|
BNGetMediumLevelILRegisterValueAtInstruction(self.function.handle, reg_id, self.index)
|
||||||
|
}
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_value_after(&self, reg_id: u32) -> RegisterValue {
|
||||||
|
unsafe {
|
||||||
|
BNGetMediumLevelILRegisterValueAfterInstruction(
|
||||||
|
self.function.handle,
|
||||||
|
reg_id,
|
||||||
|
self.index,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn possible_register_values(
|
||||||
|
&self,
|
||||||
|
reg_id: u32,
|
||||||
|
options: Option<&[DataFlowQueryOption]>,
|
||||||
|
) -> PossibleValueSet {
|
||||||
|
let options_ptr = options
|
||||||
|
.map(|op| op.as_ptr() as *mut DataFlowQueryOption)
|
||||||
|
.unwrap_or(core::ptr::null_mut());
|
||||||
|
let options_len = options.map(|op| op.len()).unwrap_or(0);
|
||||||
|
let mut value = unsafe {
|
||||||
|
BNGetMediumLevelILPossibleRegisterValuesAtInstruction(
|
||||||
|
self.function.handle,
|
||||||
|
reg_id,
|
||||||
|
self.index,
|
||||||
|
options_ptr,
|
||||||
|
options_len,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
let result = unsafe { PossibleValueSet::from_raw(value) };
|
||||||
|
unsafe { BNFreePossibleValueSet(&mut value) }
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn possible_register_values_after(
|
||||||
|
&self,
|
||||||
|
reg_id: u32,
|
||||||
|
options: Option<&[DataFlowQueryOption]>,
|
||||||
|
) -> PossibleValueSet {
|
||||||
|
let options_ptr = options
|
||||||
|
.map(|op| op.as_ptr() as *mut DataFlowQueryOption)
|
||||||
|
.unwrap_or(core::ptr::null_mut());
|
||||||
|
let options_len = options.map(|op| op.len()).unwrap_or(0);
|
||||||
|
let mut value = unsafe {
|
||||||
|
BNGetMediumLevelILPossibleRegisterValuesAfterInstruction(
|
||||||
|
self.function.handle,
|
||||||
|
reg_id,
|
||||||
|
self.index,
|
||||||
|
options_ptr,
|
||||||
|
options_len,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
let result = unsafe { PossibleValueSet::from_raw(value) };
|
||||||
|
unsafe { BNFreePossibleValueSet(&mut value) }
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn flag_value(&self, flag_id: u32) -> RegisterValue {
|
||||||
|
unsafe {
|
||||||
|
BNGetMediumLevelILFlagValueAtInstruction(self.function.handle, flag_id, self.index)
|
||||||
|
}
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn flag_value_after(&self, flag_id: u32) -> RegisterValue {
|
||||||
|
unsafe {
|
||||||
|
BNGetMediumLevelILFlagValueAfterInstruction(self.function.handle, flag_id, self.index)
|
||||||
|
}
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn possible_flag_values(
|
||||||
|
&self,
|
||||||
|
flag_id: u32,
|
||||||
|
options: Option<&[DataFlowQueryOption]>,
|
||||||
|
) -> PossibleValueSet {
|
||||||
|
let options_ptr = options
|
||||||
|
.map(|op| op.as_ptr() as *mut DataFlowQueryOption)
|
||||||
|
.unwrap_or(core::ptr::null_mut());
|
||||||
|
let options_len = options.map(|op| op.len()).unwrap_or(0);
|
||||||
|
let mut value = unsafe {
|
||||||
|
BNGetMediumLevelILPossibleFlagValuesAtInstruction(
|
||||||
|
self.function.handle,
|
||||||
|
flag_id,
|
||||||
|
self.index,
|
||||||
|
options_ptr,
|
||||||
|
options_len,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
let result = unsafe { PossibleValueSet::from_raw(value) };
|
||||||
|
unsafe { BNFreePossibleValueSet(&mut value) }
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn possible_flag_values_after(
|
||||||
|
&self,
|
||||||
|
flag_id: u32,
|
||||||
|
options: Option<&[DataFlowQueryOption]>,
|
||||||
|
) -> PossibleValueSet {
|
||||||
|
let options_ptr = options
|
||||||
|
.map(|op| op.as_ptr() as *mut DataFlowQueryOption)
|
||||||
|
.unwrap_or(core::ptr::null_mut());
|
||||||
|
let options_len = options.map(|op| op.len()).unwrap_or(0);
|
||||||
|
let mut value = unsafe {
|
||||||
|
BNGetMediumLevelILPossibleFlagValuesAfterInstruction(
|
||||||
|
self.function.handle,
|
||||||
|
flag_id,
|
||||||
|
self.index,
|
||||||
|
options_ptr,
|
||||||
|
options_len,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
let result = unsafe { PossibleValueSet::from_raw(value) };
|
||||||
|
unsafe { BNFreePossibleValueSet(&mut value) }
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn stack_contents(&self, offset: i64, size: usize) -> RegisterValue {
|
||||||
|
unsafe {
|
||||||
|
BNGetMediumLevelILStackContentsAtInstruction(
|
||||||
|
self.function.handle,
|
||||||
|
offset,
|
||||||
|
size,
|
||||||
|
self.index,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn stack_contents_after(&self, offset: i64, size: usize) -> RegisterValue {
|
||||||
|
unsafe {
|
||||||
|
BNGetMediumLevelILStackContentsAfterInstruction(
|
||||||
|
self.function.handle,
|
||||||
|
offset,
|
||||||
|
size,
|
||||||
|
self.index,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn possible_stack_contents(
|
||||||
|
&self,
|
||||||
|
offset: i64,
|
||||||
|
size: usize,
|
||||||
|
options: Option<&[DataFlowQueryOption]>,
|
||||||
|
) -> PossibleValueSet {
|
||||||
|
let options_ptr = options
|
||||||
|
.map(|op| op.as_ptr() as *mut DataFlowQueryOption)
|
||||||
|
.unwrap_or(core::ptr::null_mut());
|
||||||
|
let options_len = options.map(|op| op.len()).unwrap_or(0);
|
||||||
|
let mut value = unsafe {
|
||||||
|
BNGetMediumLevelILPossibleStackContentsAtInstruction(
|
||||||
|
self.function.handle,
|
||||||
|
offset,
|
||||||
|
size,
|
||||||
|
self.index,
|
||||||
|
options_ptr,
|
||||||
|
options_len,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
let result = unsafe { PossibleValueSet::from_raw(value) };
|
||||||
|
unsafe { BNFreePossibleValueSet(&mut value) }
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn possible_stack_contents_after(
|
||||||
|
&self,
|
||||||
|
offset: i64,
|
||||||
|
size: usize,
|
||||||
|
options: Option<&[DataFlowQueryOption]>,
|
||||||
|
) -> PossibleValueSet {
|
||||||
|
let options_ptr = options
|
||||||
|
.map(|op| op.as_ptr() as *mut DataFlowQueryOption)
|
||||||
|
.unwrap_or(core::ptr::null_mut());
|
||||||
|
let options_len = options.map(|op| op.len()).unwrap_or(0);
|
||||||
|
let mut value = unsafe {
|
||||||
|
BNGetMediumLevelILPossibleStackContentsAfterInstruction(
|
||||||
|
self.function.handle,
|
||||||
|
offset,
|
||||||
|
size,
|
||||||
|
self.index,
|
||||||
|
options_ptr,
|
||||||
|
options_len,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
let result = unsafe { PossibleValueSet::from_raw(value) };
|
||||||
|
unsafe { BNFreePossibleValueSet(&mut value) }
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets the unique variable for a definition instruction. This unique variable can be passed
|
||||||
|
/// to [crate::function::Function::split_variable] to split a variable at a definition. The given `var` is the
|
||||||
|
/// assigned variable to query.
|
||||||
|
///
|
||||||
|
/// * `var` - variable to query
|
||||||
|
pub fn split_var_for_definition(&self, var: Variable) -> Variable {
|
||||||
|
let index = unsafe {
|
||||||
|
BNGetDefaultIndexForMediumLevelILVariableDefinition(
|
||||||
|
self.function.handle,
|
||||||
|
&var.raw(),
|
||||||
|
self.index,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
Variable::new(var.t, index, var.storage)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// alias for [MediumLevelILInstruction::split_var_for_definition]
|
||||||
|
#[inline]
|
||||||
|
pub fn get_split_var_for_definition(&self, var: &Variable) -> Variable {
|
||||||
|
self.split_var_for_definition(*var)
|
||||||
|
}
|
||||||
|
|
||||||
fn lift_operand(&self, expr_idx: usize) -> Box<MediumLevelILLiftedInstruction> {
|
fn lift_operand(&self, expr_idx: usize) -> Box<MediumLevelILLiftedInstruction> {
|
||||||
Box::new(self.function.lifted_instruction_from_idx(expr_idx))
|
Box::new(self.function.lifted_instruction_from_idx(expr_idx))
|
||||||
}
|
}
|
||||||
@@ -1096,6 +1489,22 @@ impl MediumLevelILInstruction {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for MediumLevelILInstruction {
|
||||||
|
type Raw = usize;
|
||||||
|
type Context = Ref<MediumLevelILFunction>;
|
||||||
|
type Wrapped<'a> = Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl CoreArrayProviderInner for MediumLevelILInstruction {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, _count: usize, _context: &Self::Context) {
|
||||||
|
BNFreeILInstructionList(raw)
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
context.instruction_from_idx(*raw)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn get_float(value: u64, size: usize) -> f64 {
|
fn get_float(value: u64, size: usize) -> f64 {
|
||||||
match size {
|
match size {
|
||||||
4 => f32::from_bits(value as u32) as f64,
|
4 => f32::from_bits(value as u32) as f64,
|
||||||
@@ -1110,7 +1519,7 @@ fn get_raw_operation(function: &MediumLevelILFunction, idx: usize) -> BNMediumLe
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn get_var(id: u64) -> Variable {
|
fn get_var(id: u64) -> Variable {
|
||||||
unsafe { Variable::from_raw(BNFromVariableIdentifier(id)) }
|
unsafe { Variable::from_identifier(id) }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_var_ssa(id: u64, version: usize) -> SSAVariable {
|
fn get_var_ssa(id: u64, version: usize) -> SSAVariable {
|
||||||
@@ -1149,3 +1558,28 @@ fn get_call_params_ssa(
|
|||||||
assert_eq!(op.operation, BNMediumLevelILOperation::MLIL_CALL_PARAM_SSA);
|
assert_eq!(op.operation, BNMediumLevelILOperation::MLIL_CALL_PARAM_SSA);
|
||||||
OperandIter::new(function, op.operands[2] as usize, op.operands[1] as usize).exprs()
|
OperandIter::new(function, op.operands[2] as usize, op.operands[1] as usize).exprs()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Conditional branching instruction and an expected conditional result
|
||||||
|
pub struct BranchDependence {
|
||||||
|
pub instruction: MediumLevelILInstruction,
|
||||||
|
pub dependence: ILBranchDependence,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for BranchDependence {
|
||||||
|
type Raw = BNILBranchInstructionAndDependence;
|
||||||
|
type Context = Ref<MediumLevelILFunction>;
|
||||||
|
type Wrapped<'a> = Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl CoreArrayProviderInner for BranchDependence {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, _count: usize, _context: &Self::Context) {
|
||||||
|
unsafe { BNFreeILBranchDependenceList(raw) };
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
Self {
|
||||||
|
instruction: MediumLevelILInstruction::new(context.clone(), raw.branch),
|
||||||
|
dependence: raw.dependence,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
143
src/mlil/lift.rs
143
src/mlil/lift.rs
@@ -1,7 +1,8 @@
|
|||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
|
|
||||||
|
use crate::architecture::CoreIntrinsic;
|
||||||
use crate::rc::Ref;
|
use crate::rc::Ref;
|
||||||
use crate::types::{ConstantData, ILIntrinsic, SSAVariable, Variable};
|
use crate::types::{ConstantData, SSAVariable, Variable};
|
||||||
|
|
||||||
use super::operation::*;
|
use super::operation::*;
|
||||||
use super::MediumLevelILFunction;
|
use super::MediumLevelILFunction;
|
||||||
@@ -9,7 +10,7 @@ use super::MediumLevelILFunction;
|
|||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub enum MediumLevelILLiftedOperand {
|
pub enum MediumLevelILLiftedOperand {
|
||||||
ConstantData(ConstantData),
|
ConstantData(ConstantData),
|
||||||
Intrinsic(ILIntrinsic),
|
Intrinsic(CoreIntrinsic),
|
||||||
Expr(MediumLevelILLiftedInstruction),
|
Expr(MediumLevelILLiftedInstruction),
|
||||||
ExprList(Vec<MediumLevelILLiftedInstruction>),
|
ExprList(Vec<MediumLevelILLiftedInstruction>),
|
||||||
Float(f64),
|
Float(f64),
|
||||||
@@ -26,6 +27,8 @@ pub enum MediumLevelILLiftedOperand {
|
|||||||
pub struct MediumLevelILLiftedInstruction {
|
pub struct MediumLevelILLiftedInstruction {
|
||||||
pub function: Ref<MediumLevelILFunction>,
|
pub function: Ref<MediumLevelILFunction>,
|
||||||
pub address: u64,
|
pub address: u64,
|
||||||
|
pub index: usize,
|
||||||
|
pub size: usize,
|
||||||
pub kind: MediumLevelILLiftedInstructionKind,
|
pub kind: MediumLevelILLiftedInstructionKind,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -164,6 +167,142 @@ pub enum MediumLevelILLiftedInstructionKind {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl MediumLevelILLiftedInstruction {
|
impl MediumLevelILLiftedInstruction {
|
||||||
|
pub fn name(&self) -> &'static str {
|
||||||
|
use MediumLevelILLiftedInstructionKind::*;
|
||||||
|
match self.kind {
|
||||||
|
Nop => "Nop",
|
||||||
|
Noret => "Noret",
|
||||||
|
Bp => "Bp",
|
||||||
|
Undef => "Undef",
|
||||||
|
Unimpl => "Unimpl",
|
||||||
|
If(_) => "If",
|
||||||
|
FloatConst(_) => "FloatConst",
|
||||||
|
Const(_) => "Const",
|
||||||
|
ConstPtr(_) => "ConstPtr",
|
||||||
|
Import(_) => "Import",
|
||||||
|
ExternPtr(_) => "ExternPtr",
|
||||||
|
ConstData(_) => "ConstData",
|
||||||
|
Jump(_) => "Jump",
|
||||||
|
RetHint(_) => "RetHint",
|
||||||
|
StoreSsa(_) => "StoreSsa",
|
||||||
|
StoreStructSsa(_) => "StoreStructSsa",
|
||||||
|
StoreStruct(_) => "StoreStruct",
|
||||||
|
Store(_) => "Store",
|
||||||
|
JumpTo(_) => "JumpTo",
|
||||||
|
Goto(_) => "Goto",
|
||||||
|
FreeVarSlot(_) => "FreeVarSlot",
|
||||||
|
SetVarField(_) => "SetVarField",
|
||||||
|
SetVar(_) => "SetVar",
|
||||||
|
FreeVarSlotSsa(_) => "FreeVarSlotSsa",
|
||||||
|
SetVarSsaField(_) => "SetVarSsaField",
|
||||||
|
SetVarAliasedField(_) => "SetVarAliasedField",
|
||||||
|
SetVarAliased(_) => "SetVarAliased",
|
||||||
|
SetVarSsa(_) => "SetVarSsa",
|
||||||
|
VarPhi(_) => "VarPhi",
|
||||||
|
MemPhi(_) => "MemPhi",
|
||||||
|
VarSplit(_) => "VarSplit",
|
||||||
|
SetVarSplit(_) => "SetVarSplit",
|
||||||
|
VarSplitSsa(_) => "VarSplitSsa",
|
||||||
|
SetVarSplitSsa(_) => "SetVarSplitSsa",
|
||||||
|
Add(_) => "Add",
|
||||||
|
Sub(_) => "Sub",
|
||||||
|
And(_) => "And",
|
||||||
|
Or(_) => "Or",
|
||||||
|
Xor(_) => "Xor",
|
||||||
|
Lsl(_) => "Lsl",
|
||||||
|
Lsr(_) => "Lsr",
|
||||||
|
Asr(_) => "Asr",
|
||||||
|
Rol(_) => "Rol",
|
||||||
|
Ror(_) => "Ror",
|
||||||
|
Mul(_) => "Mul",
|
||||||
|
MuluDp(_) => "MuluDp",
|
||||||
|
MulsDp(_) => "MulsDp",
|
||||||
|
Divu(_) => "Divu",
|
||||||
|
DivuDp(_) => "DivuDp",
|
||||||
|
Divs(_) => "Divs",
|
||||||
|
DivsDp(_) => "DivsDp",
|
||||||
|
Modu(_) => "Modu",
|
||||||
|
ModuDp(_) => "ModuDp",
|
||||||
|
Mods(_) => "Mods",
|
||||||
|
ModsDp(_) => "ModsDp",
|
||||||
|
CmpE(_) => "CmpE",
|
||||||
|
CmpNe(_) => "CmpNe",
|
||||||
|
CmpSlt(_) => "CmpSlt",
|
||||||
|
CmpUlt(_) => "CmpUlt",
|
||||||
|
CmpSle(_) => "CmpSle",
|
||||||
|
CmpUle(_) => "CmpUle",
|
||||||
|
CmpSge(_) => "CmpSge",
|
||||||
|
CmpUge(_) => "CmpUge",
|
||||||
|
CmpSgt(_) => "CmpSgt",
|
||||||
|
CmpUgt(_) => "CmpUgt",
|
||||||
|
TestBit(_) => "TestBit",
|
||||||
|
AddOverflow(_) => "AddOverflow",
|
||||||
|
FcmpE(_) => "FcmpE",
|
||||||
|
FcmpNe(_) => "FcmpNe",
|
||||||
|
FcmpLt(_) => "FcmpLt",
|
||||||
|
FcmpLe(_) => "FcmpLe",
|
||||||
|
FcmpGe(_) => "FcmpGe",
|
||||||
|
FcmpGt(_) => "FcmpGt",
|
||||||
|
FcmpO(_) => "FcmpO",
|
||||||
|
FcmpUo(_) => "FcmpUo",
|
||||||
|
Fadd(_) => "Fadd",
|
||||||
|
Fsub(_) => "Fsub",
|
||||||
|
Fmul(_) => "Fmul",
|
||||||
|
Fdiv(_) => "Fdiv",
|
||||||
|
Adc(_) => "Adc",
|
||||||
|
Sbb(_) => "Sbb",
|
||||||
|
Rlc(_) => "Rlc",
|
||||||
|
Rrc(_) => "Rrc",
|
||||||
|
Call(_) => "Call",
|
||||||
|
Tailcall(_) => "Tailcall",
|
||||||
|
Syscall(_) => "Syscall",
|
||||||
|
Intrinsic(_) => "Intrinsic",
|
||||||
|
IntrinsicSsa(_) => "IntrinsicSsa",
|
||||||
|
CallSsa(_) => "CallSsa",
|
||||||
|
TailcallSsa(_) => "TailcallSsa",
|
||||||
|
CallUntypedSsa(_) => "CallUntypedSsa",
|
||||||
|
TailcallUntypedSsa(_) => "TailcallUntypedSsa",
|
||||||
|
SyscallSsa(_) => "SyscallSsa",
|
||||||
|
SyscallUntypedSsa(_) => "SyscallUntypedSsa",
|
||||||
|
CallUntyped(_) => "CallUntyped",
|
||||||
|
TailcallUntyped(_) => "TailcallUntyped",
|
||||||
|
SyscallUntyped(_) => "SyscallUntyped",
|
||||||
|
SeparateParamList(_) => "SeparateParamList",
|
||||||
|
SharedParamSlot(_) => "SharedParamSlot",
|
||||||
|
Neg(_) => "Neg",
|
||||||
|
Not(_) => "Not",
|
||||||
|
Sx(_) => "Sx",
|
||||||
|
Zx(_) => "Zx",
|
||||||
|
LowPart(_) => "LowPart",
|
||||||
|
BoolToInt(_) => "BoolToInt",
|
||||||
|
UnimplMem(_) => "UnimplMem",
|
||||||
|
Fsqrt(_) => "Fsqrt",
|
||||||
|
Fneg(_) => "Fneg",
|
||||||
|
Fabs(_) => "Fabs",
|
||||||
|
FloatToInt(_) => "FloatToInt",
|
||||||
|
IntToFloat(_) => "IntToFloat",
|
||||||
|
FloatConv(_) => "FloatConv",
|
||||||
|
RoundToInt(_) => "RoundToInt",
|
||||||
|
Floor(_) => "Floor",
|
||||||
|
Ceil(_) => "Ceil",
|
||||||
|
Ftrunc(_) => "Ftrunc",
|
||||||
|
Load(_) => "Load",
|
||||||
|
LoadStruct(_) => "LoadStruct",
|
||||||
|
LoadStructSsa(_) => "LoadStructSsa",
|
||||||
|
LoadSsa(_) => "LoadSsa",
|
||||||
|
Ret(_) => "Ret",
|
||||||
|
Var(_) => "Var",
|
||||||
|
AddressOf(_) => "AddressOf",
|
||||||
|
VarField(_) => "VarField",
|
||||||
|
AddressOfField(_) => "AddressOfField",
|
||||||
|
VarSsa(_) => "VarSsa",
|
||||||
|
VarAliased(_) => "VarAliased",
|
||||||
|
VarSsaField(_) => "VarSsaField",
|
||||||
|
VarAliasedField(_) => "VarAliasedField",
|
||||||
|
Trap(_) => "Trap",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn operands(&self) -> Vec<(&'static str, MediumLevelILLiftedOperand)> {
|
pub fn operands(&self) -> Vec<(&'static str, MediumLevelILLiftedOperand)> {
|
||||||
use MediumLevelILLiftedInstructionKind::*;
|
use MediumLevelILLiftedInstructionKind::*;
|
||||||
use MediumLevelILLiftedOperand as Operand;
|
use MediumLevelILLiftedOperand as Operand;
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
|
|
||||||
use crate::types::{ConstantData, ILIntrinsic, SSAVariable, Variable};
|
use crate::{architecture::CoreIntrinsic, types::{ConstantData, SSAVariable, Variable}};
|
||||||
|
|
||||||
use super::MediumLevelILLiftedInstruction;
|
use super::MediumLevelILLiftedInstruction;
|
||||||
|
|
||||||
@@ -355,7 +355,7 @@ pub struct Intrinsic {
|
|||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub struct LiftedIntrinsic {
|
pub struct LiftedIntrinsic {
|
||||||
pub output: Vec<Variable>,
|
pub output: Vec<Variable>,
|
||||||
pub intrinsic: ILIntrinsic,
|
pub intrinsic: CoreIntrinsic,
|
||||||
pub params: Vec<MediumLevelILLiftedInstruction>,
|
pub params: Vec<MediumLevelILLiftedInstruction>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -371,7 +371,7 @@ pub struct IntrinsicSsa {
|
|||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub struct LiftedIntrinsicSsa {
|
pub struct LiftedIntrinsicSsa {
|
||||||
pub output: Vec<SSAVariable>,
|
pub output: Vec<SSAVariable>,
|
||||||
pub intrinsic: ILIntrinsic,
|
pub intrinsic: CoreIntrinsic,
|
||||||
pub params: Vec<MediumLevelILLiftedInstruction>,
|
pub params: Vec<MediumLevelILLiftedInstruction>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
use binaryninjacore_sys::BNFromVariableIdentifier;
|
|
||||||
use binaryninjacore_sys::BNGetHighLevelILByIndex;
|
use binaryninjacore_sys::BNGetHighLevelILByIndex;
|
||||||
use binaryninjacore_sys::BNGetMediumLevelILByIndex;
|
use binaryninjacore_sys::BNGetMediumLevelILByIndex;
|
||||||
use binaryninjacore_sys::BNHighLevelILOperation;
|
use binaryninjacore_sys::BNHighLevelILOperation;
|
||||||
@@ -215,7 +214,7 @@ impl<F: ILFunction + RefCountable> ExactSizeIterator for OperandSSAVarIter<F> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_var(id: u64) -> Variable {
|
pub fn get_var(id: u64) -> Variable {
|
||||||
unsafe { Variable::from_raw(BNFromVariableIdentifier(id)) }
|
unsafe { Variable::from_identifier(id) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_var_ssa(id: u64, version: usize) -> SSAVariable {
|
pub fn get_var_ssa(id: u64, version: usize) -> SSAVariable {
|
||||||
|
|||||||
@@ -23,6 +23,7 @@ use crate::{
|
|||||||
callingconvention::CallingConvention,
|
callingconvention::CallingConvention,
|
||||||
rc::*,
|
rc::*,
|
||||||
string::*,
|
string::*,
|
||||||
|
typelibrary::TypeLibrary,
|
||||||
types::{QualifiedName, QualifiedNameAndType, Type},
|
types::{QualifiedName, QualifiedNameAndType, Type},
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -163,6 +164,15 @@ impl Platform {
|
|||||||
unsafe { CoreArchitecture::from_raw(BNGetPlatformArchitecture(self.handle)) }
|
unsafe { CoreArchitecture::from_raw(BNGetPlatformArchitecture(self.handle)) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_type_libraries_by_name(&self, name: &QualifiedName) -> Array<TypeLibrary> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetPlatformTypeLibrariesByName(self.handle, &name.0 as *const _ as *mut _, &mut count)
|
||||||
|
};
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
pub fn register_os<S: BnStrCompatible>(&self, os: S) {
|
pub fn register_os<S: BnStrCompatible>(&self, os: S) {
|
||||||
let os = os.into_bytes_with_nul();
|
let os = os.into_bytes_with_nul();
|
||||||
|
|
||||||
@@ -365,18 +375,14 @@ unsafe impl RefCountable for Platform {
|
|||||||
impl CoreArrayProvider for Platform {
|
impl CoreArrayProvider for Platform {
|
||||||
type Raw = *mut BNPlatform;
|
type Raw = *mut BNPlatform;
|
||||||
type Context = ();
|
type Context = ();
|
||||||
|
type Wrapped<'a> = Guard<'a, Platform>;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl CoreOwnedArrayProvider for Platform {
|
unsafe impl CoreArrayProviderInner for Platform {
|
||||||
unsafe fn free(raw: *mut *mut BNPlatform, count: usize, _context: &()) {
|
unsafe fn free(raw: *mut *mut BNPlatform, count: usize, _context: &()) {
|
||||||
BNFreePlatformList(raw, count);
|
BNFreePlatformList(raw, count);
|
||||||
}
|
}
|
||||||
}
|
unsafe fn wrap_raw<'a>(raw: &'a *mut BNPlatform, context: &'a ()) -> Self::Wrapped<'a> {
|
||||||
|
|
||||||
unsafe impl<'a> CoreArrayWrapper<'a> for Platform {
|
|
||||||
type Wrapped = Guard<'a, Platform>;
|
|
||||||
|
|
||||||
unsafe fn wrap_raw(raw: &'a *mut BNPlatform, context: &'a ()) -> Guard<'a, Platform> {
|
|
||||||
debug_assert!(!raw.is_null());
|
debug_assert!(!raw.is_null());
|
||||||
Guard::new(Platform { handle: *raw }, context)
|
Guard::new(Platform { handle: *raw }, context)
|
||||||
}
|
}
|
||||||
|
|||||||
1464
src/project.rs
Normal file
1464
src/project.rs
Normal file
File diff suppressed because it is too large
Load Diff
121
src/rc.rs
121
src/rc.rs
@@ -35,7 +35,7 @@ use std::slice;
|
|||||||
// `T` does not have the `Drop` impl in order to allow more
|
// `T` does not have the `Drop` impl in order to allow more
|
||||||
// efficient handling of core owned objects we receive pointers
|
// efficient handling of core owned objects we receive pointers
|
||||||
// to in callbacks
|
// to in callbacks
|
||||||
pub unsafe trait RefCountable: ToOwned<Owned = Ref<Self>> + Sized {
|
pub(crate) unsafe trait RefCountable: ToOwned<Owned = Ref<Self>> + Sized {
|
||||||
unsafe fn inc_ref(handle: &Self) -> Ref<Self>;
|
unsafe fn inc_ref(handle: &Self) -> Ref<Self>;
|
||||||
unsafe fn dec_ref(handle: &Self);
|
unsafe fn dec_ref(handle: &Self);
|
||||||
}
|
}
|
||||||
@@ -43,10 +43,12 @@ pub unsafe trait RefCountable: ToOwned<Owned = Ref<Self>> + Sized {
|
|||||||
// Represents an 'owned' reference tracked by the core
|
// Represents an 'owned' reference tracked by the core
|
||||||
// that we are responsible for cleaning up once we're
|
// that we are responsible for cleaning up once we're
|
||||||
// done with the encapsulated value.
|
// done with the encapsulated value.
|
||||||
|
#[allow(private_bounds)]
|
||||||
pub struct Ref<T: RefCountable> {
|
pub struct Ref<T: RefCountable> {
|
||||||
contents: T,
|
contents: T,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(private_bounds)]
|
||||||
impl<T: RefCountable> Ref<T> {
|
impl<T: RefCountable> Ref<T> {
|
||||||
/// Safety: You need to make sure wherever you got the contents from incremented the ref count already. Anywhere the core passes out an object to the API does this.
|
/// Safety: You need to make sure wherever you got the contents from incremented the ref count already. Anywhere the core passes out an object to the API does this.
|
||||||
pub(crate) unsafe fn new(contents: T) -> Self {
|
pub(crate) unsafe fn new(contents: T) -> Self {
|
||||||
@@ -151,6 +153,7 @@ impl<'a, T> Guard<'a, T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(private_bounds)]
|
||||||
impl<'a, T> Guard<'a, T>
|
impl<'a, T> Guard<'a, T>
|
||||||
where
|
where
|
||||||
T: RefCountable,
|
T: RefCountable,
|
||||||
@@ -190,23 +193,18 @@ impl<'a, T> Borrow<T> for Guard<'a, T> {
|
|||||||
pub trait CoreArrayProvider {
|
pub trait CoreArrayProvider {
|
||||||
type Raw;
|
type Raw;
|
||||||
type Context;
|
type Context;
|
||||||
}
|
type Wrapped<'a>
|
||||||
|
|
||||||
pub unsafe trait CoreOwnedArrayProvider: CoreArrayProvider {
|
|
||||||
unsafe fn free(raw: *mut Self::Raw, count: usize, context: &Self::Context);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub unsafe trait CoreArrayWrapper<'a>: CoreArrayProvider
|
|
||||||
where
|
where
|
||||||
Self::Raw: 'a,
|
Self: 'a;
|
||||||
Self::Context: 'a,
|
|
||||||
{
|
|
||||||
type Wrapped: 'a;
|
|
||||||
|
|
||||||
unsafe fn wrap_raw(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Array<P: CoreOwnedArrayProvider> {
|
pub(crate) unsafe trait CoreArrayProviderInner: CoreArrayProvider {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, count: usize, context: &Self::Context);
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped<'a>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(private_bounds)]
|
||||||
|
pub struct Array<P: CoreArrayProviderInner> {
|
||||||
contents: *mut P::Raw,
|
contents: *mut P::Raw,
|
||||||
count: usize,
|
count: usize,
|
||||||
context: P::Context,
|
context: P::Context,
|
||||||
@@ -214,18 +212,19 @@ pub struct Array<P: CoreOwnedArrayProvider> {
|
|||||||
|
|
||||||
unsafe impl<P> Sync for Array<P>
|
unsafe impl<P> Sync for Array<P>
|
||||||
where
|
where
|
||||||
P: CoreOwnedArrayProvider,
|
P: CoreArrayProviderInner,
|
||||||
P::Context: Sync,
|
P::Context: Sync,
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
unsafe impl<P> Send for Array<P>
|
unsafe impl<P> Send for Array<P>
|
||||||
where
|
where
|
||||||
P: CoreOwnedArrayProvider,
|
P: CoreArrayProviderInner,
|
||||||
P::Context: Send,
|
P::Context: Send,
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<P: CoreOwnedArrayProvider> Array<P> {
|
#[allow(private_bounds)]
|
||||||
|
impl<P: CoreArrayProviderInner> Array<P> {
|
||||||
pub(crate) unsafe fn new(raw: *mut P::Raw, count: usize, context: P::Context) -> Self {
|
pub(crate) unsafe fn new(raw: *mut P::Raw, count: usize, context: P::Context) -> Self {
|
||||||
Self {
|
Self {
|
||||||
contents: raw,
|
contents: raw,
|
||||||
@@ -243,23 +242,19 @@ impl<P: CoreOwnedArrayProvider> Array<P> {
|
|||||||
pub fn is_empty(&self) -> bool {
|
pub fn is_empty(&self) -> bool {
|
||||||
self.count == 0
|
self.count == 0
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn into_raw_parts(self) -> (*mut P::Raw, usize) {
|
|
||||||
let me = mem::ManuallyDrop::new(self);
|
|
||||||
(me.contents, me.count)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, P: 'a + CoreArrayWrapper<'a> + CoreOwnedArrayProvider> Array<P> {
|
#[allow(private_bounds)]
|
||||||
|
impl<P: CoreArrayProviderInner> Array<P> {
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn get(&'a self, index: usize) -> P::Wrapped {
|
pub fn get(&self, index: usize) -> P::Wrapped<'_> {
|
||||||
unsafe {
|
unsafe {
|
||||||
let backing = slice::from_raw_parts(self.contents, self.count);
|
let backing = slice::from_raw_parts(self.contents, self.count);
|
||||||
P::wrap_raw(&backing[index], &self.context)
|
P::wrap_raw(&backing[index], &self.context)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn iter(&'a self) -> ArrayIter<'a, P> {
|
pub fn iter(&self) -> ArrayIter<P> {
|
||||||
ArrayIter {
|
ArrayIter {
|
||||||
it: unsafe { slice::from_raw_parts(self.contents, self.count).iter() },
|
it: unsafe { slice::from_raw_parts(self.contents, self.count).iter() },
|
||||||
context: &self.context,
|
context: &self.context,
|
||||||
@@ -267,8 +262,8 @@ impl<'a, P: 'a + CoreArrayWrapper<'a> + CoreOwnedArrayProvider> Array<P> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, P: 'a + CoreArrayWrapper<'a> + CoreOwnedArrayProvider> IntoIterator for &'a Array<P> {
|
impl<'a, P: CoreArrayProviderInner> IntoIterator for &'a Array<P> {
|
||||||
type Item = P::Wrapped;
|
type Item = P::Wrapped<'a>;
|
||||||
type IntoIter = ArrayIter<'a, P>;
|
type IntoIter = ArrayIter<'a, P>;
|
||||||
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
@@ -276,7 +271,7 @@ impl<'a, P: 'a + CoreArrayWrapper<'a> + CoreOwnedArrayProvider> IntoIterator for
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<P: CoreOwnedArrayProvider> Drop for Array<P> {
|
impl<P: CoreArrayProviderInner> Drop for Array<P> {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
unsafe {
|
unsafe {
|
||||||
P::free(self.contents, self.count, &self.context);
|
P::free(self.contents, self.count, &self.context);
|
||||||
@@ -284,7 +279,8 @@ impl<P: CoreOwnedArrayProvider> Drop for Array<P> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ArrayGuard<P: CoreArrayProvider> {
|
#[allow(private_bounds)]
|
||||||
|
pub struct ArrayGuard<P: CoreArrayProviderInner> {
|
||||||
contents: *mut P::Raw,
|
contents: *mut P::Raw,
|
||||||
count: usize,
|
count: usize,
|
||||||
context: P::Context,
|
context: P::Context,
|
||||||
@@ -292,18 +288,19 @@ pub struct ArrayGuard<P: CoreArrayProvider> {
|
|||||||
|
|
||||||
unsafe impl<P> Sync for ArrayGuard<P>
|
unsafe impl<P> Sync for ArrayGuard<P>
|
||||||
where
|
where
|
||||||
P: CoreArrayProvider,
|
P: CoreArrayProviderInner,
|
||||||
P::Context: Sync,
|
P::Context: Sync,
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
unsafe impl<P> Send for ArrayGuard<P>
|
unsafe impl<P> Send for ArrayGuard<P>
|
||||||
where
|
where
|
||||||
P: CoreArrayProvider,
|
P: CoreArrayProviderInner,
|
||||||
P::Context: Send,
|
P::Context: Send,
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<P: CoreArrayProvider> ArrayGuard<P> {
|
#[allow(private_bounds)]
|
||||||
|
impl<P: CoreArrayProviderInner> ArrayGuard<P> {
|
||||||
pub(crate) unsafe fn new(raw: *mut P::Raw, count: usize, context: P::Context) -> Self {
|
pub(crate) unsafe fn new(raw: *mut P::Raw, count: usize, context: P::Context) -> Self {
|
||||||
Self {
|
Self {
|
||||||
contents: raw,
|
contents: raw,
|
||||||
@@ -323,16 +320,17 @@ impl<P: CoreArrayProvider> ArrayGuard<P> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, P: 'a + CoreArrayWrapper<'a> + CoreArrayProvider> ArrayGuard<P> {
|
#[allow(private_bounds)]
|
||||||
|
impl<P: CoreArrayProviderInner> ArrayGuard<P> {
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn get(&'a self, index: usize) -> P::Wrapped {
|
pub fn get(&self, index: usize) -> P::Wrapped<'_> {
|
||||||
unsafe {
|
unsafe {
|
||||||
let backing = slice::from_raw_parts(self.contents, self.count);
|
let backing = slice::from_raw_parts(self.contents, self.count);
|
||||||
P::wrap_raw(&backing[index], &self.context)
|
P::wrap_raw(&backing[index], &self.context)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn iter(&'a self) -> ArrayIter<'a, P> {
|
pub fn iter(&self) -> ArrayIter<P> {
|
||||||
ArrayIter {
|
ArrayIter {
|
||||||
it: unsafe { slice::from_raw_parts(self.contents, self.count).iter() },
|
it: unsafe { slice::from_raw_parts(self.contents, self.count).iter() },
|
||||||
context: &self.context,
|
context: &self.context,
|
||||||
@@ -340,8 +338,8 @@ impl<'a, P: 'a + CoreArrayWrapper<'a> + CoreArrayProvider> ArrayGuard<P> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, P: 'a + CoreArrayWrapper<'a> + CoreArrayProvider> IntoIterator for &'a ArrayGuard<P> {
|
impl<'a, P: CoreArrayProviderInner> IntoIterator for &'a ArrayGuard<P> {
|
||||||
type Item = P::Wrapped;
|
type Item = P::Wrapped<'a>;
|
||||||
type IntoIter = ArrayIter<'a, P>;
|
type IntoIter = ArrayIter<'a, P>;
|
||||||
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
@@ -349,29 +347,30 @@ impl<'a, P: 'a + CoreArrayWrapper<'a> + CoreArrayProvider> IntoIterator for &'a
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(private_bounds)]
|
||||||
pub struct ArrayIter<'a, P>
|
pub struct ArrayIter<'a, P>
|
||||||
where
|
where
|
||||||
P: 'a + CoreArrayWrapper<'a>,
|
P: CoreArrayProviderInner,
|
||||||
{
|
{
|
||||||
it: slice::Iter<'a, P::Raw>,
|
it: slice::Iter<'a, P::Raw>,
|
||||||
context: &'a P::Context,
|
context: &'a P::Context,
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl<'a, P> Send for ArrayIter<'a, P>
|
unsafe impl<P> Send for ArrayIter<'_, P>
|
||||||
where
|
where
|
||||||
P: CoreArrayWrapper<'a>,
|
P: CoreArrayProviderInner,
|
||||||
P::Context: Sync,
|
P::Context: Sync,
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, P> Iterator for ArrayIter<'a, P>
|
impl<'a, P> Iterator for ArrayIter<'a, P>
|
||||||
where
|
where
|
||||||
P: 'a + CoreArrayWrapper<'a>,
|
P: 'a + CoreArrayProviderInner,
|
||||||
{
|
{
|
||||||
type Item = P::Wrapped;
|
type Item = P::Wrapped<'a>;
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn next(&mut self) -> Option<P::Wrapped> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
self.it
|
self.it
|
||||||
.next()
|
.next()
|
||||||
.map(|r| unsafe { P::wrap_raw(r, self.context) })
|
.map(|r| unsafe { P::wrap_raw(r, self.context) })
|
||||||
@@ -385,7 +384,7 @@ where
|
|||||||
|
|
||||||
impl<'a, P> ExactSizeIterator for ArrayIter<'a, P>
|
impl<'a, P> ExactSizeIterator for ArrayIter<'a, P>
|
||||||
where
|
where
|
||||||
P: 'a + CoreArrayWrapper<'a>,
|
P: 'a + CoreArrayProviderInner,
|
||||||
{
|
{
|
||||||
#[inline]
|
#[inline]
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
@@ -395,10 +394,10 @@ where
|
|||||||
|
|
||||||
impl<'a, P> DoubleEndedIterator for ArrayIter<'a, P>
|
impl<'a, P> DoubleEndedIterator for ArrayIter<'a, P>
|
||||||
where
|
where
|
||||||
P: 'a + CoreArrayWrapper<'a>,
|
P: 'a + CoreArrayProviderInner,
|
||||||
{
|
{
|
||||||
#[inline]
|
#[inline]
|
||||||
fn next_back(&mut self) -> Option<P::Wrapped> {
|
fn next_back(&mut self) -> Option<P::Wrapped<'a>> {
|
||||||
self.it
|
self.it
|
||||||
.next_back()
|
.next_back()
|
||||||
.map(|r| unsafe { P::wrap_raw(r, self.context) })
|
.map(|r| unsafe { P::wrap_raw(r, self.context) })
|
||||||
@@ -411,21 +410,23 @@ use rayon::prelude::*;
|
|||||||
#[cfg(feature = "rayon")]
|
#[cfg(feature = "rayon")]
|
||||||
use rayon::iter::plumbing::*;
|
use rayon::iter::plumbing::*;
|
||||||
|
|
||||||
|
#[allow(private_bounds)]
|
||||||
#[cfg(feature = "rayon")]
|
#[cfg(feature = "rayon")]
|
||||||
impl<'a, P> Array<P>
|
impl<P> Array<P>
|
||||||
where
|
where
|
||||||
P: 'a + CoreArrayWrapper<'a> + CoreOwnedArrayProvider,
|
P: CoreArrayProviderInner,
|
||||||
P::Context: Sync,
|
P::Context: Sync,
|
||||||
P::Wrapped: Send,
|
for<'a> P::Wrapped<'a>: Send,
|
||||||
{
|
{
|
||||||
pub fn par_iter(&'a self) -> ParArrayIter<'a, P> {
|
pub fn par_iter(&self) -> ParArrayIter<'_, P> {
|
||||||
ParArrayIter { it: self.iter() }
|
ParArrayIter { it: self.iter() }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#[allow(private_bounds)]
|
||||||
#[cfg(feature = "rayon")]
|
#[cfg(feature = "rayon")]
|
||||||
pub struct ParArrayIter<'a, P>
|
pub struct ParArrayIter<'a, P>
|
||||||
where
|
where
|
||||||
P: 'a + CoreArrayWrapper<'a>,
|
P: CoreArrayProviderInner,
|
||||||
ArrayIter<'a, P>: Send,
|
ArrayIter<'a, P>: Send,
|
||||||
{
|
{
|
||||||
it: ArrayIter<'a, P>,
|
it: ArrayIter<'a, P>,
|
||||||
@@ -434,11 +435,11 @@ where
|
|||||||
#[cfg(feature = "rayon")]
|
#[cfg(feature = "rayon")]
|
||||||
impl<'a, P> ParallelIterator for ParArrayIter<'a, P>
|
impl<'a, P> ParallelIterator for ParArrayIter<'a, P>
|
||||||
where
|
where
|
||||||
P: 'a + CoreArrayWrapper<'a>,
|
P: 'a + CoreArrayProviderInner,
|
||||||
P::Wrapped: Send,
|
P::Wrapped<'a>: Send,
|
||||||
ArrayIter<'a, P>: Send,
|
ArrayIter<'a, P>: Send,
|
||||||
{
|
{
|
||||||
type Item = P::Wrapped;
|
type Item = P::Wrapped<'a>;
|
||||||
|
|
||||||
fn drive_unindexed<C>(self, consumer: C) -> C::Result
|
fn drive_unindexed<C>(self, consumer: C) -> C::Result
|
||||||
where
|
where
|
||||||
@@ -455,8 +456,8 @@ where
|
|||||||
#[cfg(feature = "rayon")]
|
#[cfg(feature = "rayon")]
|
||||||
impl<'a, P> IndexedParallelIterator for ParArrayIter<'a, P>
|
impl<'a, P> IndexedParallelIterator for ParArrayIter<'a, P>
|
||||||
where
|
where
|
||||||
P: 'a + CoreArrayWrapper<'a>,
|
P: 'a + CoreArrayProviderInner,
|
||||||
P::Wrapped: Send,
|
P::Wrapped<'a>: Send,
|
||||||
ArrayIter<'a, P>: Send,
|
ArrayIter<'a, P>: Send,
|
||||||
{
|
{
|
||||||
fn drive<C>(self, consumer: C) -> C::Result
|
fn drive<C>(self, consumer: C) -> C::Result
|
||||||
@@ -481,7 +482,7 @@ where
|
|||||||
#[cfg(feature = "rayon")]
|
#[cfg(feature = "rayon")]
|
||||||
struct ArrayIterProducer<'a, P>
|
struct ArrayIterProducer<'a, P>
|
||||||
where
|
where
|
||||||
P: 'a + CoreArrayWrapper<'a>,
|
P: 'a + CoreArrayProviderInner,
|
||||||
ArrayIter<'a, P>: Send,
|
ArrayIter<'a, P>: Send,
|
||||||
{
|
{
|
||||||
it: ArrayIter<'a, P>,
|
it: ArrayIter<'a, P>,
|
||||||
@@ -490,10 +491,10 @@ where
|
|||||||
#[cfg(feature = "rayon")]
|
#[cfg(feature = "rayon")]
|
||||||
impl<'a, P> Producer for ArrayIterProducer<'a, P>
|
impl<'a, P> Producer for ArrayIterProducer<'a, P>
|
||||||
where
|
where
|
||||||
P: 'a + CoreArrayWrapper<'a>,
|
P: 'a + CoreArrayProviderInner,
|
||||||
ArrayIter<'a, P>: Send,
|
ArrayIter<'a, P>: Send,
|
||||||
{
|
{
|
||||||
type Item = P::Wrapped;
|
type Item = P::Wrapped<'a>;
|
||||||
type IntoIter = ArrayIter<'a, P>;
|
type IntoIter = ArrayIter<'a, P>;
|
||||||
|
|
||||||
fn into_iter(self) -> ArrayIter<'a, P> {
|
fn into_iter(self) -> ArrayIter<'a, P> {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
use crate::architecture::CoreArchitecture;
|
use crate::architecture::CoreArchitecture;
|
||||||
use crate::function::Function;
|
use crate::function::Function;
|
||||||
use crate::rc::{CoreArrayProvider, CoreArrayWrapper, CoreOwnedArrayProvider, Ref};
|
use crate::rc::{CoreArrayProvider, CoreArrayProviderInner, Guard, Ref};
|
||||||
use binaryninjacore_sys::{BNFreeCodeReferences, BNFreeDataReferences, BNReferenceSource};
|
use binaryninjacore_sys::{BNFreeCodeReferences, BNFreeDataReferences, BNReferenceSource};
|
||||||
use std::mem::ManuallyDrop;
|
use std::mem::ManuallyDrop;
|
||||||
|
|
||||||
@@ -56,19 +56,15 @@ impl<'a> CodeReference {
|
|||||||
impl CoreArrayProvider for CodeReference {
|
impl CoreArrayProvider for CodeReference {
|
||||||
type Raw = BNReferenceSource;
|
type Raw = BNReferenceSource;
|
||||||
type Context = ();
|
type Context = ();
|
||||||
|
type Wrapped<'a> = Guard<'a, CodeReference>;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl CoreOwnedArrayProvider for CodeReference {
|
unsafe impl CoreArrayProviderInner for CodeReference {
|
||||||
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
BNFreeCodeReferences(raw, count)
|
BNFreeCodeReferences(raw, count)
|
||||||
}
|
}
|
||||||
}
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
Guard::new(CodeReference::new(raw), &())
|
||||||
unsafe impl<'a> CoreArrayWrapper<'a> for CodeReference {
|
|
||||||
type Wrapped = CodeReference;
|
|
||||||
|
|
||||||
unsafe fn wrap_raw(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped {
|
|
||||||
CodeReference::new(raw)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -77,18 +73,14 @@ unsafe impl<'a> CoreArrayWrapper<'a> for CodeReference {
|
|||||||
impl CoreArrayProvider for DataReference {
|
impl CoreArrayProvider for DataReference {
|
||||||
type Raw = u64;
|
type Raw = u64;
|
||||||
type Context = ();
|
type Context = ();
|
||||||
|
type Wrapped<'a> = DataReference;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl CoreOwnedArrayProvider for DataReference {
|
unsafe impl CoreArrayProviderInner for DataReference {
|
||||||
unsafe fn free(raw: *mut Self::Raw, _count: usize, _context: &Self::Context) {
|
unsafe fn free(raw: *mut Self::Raw, _count: usize, _context: &Self::Context) {
|
||||||
BNFreeDataReferences(raw)
|
BNFreeDataReferences(raw)
|
||||||
}
|
}
|
||||||
}
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
|
||||||
unsafe impl<'a> CoreArrayWrapper<'a> for DataReference {
|
|
||||||
type Wrapped = DataReference;
|
|
||||||
|
|
||||||
unsafe fn wrap_raw(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped {
|
|
||||||
DataReference { address: *raw }
|
DataReference { address: *raw }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,13 +1,15 @@
|
|||||||
|
use crate::rc::Guard;
|
||||||
use crate::string::BnStrCompatible;
|
use crate::string::BnStrCompatible;
|
||||||
use crate::{
|
use crate::{
|
||||||
architecture::{Architecture, CoreArchitecture},
|
architecture::{Architecture, CoreArchitecture},
|
||||||
binaryview::BinaryView,
|
binaryview::BinaryView,
|
||||||
llil,
|
llil,
|
||||||
rc::{CoreArrayProvider, CoreArrayWrapper, CoreOwnedArrayProvider, Ref, RefCountable},
|
rc::{CoreArrayProvider, CoreArrayProviderInner, Ref, RefCountable},
|
||||||
symbol::Symbol,
|
symbol::Symbol,
|
||||||
};
|
};
|
||||||
use binaryninjacore_sys::*;
|
use binaryninjacore_sys::*;
|
||||||
use std::borrow::Borrow;
|
use std::borrow::Borrow;
|
||||||
|
use std::mem::MaybeUninit;
|
||||||
use std::os::raw::c_void;
|
use std::os::raw::c_void;
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||||
@@ -219,18 +221,15 @@ impl Relocation {
|
|||||||
impl CoreArrayProvider for Relocation {
|
impl CoreArrayProvider for Relocation {
|
||||||
type Raw = *mut BNRelocation;
|
type Raw = *mut BNRelocation;
|
||||||
type Context = ();
|
type Context = ();
|
||||||
|
type Wrapped<'a> = Guard<'a, Relocation>;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl CoreOwnedArrayProvider for Relocation {
|
unsafe impl CoreArrayProviderInner for Relocation {
|
||||||
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
BNFreeRelocationList(raw, count);
|
BNFreeRelocationList(raw, count);
|
||||||
}
|
}
|
||||||
}
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
Guard::new(Relocation(*raw), &())
|
||||||
unsafe impl<'a> CoreArrayWrapper<'a> for Relocation {
|
|
||||||
type Wrapped = Relocation;
|
|
||||||
unsafe fn wrap_raw(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped {
|
|
||||||
Relocation(*raw)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -501,12 +500,9 @@ where
|
|||||||
|
|
||||||
let name = name.into_bytes_with_nul();
|
let name = name.into_bytes_with_nul();
|
||||||
|
|
||||||
let uninit_handler = RelocationHandlerBuilder {
|
let raw = Box::leak(Box::new(MaybeUninit::<RelocationHandlerBuilder<_>>::zeroed()));
|
||||||
handler: unsafe { std::mem::zeroed() },
|
|
||||||
};
|
|
||||||
let raw = Box::into_raw(Box::new(uninit_handler));
|
|
||||||
let mut custom_handler = BNCustomRelocationHandler {
|
let mut custom_handler = BNCustomRelocationHandler {
|
||||||
context: raw as *mut _,
|
context: raw.as_mut_ptr() as *mut _,
|
||||||
freeObject: Some(cb_free::<R>),
|
freeObject: Some(cb_free::<R>),
|
||||||
getRelocationInfo: Some(cb_get_relocation_info::<R>),
|
getRelocationInfo: Some(cb_get_relocation_info::<R>),
|
||||||
applyRelocation: Some(cb_apply_relocation::<R>),
|
applyRelocation: Some(cb_apply_relocation::<R>),
|
||||||
@@ -517,13 +513,12 @@ where
|
|||||||
assert!(!handle_raw.is_null());
|
assert!(!handle_raw.is_null());
|
||||||
let handle = CoreRelocationHandler(handle_raw);
|
let handle = CoreRelocationHandler(handle_raw);
|
||||||
let custom_handle = CustomRelocationHandlerHandle {
|
let custom_handle = CustomRelocationHandlerHandle {
|
||||||
handle: raw as *mut R,
|
handle: raw.as_mut_ptr() as *mut R,
|
||||||
};
|
};
|
||||||
unsafe {
|
unsafe {
|
||||||
core::ptr::write(
|
raw.write(RelocationHandlerBuilder {
|
||||||
&mut raw.as_mut().unwrap().handler,
|
handler: func(custom_handle, CoreRelocationHandler(handle.0)),
|
||||||
func(custom_handle, CoreRelocationHandler(handle.0)),
|
});
|
||||||
);
|
|
||||||
|
|
||||||
BNArchitectureRegisterRelocationHandler(
|
BNArchitectureRegisterRelocationHandler(
|
||||||
arch.handle().as_ref().0,
|
arch.handle().as_ref().0,
|
||||||
|
|||||||
@@ -72,8 +72,11 @@ impl Section {
|
|||||||
|
|
||||||
/// You need to create a section builder, customize that section, then add it to a binary view:
|
/// You need to create a section builder, customize that section, then add it to a binary view:
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```no_run
|
||||||
/// bv.add_section(Section::new().align(4).entry_size(4))
|
/// # use binaryninja::section::Section;
|
||||||
|
/// # use binaryninja::binaryview::BinaryViewExt;
|
||||||
|
/// let bv = binaryninja::load("example").unwrap();
|
||||||
|
/// bv.add_section(Section::builder("example", 0..1024).align(4).entry_size(4))
|
||||||
/// ```
|
/// ```
|
||||||
pub fn builder<S: BnStrCompatible>(name: S, range: Range<u64>) -> SectionBuilder<S> {
|
pub fn builder<S: BnStrCompatible>(name: S, range: Range<u64>) -> SectionBuilder<S> {
|
||||||
SectionBuilder::new(name, range)
|
SectionBuilder::new(name, range)
|
||||||
@@ -171,18 +174,14 @@ unsafe impl RefCountable for Section {
|
|||||||
impl CoreArrayProvider for Section {
|
impl CoreArrayProvider for Section {
|
||||||
type Raw = *mut BNSection;
|
type Raw = *mut BNSection;
|
||||||
type Context = ();
|
type Context = ();
|
||||||
|
type Wrapped<'a> = Guard<'a, Section>;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl CoreOwnedArrayProvider for Section {
|
unsafe impl CoreArrayProviderInner for Section {
|
||||||
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
BNFreeSectionList(raw, count);
|
BNFreeSectionList(raw, count);
|
||||||
}
|
}
|
||||||
}
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
|
||||||
unsafe impl<'a> CoreArrayWrapper<'a> for Section {
|
|
||||||
type Wrapped = Guard<'a, Section>;
|
|
||||||
|
|
||||||
unsafe fn wrap_raw(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped {
|
|
||||||
Guard::new(Section::from_raw(*raw), context)
|
Guard::new(Section::from_raw(*raw), context)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -117,8 +117,11 @@ impl Segment {
|
|||||||
|
|
||||||
/// You need to create a segment builder, customize that segment, then add it to a binary view:
|
/// You need to create a segment builder, customize that segment, then add it to a binary view:
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```no_run
|
||||||
/// bv.add_segment(Segment::new().align(4).entry_size(4))
|
/// # use binaryninja::segment::Segment;
|
||||||
|
/// # use binaryninja::binaryview::BinaryViewExt;
|
||||||
|
/// let bv = binaryninja::load("example").unwrap();
|
||||||
|
/// bv.add_segment(Segment::builder(0..0x1000).writable(true).readable(true))
|
||||||
/// ```
|
/// ```
|
||||||
pub fn builder(ea_range: Range<u64>) -> SegmentBuilder {
|
pub fn builder(ea_range: Range<u64>) -> SegmentBuilder {
|
||||||
SegmentBuilder::new(ea_range)
|
SegmentBuilder::new(ea_range)
|
||||||
@@ -201,18 +204,14 @@ unsafe impl RefCountable for Segment {
|
|||||||
impl CoreArrayProvider for Segment {
|
impl CoreArrayProvider for Segment {
|
||||||
type Raw = *mut BNSegment;
|
type Raw = *mut BNSegment;
|
||||||
type Context = ();
|
type Context = ();
|
||||||
|
type Wrapped<'a> = Guard<'a, Segment>;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl CoreOwnedArrayProvider for Segment {
|
unsafe impl CoreArrayProviderInner for Segment {
|
||||||
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
BNFreeSegmentList(raw, count);
|
BNFreeSegmentList(raw, count);
|
||||||
}
|
}
|
||||||
}
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
|
||||||
unsafe impl<'a> CoreArrayWrapper<'a> for Segment {
|
|
||||||
type Wrapped = Guard<'a, Segment>;
|
|
||||||
|
|
||||||
unsafe fn wrap_raw(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped {
|
|
||||||
Guard::new(Segment::from_raw(*raw), context)
|
Guard::new(Segment::from_raw(*raw), context)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
130
src/string.rs
130
src/string.rs
@@ -14,7 +14,7 @@
|
|||||||
|
|
||||||
//! String wrappers for core-owned strings and strings being passed to the core
|
//! String wrappers for core-owned strings and strings being passed to the core
|
||||||
|
|
||||||
use std::borrow::{Borrow, Cow};
|
use std::borrow::Cow;
|
||||||
use std::ffi::{CStr, CString};
|
use std::ffi::{CStr, CString};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::hash::{Hash, Hasher};
|
use std::hash::{Hash, Hasher};
|
||||||
@@ -33,61 +33,9 @@ pub(crate) fn raw_to_string(ptr: *const raw::c_char) -> Option<String> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// These are strings that the core will both allocate and free.
|
/// Is the quivalent of `core::ffi::CString` but using the allocation and free
|
||||||
/// We just have a reference to these strings and want to be able use them, but aren't responsible for cleanup
|
/// functions provided by binaryninja_sys.
|
||||||
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
|
#[repr(transparent)]
|
||||||
#[repr(C)]
|
|
||||||
pub struct BnStr {
|
|
||||||
raw: [u8],
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BnStr {
|
|
||||||
pub(crate) unsafe fn from_raw<'a>(ptr: *const raw::c_char) -> &'a Self {
|
|
||||||
mem::transmute(CStr::from_ptr(ptr).to_bytes_with_nul())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_str(&self) -> &str {
|
|
||||||
self.as_cstr().to_str().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_cstr(&self) -> &CStr {
|
|
||||||
unsafe { CStr::from_bytes_with_nul_unchecked(&self.raw) }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for BnStr {
|
|
||||||
type Target = str;
|
|
||||||
|
|
||||||
fn deref(&self) -> &str {
|
|
||||||
self.as_str()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AsRef<[u8]> for BnStr {
|
|
||||||
fn as_ref(&self) -> &[u8] {
|
|
||||||
&self.raw
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AsRef<str> for BnStr {
|
|
||||||
fn as_ref(&self) -> &str {
|
|
||||||
self.as_str()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Borrow<str> for BnStr {
|
|
||||||
fn borrow(&self) -> &str {
|
|
||||||
self.as_str()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for BnStr {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
write!(f, "{}", self.as_cstr().to_string_lossy())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[repr(C)]
|
|
||||||
pub struct BnString {
|
pub struct BnString {
|
||||||
raw: *mut raw::c_char,
|
raw: *mut raw::c_char,
|
||||||
}
|
}
|
||||||
@@ -131,8 +79,28 @@ impl BnString {
|
|||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn as_raw(&self) -> &raw::c_char {
|
||||||
|
unsafe { &*self.raw }
|
||||||
|
}
|
||||||
|
|
||||||
pub fn as_str(&self) -> &str {
|
pub fn as_str(&self) -> &str {
|
||||||
unsafe { BnStr::from_raw(self.raw).as_str() }
|
unsafe { CStr::from_ptr(self.raw).to_str().unwrap() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_bytes(&self) -> &[u8] {
|
||||||
|
self.as_str().as_bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_bytes_with_null(&self) -> &[u8] {
|
||||||
|
self.deref().to_bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.as_ref().len()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.as_ref().is_empty()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -158,16 +126,16 @@ impl Clone for BnString {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Deref for BnString {
|
impl Deref for BnString {
|
||||||
type Target = BnStr;
|
type Target = CStr;
|
||||||
|
|
||||||
fn deref(&self) -> &BnStr {
|
fn deref(&self) -> &CStr {
|
||||||
unsafe { BnStr::from_raw(self.raw) }
|
unsafe { CStr::from_ptr(self.raw) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsRef<[u8]> for BnString {
|
impl AsRef<[u8]> for BnString {
|
||||||
fn as_ref(&self) -> &[u8] {
|
fn as_ref(&self) -> &[u8] {
|
||||||
self.as_cstr().to_bytes_with_nul()
|
self.to_bytes_with_nul()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -187,33 +155,29 @@ impl Eq for BnString {}
|
|||||||
|
|
||||||
impl fmt::Display for BnString {
|
impl fmt::Display for BnString {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "{}", self.as_cstr().to_string_lossy())
|
write!(f, "{}", self.to_string_lossy())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for BnString {
|
impl fmt::Debug for BnString {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "{}", self.as_cstr().to_string_lossy())
|
write!(f, "{}", self.to_string_lossy())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CoreArrayProvider for BnString {
|
impl CoreArrayProvider for BnString {
|
||||||
type Raw = *mut raw::c_char;
|
type Raw = *mut raw::c_char;
|
||||||
type Context = ();
|
type Context = ();
|
||||||
|
type Wrapped<'a> = &'a str;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl CoreOwnedArrayProvider for BnString {
|
unsafe impl CoreArrayProviderInner for BnString {
|
||||||
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
use binaryninjacore_sys::BNFreeStringList;
|
use binaryninjacore_sys::BNFreeStringList;
|
||||||
BNFreeStringList(raw, count);
|
BNFreeStringList(raw, count);
|
||||||
}
|
}
|
||||||
}
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
CStr::from_ptr(*raw).to_str().unwrap()
|
||||||
unsafe impl<'a> CoreArrayWrapper<'a> for BnString {
|
|
||||||
type Wrapped = &'a BnStr;
|
|
||||||
|
|
||||||
unsafe fn wrap_raw(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped {
|
|
||||||
BnStr::from_raw(*raw)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -222,11 +186,11 @@ pub unsafe trait BnStrCompatible {
|
|||||||
fn into_bytes_with_nul(self) -> Self::Result;
|
fn into_bytes_with_nul(self) -> Self::Result;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl<'a> BnStrCompatible for &'a BnStr {
|
unsafe impl<'a> BnStrCompatible for &'a CStr {
|
||||||
type Result = &'a [u8];
|
type Result = &'a [u8];
|
||||||
|
|
||||||
fn into_bytes_with_nul(self) -> Self::Result {
|
fn into_bytes_with_nul(self) -> Self::Result {
|
||||||
self.as_cstr().to_bytes_with_nul()
|
self.to_bytes_with_nul()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -238,14 +202,6 @@ unsafe impl BnStrCompatible for BnString {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl<'a> BnStrCompatible for &'a CStr {
|
|
||||||
type Result = &'a [u8];
|
|
||||||
|
|
||||||
fn into_bytes_with_nul(self) -> Self::Result {
|
|
||||||
self.to_bytes_with_nul()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe impl BnStrCompatible for CString {
|
unsafe impl BnStrCompatible for CString {
|
||||||
type Result = Vec<u8>;
|
type Result = Vec<u8>;
|
||||||
|
|
||||||
@@ -294,3 +250,15 @@ unsafe impl BnStrCompatible for &QualifiedName {
|
|||||||
self.string().into_bytes_with_nul()
|
self.string().into_bytes_with_nul()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub trait IntoJson {
|
||||||
|
type Output: BnStrCompatible;
|
||||||
|
fn get_json_string(self) -> Result<Self::Output, ()>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: BnStrCompatible> IntoJson for S {
|
||||||
|
type Output = S;
|
||||||
|
fn get_json_string(self) -> Result<Self::Output, ()> {
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -230,8 +230,10 @@ impl Symbol {
|
|||||||
|
|
||||||
/// To create a new symbol, you need to create a symbol builder, customize that symbol, then add `SymbolBuilder::create` it into a `Ref<Symbol>`:
|
/// To create a new symbol, you need to create a symbol builder, customize that symbol, then add `SymbolBuilder::create` it into a `Ref<Symbol>`:
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```no_run
|
||||||
/// Symbol::new().short_name("hello").full_name("hello").create();
|
/// # use binaryninja::symbol::Symbol;
|
||||||
|
/// # use binaryninja::symbol::SymbolType;
|
||||||
|
/// Symbol::builder(SymbolType::Data, "hello", 0x1337).short_name("hello").full_name("hello").create();
|
||||||
/// ```
|
/// ```
|
||||||
pub fn builder(ty: SymbolType, raw_name: &str, addr: u64) -> SymbolBuilder {
|
pub fn builder(ty: SymbolType, raw_name: &str, addr: u64) -> SymbolBuilder {
|
||||||
SymbolBuilder::new(ty, raw_name, addr)
|
SymbolBuilder::new(ty, raw_name, addr)
|
||||||
@@ -246,24 +248,15 @@ impl Symbol {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn full_name(&self) -> BnString {
|
pub fn full_name(&self) -> BnString {
|
||||||
unsafe {
|
unsafe { BnString::from_raw(BNGetSymbolFullName(self.handle)) }
|
||||||
let name = BNGetSymbolFullName(self.handle);
|
|
||||||
BnString::from_raw(name)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn short_name(&self) -> BnString {
|
pub fn short_name(&self) -> BnString {
|
||||||
unsafe {
|
unsafe { BnString::from_raw(BNGetSymbolShortName(self.handle)) }
|
||||||
let name = BNGetSymbolShortName(self.handle);
|
|
||||||
BnString::from_raw(name)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn raw_name(&self) -> BnString {
|
pub fn raw_name(&self) -> BnString {
|
||||||
unsafe {
|
unsafe { BnString::from_raw(BNGetSymbolRawName(self.handle)) }
|
||||||
let name = BNGetSymbolRawName(self.handle);
|
|
||||||
BnString::from_raw(name)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn address(&self) -> u64 {
|
pub fn address(&self) -> u64 {
|
||||||
@@ -326,18 +319,14 @@ unsafe impl RefCountable for Symbol {
|
|||||||
impl CoreArrayProvider for Symbol {
|
impl CoreArrayProvider for Symbol {
|
||||||
type Raw = *mut BNSymbol;
|
type Raw = *mut BNSymbol;
|
||||||
type Context = ();
|
type Context = ();
|
||||||
|
type Wrapped<'a> = Guard<'a, Symbol>;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl CoreOwnedArrayProvider for Symbol {
|
unsafe impl CoreArrayProviderInner for Symbol {
|
||||||
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
BNFreeSymbolList(raw, count);
|
BNFreeSymbolList(raw, count);
|
||||||
}
|
}
|
||||||
}
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
|
||||||
unsafe impl<'a> CoreArrayWrapper<'a> for Symbol {
|
|
||||||
type Wrapped = Guard<'a, Symbol>;
|
|
||||||
|
|
||||||
unsafe fn wrap_raw(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped {
|
|
||||||
Guard::new(Symbol::from_raw(*raw), context)
|
Guard::new(Symbol::from_raw(*raw), context)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
76
src/tags.rs
76
src/tags.rs
@@ -16,8 +16,10 @@
|
|||||||
|
|
||||||
use binaryninjacore_sys::*;
|
use binaryninjacore_sys::*;
|
||||||
|
|
||||||
|
use crate::architecture::CoreArchitecture;
|
||||||
use crate::binaryview::BinaryView;
|
use crate::binaryview::BinaryView;
|
||||||
|
|
||||||
|
use crate::function::Function;
|
||||||
use crate::rc::*;
|
use crate::rc::*;
|
||||||
use crate::string::*;
|
use crate::string::*;
|
||||||
|
|
||||||
@@ -77,6 +79,21 @@ impl ToOwned for Tag {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for Tag {
|
||||||
|
type Raw = *mut BNTag;
|
||||||
|
type Context = ();
|
||||||
|
type Wrapped<'a> = Guard<'a, Self>;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl CoreArrayProviderInner for Tag {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
|
BNFreeTagList(raw, count)
|
||||||
|
}
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
Guard::new(Self { handle: *raw }, &context)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
unsafe impl Send for Tag {}
|
unsafe impl Send for Tag {}
|
||||||
unsafe impl Sync for Tag {}
|
unsafe impl Sync for Tag {}
|
||||||
|
|
||||||
@@ -115,7 +132,7 @@ impl TagType {
|
|||||||
pub fn set_icon<S: BnStrCompatible>(&self, icon: S) {
|
pub fn set_icon<S: BnStrCompatible>(&self, icon: S) {
|
||||||
let icon = icon.into_bytes_with_nul();
|
let icon = icon.into_bytes_with_nul();
|
||||||
unsafe {
|
unsafe {
|
||||||
BNTagTypeSetName(self.handle, icon.as_ref().as_ptr() as *mut _);
|
BNTagTypeSetIcon(self.handle, icon.as_ref().as_ptr() as *mut _);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -176,3 +193,60 @@ impl ToOwned for TagType {
|
|||||||
|
|
||||||
unsafe impl Send for TagType {}
|
unsafe impl Send for TagType {}
|
||||||
unsafe impl Sync for TagType {}
|
unsafe impl Sync for TagType {}
|
||||||
|
|
||||||
|
pub type TagReferenceType = BNTagReferenceType;
|
||||||
|
|
||||||
|
pub struct TagReference {
|
||||||
|
ref_type: TagReferenceType,
|
||||||
|
auto_defined: bool,
|
||||||
|
tag: Ref<Tag>,
|
||||||
|
arch: CoreArchitecture,
|
||||||
|
func: Ref<Function>,
|
||||||
|
addr: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TagReference {
|
||||||
|
unsafe fn from_borrowed_raw(value: &BNTagReference) -> Self {
|
||||||
|
Self {
|
||||||
|
ref_type: value.refType,
|
||||||
|
auto_defined: value.autoDefined,
|
||||||
|
tag: Tag { handle: value.tag }.to_owned(),
|
||||||
|
arch: CoreArchitecture::from_raw(value.arch),
|
||||||
|
func: Function { handle: value.func }.to_owned(),
|
||||||
|
addr: value.addr,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn ref_type(&self) -> TagReferenceType {
|
||||||
|
self.ref_type
|
||||||
|
}
|
||||||
|
pub fn auto(&self) -> bool {
|
||||||
|
self.auto_defined
|
||||||
|
}
|
||||||
|
pub fn tag(&self) -> &Tag {
|
||||||
|
&self.tag
|
||||||
|
}
|
||||||
|
pub fn arch(&self) -> CoreArchitecture {
|
||||||
|
self.arch
|
||||||
|
}
|
||||||
|
pub fn functions(&self) -> &Function {
|
||||||
|
&self.func
|
||||||
|
}
|
||||||
|
pub fn address(&self) -> u64 {
|
||||||
|
self.addr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for TagReference {
|
||||||
|
type Raw = BNTagReference;
|
||||||
|
type Context = ();
|
||||||
|
type Wrapped<'a> = Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl CoreArrayProviderInner for TagReference {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
|
BNFreeTagReferences(raw, count)
|
||||||
|
}
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
Self::from_borrowed_raw(raw)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
947
src/typearchive.rs
Normal file
947
src/typearchive.rs
Normal file
@@ -0,0 +1,947 @@
|
|||||||
|
use core::{ffi, mem, ptr};
|
||||||
|
|
||||||
|
use binaryninjacore_sys::*;
|
||||||
|
|
||||||
|
use crate::databuffer::DataBuffer;
|
||||||
|
use crate::metadata::Metadata;
|
||||||
|
use crate::platform::Platform;
|
||||||
|
use crate::rc::{Array, CoreArrayProvider, CoreArrayProviderInner, Ref};
|
||||||
|
use crate::string::{BnStrCompatible, BnString};
|
||||||
|
use crate::types::{QualifiedName, QualifiedNameAndType, QualifiedNameTypeAndId, Type};
|
||||||
|
|
||||||
|
/// Type Archives are a collection of types which can be shared between different analysis
|
||||||
|
/// sessions and are backed by a database file on disk. Their types can be modified, and
|
||||||
|
/// a history of previous versions of types is stored in snapshots in the archive.
|
||||||
|
#[repr(transparent)]
|
||||||
|
pub struct TypeArchive {
|
||||||
|
handle: ptr::NonNull<BNTypeArchive>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for TypeArchive {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
unsafe { BNFreeTypeArchiveReference(self.as_raw()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for TypeArchive {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
unsafe {
|
||||||
|
Self::from_raw(ptr::NonNull::new(BNNewTypeArchiveReference(self.as_raw())).unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq for TypeArchive {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.id() == other.id()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Eq for TypeArchive {}
|
||||||
|
|
||||||
|
impl core::hash::Hash for TypeArchive {
|
||||||
|
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||||
|
(self.handle.as_ptr() as usize).hash(state);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl core::fmt::Debug for TypeArchive {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
let path = self.path().map(|x| x.to_string());
|
||||||
|
f.debug_struct("TypeArchive").field("path", &path).finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TypeArchive {
|
||||||
|
pub(crate) unsafe fn from_raw(handle: ptr::NonNull<BNTypeArchive>) -> Self {
|
||||||
|
Self { handle }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) unsafe fn ref_from_raw(handle: &*mut BNTypeArchive) -> &Self {
|
||||||
|
assert!(!handle.is_null());
|
||||||
|
mem::transmute(handle)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::mut_from_ref)]
|
||||||
|
pub(crate) unsafe fn as_raw(&self) -> &mut BNTypeArchive {
|
||||||
|
&mut *self.handle.as_ptr()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Open the Type Archive at the given path, if it exists.
|
||||||
|
pub fn open<S: BnStrCompatible>(path: S) -> Option<TypeArchive> {
|
||||||
|
let path = path.into_bytes_with_nul();
|
||||||
|
let handle = unsafe { BNOpenTypeArchive(path.as_ref().as_ptr() as *const ffi::c_char) };
|
||||||
|
ptr::NonNull::new(handle).map(|handle| unsafe { TypeArchive::from_raw(handle) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a Type Archive at the given path, returning None if it could not be created.
|
||||||
|
pub fn create<S: BnStrCompatible>(path: S, platform: &Platform) -> Option<TypeArchive> {
|
||||||
|
let path = path.into_bytes_with_nul();
|
||||||
|
let handle = unsafe {
|
||||||
|
BNCreateTypeArchive(
|
||||||
|
path.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
platform.handle,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
ptr::NonNull::new(handle).map(|handle| unsafe { TypeArchive::from_raw(handle) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a Type Archive at the given path and id, returning None if it could not be created.
|
||||||
|
pub fn create_with_id<P: BnStrCompatible, I: BnStrCompatible>(
|
||||||
|
path: P,
|
||||||
|
id: I,
|
||||||
|
platform: &Platform,
|
||||||
|
) -> Option<TypeArchive> {
|
||||||
|
let path = path.into_bytes_with_nul();
|
||||||
|
let id = id.into_bytes_with_nul();
|
||||||
|
let handle = unsafe {
|
||||||
|
BNCreateTypeArchiveWithId(
|
||||||
|
path.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
platform.handle,
|
||||||
|
id.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
ptr::NonNull::new(handle).map(|handle| unsafe { TypeArchive::from_raw(handle) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a reference to the Type Archive with the known id, if one exists.
|
||||||
|
pub fn lookup_by_id<S: BnStrCompatible>(id: S) -> Option<TypeArchive> {
|
||||||
|
let id = id.into_bytes_with_nul();
|
||||||
|
let handle = unsafe { BNLookupTypeArchiveById(id.as_ref().as_ptr() as *const ffi::c_char) };
|
||||||
|
ptr::NonNull::new(handle).map(|handle| unsafe { TypeArchive::from_raw(handle) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the path to the Type Archive's file
|
||||||
|
pub fn path(&self) -> Option<BnString> {
|
||||||
|
let result = unsafe { BNGetTypeArchivePath(self.as_raw()) };
|
||||||
|
(!result.is_null()).then(|| unsafe { BnString::from_raw(result) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the guid for a Type Archive
|
||||||
|
pub fn id(&self) -> Option<BnString> {
|
||||||
|
let result = unsafe { BNGetTypeArchiveId(self.as_raw()) };
|
||||||
|
(!result.is_null()).then(|| unsafe { BnString::from_raw(result) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the associated Platform for a Type Archive
|
||||||
|
pub fn platform(&self) -> Ref<Platform> {
|
||||||
|
let result = unsafe { BNGetTypeArchivePlatform(self.as_raw()) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Platform::ref_from_raw(result) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the id of the current snapshot in the type archive
|
||||||
|
pub fn current_snapshot_id(&self) -> BnString {
|
||||||
|
let result = unsafe { BNGetTypeArchiveCurrentSnapshotId(self.as_raw()) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { BnString::from_raw(result) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Revert the type archive's current snapshot to the given snapshot
|
||||||
|
pub fn set_current_snapshot_id<S: BnStrCompatible>(&self, id: S) {
|
||||||
|
let id = id.into_bytes_with_nul();
|
||||||
|
unsafe {
|
||||||
|
BNSetTypeArchiveCurrentSnapshot(
|
||||||
|
self.as_raw(),
|
||||||
|
id.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a list of every snapshot's id
|
||||||
|
pub fn all_snapshot_ids(&self) -> Array<BnString> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe { BNGetTypeArchiveAllSnapshotIds(self.as_raw(), &mut count) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the ids of the parents to the given snapshot
|
||||||
|
pub fn get_snapshot_parent_ids<S: BnStrCompatible>(
|
||||||
|
&self,
|
||||||
|
snapshot: S,
|
||||||
|
) -> Option<Array<BnString>> {
|
||||||
|
let snapshot = snapshot.into_bytes_with_nul();
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetTypeArchiveSnapshotParentIds(
|
||||||
|
self.as_raw(),
|
||||||
|
snapshot.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
&mut count,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
(!result.is_null()).then(|| unsafe { Array::new(result, count, ()) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the ids of the children to the given snapshot
|
||||||
|
pub fn get_snapshot_child_ids<S: BnStrCompatible>(
|
||||||
|
&self,
|
||||||
|
snapshot: S,
|
||||||
|
) -> Option<Array<BnString>> {
|
||||||
|
let snapshot = snapshot.into_bytes_with_nul();
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetTypeArchiveSnapshotChildIds(
|
||||||
|
self.as_raw(),
|
||||||
|
snapshot.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
&mut count,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
(!result.is_null()).then(|| unsafe { Array::new(result, count, ()) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add named types to the type archive. Type must have all dependant named types added
|
||||||
|
/// prior to being added, or this function will fail.
|
||||||
|
/// If the type already exists, it will be overwritten.
|
||||||
|
///
|
||||||
|
/// * `name` - Name of new type
|
||||||
|
/// * `type` - Definition of new type
|
||||||
|
pub fn add_type(&self, name: &QualifiedNameAndType) {
|
||||||
|
self.add_types(core::slice::from_ref(name))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add named types to the type archive. Types must have all dependant named
|
||||||
|
/// types prior to being added, or included in the list, or this function will fail.
|
||||||
|
/// Types already existing with any added names will be overwritten.
|
||||||
|
///
|
||||||
|
/// * `new_types` - Names and definitions of new types
|
||||||
|
pub fn add_types(&self, new_types: &[QualifiedNameAndType]) {
|
||||||
|
// SAFETY BNQualifiedNameAndType and QualifiedNameAndType are transparent
|
||||||
|
let new_types_raw: &[BNQualifiedNameAndType] = unsafe { mem::transmute(new_types) };
|
||||||
|
let result = unsafe {
|
||||||
|
BNAddTypeArchiveTypes(self.as_raw(), new_types_raw.as_ptr(), new_types.len())
|
||||||
|
};
|
||||||
|
assert!(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Change the name of an existing type in the type archive.
|
||||||
|
///
|
||||||
|
/// * `old_name` - Old type name in archive
|
||||||
|
/// * `new_name` - New type name
|
||||||
|
pub fn rename_type(&self, old_name: &QualifiedName, new_name: &QualifiedNameAndType) {
|
||||||
|
let id = self
|
||||||
|
.get_type_id(old_name, self.current_snapshot_id())
|
||||||
|
.unwrap();
|
||||||
|
return self.rename_type_by_id(id, new_name.name());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Change the name of an existing type in the type archive.
|
||||||
|
///
|
||||||
|
/// * `id` - Old id of type in archive
|
||||||
|
/// * `new_name` - New type name
|
||||||
|
pub fn rename_type_by_id<S: BnStrCompatible>(&self, id: S, new_name: &QualifiedName) {
|
||||||
|
let id = id.into_bytes_with_nul();
|
||||||
|
let result = unsafe {
|
||||||
|
BNRenameTypeArchiveType(
|
||||||
|
self.as_raw(),
|
||||||
|
id.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
&new_name.0,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
assert!(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete an existing type in the type archive.
|
||||||
|
pub fn delete_type(&self, name: &QualifiedName) {
|
||||||
|
let id = self.get_type_id(name, self.current_snapshot_id());
|
||||||
|
let Some(id) = id else {
|
||||||
|
panic!("Unknown type {}", name.string())
|
||||||
|
};
|
||||||
|
self.delete_type_by_id(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete an existing type in the type archive.
|
||||||
|
pub fn delete_type_by_id<S: BnStrCompatible>(&self, id: S) {
|
||||||
|
let id = id.into_bytes_with_nul();
|
||||||
|
let result = unsafe {
|
||||||
|
BNDeleteTypeArchiveType(self.as_raw(), id.as_ref().as_ptr() as *const ffi::c_char)
|
||||||
|
};
|
||||||
|
assert!(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Retrieve a stored type in the archive
|
||||||
|
///
|
||||||
|
/// * `name` - Type name
|
||||||
|
/// * `snapshot` - Snapshot id to search for types
|
||||||
|
pub fn get_type_by_name<S: BnStrCompatible>(
|
||||||
|
&self,
|
||||||
|
name: &QualifiedName,
|
||||||
|
snapshot: S,
|
||||||
|
) -> Option<Ref<Type>> {
|
||||||
|
let snapshot = snapshot.into_bytes_with_nul();
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetTypeArchiveTypeByName(
|
||||||
|
self.as_raw(),
|
||||||
|
&name.0,
|
||||||
|
snapshot.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
(!result.is_null()).then(|| unsafe { Type::ref_from_raw(result) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Retrieve a stored type in the archive by id
|
||||||
|
///
|
||||||
|
/// * `id` - Type id
|
||||||
|
/// * `snapshot` - Snapshot id to search for types
|
||||||
|
pub fn get_type_by_id<I: BnStrCompatible, S: BnStrCompatible>(
|
||||||
|
&self,
|
||||||
|
id: I,
|
||||||
|
snapshot: S,
|
||||||
|
) -> Option<Ref<Type>> {
|
||||||
|
let snapshot = snapshot.into_bytes_with_nul();
|
||||||
|
let id = id.into_bytes_with_nul();
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetTypeArchiveTypeById(
|
||||||
|
self.as_raw(),
|
||||||
|
id.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
snapshot.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
(!result.is_null()).then(|| unsafe { Type::ref_from_raw(result) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Retrieve a type's name by its id
|
||||||
|
///
|
||||||
|
/// * `id` - Type id
|
||||||
|
/// * `snapshot` - Snapshot id to search for types
|
||||||
|
pub fn get_type_name_by_id<I: BnStrCompatible, S: BnStrCompatible>(
|
||||||
|
&self,
|
||||||
|
id: I,
|
||||||
|
snapshot: S,
|
||||||
|
) -> QualifiedName {
|
||||||
|
let snapshot = snapshot.into_bytes_with_nul();
|
||||||
|
let id = id.into_bytes_with_nul();
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetTypeArchiveTypeName(
|
||||||
|
self.as_raw(),
|
||||||
|
id.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
snapshot.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
QualifiedName(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Retrieve a type's id by its name
|
||||||
|
///
|
||||||
|
/// * `name` - Type name
|
||||||
|
/// * `snapshot` - Snapshot id to search for types
|
||||||
|
pub fn get_type_id<S: BnStrCompatible>(
|
||||||
|
&self,
|
||||||
|
name: &QualifiedName,
|
||||||
|
snapshot: S,
|
||||||
|
) -> Option<BnString> {
|
||||||
|
let snapshot = snapshot.into_bytes_with_nul();
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetTypeArchiveTypeId(
|
||||||
|
self.as_raw(),
|
||||||
|
&name.0,
|
||||||
|
snapshot.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
(!result.is_null()).then(|| unsafe { BnString::from_raw(result) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Retrieve all stored types in the archive at a snapshot
|
||||||
|
///
|
||||||
|
/// * `snapshot` - Snapshot id to search for types
|
||||||
|
pub fn get_types_and_ids<S: BnStrCompatible>(
|
||||||
|
&self,
|
||||||
|
snapshot: S,
|
||||||
|
) -> Array<QualifiedNameTypeAndId> {
|
||||||
|
let snapshot = snapshot.into_bytes_with_nul();
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetTypeArchiveTypes(
|
||||||
|
self.as_raw(),
|
||||||
|
snapshot.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
&mut count,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a list of all types' ids in the archive at a snapshot
|
||||||
|
///
|
||||||
|
/// * `snapshot` - Snapshot id to search for types
|
||||||
|
pub fn get_type_ids<S: BnStrCompatible>(&self, snapshot: S) -> Array<BnString> {
|
||||||
|
let snapshot = snapshot.into_bytes_with_nul();
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetTypeArchiveTypeIds(
|
||||||
|
self.as_raw(),
|
||||||
|
snapshot.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
&mut count,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a list of all types' names in the archive at a snapshot
|
||||||
|
///
|
||||||
|
/// * `snapshot` - Snapshot id to search for types
|
||||||
|
pub fn get_type_names<S: BnStrCompatible>(&self, snapshot: S) -> Array<QualifiedName> {
|
||||||
|
let snapshot = snapshot.into_bytes_with_nul();
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetTypeArchiveTypeNames(
|
||||||
|
self.as_raw(),
|
||||||
|
snapshot.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
&mut count,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a list of all types' names and ids in the archive at a current snapshot
|
||||||
|
|
||||||
|
/// * `snapshot` - Snapshot id to search for types
|
||||||
|
pub fn get_type_names_and_ids<S: BnStrCompatible>(
|
||||||
|
&self,
|
||||||
|
snapshot: S,
|
||||||
|
) -> (Array<QualifiedName>, Array<BnString>) {
|
||||||
|
let snapshot = snapshot.into_bytes_with_nul();
|
||||||
|
let mut count = 0;
|
||||||
|
let mut names = ptr::null_mut();
|
||||||
|
let mut ids = ptr::null_mut();
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetTypeArchiveTypeNamesAndIds(
|
||||||
|
self.as_raw(),
|
||||||
|
snapshot.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
&mut names,
|
||||||
|
&mut ids,
|
||||||
|
&mut count,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
assert!(result);
|
||||||
|
(unsafe { Array::new(names, count, ()) }, unsafe {
|
||||||
|
Array::new(ids, count, ())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get all types a given type references directly
|
||||||
|
///
|
||||||
|
/// * `id` - Source type id
|
||||||
|
/// * `snapshot` - Snapshot id to search for types
|
||||||
|
pub fn get_outgoing_direct_references<I: BnStrCompatible, S: BnStrCompatible>(
|
||||||
|
&self,
|
||||||
|
id: I,
|
||||||
|
snapshot: S,
|
||||||
|
) -> Array<BnString> {
|
||||||
|
let snapshot = snapshot.into_bytes_with_nul();
|
||||||
|
let id = id.into_bytes_with_nul();
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetTypeArchiveOutgoingDirectTypeReferences(
|
||||||
|
self.as_raw(),
|
||||||
|
id.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
snapshot.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
&mut count,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get all types a given type references, and any types that the referenced types reference
|
||||||
|
///
|
||||||
|
/// :param id: Source type id
|
||||||
|
/// :param snapshot: Snapshot id to search for types
|
||||||
|
pub fn get_outgoing_recursive_references<I: BnStrCompatible, S: BnStrCompatible>(
|
||||||
|
&self,
|
||||||
|
id: I,
|
||||||
|
snapshot: S,
|
||||||
|
) -> Array<BnString> {
|
||||||
|
let snapshot = snapshot.into_bytes_with_nul();
|
||||||
|
let id = id.into_bytes_with_nul();
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetTypeArchiveOutgoingRecursiveTypeReferences(
|
||||||
|
self.as_raw(),
|
||||||
|
id.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
snapshot.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
&mut count,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get all types that reference a given type
|
||||||
|
///
|
||||||
|
/// * `id` - Target type id
|
||||||
|
/// * `snapshot` - Snapshot id to search for types
|
||||||
|
pub fn get_incoming_direct_references<I: BnStrCompatible, S: BnStrCompatible>(
|
||||||
|
&self,
|
||||||
|
id: I,
|
||||||
|
snapshot: S,
|
||||||
|
) -> Array<BnString> {
|
||||||
|
let snapshot = snapshot.into_bytes_with_nul();
|
||||||
|
let id = id.into_bytes_with_nul();
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetTypeArchiveIncomingDirectTypeReferences(
|
||||||
|
self.as_raw(),
|
||||||
|
id.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
snapshot.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
&mut count,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get all types that reference a given type, and all types that reference them, recursively
|
||||||
|
///
|
||||||
|
/// * `id` - Target type id
|
||||||
|
/// * `snapshot` - Snapshot id to search for types, or empty string to search the latest snapshot
|
||||||
|
pub fn get_incoming_recursive_references<I: BnStrCompatible, S: BnStrCompatible>(
|
||||||
|
&self,
|
||||||
|
id: I,
|
||||||
|
snapshot: S,
|
||||||
|
) -> Array<BnString> {
|
||||||
|
let snapshot = snapshot.into_bytes_with_nul();
|
||||||
|
let id = id.into_bytes_with_nul();
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe {
|
||||||
|
BNGetTypeArchiveIncomingRecursiveTypeReferences(
|
||||||
|
self.as_raw(),
|
||||||
|
id.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
snapshot.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
&mut count,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Look up a metadata entry in the archive
|
||||||
|
pub fn query_metadata<S: BnStrCompatible>(&self, key: S) -> Option<Ref<Metadata>> {
|
||||||
|
let key = key.into_bytes_with_nul();
|
||||||
|
let result = unsafe {
|
||||||
|
BNTypeArchiveQueryMetadata(self.as_raw(), key.as_ref().as_ptr() as *const ffi::c_char)
|
||||||
|
};
|
||||||
|
(!result.is_null()).then(|| unsafe { Metadata::ref_from_raw(result) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Store a key/value pair in the archive's metadata storage
|
||||||
|
///
|
||||||
|
/// * `key` - key value to associate the Metadata object with
|
||||||
|
/// * `md` - object to store.
|
||||||
|
pub fn store_metadata<S: BnStrCompatible>(&self, key: S, md: &Metadata) {
|
||||||
|
let key = key.into_bytes_with_nul();
|
||||||
|
let result = unsafe {
|
||||||
|
BNTypeArchiveStoreMetadata(
|
||||||
|
self.as_raw(),
|
||||||
|
key.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
md.handle,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
assert!(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete a given metadata entry in the archive from the `key`
|
||||||
|
pub fn remove_metadata<S: BnStrCompatible>(&self, key: S) -> bool {
|
||||||
|
let key = key.into_bytes_with_nul();
|
||||||
|
unsafe {
|
||||||
|
BNTypeArchiveRemoveMetadata(self.as_raw(), key.as_ref().as_ptr() as *const ffi::c_char)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Turn a given `snapshot` id into a data stream
|
||||||
|
pub fn serialize_snapshot<S: BnStrCompatible>(&self, snapshot: S) -> DataBuffer {
|
||||||
|
let snapshot = snapshot.into_bytes_with_nul();
|
||||||
|
let result = unsafe {
|
||||||
|
BNTypeArchiveSerializeSnapshot(
|
||||||
|
self.as_raw(),
|
||||||
|
snapshot.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
assert!(!result.is_null());
|
||||||
|
DataBuffer::from_raw(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Take a serialized snapshot `data` stream and create a new snapshot from it
|
||||||
|
pub fn deserialize_snapshot(&self, data: &DataBuffer) -> BnString {
|
||||||
|
let result = unsafe { BNTypeArchiveDeserializeSnapshot(self.as_raw(), data.as_raw()) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { BnString::from_raw(result) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Register a notification listener
|
||||||
|
pub fn register_notification_callback<T: TypeArchiveNotificationCallback>(
|
||||||
|
&self,
|
||||||
|
callback: T,
|
||||||
|
) -> TypeArchiveCallbackHandle<T> {
|
||||||
|
// SAFETY free on [TypeArchiveCallbackHandle::Drop]
|
||||||
|
let callback = Box::leak(Box::new(callback));
|
||||||
|
let mut notification = BNTypeArchiveNotification {
|
||||||
|
context: callback as *mut T as *mut ffi::c_void,
|
||||||
|
typeAdded: Some(cb_type_added::<T>),
|
||||||
|
typeUpdated: Some(cb_type_updated::<T>),
|
||||||
|
typeRenamed: Some(cb_type_renamed::<T>),
|
||||||
|
typeDeleted: Some(cb_type_deleted::<T>),
|
||||||
|
};
|
||||||
|
unsafe { BNRegisterTypeArchiveNotification(self.as_raw(), &mut notification) }
|
||||||
|
TypeArchiveCallbackHandle {
|
||||||
|
callback,
|
||||||
|
type_archive: self.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NOTE NotificationClosure is left private, there is no need for the user
|
||||||
|
// to know or use it.
|
||||||
|
#[allow(private_interfaces)]
|
||||||
|
pub fn register_notification_closure<A, U, R, D>(
|
||||||
|
&self,
|
||||||
|
type_added: A,
|
||||||
|
type_updated: U,
|
||||||
|
type_renamed: R,
|
||||||
|
type_deleted: D,
|
||||||
|
) -> TypeArchiveCallbackHandle<NotificationClosure<A, U, R, D>>
|
||||||
|
where
|
||||||
|
A: FnMut(&TypeArchive, &str, &Type),
|
||||||
|
U: FnMut(&TypeArchive, &str, &Type, &Type),
|
||||||
|
R: FnMut(&TypeArchive, &str, &QualifiedName, &QualifiedName),
|
||||||
|
D: FnMut(&TypeArchive, &str, &Type),
|
||||||
|
{
|
||||||
|
self.register_notification_callback(NotificationClosure {
|
||||||
|
fun_type_added: type_added,
|
||||||
|
fun_type_updated: type_updated,
|
||||||
|
fun_type_renamed: type_renamed,
|
||||||
|
fun_type_deleted: type_deleted,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Close a type archive, disconnecting it from any active views and closing
|
||||||
|
/// any open file handles
|
||||||
|
pub fn close(self) {
|
||||||
|
unsafe { BNCloseTypeArchive(self.as_raw()) }
|
||||||
|
// NOTE self must be dropped after, don't make it `&self`
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Determine if `file` is a Type Archive
|
||||||
|
pub fn is_type_archive<P: BnStrCompatible>(file: P) -> bool {
|
||||||
|
let file = file.into_bytes_with_nul();
|
||||||
|
unsafe { BNIsTypeArchive(file.as_ref().as_ptr() as *const ffi::c_char) }
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO implement TypeContainer
|
||||||
|
///// Get the TypeContainer interface for this Type Archive, presenting types
|
||||||
|
///// at the current snapshot in the archive.
|
||||||
|
//pub fn type_container(&self) -> TypeContainer {
|
||||||
|
// let result = unsafe { BNGetTypeArchiveTypeContainer(self.as_raw()) };
|
||||||
|
// unsafe { TypeContainer::from_raw(ptr::NonNull::new(result).unwrap()) }
|
||||||
|
//}
|
||||||
|
|
||||||
|
/// Do some function in a transaction making a new snapshot whose id is passed to func. If func throws,
|
||||||
|
/// the transaction will be rolled back and the snapshot will not be created.
|
||||||
|
///
|
||||||
|
/// * `func` - Function to call
|
||||||
|
/// * `parents` - Parent snapshot ids
|
||||||
|
///
|
||||||
|
/// Returns Created snapshot id
|
||||||
|
pub fn new_snapshot_transaction<P, F>(&self, mut function: F, parents: &[BnString]) -> BnString
|
||||||
|
where
|
||||||
|
P: BnStrCompatible,
|
||||||
|
F: FnMut(&str) -> bool,
|
||||||
|
{
|
||||||
|
unsafe extern "C" fn cb_callback<F: FnMut(&str) -> bool>(
|
||||||
|
ctxt: *mut ffi::c_void,
|
||||||
|
id: *const ffi::c_char,
|
||||||
|
) -> bool {
|
||||||
|
let fun: &mut F = &mut *(ctxt as *mut F);
|
||||||
|
fun(&ffi::CStr::from_ptr(id).to_string_lossy())
|
||||||
|
}
|
||||||
|
|
||||||
|
// SAFETY BnString and `*const ffi::c_char` are transparent
|
||||||
|
let parents_raw = parents.as_ptr() as *const *const ffi::c_char;
|
||||||
|
|
||||||
|
let result = unsafe {
|
||||||
|
BNTypeArchiveNewSnapshotTransaction(
|
||||||
|
self.as_raw(),
|
||||||
|
Some(cb_callback::<F>),
|
||||||
|
&mut function as *mut F as *mut ffi::c_void,
|
||||||
|
parents_raw,
|
||||||
|
parents.len(),
|
||||||
|
)
|
||||||
|
};
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { BnString::from_raw(result) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Merge two snapshots in the archive to produce a new snapshot
|
||||||
|
///
|
||||||
|
/// * `base_snapshot` - Common ancestor of snapshots
|
||||||
|
/// * `first_snapshot` - First snapshot to merge
|
||||||
|
/// * `second_snapshot` - Second snapshot to merge
|
||||||
|
/// * `merge_conflicts` - List of all conflicting types, id <-> target snapshot
|
||||||
|
/// * `progress` - Function to call for progress updates
|
||||||
|
///
|
||||||
|
/// Returns Snapshot id, if merge was successful, otherwise the List of
|
||||||
|
/// conflicting type ids
|
||||||
|
pub fn merge_snapshots<B, F, S, P, M, MI, MK>(
|
||||||
|
&self,
|
||||||
|
base_snapshot: B,
|
||||||
|
first_snapshot: F,
|
||||||
|
second_snapshot: S,
|
||||||
|
merge_conflicts: M,
|
||||||
|
mut progress: P,
|
||||||
|
) -> Result<BnString, Array<BnString>>
|
||||||
|
where
|
||||||
|
B: BnStrCompatible,
|
||||||
|
F: BnStrCompatible,
|
||||||
|
S: BnStrCompatible,
|
||||||
|
P: FnMut(usize, usize) -> bool,
|
||||||
|
M: IntoIterator<Item = (MI, MK)>,
|
||||||
|
MI: BnStrCompatible,
|
||||||
|
MK: BnStrCompatible,
|
||||||
|
{
|
||||||
|
unsafe extern "C" fn cb_callback<F: FnMut(usize, usize) -> bool>(
|
||||||
|
ctxt: *mut ffi::c_void,
|
||||||
|
progress: usize,
|
||||||
|
total: usize,
|
||||||
|
) -> bool {
|
||||||
|
let ctxt: &mut F = &mut *(ctxt as *mut F);
|
||||||
|
ctxt(progress, total)
|
||||||
|
}
|
||||||
|
|
||||||
|
let base_snapshot = base_snapshot.into_bytes_with_nul();
|
||||||
|
let first_snapshot = first_snapshot.into_bytes_with_nul();
|
||||||
|
let second_snapshot = second_snapshot.into_bytes_with_nul();
|
||||||
|
let (merge_keys, merge_values): (Vec<BnString>, Vec<BnString>) = merge_conflicts
|
||||||
|
.into_iter()
|
||||||
|
.map(|(k, v)| (BnString::new(k), BnString::new(v)))
|
||||||
|
.unzip();
|
||||||
|
// SAFETY BnString and `*const ffi::c_char` are transparent
|
||||||
|
let merge_keys_raw = merge_keys.as_ptr() as *const *const ffi::c_char;
|
||||||
|
let merge_values_raw = merge_values.as_ptr() as *const *const ffi::c_char;
|
||||||
|
|
||||||
|
let mut conflicts_errors = ptr::null_mut();
|
||||||
|
let mut conflicts_errors_count = 0;
|
||||||
|
|
||||||
|
let mut result = ptr::null_mut();
|
||||||
|
|
||||||
|
let success = unsafe {
|
||||||
|
BNTypeArchiveMergeSnapshots(
|
||||||
|
self.as_raw(),
|
||||||
|
base_snapshot.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
first_snapshot.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
second_snapshot.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
merge_keys_raw,
|
||||||
|
merge_values_raw,
|
||||||
|
merge_keys.len(),
|
||||||
|
&mut conflicts_errors,
|
||||||
|
&mut conflicts_errors_count,
|
||||||
|
&mut result,
|
||||||
|
Some(cb_callback::<P>),
|
||||||
|
(&mut progress) as *mut P as *mut ffi::c_void,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
if success {
|
||||||
|
assert!(!result.is_null());
|
||||||
|
Ok(unsafe { BnString::from_raw(result) })
|
||||||
|
} else {
|
||||||
|
assert!(!conflicts_errors.is_null());
|
||||||
|
Err(unsafe { Array::new(conflicts_errors, conflicts_errors_count, ()) })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for TypeArchive {
|
||||||
|
type Raw = *mut BNTypeArchive;
|
||||||
|
type Context = ();
|
||||||
|
type Wrapped<'a> = &'a TypeArchive;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl CoreArrayProviderInner for TypeArchive {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
|
BNFreeTypeArchiveList(raw, count)
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
Self::ref_from_raw(raw)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct TypeArchiveCallbackHandle<T: TypeArchiveNotificationCallback> {
|
||||||
|
callback: *mut T,
|
||||||
|
type_archive: TypeArchive,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: TypeArchiveNotificationCallback> Drop for TypeArchiveCallbackHandle<T> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
let mut notification = BNTypeArchiveNotification {
|
||||||
|
context: self.callback as *mut ffi::c_void,
|
||||||
|
typeAdded: Some(cb_type_added::<T>),
|
||||||
|
typeUpdated: Some(cb_type_updated::<T>),
|
||||||
|
typeRenamed: Some(cb_type_renamed::<T>),
|
||||||
|
typeDeleted: Some(cb_type_deleted::<T>),
|
||||||
|
};
|
||||||
|
// unregister the notification callback
|
||||||
|
unsafe {
|
||||||
|
BNUnregisterTypeArchiveNotification(self.type_archive.as_raw(), &mut notification)
|
||||||
|
}
|
||||||
|
// free the context created at [TypeArchive::register_notification_callback]
|
||||||
|
drop(unsafe { Box::from_raw(self.callback) });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait TypeArchiveNotificationCallback {
|
||||||
|
/// Called when a type is added to the archive
|
||||||
|
///
|
||||||
|
/// * `archive` - Source Type archive
|
||||||
|
/// * `id` - Id of type added
|
||||||
|
/// * `definition` - Definition of type
|
||||||
|
fn type_added(&mut self, _archive: &TypeArchive, _id: &str, _definition: &Type) {}
|
||||||
|
|
||||||
|
/// Called when a type in the archive is updated to a new definition
|
||||||
|
///
|
||||||
|
/// * `archive` - Source Type archive
|
||||||
|
/// * `id` - Id of type
|
||||||
|
/// * `old_definition` - Previous definition
|
||||||
|
/// * `new_definition` - Current definition
|
||||||
|
fn type_updated(
|
||||||
|
&mut self,
|
||||||
|
_archive: &TypeArchive,
|
||||||
|
_id: &str,
|
||||||
|
_old_definition: &Type,
|
||||||
|
_new_definition: &Type,
|
||||||
|
) {
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Called when a type in the archive is renamed
|
||||||
|
///
|
||||||
|
/// * `archive` - Source Type archive
|
||||||
|
/// * `id` - Type id
|
||||||
|
/// * `old_name` - Previous name
|
||||||
|
/// * `new_name` - Current name
|
||||||
|
fn type_renamed(
|
||||||
|
&mut self,
|
||||||
|
_archive: &TypeArchive,
|
||||||
|
_id: &str,
|
||||||
|
_old_name: &QualifiedName,
|
||||||
|
_new_name: &QualifiedName,
|
||||||
|
) {
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Called when a type in the archive is deleted from the archive
|
||||||
|
///
|
||||||
|
/// * `archive` - Source Type archive
|
||||||
|
/// * `id` - Id of type deleted
|
||||||
|
/// * `definition` - Definition of type deleted
|
||||||
|
fn type_deleted(&mut self, _archive: &TypeArchive, _id: &str, _definition: &Type) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct NotificationClosure<A, U, R, D>
|
||||||
|
where
|
||||||
|
A: FnMut(&TypeArchive, &str, &Type),
|
||||||
|
U: FnMut(&TypeArchive, &str, &Type, &Type),
|
||||||
|
R: FnMut(&TypeArchive, &str, &QualifiedName, &QualifiedName),
|
||||||
|
D: FnMut(&TypeArchive, &str, &Type),
|
||||||
|
{
|
||||||
|
fun_type_added: A,
|
||||||
|
fun_type_updated: U,
|
||||||
|
fun_type_renamed: R,
|
||||||
|
fun_type_deleted: D,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<A, U, R, D> TypeArchiveNotificationCallback for NotificationClosure<A, U, R, D>
|
||||||
|
where
|
||||||
|
A: FnMut(&TypeArchive, &str, &Type),
|
||||||
|
U: FnMut(&TypeArchive, &str, &Type, &Type),
|
||||||
|
R: FnMut(&TypeArchive, &str, &QualifiedName, &QualifiedName),
|
||||||
|
D: FnMut(&TypeArchive, &str, &Type),
|
||||||
|
{
|
||||||
|
fn type_added(&mut self, archive: &TypeArchive, id: &str, definition: &Type) {
|
||||||
|
(self.fun_type_added)(archive, id, definition)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn type_updated(
|
||||||
|
&mut self,
|
||||||
|
archive: &TypeArchive,
|
||||||
|
id: &str,
|
||||||
|
old_definition: &Type,
|
||||||
|
new_definition: &Type,
|
||||||
|
) {
|
||||||
|
(self.fun_type_updated)(archive, id, old_definition, new_definition)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn type_renamed(
|
||||||
|
&mut self,
|
||||||
|
archive: &TypeArchive,
|
||||||
|
id: &str,
|
||||||
|
old_name: &QualifiedName,
|
||||||
|
new_name: &QualifiedName,
|
||||||
|
) {
|
||||||
|
(self.fun_type_renamed)(archive, id, old_name, new_name)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn type_deleted(&mut self, archive: &TypeArchive, id: &str, definition: &Type) {
|
||||||
|
(self.fun_type_deleted)(archive, id, definition)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe extern "C" fn cb_type_added<T: TypeArchiveNotificationCallback>(
|
||||||
|
ctxt: *mut ::std::os::raw::c_void,
|
||||||
|
archive: *mut BNTypeArchive,
|
||||||
|
id: *const ::std::os::raw::c_char,
|
||||||
|
definition: *mut BNType,
|
||||||
|
) {
|
||||||
|
let ctxt: &mut T = &mut *(ctxt as *mut T);
|
||||||
|
ctxt.type_added(
|
||||||
|
unsafe { TypeArchive::ref_from_raw(&archive) },
|
||||||
|
unsafe { ffi::CStr::from_ptr(id).to_string_lossy().as_ref() },
|
||||||
|
&Type { handle: definition },
|
||||||
|
)
|
||||||
|
}
|
||||||
|
unsafe extern "C" fn cb_type_updated<T: TypeArchiveNotificationCallback>(
|
||||||
|
ctxt: *mut ::std::os::raw::c_void,
|
||||||
|
archive: *mut BNTypeArchive,
|
||||||
|
id: *const ::std::os::raw::c_char,
|
||||||
|
old_definition: *mut BNType,
|
||||||
|
new_definition: *mut BNType,
|
||||||
|
) {
|
||||||
|
let ctxt: &mut T = &mut *(ctxt as *mut T);
|
||||||
|
ctxt.type_updated(
|
||||||
|
unsafe { TypeArchive::ref_from_raw(&archive) },
|
||||||
|
unsafe { ffi::CStr::from_ptr(id).to_string_lossy().as_ref() },
|
||||||
|
&Type {
|
||||||
|
handle: old_definition,
|
||||||
|
},
|
||||||
|
&Type {
|
||||||
|
handle: new_definition,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
unsafe extern "C" fn cb_type_renamed<T: TypeArchiveNotificationCallback>(
|
||||||
|
ctxt: *mut ::std::os::raw::c_void,
|
||||||
|
archive: *mut BNTypeArchive,
|
||||||
|
id: *const ::std::os::raw::c_char,
|
||||||
|
old_name: *const BNQualifiedName,
|
||||||
|
new_name: *const BNQualifiedName,
|
||||||
|
) {
|
||||||
|
let ctxt: &mut T = &mut *(ctxt as *mut T);
|
||||||
|
let old_name = mem::ManuallyDrop::new(QualifiedName(*old_name));
|
||||||
|
let new_name = mem::ManuallyDrop::new(QualifiedName(*new_name));
|
||||||
|
ctxt.type_renamed(
|
||||||
|
unsafe { TypeArchive::ref_from_raw(&archive) },
|
||||||
|
unsafe { ffi::CStr::from_ptr(id).to_string_lossy().as_ref() },
|
||||||
|
&old_name,
|
||||||
|
&new_name,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
unsafe extern "C" fn cb_type_deleted<T: TypeArchiveNotificationCallback>(
|
||||||
|
ctxt: *mut ::std::os::raw::c_void,
|
||||||
|
archive: *mut BNTypeArchive,
|
||||||
|
id: *const ::std::os::raw::c_char,
|
||||||
|
definition: *mut BNType,
|
||||||
|
) {
|
||||||
|
let ctxt: &mut T = &mut *(ctxt as *mut T);
|
||||||
|
ctxt.type_deleted(
|
||||||
|
unsafe { TypeArchive::ref_from_raw(&archive) },
|
||||||
|
unsafe { ffi::CStr::from_ptr(id).to_string_lossy().as_ref() },
|
||||||
|
&Type { handle: definition },
|
||||||
|
)
|
||||||
|
}
|
||||||
367
src/typelibrary.rs
Normal file
367
src/typelibrary.rs
Normal file
@@ -0,0 +1,367 @@
|
|||||||
|
use binaryninjacore_sys::*;
|
||||||
|
|
||||||
|
use core::{ffi, mem, ptr};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
architecture::CoreArchitecture,
|
||||||
|
metadata::Metadata,
|
||||||
|
platform::Platform,
|
||||||
|
rc::{Array, CoreArrayProvider, CoreArrayProviderInner, Ref},
|
||||||
|
string::{BnStrCompatible, BnString},
|
||||||
|
types::{QualifiedName, QualifiedNameAndType, Type},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[repr(transparent)]
|
||||||
|
pub struct TypeLibrary {
|
||||||
|
handle: ptr::NonNull<BNTypeLibrary>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TypeLibrary {
|
||||||
|
pub(crate) unsafe fn from_raw(handle: ptr::NonNull<BNTypeLibrary>) -> Self {
|
||||||
|
Self { handle }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) unsafe fn ref_from_raw(handle: &*mut BNTypeLibrary) -> &Self {
|
||||||
|
assert!(!handle.is_null());
|
||||||
|
mem::transmute(handle)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::mut_from_ref)]
|
||||||
|
pub(crate) unsafe fn as_raw(&self) -> &mut BNTypeLibrary {
|
||||||
|
&mut *self.handle.as_ptr()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new_reference(&self) -> Self {
|
||||||
|
unsafe {
|
||||||
|
Self::from_raw(ptr::NonNull::new(BNNewTypeLibraryReference(self.as_raw())).unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new_duplicated(&self) -> Self {
|
||||||
|
unsafe { Self::from_raw(ptr::NonNull::new(BNDuplicateTypeLibrary(self.as_raw())).unwrap()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates an empty type library object with a random GUID and the provided name.
|
||||||
|
pub fn new<S: BnStrCompatible>(arch: CoreArchitecture, name: S) -> TypeLibrary {
|
||||||
|
let name = name.into_bytes_with_nul();
|
||||||
|
let new_lib =
|
||||||
|
unsafe { BNNewTypeLibrary(arch.0, name.as_ref().as_ptr() as *const ffi::c_char) };
|
||||||
|
unsafe { TypeLibrary::from_raw(ptr::NonNull::new(new_lib).unwrap()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn all(arch: CoreArchitecture) -> Array<TypeLibrary> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe { BNGetArchitectureTypeLibraries(arch.0, &mut count) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decompresses a type library file to a file on disk.
|
||||||
|
pub fn decompress_to_file<P: BnStrCompatible, O: BnStrCompatible>(path: P, output: O) -> bool {
|
||||||
|
let path = path.into_bytes_with_nul();
|
||||||
|
let output = output.into_bytes_with_nul();
|
||||||
|
unsafe {
|
||||||
|
BNTypeLibraryDecompressToFile(
|
||||||
|
path.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
output.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Loads a finalized type library instance from file
|
||||||
|
pub fn load_from_file<S: BnStrCompatible>(path: S) -> Option<TypeLibrary> {
|
||||||
|
let path = path.into_bytes_with_nul();
|
||||||
|
let handle =
|
||||||
|
unsafe { BNLoadTypeLibraryFromFile(path.as_ref().as_ptr() as *const ffi::c_char) };
|
||||||
|
ptr::NonNull::new(handle).map(|h| unsafe { TypeLibrary::from_raw(h) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Saves a finalized type library instance to file
|
||||||
|
pub fn write_to_file<S: BnStrCompatible>(&self, path: S) -> bool {
|
||||||
|
let path = path.into_bytes_with_nul();
|
||||||
|
unsafe {
|
||||||
|
BNWriteTypeLibraryToFile(self.as_raw(), path.as_ref().as_ptr() as *const ffi::c_char)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Looks up the first type library found with a matching name. Keep in mind that names are not
|
||||||
|
/// necessarily unique.
|
||||||
|
pub fn from_name<S: BnStrCompatible>(arch: CoreArchitecture, name: S) -> Option<TypeLibrary> {
|
||||||
|
let name = name.into_bytes_with_nul();
|
||||||
|
let handle = unsafe {
|
||||||
|
BNLookupTypeLibraryByName(arch.0, name.as_ref().as_ptr() as *const ffi::c_char)
|
||||||
|
};
|
||||||
|
ptr::NonNull::new(handle).map(|h| unsafe { TypeLibrary::from_raw(h) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Attempts to grab a type library associated with the provided Architecture and GUID pair
|
||||||
|
pub fn from_guid<S: BnStrCompatible>(arch: CoreArchitecture, guid: S) -> Option<TypeLibrary> {
|
||||||
|
let guid = guid.into_bytes_with_nul();
|
||||||
|
let handle = unsafe {
|
||||||
|
BNLookupTypeLibraryByGuid(arch.0, guid.as_ref().as_ptr() as *const ffi::c_char)
|
||||||
|
};
|
||||||
|
ptr::NonNull::new(handle).map(|h| unsafe { TypeLibrary::from_raw(h) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The Architecture this type library is associated with
|
||||||
|
pub fn arch(&self) -> CoreArchitecture {
|
||||||
|
let arch = unsafe { BNGetTypeLibraryArchitecture(self.as_raw()) };
|
||||||
|
assert!(!arch.is_null());
|
||||||
|
CoreArchitecture(arch)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The primary name associated with this type library
|
||||||
|
pub fn name(&self) -> Option<BnString> {
|
||||||
|
let result = unsafe { BNGetTypeLibraryName(self.as_raw()) };
|
||||||
|
(!result.is_null()).then(|| unsafe { BnString::from_raw(result) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sets the name of a type library instance that has not been finalized
|
||||||
|
pub fn set_name<S: BnStrCompatible>(&self, value: S) {
|
||||||
|
let value = value.into_bytes_with_nul();
|
||||||
|
unsafe {
|
||||||
|
BNSetTypeLibraryName(self.as_raw(), value.as_ref().as_ptr() as *const ffi::c_char)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The `dependency_name` of a library is the name used to record dependencies across
|
||||||
|
/// type libraries. This allows, for example, a library with the name "musl_libc" to have
|
||||||
|
/// dependencies on it recorded as "libc_generic", allowing a type library to be used across
|
||||||
|
/// multiple platforms where each has a specific libc that also provides the name "libc_generic"
|
||||||
|
/// as an `alternate_name`.
|
||||||
|
pub fn dependency_name(&self) -> Option<BnString> {
|
||||||
|
let result = unsafe { BNGetTypeLibraryDependencyName(self.as_raw()) };
|
||||||
|
(!result.is_null()).then(|| unsafe { BnString::from_raw(result) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sets the dependency name of a type library instance that has not been finalized
|
||||||
|
pub fn set_dependency_name<S: BnStrCompatible>(&self, value: S) {
|
||||||
|
let value = value.into_bytes_with_nul();
|
||||||
|
unsafe {
|
||||||
|
BNSetTypeLibraryDependencyName(
|
||||||
|
self.as_raw(),
|
||||||
|
value.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the GUID associated with the type library
|
||||||
|
pub fn guid(&self) -> Option<BnString> {
|
||||||
|
let result = unsafe { BNGetTypeLibraryGuid(self.as_raw()) };
|
||||||
|
(!result.is_null()).then(|| unsafe { BnString::from_raw(result) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sets the GUID of a type library instance that has not been finalized
|
||||||
|
pub fn set_guid<S: BnStrCompatible>(&self, value: S) {
|
||||||
|
let value = value.into_bytes_with_nul();
|
||||||
|
unsafe {
|
||||||
|
BNSetTypeLibraryGuid(self.as_raw(), value.as_ref().as_ptr() as *const ffi::c_char)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A list of extra names that will be considered a match by [Platform::get_type_libraries_by_name]
|
||||||
|
pub fn alternate_names(&self) -> Array<BnString> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe { BNGetTypeLibraryAlternateNames(self.as_raw(), &mut count) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds an extra name to this type library used during library lookups and dependency resolution
|
||||||
|
pub fn add_alternate_name<S: BnStrCompatible>(&self, value: S) {
|
||||||
|
let value = value.into_bytes_with_nul();
|
||||||
|
unsafe {
|
||||||
|
BNAddTypeLibraryAlternateName(
|
||||||
|
self.as_raw(),
|
||||||
|
value.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a list of all platform names that this type library will register with during platform
|
||||||
|
/// type registration.
|
||||||
|
///
|
||||||
|
/// This returns strings, not Platform objects, as type libraries can be distributed with support for
|
||||||
|
/// Platforms that may not be present.
|
||||||
|
pub fn platform_names(&self) -> Array<BnString> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe { BNGetTypeLibraryPlatforms(self.as_raw(), &mut count) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Associate a platform with a type library instance that has not been finalized.
|
||||||
|
///
|
||||||
|
/// This will cause the library to be searchable by [Platform::get_type_libraries_by_name]
|
||||||
|
/// when loaded.
|
||||||
|
///
|
||||||
|
/// This does not have side affects until finalization of the type library.
|
||||||
|
pub fn add_platform(&self, plat: &Platform) {
|
||||||
|
unsafe { BNAddTypeLibraryPlatform(self.as_raw(), plat.handle) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Clears the list of platforms associated with a type library instance that has not been finalized
|
||||||
|
pub fn clear_platforms(&self) {
|
||||||
|
unsafe { BNClearTypeLibraryPlatforms(self.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Flags a newly created type library instance as finalized and makes it available for Platform and Architecture
|
||||||
|
/// type library searches
|
||||||
|
pub fn finalize(&self) -> bool {
|
||||||
|
unsafe { BNFinalizeTypeLibrary(self.as_raw()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Retrieves a metadata associated with the given key stored in the type library
|
||||||
|
pub fn query_metadata<S: BnStrCompatible>(&self, key: S) -> Option<Metadata> {
|
||||||
|
let key = key.into_bytes_with_nul();
|
||||||
|
let result = unsafe {
|
||||||
|
BNTypeLibraryQueryMetadata(self.as_raw(), key.as_ref().as_ptr() as *const ffi::c_char)
|
||||||
|
};
|
||||||
|
(!result.is_null()).then(|| unsafe { Metadata::from_raw(result) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Stores an object for the given key in the current type library. Objects stored using
|
||||||
|
/// `store_metadata` can be retrieved from any reference to the library. Objects stored are not arbitrary python
|
||||||
|
/// objects! The values stored must be able to be held in a Metadata object. See [Metadata]
|
||||||
|
/// for more information. Python objects could obviously be serialized using pickle but this intentionally
|
||||||
|
/// a task left to the user since there is the potential security issues.
|
||||||
|
///
|
||||||
|
/// This is primarily intended as a way to store Platform specific information relevant to BinaryView implementations;
|
||||||
|
/// for example the PE BinaryViewType uses type library metadata to retrieve ordinal information, when available.
|
||||||
|
///
|
||||||
|
/// * `key` - key value to associate the Metadata object with
|
||||||
|
/// * `md` - object to store.
|
||||||
|
pub fn store_metadata<S: BnStrCompatible>(&self, key: S, md: &Metadata) {
|
||||||
|
let key = key.into_bytes_with_nul();
|
||||||
|
unsafe {
|
||||||
|
BNTypeLibraryStoreMetadata(
|
||||||
|
self.as_raw(),
|
||||||
|
key.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
md.handle,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Removes the metadata associated with key from the current type library.
|
||||||
|
pub fn remove_metadata<S: BnStrCompatible>(&self, key: S) {
|
||||||
|
let key = key.into_bytes_with_nul();
|
||||||
|
unsafe {
|
||||||
|
BNTypeLibraryRemoveMetadata(self.as_raw(), key.as_ref().as_ptr() as *const ffi::c_char)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Retrieves the metadata associated with the current type library.
|
||||||
|
pub fn metadata(&self) -> Metadata {
|
||||||
|
let md_handle = unsafe { BNTypeLibraryGetMetadata(self.as_raw()) };
|
||||||
|
assert!(!md_handle.is_null());
|
||||||
|
unsafe { Metadata::from_raw(md_handle) }
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: implement TypeContainer
|
||||||
|
// /// Type Container for all TYPES within the Type Library. Objects are not included.
|
||||||
|
// /// The Type Container's Platform will be the first platform associated with the Type Library.
|
||||||
|
// pub fn type_container(&self) -> TypeContainer {
|
||||||
|
// let result = unsafe{ BNGetTypeLibraryTypeContainer(self.as_raw())};
|
||||||
|
// unsafe{TypeContainer::from_raw(ptr::NonNull::new(result).unwrap())}
|
||||||
|
// }
|
||||||
|
|
||||||
|
/// Directly inserts a named object into the type library's object store.
|
||||||
|
/// This is not done recursively, so care should be taken that types referring to other types
|
||||||
|
/// through NamedTypeReferences are already appropriately prepared.
|
||||||
|
///
|
||||||
|
/// To add types and objects from an existing BinaryView, it is recommended to use
|
||||||
|
/// `export_object_to_library <binaryview.BinaryView.export_object_to_library>`, which will automatically pull in
|
||||||
|
/// all referenced types and record additional dependencies as needed.
|
||||||
|
pub fn add_named_object(&self, name: &QualifiedName, type_: &Type) {
|
||||||
|
unsafe {
|
||||||
|
BNAddTypeLibraryNamedObject(self.as_raw(), &name.0 as *const _ as *mut _, type_.handle)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Directly inserts a named object into the type library's object store.
|
||||||
|
/// This is not done recursively, so care should be taken that types referring to other types
|
||||||
|
/// through NamedTypeReferences are already appropriately prepared.
|
||||||
|
///
|
||||||
|
/// To add types and objects from an existing BinaryView, it is recommended to use
|
||||||
|
/// `export_type_to_library <binaryview.BinaryView.export_type_to_library>`, which will automatically pull in
|
||||||
|
/// all referenced types and record additional dependencies as needed.
|
||||||
|
pub fn add_named_type(&self, name: &QualifiedNameAndType, type_: &Type) {
|
||||||
|
unsafe {
|
||||||
|
BNAddTypeLibraryNamedType(self.as_raw(), &name.0 as *const _ as *mut _, type_.handle)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Manually flag NamedTypeReferences to the given QualifiedName as originating from another source
|
||||||
|
/// TypeLibrary with the given dependency name.
|
||||||
|
///
|
||||||
|
/// <div class="warning">
|
||||||
|
///
|
||||||
|
/// Use this api with extreme caution.
|
||||||
|
///
|
||||||
|
/// </div/
|
||||||
|
pub fn add_type_source<S: BnStrCompatible>(&self, name: &QualifiedName, source: S) {
|
||||||
|
let source = source.into_bytes_with_nul();
|
||||||
|
unsafe {
|
||||||
|
BNAddTypeLibraryNamedTypeSource(
|
||||||
|
self.as_raw(),
|
||||||
|
&name.0 as *const _ as *mut _,
|
||||||
|
source.as_ref().as_ptr() as *const ffi::c_char,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Direct extracts a reference to a contained object -- when
|
||||||
|
/// attempting to extract types from a library into a BinaryView, consider using
|
||||||
|
/// `import_library_object <binaryview.BinaryView.import_library_object>` instead.
|
||||||
|
pub fn get_named_object(&self, name: &QualifiedName) -> Option<Ref<Type>> {
|
||||||
|
let t =
|
||||||
|
unsafe { BNGetTypeLibraryNamedObject(self.as_raw(), &name.0 as *const _ as *mut _) };
|
||||||
|
(!t.is_null()).then(|| unsafe { Type::ref_from_raw(t) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Direct extracts a reference to a contained type -- when
|
||||||
|
/// attempting to extract types from a library into a BinaryView, consider using
|
||||||
|
/// `import_library_type <binaryview.BinaryView.import_library_type>` instead.
|
||||||
|
pub fn get_named_type(&self, name: &QualifiedName) -> Option<Ref<Type>> {
|
||||||
|
let t = unsafe { BNGetTypeLibraryNamedType(self.as_raw(), &name.0 as *const _ as *mut _) };
|
||||||
|
(!t.is_null()).then(|| unsafe { Type::ref_from_raw(t) })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A dict containing all named objects (functions, exported variables) provided by a type library
|
||||||
|
pub fn named_objects(&self) -> Array<QualifiedNameAndType> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe { BNGetTypeLibraryNamedObjects(self.as_raw(), &mut count) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A dict containing all named types provided by a type library
|
||||||
|
pub fn named_types(&self) -> Array<QualifiedNameAndType> {
|
||||||
|
let mut count = 0;
|
||||||
|
let result = unsafe { BNGetTypeLibraryNamedTypes(self.as_raw(), &mut count) };
|
||||||
|
assert!(!result.is_null());
|
||||||
|
unsafe { Array::new(result, count, ()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for TypeLibrary {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
unsafe { BNFreeTypeLibrary(self.as_raw()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for TypeLibrary {
|
||||||
|
type Raw = *mut BNTypeLibrary;
|
||||||
|
type Context = ();
|
||||||
|
type Wrapped<'a> = &'a Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl CoreArrayProviderInner for TypeLibrary {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
|
BNFreeTypeLibraryList(raw, count)
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
Self::ref_from_raw(raw)
|
||||||
|
}
|
||||||
|
}
|
||||||
1433
src/types.rs
1433
src/types.rs
File diff suppressed because it is too large
Load Diff
272
src/update.rs
Normal file
272
src/update.rs
Normal file
@@ -0,0 +1,272 @@
|
|||||||
|
use core::{ffi, mem, ptr};
|
||||||
|
use std::time::{Duration, SystemTime, UNIX_EPOCH};
|
||||||
|
|
||||||
|
use binaryninjacore_sys::*;
|
||||||
|
|
||||||
|
use crate::rc::{Array, CoreArrayProvider, CoreArrayProviderInner};
|
||||||
|
use crate::string::BnString;
|
||||||
|
|
||||||
|
pub type UpdateResult = BNUpdateResult;
|
||||||
|
|
||||||
|
#[repr(C)]
|
||||||
|
pub struct UpdateChannel {
|
||||||
|
pub name: BnString,
|
||||||
|
pub description: BnString,
|
||||||
|
pub latest_version: BnString,
|
||||||
|
// NOTE don't allow the user to create his own UpdateChannel
|
||||||
|
_lock: core::marker::PhantomData<()>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UpdateChannel {
|
||||||
|
pub(crate) unsafe fn ref_from_raw(handle: &BNUpdateChannel) -> &Self {
|
||||||
|
mem::transmute(handle)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn all() -> Result<Array<UpdateChannel>, BnString> {
|
||||||
|
let mut count = 0;
|
||||||
|
let mut errors = ptr::null_mut();
|
||||||
|
let result = unsafe { BNGetUpdateChannels(&mut count, &mut errors) };
|
||||||
|
if !errors.is_null() {
|
||||||
|
Err(unsafe { BnString::from_raw(errors) })
|
||||||
|
} else {
|
||||||
|
assert!(!result.is_null());
|
||||||
|
Ok(unsafe { Array::new(result, count, ()) })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List of versions
|
||||||
|
pub fn versions(&self) -> Result<Array<UpdateVersion>, BnString> {
|
||||||
|
let mut count = 0;
|
||||||
|
let mut errors = ptr::null_mut();
|
||||||
|
let result =
|
||||||
|
unsafe { BNGetUpdateChannelVersions(self.name.as_ptr(), &mut count, &mut errors) };
|
||||||
|
if !errors.is_null() {
|
||||||
|
Err(unsafe { BnString::from_raw(errors) })
|
||||||
|
} else {
|
||||||
|
assert!(!result.is_null());
|
||||||
|
Ok(unsafe { Array::new(result, count, ()) })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Latest version
|
||||||
|
pub fn latest_version(&self) -> Result<UpdateVersion, BnString> {
|
||||||
|
let last_version = &self.latest_version;
|
||||||
|
let versions = self.versions()?;
|
||||||
|
for version in &versions {
|
||||||
|
if &version.version == last_version {
|
||||||
|
return Ok(version.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
panic!();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether updates are available
|
||||||
|
pub fn updates_available(&self) -> Result<bool, BnString> {
|
||||||
|
let mut errors = ptr::null_mut();
|
||||||
|
let result = unsafe {
|
||||||
|
BNAreUpdatesAvailable(
|
||||||
|
self.name.as_ptr(),
|
||||||
|
ptr::null_mut(),
|
||||||
|
ptr::null_mut(),
|
||||||
|
&mut errors,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
if !errors.is_null() {
|
||||||
|
Err(unsafe { BnString::from_raw(errors) })
|
||||||
|
} else {
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_to_latest(&self) -> Result<UpdateResult, BnString> {
|
||||||
|
let mut errors = ptr::null_mut();
|
||||||
|
let result = unsafe {
|
||||||
|
BNUpdateToLatestVersion(
|
||||||
|
self.name.as_ptr(),
|
||||||
|
&mut errors,
|
||||||
|
Some(cb_progress_nop),
|
||||||
|
ptr::null_mut(),
|
||||||
|
)
|
||||||
|
};
|
||||||
|
if !errors.is_null() {
|
||||||
|
Err(unsafe { BnString::from_raw(errors) })
|
||||||
|
} else {
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_to_latest_with_progress<F>(
|
||||||
|
&self,
|
||||||
|
mut progress: F,
|
||||||
|
) -> Result<UpdateResult, BnString>
|
||||||
|
where
|
||||||
|
F: FnMut(u64, u64) -> bool,
|
||||||
|
{
|
||||||
|
let mut errors = ptr::null_mut();
|
||||||
|
let result = unsafe {
|
||||||
|
BNUpdateToLatestVersion(
|
||||||
|
self.name.as_ptr(),
|
||||||
|
&mut errors,
|
||||||
|
Some(cb_progress::<F>),
|
||||||
|
&mut progress as *mut _ as *mut ffi::c_void,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
if !errors.is_null() {
|
||||||
|
Err(unsafe { BnString::from_raw(errors) })
|
||||||
|
} else {
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update(&self, version: &UpdateVersion) -> Result<UpdateResult, BnString> {
|
||||||
|
let mut errors = ptr::null_mut();
|
||||||
|
let result = unsafe {
|
||||||
|
BNUpdateToVersion(
|
||||||
|
self.name.as_ptr(),
|
||||||
|
version.version.as_ptr(),
|
||||||
|
&mut errors,
|
||||||
|
Some(cb_progress_nop),
|
||||||
|
ptr::null_mut(),
|
||||||
|
)
|
||||||
|
};
|
||||||
|
if !errors.is_null() {
|
||||||
|
Err(unsafe { BnString::from_raw(errors) })
|
||||||
|
} else {
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_with_progress<F>(
|
||||||
|
&self,
|
||||||
|
version: &UpdateVersion,
|
||||||
|
mut progress: F,
|
||||||
|
) -> Result<UpdateResult, BnString>
|
||||||
|
where
|
||||||
|
F: FnMut(u64, u64) -> bool,
|
||||||
|
{
|
||||||
|
let mut errors = ptr::null_mut();
|
||||||
|
let result = unsafe {
|
||||||
|
BNUpdateToVersion(
|
||||||
|
self.name.as_ptr(),
|
||||||
|
version.version.as_ptr(),
|
||||||
|
&mut errors,
|
||||||
|
Some(cb_progress::<F>),
|
||||||
|
&mut progress as *mut _ as *mut ffi::c_void,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
if !errors.is_null() {
|
||||||
|
Err(unsafe { BnString::from_raw(errors) })
|
||||||
|
} else {
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for UpdateChannel {
|
||||||
|
type Raw = BNUpdateChannel;
|
||||||
|
type Context = ();
|
||||||
|
type Wrapped<'a> = &'a Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl CoreArrayProviderInner for UpdateChannel {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
|
BNFreeUpdateChannelList(raw, count)
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
UpdateChannel::ref_from_raw(raw)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[repr(C)]
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct UpdateVersion {
|
||||||
|
pub version: BnString,
|
||||||
|
pub notes: BnString,
|
||||||
|
time: u64,
|
||||||
|
// NOTE don't allow the user to create his own UpdateVersion
|
||||||
|
_lock: core::marker::PhantomData<()>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UpdateVersion {
|
||||||
|
pub(crate) unsafe fn ref_from_raw(handle: &BNUpdateVersion) -> &Self {
|
||||||
|
mem::transmute(handle)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn time(&self) -> SystemTime {
|
||||||
|
UNIX_EPOCH + Duration::from_secs(self.time)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_time(&mut self, time: SystemTime) {
|
||||||
|
let epoch = time.duration_since(UNIX_EPOCH).unwrap();
|
||||||
|
self.time = epoch.as_secs();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CoreArrayProvider for UpdateVersion {
|
||||||
|
type Raw = BNUpdateVersion;
|
||||||
|
type Context = ();
|
||||||
|
type Wrapped<'a> = &'a Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl CoreArrayProviderInner for UpdateVersion {
|
||||||
|
unsafe fn free(raw: *mut Self::Raw, count: usize, _context: &Self::Context) {
|
||||||
|
BNFreeUpdateChannelVersionList(raw, count)
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe fn wrap_raw<'a>(raw: &'a Self::Raw, _context: &'a Self::Context) -> Self::Wrapped<'a> {
|
||||||
|
UpdateVersion::ref_from_raw(raw)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// queries if auto updates are enabled.
|
||||||
|
pub fn are_auto_updates_enabled() -> bool {
|
||||||
|
unsafe { BNAreAutoUpdatesEnabled() }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// sets auto update enabled status.
|
||||||
|
pub fn set_auto_updates_enabled(enabled: bool) {
|
||||||
|
unsafe { BNSetAutoUpdatesEnabled(enabled) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// returns the time stamp for the last time updates were checked.
|
||||||
|
pub fn get_time_since_last_update_check() -> u64 {
|
||||||
|
unsafe { BNGetTimeSinceLastUpdateCheck() }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// whether an update has been downloaded and is waiting installation
|
||||||
|
pub fn is_update_installation_pending() -> bool {
|
||||||
|
unsafe { BNIsUpdateInstallationPending() }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// installs any pending updates
|
||||||
|
pub fn install_pending_update() -> Result<(), BnString> {
|
||||||
|
let mut errors = ptr::null_mut();
|
||||||
|
unsafe { BNInstallPendingUpdate(&mut errors) };
|
||||||
|
if !errors.is_null() {
|
||||||
|
Err(unsafe { BnString::from_raw(errors) })
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn updates_checked() {
|
||||||
|
unsafe { BNUpdatesChecked() }
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe extern "C" fn cb_progress_nop(
|
||||||
|
_ctxt: *mut ::std::os::raw::c_void,
|
||||||
|
_progress: u64,
|
||||||
|
_total: u64,
|
||||||
|
) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe extern "C" fn cb_progress<F: FnMut(u64, u64) -> bool>(
|
||||||
|
ctxt: *mut ::std::os::raw::c_void,
|
||||||
|
progress: u64,
|
||||||
|
total: u64,
|
||||||
|
) -> bool {
|
||||||
|
let ctxt: &mut F = &mut *(ctxt as *mut F);
|
||||||
|
ctxt(progress, total)
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user