forked from mirrors/gecko-dev
		
	Bug 1826304 - Update base64 in some in-tree crates. r=glandium,webdriver-reviewers,necko-reviewers,supply-chain-reviewers,jesup,jgraham,emilio
Patch old base64 0.13 to 0.21 to avoid introducing the dup dependency. Differential Revision: https://phabricator.services.mozilla.com/D175116
This commit is contained in:
		
							parent
							
								
									ac3a6c0456
								
							
						
					
					
						commit
						0277d1fb8f
					
				
					 49 changed files with 4753 additions and 4608 deletions
				
			
		
							
								
								
									
										31
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										31
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							|  | @ -324,7 +324,7 @@ version = "0.4.0-alpha.12" | |||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "c14dd8025f8c43525d3bcb5fba75838095ddfe99f2284a72cd1b2a9380c47ae2" | ||||
| dependencies = [ | ||||
|  "base64", | ||||
|  "base64 0.13.999", | ||||
|  "bitflags 1.3.2", | ||||
|  "cfg-if 1.0.0", | ||||
|  "core-foundation", | ||||
|  | @ -366,9 +366,16 @@ version = "1.1.0" | |||
| 
 | ||||
| [[package]] | ||||
| name = "base64" | ||||
| version = "0.13.1" | ||||
| version = "0.13.999" | ||||
| dependencies = [ | ||||
|  "base64 0.21.0", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "base64" | ||||
| version = "0.21.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" | ||||
| checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "bench-collections-gtest" | ||||
|  | @ -643,7 +650,7 @@ checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" | |||
| name = "cert_storage" | ||||
| version = "0.0.1" | ||||
| dependencies = [ | ||||
|  "base64", | ||||
|  "base64 0.21.0", | ||||
|  "byteorder", | ||||
|  "crossbeam-utils", | ||||
|  "cstr", | ||||
|  | @ -1974,7 +1981,7 @@ dependencies = [ | |||
| name = "geckodriver" | ||||
| version = "0.33.0" | ||||
| dependencies = [ | ||||
|  "base64", | ||||
|  "base64 0.21.0", | ||||
|  "chrono", | ||||
|  "clap", | ||||
|  "hyper", | ||||
|  | @ -2402,7 +2409,7 @@ version = "0.3.8" | |||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "f3e372db8e5c0d213e0cd0b9be18be2aca3d44cf2fe30a9d46a65581cd454584" | ||||
| dependencies = [ | ||||
|  "base64", | ||||
|  "base64 0.13.999", | ||||
|  "bitflags 1.3.2", | ||||
|  "bytes", | ||||
|  "headers-core", | ||||
|  | @ -2484,7 +2491,7 @@ dependencies = [ | |||
| name = "http3server" | ||||
| version = "0.1.1" | ||||
| dependencies = [ | ||||
|  "base64", | ||||
|  "base64 0.21.0", | ||||
|  "bindgen 0.64.0", | ||||
|  "cfg-if 1.0.0", | ||||
|  "http", | ||||
|  | @ -4076,7 +4083,7 @@ version = "1.3.1" | |||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "bd39bc6cdc9355ad1dc5eeedefee696bb35c34caf21768741e81826c0bbd7225" | ||||
| dependencies = [ | ||||
|  "base64", | ||||
|  "base64 0.13.999", | ||||
|  "indexmap", | ||||
|  "line-wrap", | ||||
|  "serde", | ||||
|  | @ -4106,7 +4113,7 @@ version = "0.9.1" | |||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "c574f8eb821cee725c8e662e5881e0e94f40838af25be4035e15b5c4d960df3e" | ||||
| dependencies = [ | ||||
|  "base64", | ||||
|  "base64 0.13.999", | ||||
|  "byteorder", | ||||
|  "getrandom", | ||||
|  "serde", | ||||
|  | @ -4421,7 +4428,7 @@ version = "0.8.0" | |||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "300a51053b1cb55c80b7a9fde4120726ddf25ca241a1cbb926626f62fb136bff" | ||||
| dependencies = [ | ||||
|  "base64", | ||||
|  "base64 0.13.999", | ||||
|  "bitflags 1.3.2", | ||||
|  "serde", | ||||
| ] | ||||
|  | @ -5074,7 +5081,7 @@ name = "sync-guid" | |||
| version = "0.1.0" | ||||
| source = "git+https://github.com/mozilla/application-services?rev=86c84c217036c12283d19368867323a66bf35883#86c84c217036c12283d19368867323a66bf35883" | ||||
| dependencies = [ | ||||
|  "base64", | ||||
|  "base64 0.13.999", | ||||
|  "rand", | ||||
|  "rusqlite", | ||||
|  "serde", | ||||
|  | @ -5937,7 +5944,7 @@ version = "0.3.100" | |||
| name = "webdriver" | ||||
| version = "0.48.0" | ||||
| dependencies = [ | ||||
|  "base64", | ||||
|  "base64 0.13.999", | ||||
|  "bytes", | ||||
|  "cookie", | ||||
|  "http", | ||||
|  |  | |||
|  | @ -115,6 +115,9 @@ parking_lot = { path = "build/rust/parking_lot" } | |||
| # Override tinyvec with smallvec | ||||
| tinyvec = { path = "build/rust/tinyvec" } | ||||
| 
 | ||||
| # Patch base64 0.13 to 0.21 | ||||
| base64 = { path = "build/rust/base64" } | ||||
| 
 | ||||
| # Patch wasi 0.10 to 0.11 | ||||
| wasi = { path = "build/rust/wasi" } | ||||
| 
 | ||||
|  |  | |||
							
								
								
									
										11
									
								
								build/rust/base64/Cargo.toml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								build/rust/base64/Cargo.toml
									
									
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,11 @@ | |||
| [package] | ||||
| name = "base64" | ||||
| version = "0.13.999" | ||||
| edition = "2018" | ||||
| license = "MPL-2.0" | ||||
| 
 | ||||
| [lib] | ||||
| path = "lib.rs" | ||||
| 
 | ||||
| [dependencies.base64] | ||||
| version = "0.21.0" | ||||
							
								
								
									
										42
									
								
								build/rust/base64/lib.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										42
									
								
								build/rust/base64/lib.rs
									
									
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,42 @@ | |||
| /* This Source Code Form is subject to the terms of the Mozilla Public
 | ||||
|  * License, v. 2.0. If a copy of the MPL was not distributed with this | ||||
|  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 | ||||
| 
 | ||||
| pub use base64::engine::general_purpose::*; | ||||
| pub use base64::DecodeError; | ||||
| use base64::Engine; | ||||
| 
 | ||||
| // Re-implement some of the 0.13 APIs on top of 0.21
 | ||||
| 
 | ||||
| pub fn decode<T: AsRef<[u8]>>(input: T) -> Result<Vec<u8>, DecodeError> { | ||||
|     STANDARD.decode(input) | ||||
| } | ||||
| 
 | ||||
| pub fn encode<T: AsRef<[u8]>>(input: T) -> String { | ||||
|     STANDARD.encode(input) | ||||
| } | ||||
| 
 | ||||
| pub fn decode_config<T: AsRef<[u8]>>( | ||||
|     input: T, | ||||
|     engine: GeneralPurpose, | ||||
| ) -> Result<Vec<u8>, DecodeError> { | ||||
|     engine.decode(input) | ||||
| } | ||||
| 
 | ||||
| pub fn encode_config<T: AsRef<[u8]>>(input: T, engine: GeneralPurpose) -> String { | ||||
|     engine.encode(input) | ||||
| } | ||||
| 
 | ||||
| pub fn encode_config_slice<T: AsRef<[u8]>>( | ||||
|     input: T, | ||||
|     engine: GeneralPurpose, | ||||
|     output: &mut [u8], | ||||
| ) -> usize { | ||||
|     engine | ||||
|         .encode_slice(input, output) | ||||
|         .expect("Output buffer too small") | ||||
| } | ||||
| 
 | ||||
| pub fn encode_config_buf<T: AsRef<[u8]>>(input: T, engine: GeneralPurpose, buf: &mut String) { | ||||
|     engine.encode_string(input, buf) | ||||
| } | ||||
|  | @ -13,7 +13,7 @@ neqo-qpack = { tag = "v0.6.4", git = "https://github.com/mozilla/neqo" } | |||
| mio = "0.6.17" | ||||
| mio-extras = "2.0.5" | ||||
| log = "0.4.0" | ||||
| base64 = "0.13" | ||||
| base64 = "0.21" | ||||
| cfg-if = "1.0" | ||||
| http = "0.2.8" | ||||
| hyper = { version = "0.14", features = ["full"] } | ||||
|  |  | |||
|  | @ -6,6 +6,7 @@ | |||
| 
 | ||||
| #![deny(warnings)] | ||||
| 
 | ||||
| use base64::prelude::*; | ||||
| use neqo_common::{event::Provider, qdebug, qinfo, qtrace, Datagram, Header}; | ||||
| use neqo_crypto::{generate_ech_keys, init_db, AllowZeroRtt, AntiReplay}; | ||||
| use neqo_http3::{ | ||||
|  | @ -50,8 +51,6 @@ use std::hash::{Hash, Hasher}; | |||
| use std::mem; | ||||
| use std::net::SocketAddr; | ||||
| 
 | ||||
| extern crate base64; | ||||
| 
 | ||||
| const MAX_TABLE_SIZE: u64 = 65536; | ||||
| const MAX_BLOCKED_STREAMS: u16 = 10; | ||||
| const PROTOCOLS: &[&str] = &["h3-29", "h3"]; | ||||
|  | @ -1094,7 +1093,7 @@ impl ServersRunner { | |||
|             self.hosts[2].port(), | ||||
|             self.hosts[3].port(), | ||||
|             self.hosts[4].port(), | ||||
|             base64::encode(&self.ech_config) | ||||
|             BASE64_STANDARD.encode(&self.ech_config) | ||||
|         ); | ||||
|         self.poll | ||||
|             .register(&self.timer, TIMER_TOKEN, Ready::readable(), PollOpt::edge()) | ||||
|  |  | |||
|  | @ -5,7 +5,7 @@ authors = ["Dana Keeler <dkeeler@mozilla.com>", "Mark Goodwin <mgoodwin@mozilla. | |||
| license = "MPL-2.0" | ||||
| 
 | ||||
| [dependencies] | ||||
| base64 = "0.13" | ||||
| base64 = "0.21.0" | ||||
| byteorder = "1.2.7" | ||||
| crossbeam-utils = "0.8" | ||||
| cstr = "0.2" | ||||
|  |  | |||
|  | @ -25,6 +25,7 @@ extern crate tempfile; | |||
| extern crate wr_malloc_size_of; | ||||
| use wr_malloc_size_of as malloc_size_of; | ||||
| 
 | ||||
| use base64::prelude::*; | ||||
| use byteorder::{LittleEndian, NetworkEndian, ReadBytesExt, WriteBytesExt}; | ||||
| use crossbeam_utils::atomic::AtomicCell; | ||||
| use malloc_size_of::{MallocSizeOf, MallocSizeOfOps}; | ||||
|  | @ -228,12 +229,12 @@ impl SecurityState { | |||
|             // errors and attempt to continue.
 | ||||
|             // Check if we have a new DN
 | ||||
|             if leading_char != '\t' && leading_char != ' ' { | ||||
|                 if let Ok(decoded_dn) = base64::decode(&l) { | ||||
|                 if let Ok(decoded_dn) = BASE64_STANDARD.decode(&l) { | ||||
|                     dn = Some(decoded_dn); | ||||
|                 } | ||||
|                 continue; | ||||
|             } | ||||
|             let l_sans_prefix = match base64::decode(&l[1..]) { | ||||
|             let l_sans_prefix = match BASE64_STANDARD.decode(&l[1..]) { | ||||
|                 Ok(decoded) => decoded, | ||||
|                 Err(_) => continue, | ||||
|             }; | ||||
|  | @ -484,7 +485,7 @@ impl SecurityState { | |||
|             Vec::with_capacity(size_of::<u8>() + coverage_entries.len() * COVERAGE_V1_ENTRY_BYTES); | ||||
|         coverage_bytes.push(COVERAGE_SERIALIZATION_VERSION); | ||||
|         for (b64_log_id, min_t, max_t) in coverage_entries { | ||||
|             let log_id = match base64::decode(&b64_log_id) { | ||||
|             let log_id = match BASE64_STANDARD.decode(&b64_log_id) { | ||||
|                 Ok(log_id) if log_id.len() == 32 => log_id, | ||||
|                 _ => { | ||||
|                     warn!("malformed log ID - skipping: {}", b64_log_id); | ||||
|  | @ -510,7 +511,7 @@ impl SecurityState { | |||
|         ); | ||||
|         enrollment_bytes.push(ENROLLMENT_SERIALIZATION_VERSION); | ||||
|         for b64_issuer_id in enrolled_issuers { | ||||
|             let issuer_id = match base64::decode(&b64_issuer_id) { | ||||
|             let issuer_id = match BASE64_STANDARD.decode(&b64_issuer_id) { | ||||
|                 Ok(issuer_id) if issuer_id.len() == 32 => issuer_id, | ||||
|                 _ => { | ||||
|                     warn!("malformed issuer ID - skipping: {}", b64_issuer_id); | ||||
|  | @ -727,14 +728,14 @@ impl SecurityState { | |||
|         )?; | ||||
| 
 | ||||
|         for (cert_der_base64, subject_base64, trust) in certs { | ||||
|             let cert_der = match base64::decode(&cert_der_base64) { | ||||
|             let cert_der = match BASE64_STANDARD.decode(&cert_der_base64) { | ||||
|                 Ok(cert_der) => cert_der, | ||||
|                 Err(e) => { | ||||
|                     warn!("error base64-decoding cert - skipping: {}", e); | ||||
|                     continue; | ||||
|                 } | ||||
|             }; | ||||
|             let subject = match base64::decode(&subject_base64) { | ||||
|             let subject = match BASE64_STANDARD.decode(&subject_base64) { | ||||
|                 Ok(subject) => subject, | ||||
|                 Err(e) => { | ||||
|                     warn!("error base64-decoding subject - skipping: {}", e); | ||||
|  | @ -786,7 +787,7 @@ impl SecurityState { | |||
|         let reader = env_and_store.env.read()?; | ||||
| 
 | ||||
|         for hash in hashes_base64 { | ||||
|             let hash = match base64::decode(&hash) { | ||||
|             let hash = match BASE64_STANDARD.decode(&hash) { | ||||
|                 Ok(hash) => hash, | ||||
|                 Err(e) => { | ||||
|                     warn!("error decoding hash - ignoring: {}", e); | ||||
|  | @ -1072,8 +1073,8 @@ impl EncodedSecurityState { | |||
|     } | ||||
| 
 | ||||
|     fn key(&self) -> Result<Vec<u8>, SecurityStateError> { | ||||
|         let key_part_1 = base64::decode(&self.key_part_1_base64)?; | ||||
|         let key_part_2 = base64::decode(&self.key_part_2_base64)?; | ||||
|         let key_part_1 = BASE64_STANDARD.decode(&self.key_part_1_base64)?; | ||||
|         let key_part_2 = BASE64_STANDARD.decode(&self.key_part_2_base64)?; | ||||
|         Ok(make_key!(self.prefix, &key_part_1, &key_part_2)) | ||||
|     } | ||||
| 
 | ||||
|  |  | |||
|  | @ -187,7 +187,7 @@ function run_test() { | |||
|     // revocations.txt is revoked
 | ||||
|     // subject: MCsxKTAnBgNVBAMMIEVFIFJldm9rZWQgQnkgU3ViamVjdCBhbmQgUHViS2V5
 | ||||
|     // (CN=EE Revoked By Subject and PubKey)
 | ||||
|     // pubkeyhash: VCIlmPM9NkgFQtrs4Oa5TeFcDu6MWRTKSNdePEhOgD8 (this is the
 | ||||
|     // pubkeyhash: VCIlmPM9NkgFQtrs4Oa5TeFcDu6MWRTKSNdePEhOgD8= (this is the
 | ||||
|     // shared RSA SPKI)
 | ||||
|     file = "test_onecrl/ee-revoked-by-subject-and-pubkey.pem"; | ||||
|     await verify_cert(file, SEC_ERROR_REVOKED_CERTIFICATE); | ||||
|  |  | |||
|  | @ -33,7 +33,7 @@ MBwxGjAYBgNVBAMMEVRlc3QgSW50ZXJtZWRpYXRl | |||
| # subject is base-64 encoded subject DN "CN=EE Revoked By Subject and PubKey" | ||||
| # pubKeyHash is the base-64 encoded sha256 hash of the shared RSA SPKI | ||||
| MCsxKTAnBgNVBAMMIEVFIFJldm9rZWQgQnkgU3ViamVjdCBhbmQgUHViS2V5 | ||||
| 	VCIlmPM9NkgFQtrs4Oa5TeFcDu6MWRTKSNdePEhOgD8 | ||||
| 	VCIlmPM9NkgFQtrs4Oa5TeFcDu6MWRTKSNdePEhOgD8= | ||||
| # and some more data to ensure that mixed items don't cause parsing failure | ||||
| a DN | ||||
|  a serial | ||||
|  |  | |||
|  | @ -63,6 +63,12 @@ the environment's terminal information when asked. Does its stated purpose and | |||
| no more. | ||||
| """ | ||||
| 
 | ||||
| [[audits.bytecode-alliance.audits.base64]] | ||||
| who = "Pat Hickey <phickey@fastly.com>" | ||||
| criteria = "safe-to-deploy" | ||||
| version = "0.21.0" | ||||
| notes = "This crate has no dependencies, no build.rs, and contains no unsafe code." | ||||
| 
 | ||||
| [[audits.bytecode-alliance.audits.block-buffer]] | ||||
| who = "Benjamin Bouvier <public@benj.me>" | ||||
| criteria = "safe-to-deploy" | ||||
|  |  | |||
|  | @ -21,7 +21,7 @@ license = "MPL-2.0" | |||
| repository = "https://hg.mozilla.org/mozilla-central/file/tip/testing/geckodriver" | ||||
| 
 | ||||
| [dependencies] | ||||
| base64 = "0.13" | ||||
| base64 = "0.21" | ||||
| chrono = "0.4.6" | ||||
| clap = { version = "~3.1", default-features = false, features = ["cargo", "std", "suggestions", "wrap_help"] } | ||||
| hyper = "0.14" | ||||
|  |  | |||
|  | @ -391,6 +391,8 @@ mod tests { | |||
|     use super::set_prefs; | ||||
|     use crate::browser::read_marionette_port; | ||||
|     use crate::capabilities::{FirefoxOptions, ProfileType}; | ||||
|     use base64::prelude::BASE64_STANDARD; | ||||
|     use base64::Engine; | ||||
|     use mozprofile::preferences::{Pref, PrefValue}; | ||||
|     use mozprofile::profile::Profile; | ||||
|     use serde_json::{Map, Value}; | ||||
|  | @ -403,7 +405,7 @@ mod tests { | |||
|         let mut profile_data = Vec::with_capacity(1024); | ||||
|         let mut profile = File::open("src/tests/profile.zip").unwrap(); | ||||
|         profile.read_to_end(&mut profile_data).unwrap(); | ||||
|         Value::String(base64::encode(&profile_data)) | ||||
|         Value::String(BASE64_STANDARD.encode(&profile_data)) | ||||
|     } | ||||
| 
 | ||||
|     // This is not a pretty test, mostly due to the nature of
 | ||||
|  |  | |||
|  | @ -5,6 +5,8 @@ | |||
| use crate::command::LogOptions; | ||||
| use crate::logging::Level; | ||||
| use crate::marionette::MarionetteSettings; | ||||
| use base64::prelude::BASE64_STANDARD; | ||||
| use base64::Engine; | ||||
| use mozdevice::AndroidStorageInput; | ||||
| use mozprofile::preferences::Pref; | ||||
| use mozprofile::profile::Profile; | ||||
|  | @ -555,7 +557,7 @@ impl FirefoxOptions { | |||
|             let profile_base64 = profile_json.as_str().ok_or_else(|| { | ||||
|                 WebDriverError::new(ErrorStatus::InvalidArgument, "Profile is not a string") | ||||
|             })?; | ||||
|             let profile_zip = &*base64::decode(profile_base64)?; | ||||
|             let profile_zip = &*BASE64_STANDARD.decode(profile_base64)?; | ||||
| 
 | ||||
|             // Create an emtpy profile directory
 | ||||
|             let profile = Profile::new(profile_root)?; | ||||
|  | @ -867,7 +869,7 @@ mod tests { | |||
|         let mut profile_data = Vec::with_capacity(1024); | ||||
|         let mut profile = File::open("src/tests/profile.zip").unwrap(); | ||||
|         profile.read_to_end(&mut profile_data).unwrap(); | ||||
|         Value::String(base64::encode(&profile_data)) | ||||
|         Value::String(BASE64_STANDARD.encode(&profile_data)) | ||||
|     } | ||||
| 
 | ||||
|     fn make_options( | ||||
|  |  | |||
|  | @ -3,6 +3,8 @@ | |||
|  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 | ||||
| 
 | ||||
| use crate::logging; | ||||
| use base64::prelude::BASE64_STANDARD; | ||||
| use base64::Engine; | ||||
| use hyper::Method; | ||||
| use serde::de::{self, Deserialize, Deserializer}; | ||||
| use serde_json::{self, Value}; | ||||
|  | @ -142,7 +144,9 @@ impl<'de> Deserialize<'de> for AddonInstallParameters { | |||
|                 temporary: data.temporary, | ||||
|             }, | ||||
|             Helper::Base64(ref mut data) => { | ||||
|                 let content = base64::decode(&data.addon).map_err(de::Error::custom)?; | ||||
|                 let content = BASE64_STANDARD | ||||
|                     .decode(&data.addon) | ||||
|                     .map_err(de::Error::custom)?; | ||||
| 
 | ||||
|                 let path = env::temp_dir() | ||||
|                     .as_path() | ||||
|  |  | |||
							
								
								
									
										2
									
								
								third_party/rust/base64/.cargo-checksum.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								third_party/rust/base64/.cargo-checksum.json
									
									
									
									
										vendored
									
									
								
							|  | @ -1 +1 @@ | |||
| {"files":{"Cargo.lock":"0ed67b0f1f68ba5e19475d679df711f1f2c421768462b58e327b5b3ae9111825","Cargo.toml":"ac3f54d7667cfc0baac784fcc6f0a6cc0da9b496af69917fe7abb95f7e3b4a36","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0dd882e53de11566d50f8e8e2d5a651bcf3fabee4987d70f306233cf39094ba7","README.md":"2810098d290f3df719e6f41ffca38bb954d0fe62d4e56905a9a2436c4784bebf","RELEASE-NOTES.md":"ef116f3596474c9920957f65e346d7d1954f194aa3458eac5b626781116c500b","benches/benchmarks.rs":"bc1f603c5aa87627a93eee71eaed64fbd767d013051bac00ea265c16fecb30b9","examples/base64.rs":"f397b8726df41fce0793a8c6ebe95d4651aa37ed746da305032f1e99d9c37235","examples/make_tables.rs":"392f51b3edb1b5a2c62b823337c7785a6b8535f39f09283b1913a5c68fb186bf","icon_CLion.svg":"cffa044ba75cb998ee3306991dc4a3755ec2f39ab95ddd4b74bc21988389020f","src/chunked_encoder.rs":"fba5ea5f1204b9bf11291ec3483bcb23d330101eb2f6635e33cd63e4de13b653","src/decode.rs":"ba67f53612c494281e2e4aae4350165d54ee827ea7e1170b8e02e2db81b55c6e","src/display.rs":"55f9cd05ba037b77689e2558fa174055fad5273959d1edce3a2289f94244fd5d","src/encode.rs":"8a0a6b71581b4c52c2844111a3611cf73522533452a27f5ef8d09eaa73594e2e","src/lib.rs":"c7b904fac8706bc4758c2763e7a43dc1edd99ed5641ac2355957f6aeff91eece","src/read/decoder.rs":"9a7b65e165f7aed6b007bf7436ac9ba9b03d3b03e9d5a1e16691874e21509ced","src/read/decoder_tests.rs":"aacb7624c33ed6b90e068ff9af6095c839b4088060b4c406c08dce25ce837f6d","src/read/mod.rs":"e0b714eda02d16b1ffa6f78fd09b2f963e01c881b1f7c17b39db4e904be5e746","src/tables.rs":"73ce100fd3f4917ec1e8d9277ff0b956cc2636b33145f101a7cf1a5a8b7bacc1","src/tests.rs":"202ddced9cf52205182c6202e583c4c4f929b9d225fd9d1ebdbfd389cb2df0ba","src/write/encoder.rs":"573f058a66f0a6af4215f5ae75a3e96b07a5e345975693abe30bd1e8ce2d235c","src/write/encoder_string_writer.rs":"3f9109585babe048230659f64973cb1633bbb2ed9de255177336260226127b81","src/write/encoder_tests.rs":"381d7c2871407157c36e909c928307ac0389b3d4504fb80607134e94ac59e68f","src/write/mod.rs":"1503b9457e4f5d2895b24136c3af893f0b7ce18dfe4de1096fc5d17f8d78e99b","tests/decode.rs":"da2cbd49b84e0d8b1d8a52136ba3d97cfb248920a45f9955db1e5bc5367218ce","tests/encode.rs":"5efb6904c36c6f899a05078e5c9be756fc58af1ee9940edfa8dea1ee53675364","tests/helpers.rs":"a76015e4a4e8f98213bdbaa592cd9574ccdc95a28e1b1f835a2753e09fa6037f","tests/tests.rs":"05753e5f1d4a6c75015a5342f9b5dc3073c00bdfe0a829a962f8723321c75549"},"package":"9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"} | ||||
| {"files":{"Cargo.lock":"ee9a902629f1a6cc9308158590fc298c628f323383c4fb9a5ab9fd51011b268e","Cargo.toml":"37ffe4d4bdbd21f5a1cc78596abf6e704e4131dbec830fcd6c8bec33d4caf76b","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0dd882e53de11566d50f8e8e2d5a651bcf3fabee4987d70f306233cf39094ba7","README.md":"99e61de0bafd6985761b596f9aa970dee5b4d0cfbfc05cd6565b5ffa139cb34f","RELEASE-NOTES.md":"c8b9e21adecb3a89928cdfbe55d184cc234e0e6cf8f61fb8ebab48982b3a6f9c","benches/benchmarks.rs":"faf63c3d83fe1568927288cfcc7f9bd4bd15c6b531450c53d2c064386fc5c652","clippy.toml":"ee3dedc35eb156cbfbe836692846cd492f59acc7443dd43cc6d71f087d80051c","examples/base64.rs":"8c48673029aeeb1e06a2ecfd237acf8ef24349990e97f6d2c4d0fa2af36c94b3","icon_CLion.svg":"cffa044ba75cb998ee3306991dc4a3755ec2f39ab95ddd4b74bc21988389020f","src/alphabet.rs":"420b5e23da0702c401489c53721696c5d5f69631f4ca9462f4c5ef3bdc77114e","src/chunked_encoder.rs":"4dfad5b47da1c35deaaa6ed2bb1efa51d98d6d9a7ca85a37ef4a02dfa846e723","src/decode.rs":"c293bf40a821795909a458aa8d7e76005a46e6953eed7ea284da1334e117ae74","src/display.rs":"31bf3e19274a0b80dd8948a81ea535944f756ef5b88736124c940f5fe1e8c71c","src/encode.rs":"34c800de1576f425cc48fa7ed9486d7c925cf7215dfc0634d2349c97b5199595","src/engine/general_purpose/decode.rs":"be237ac84b6a1deafd335896959302b7cf9337868cd718ebf7d249ccdc43a84f","src/engine/general_purpose/decode_suffix.rs":"797729cc1d56e9786f65e765cc5bb9ab2799f9140db4b412b919512fd7542355","src/engine/general_purpose/mod.rs":"2c6fbe61fae32800d30be5dc5bde429b8a07a5f027d0d2d1227a24ed13b1b461","src/engine/mod.rs":"7cd78bb317074a6e5439e272e4943d59d6bd47b149ed76b52e6f75b45909ce52","src/engine/naive.rs":"dcebcc41a0f4a156dd516ae89824748f5a4eedeabfe8d92f6f5bd3a6d5ceb5fb","src/engine/tests.rs":"4a8ff2ab7700b49e5b33606a93af04a5f93b18ca48e760ab6ced6337ba3a4847","src/lib.rs":"b4699408a9356f88fd8a3aeffae97e54e7a249afe5d919ecf9d4092d1c8efde1","src/prelude.rs":"f82fcf5e31921060929f9e10efb2868ba7339b085ee76fc5e7077f6030fbb2cc","src/read/decoder.rs":"65f03af1f4eb8d466a9a800dc6414678195b4ac6c579cd747b5632eee219f5a4","src/read/decoder_tests.rs":"ebf40a5722a58dbe74f013a4163ab20f5ce42ceb4beaefede07562079d596604","src/read/mod.rs":"e0b714eda02d16b1ffa6f78fd09b2f963e01c881b1f7c17b39db4e904be5e746","src/tests.rs":"90cb9f8a1ccb7c4ddc4f8618208e0031fc97e0df0e5aa466d6a5cf45d25967d8","src/write/encoder.rs":"c889c853249220fe2ddaeb77ee6e2ee2945f7db88cd6658ef89ff71b81255ea8","src/write/encoder_string_writer.rs":"ac3702b1a846fd0664e78d2dd82c939073ca00577d3201a8f1fbe17a9bf85e70","src/write/encoder_tests.rs":"39572f11fdf63af47f13bb58be280221322c669504a1b4a30a9181fe465e0f90","src/write/mod.rs":"73cd98dadc9d712b3fefd9449d97e825e097397441b90588e0051e4d3b0911b9","tests/encode.rs":"072f079f0250d542ff964e8e72b7d13e2336fbee7395367ff737e140c38ac459","tests/tests.rs":"78efcf0dc4bb6ae52f7a91fcad89e44e4dce578224c36b4e6c1c306459be8500"},"package":"a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a"} | ||||
							
								
								
									
										459
									
								
								third_party/rust/base64/Cargo.lock
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										459
									
								
								third_party/rust/base64/Cargo.lock
									
									
									
										generated
									
									
										vendored
									
									
								
							|  | @ -2,6 +2,12 @@ | |||
| # It is not intended for manual editing. | ||||
| version = 3 | ||||
| 
 | ||||
| [[package]] | ||||
| name = "anes" | ||||
| version = "0.1.6" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "ansi_term" | ||||
| version = "0.12.1" | ||||
|  | @ -17,20 +23,11 @@ version = "0.2.14" | |||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" | ||||
| dependencies = [ | ||||
|  "hermit-abi", | ||||
|  "hermit-abi 0.1.19", | ||||
|  "libc", | ||||
|  "winapi", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "autocfg" | ||||
| version = "0.1.8" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "0dde43e75fd43e8a1bf86103336bc699aa8d17ad1be60c76c0bdfd4828e19b78" | ||||
| dependencies = [ | ||||
|  "autocfg 1.1.0", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "autocfg" | ||||
| version = "1.1.0" | ||||
|  | @ -39,10 +36,12 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" | |||
| 
 | ||||
| [[package]] | ||||
| name = "base64" | ||||
| version = "0.13.1" | ||||
| version = "0.21.0" | ||||
| dependencies = [ | ||||
|  "criterion", | ||||
|  "rand", | ||||
|  "rstest", | ||||
|  "rstest_reuse", | ||||
|  "structopt", | ||||
| ] | ||||
| 
 | ||||
|  | @ -52,33 +51,12 @@ version = "1.3.2" | |||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "bstr" | ||||
| version = "0.2.17" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223" | ||||
| dependencies = [ | ||||
|  "lazy_static", | ||||
|  "memchr", | ||||
|  "regex-automata", | ||||
|  "serde", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "bumpalo" | ||||
| version = "3.11.1" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "cast" | ||||
| version = "0.2.7" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "4c24dab4283a142afa2fdca129b80ad2c6284e073930f964c3a1293c225ee39a" | ||||
| dependencies = [ | ||||
|  "rustc_version", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "cast" | ||||
| version = "0.3.0" | ||||
|  | @ -91,6 +69,33 @@ version = "1.0.0" | |||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "ciborium" | ||||
| version = "0.2.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "b0c137568cc60b904a7724001b35ce2630fd00d5d84805fbb608ab89509d788f" | ||||
| dependencies = [ | ||||
|  "ciborium-io", | ||||
|  "ciborium-ll", | ||||
|  "serde", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "ciborium-io" | ||||
| version = "0.2.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "346de753af073cc87b52b2083a506b38ac176a44cfb05497b622e27be899b369" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "ciborium-ll" | ||||
| version = "0.2.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "213030a2b5a4e0c0892b6652260cf6ccac84827b83a85a534e178e3906c4cf1b" | ||||
| dependencies = [ | ||||
|  "ciborium-io", | ||||
|  "half", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "clap" | ||||
| version = "2.34.0" | ||||
|  | @ -101,32 +106,45 @@ dependencies = [ | |||
|  "atty", | ||||
|  "bitflags", | ||||
|  "strsim", | ||||
|  "textwrap", | ||||
|  "textwrap 0.11.0", | ||||
|  "unicode-width", | ||||
|  "vec_map", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "cloudabi" | ||||
| version = "0.0.3" | ||||
| name = "clap" | ||||
| version = "3.2.23" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" | ||||
| checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5" | ||||
| dependencies = [ | ||||
|  "bitflags", | ||||
|  "clap_lex", | ||||
|  "indexmap", | ||||
|  "textwrap 0.16.0", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "clap_lex" | ||||
| version = "0.2.4" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" | ||||
| dependencies = [ | ||||
|  "os_str_bytes", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "criterion" | ||||
| version = "0.3.2" | ||||
| version = "0.4.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "63f696897c88b57f4ffe3c69d8e1a0613c7d0e6c4833363c8560fbde9c47b966" | ||||
| checksum = "e7c76e09c1aae2bc52b3d2f29e13c6572553b30c4aa1b8a49fd70de6412654cb" | ||||
| dependencies = [ | ||||
|  "anes", | ||||
|  "atty", | ||||
|  "cast 0.2.7", | ||||
|  "clap", | ||||
|  "cast", | ||||
|  "ciborium", | ||||
|  "clap 3.2.23", | ||||
|  "criterion-plot", | ||||
|  "csv", | ||||
|  "itertools 0.9.0", | ||||
|  "itertools", | ||||
|  "lazy_static", | ||||
|  "num-traits", | ||||
|  "oorandom", | ||||
|  | @ -142,12 +160,12 @@ dependencies = [ | |||
| 
 | ||||
| [[package]] | ||||
| name = "criterion-plot" | ||||
| version = "0.4.5" | ||||
| version = "0.5.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "2673cc8207403546f45f5fd319a974b1e6983ad1a3ee7e6041650013be041876" | ||||
| checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" | ||||
| dependencies = [ | ||||
|  "cast 0.3.0", | ||||
|  "itertools 0.10.5", | ||||
|  "cast", | ||||
|  "itertools", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
|  | @ -173,11 +191,11 @@ dependencies = [ | |||
| 
 | ||||
| [[package]] | ||||
| name = "crossbeam-epoch" | ||||
| version = "0.9.11" | ||||
| version = "0.9.13" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "f916dfc5d356b0ed9dae65f1db9fc9770aa2851d2662b988ccf4fe3516e86348" | ||||
| checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a" | ||||
| dependencies = [ | ||||
|  "autocfg 1.1.0", | ||||
|  "autocfg", | ||||
|  "cfg-if", | ||||
|  "crossbeam-utils", | ||||
|  "memoffset", | ||||
|  | @ -186,35 +204,13 @@ dependencies = [ | |||
| 
 | ||||
| [[package]] | ||||
| name = "crossbeam-utils" | ||||
| version = "0.8.12" | ||||
| version = "0.8.14" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "edbafec5fa1f196ca66527c1b12c2ec4745ca14b50f1ad8f9f6f720b55d11fac" | ||||
| checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f" | ||||
| dependencies = [ | ||||
|  "cfg-if", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "csv" | ||||
| version = "1.1.6" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "22813a6dc45b335f9bade10bf7271dc477e81113e89eb251a0bc2a8a81c536e1" | ||||
| dependencies = [ | ||||
|  "bstr", | ||||
|  "csv-core", | ||||
|  "itoa 0.4.8", | ||||
|  "ryu", | ||||
|  "serde", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "csv-core" | ||||
| version = "0.1.10" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90" | ||||
| dependencies = [ | ||||
|  "memchr", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "either" | ||||
| version = "1.8.0" | ||||
|  | @ -222,10 +218,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index" | |||
| checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "fuchsia-cprng" | ||||
| version = "0.1.1" | ||||
| name = "getrandom" | ||||
| version = "0.2.8" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" | ||||
| checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" | ||||
| dependencies = [ | ||||
|  "cfg-if", | ||||
|  "libc", | ||||
|  "wasi", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "half" | ||||
| version = "1.8.2" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "hashbrown" | ||||
| version = "0.12.3" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "heck" | ||||
|  | @ -246,12 +259,22 @@ dependencies = [ | |||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "itertools" | ||||
| version = "0.9.0" | ||||
| name = "hermit-abi" | ||||
| version = "0.2.6" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" | ||||
| checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" | ||||
| dependencies = [ | ||||
|  "either", | ||||
|  "libc", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "indexmap" | ||||
| version = "1.9.2" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" | ||||
| dependencies = [ | ||||
|  "autocfg", | ||||
|  "hashbrown", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
|  | @ -265,15 +288,9 @@ dependencies = [ | |||
| 
 | ||||
| [[package]] | ||||
| name = "itoa" | ||||
| version = "0.4.8" | ||||
| version = "1.0.5" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "itoa" | ||||
| version = "1.0.4" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc" | ||||
| checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "js-sys" | ||||
|  | @ -292,9 +309,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" | |||
| 
 | ||||
| [[package]] | ||||
| name = "libc" | ||||
| version = "0.2.135" | ||||
| version = "0.2.139" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c" | ||||
| checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "log" | ||||
|  | @ -305,19 +322,13 @@ dependencies = [ | |||
|  "cfg-if", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "memchr" | ||||
| version = "2.5.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "memoffset" | ||||
| version = "0.6.5" | ||||
| version = "0.7.1" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" | ||||
| checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" | ||||
| dependencies = [ | ||||
|  "autocfg 1.1.0", | ||||
|  "autocfg", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
|  | @ -326,24 +337,24 @@ version = "0.2.15" | |||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" | ||||
| dependencies = [ | ||||
|  "autocfg 1.1.0", | ||||
|  "autocfg", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "num_cpus" | ||||
| version = "1.13.1" | ||||
| version = "1.15.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" | ||||
| checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" | ||||
| dependencies = [ | ||||
|  "hermit-abi", | ||||
|  "hermit-abi 0.2.6", | ||||
|  "libc", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "once_cell" | ||||
| version = "1.15.0" | ||||
| version = "1.17.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1" | ||||
| checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "oorandom" | ||||
|  | @ -352,17 +363,45 @@ source = "registry+https://github.com/rust-lang/crates.io-index" | |||
| checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "plotters" | ||||
| version = "0.2.15" | ||||
| name = "os_str_bytes" | ||||
| version = "6.4.1" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "0d1685fbe7beba33de0330629da9d955ac75bd54f33d7b79f9a895590124f6bb" | ||||
| checksum = "9b7820b9daea5457c9f21c69448905d723fbd21136ccf521748f23fd49e723ee" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "plotters" | ||||
| version = "0.3.4" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "2538b639e642295546c50fcd545198c9d64ee2a38620a628724a3b266d5fbf97" | ||||
| dependencies = [ | ||||
|  "js-sys", | ||||
|  "num-traits", | ||||
|  "plotters-backend", | ||||
|  "plotters-svg", | ||||
|  "wasm-bindgen", | ||||
|  "web-sys", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "plotters-backend" | ||||
| version = "0.3.4" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "193228616381fecdc1224c62e96946dfbc73ff4384fba576e052ff8c1bea8142" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "plotters-svg" | ||||
| version = "0.3.3" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "f9a81d2759aae1dae668f783c308bc5c8ebd191ff4184aaa1b37f65a6ae5a56f" | ||||
| dependencies = [ | ||||
|  "plotters-backend", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "ppv-lite86" | ||||
| version = "0.2.17" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "proc-macro-error" | ||||
| version = "1.0.4" | ||||
|  | @ -389,145 +428,67 @@ dependencies = [ | |||
| 
 | ||||
| [[package]] | ||||
| name = "proc-macro2" | ||||
| version = "1.0.47" | ||||
| version = "1.0.49" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725" | ||||
| checksum = "57a8eca9f9c4ffde41714334dee777596264c7825420f521abc92b5b5deb63a5" | ||||
| dependencies = [ | ||||
|  "unicode-ident", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "quote" | ||||
| version = "1.0.21" | ||||
| version = "1.0.23" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179" | ||||
| checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b" | ||||
| dependencies = [ | ||||
|  "proc-macro2", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "rand" | ||||
| version = "0.6.5" | ||||
| version = "0.8.5" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" | ||||
| checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" | ||||
| dependencies = [ | ||||
|  "autocfg 0.1.8", | ||||
|  "libc", | ||||
|  "rand_chacha", | ||||
|  "rand_core 0.4.2", | ||||
|  "rand_hc", | ||||
|  "rand_isaac", | ||||
|  "rand_jitter", | ||||
|  "rand_os", | ||||
|  "rand_pcg", | ||||
|  "rand_xorshift", | ||||
|  "winapi", | ||||
|  "rand_core", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "rand_chacha" | ||||
| version = "0.1.1" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" | ||||
| dependencies = [ | ||||
|  "autocfg 0.1.8", | ||||
|  "rand_core 0.3.1", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "rand_core" | ||||
| version = "0.3.1" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" | ||||
| checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" | ||||
| dependencies = [ | ||||
|  "rand_core 0.4.2", | ||||
|  "ppv-lite86", | ||||
|  "rand_core", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "rand_core" | ||||
| version = "0.4.2" | ||||
| version = "0.6.4" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "rand_hc" | ||||
| version = "0.1.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" | ||||
| checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" | ||||
| dependencies = [ | ||||
|  "rand_core 0.3.1", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "rand_isaac" | ||||
| version = "0.1.1" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" | ||||
| dependencies = [ | ||||
|  "rand_core 0.3.1", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "rand_jitter" | ||||
| version = "0.1.4" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" | ||||
| dependencies = [ | ||||
|  "libc", | ||||
|  "rand_core 0.4.2", | ||||
|  "winapi", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "rand_os" | ||||
| version = "0.1.3" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" | ||||
| dependencies = [ | ||||
|  "cloudabi", | ||||
|  "fuchsia-cprng", | ||||
|  "libc", | ||||
|  "rand_core 0.4.2", | ||||
|  "rdrand", | ||||
|  "winapi", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "rand_pcg" | ||||
| version = "0.1.2" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" | ||||
| dependencies = [ | ||||
|  "autocfg 0.1.8", | ||||
|  "rand_core 0.4.2", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "rand_xorshift" | ||||
| version = "0.1.1" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" | ||||
| dependencies = [ | ||||
|  "rand_core 0.3.1", | ||||
|  "getrandom", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "rayon" | ||||
| version = "1.5.3" | ||||
| version = "1.6.1" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d" | ||||
| checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7" | ||||
| dependencies = [ | ||||
|  "autocfg 1.1.0", | ||||
|  "crossbeam-deque", | ||||
|  "either", | ||||
|  "rayon-core", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "rayon-core" | ||||
| version = "1.9.3" | ||||
| version = "1.10.1" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f" | ||||
| checksum = "cac410af5d00ab6884528b4ab69d1e8e146e8d471201800fa1b4524126de6ad3" | ||||
| dependencies = [ | ||||
|  "crossbeam-channel", | ||||
|  "crossbeam-deque", | ||||
|  | @ -535,35 +496,44 @@ dependencies = [ | |||
|  "num_cpus", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "rdrand" | ||||
| version = "0.4.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" | ||||
| dependencies = [ | ||||
|  "rand_core 0.3.1", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "regex" | ||||
| version = "1.6.0" | ||||
| version = "1.7.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b" | ||||
| checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a" | ||||
| dependencies = [ | ||||
|  "regex-syntax", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "regex-automata" | ||||
| version = "0.1.10" | ||||
| name = "regex-syntax" | ||||
| version = "0.6.28" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" | ||||
| checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "regex-syntax" | ||||
| version = "0.6.27" | ||||
| name = "rstest" | ||||
| version = "0.12.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" | ||||
| checksum = "d912f35156a3f99a66ee3e11ac2e0b3f34ac85a07e05263d05a7e2c8810d616f" | ||||
| dependencies = [ | ||||
|  "cfg-if", | ||||
|  "proc-macro2", | ||||
|  "quote", | ||||
|  "rustc_version", | ||||
|  "syn", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "rstest_reuse" | ||||
| version = "0.3.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "b29d3117bce27ea307d1fb7ce12c64ba11b3fd04311a42d32bc5f0072e6e3d4d" | ||||
| dependencies = [ | ||||
|  "quote", | ||||
|  "rustc_version", | ||||
|  "syn", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "rustc_version" | ||||
|  | @ -576,9 +546,9 @@ dependencies = [ | |||
| 
 | ||||
| [[package]] | ||||
| name = "ryu" | ||||
| version = "1.0.11" | ||||
| version = "1.0.12" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09" | ||||
| checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "same-file" | ||||
|  | @ -597,21 +567,24 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" | |||
| 
 | ||||
| [[package]] | ||||
| name = "semver" | ||||
| version = "1.0.14" | ||||
| version = "1.0.16" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4" | ||||
| checksum = "58bc9567378fc7690d6b2addae4e60ac2eeea07becb2c64b9f218b53865cba2a" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "serde" | ||||
| version = "1.0.146" | ||||
| version = "1.0.152" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "6df50b7a60a0ad48e1b42eb38373eac8ff785d619fb14db917b4e63d5439361f" | ||||
| checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb" | ||||
| dependencies = [ | ||||
|  "serde_derive", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "serde_derive" | ||||
| version = "1.0.146" | ||||
| version = "1.0.152" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "a714fd32ba1d66047ce7d53dabd809e9922d538f9047de13cc4cffca47b36205" | ||||
| checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" | ||||
| dependencies = [ | ||||
|  "proc-macro2", | ||||
|  "quote", | ||||
|  | @ -620,11 +593,11 @@ dependencies = [ | |||
| 
 | ||||
| [[package]] | ||||
| name = "serde_json" | ||||
| version = "1.0.87" | ||||
| version = "1.0.91" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "6ce777b7b150d76b9cf60d28b55f5847135a003f7d7350c6be7a773508ce7d45" | ||||
| checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883" | ||||
| dependencies = [ | ||||
|  "itoa 1.0.4", | ||||
|  "itoa", | ||||
|  "ryu", | ||||
|  "serde", | ||||
| ] | ||||
|  | @ -641,7 +614,7 @@ version = "0.3.26" | |||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "0c6b5c64445ba8094a6ab0c3cd2ad323e07171012d9c98b0b15651daf1787a10" | ||||
| dependencies = [ | ||||
|  "clap", | ||||
|  "clap 2.34.0", | ||||
|  "lazy_static", | ||||
|  "structopt-derive", | ||||
| ] | ||||
|  | @ -661,9 +634,9 @@ dependencies = [ | |||
| 
 | ||||
| [[package]] | ||||
| name = "syn" | ||||
| version = "1.0.103" | ||||
| version = "1.0.107" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "a864042229133ada95abf3b54fdc62ef5ccabe9515b64717bcb9a1919e59445d" | ||||
| checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" | ||||
| dependencies = [ | ||||
|  "proc-macro2", | ||||
|  "quote", | ||||
|  | @ -679,6 +652,12 @@ dependencies = [ | |||
|  "unicode-width", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "textwrap" | ||||
| version = "0.16.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "tinytemplate" | ||||
| version = "1.2.1" | ||||
|  | @ -691,9 +670,9 @@ dependencies = [ | |||
| 
 | ||||
| [[package]] | ||||
| name = "unicode-ident" | ||||
| version = "1.0.5" | ||||
| version = "1.0.6" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3" | ||||
| checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "unicode-segmentation" | ||||
|  | @ -730,6 +709,12 @@ dependencies = [ | |||
|  "winapi-util", | ||||
| ] | ||||
| 
 | ||||
| [[package]] | ||||
| name = "wasi" | ||||
| version = "0.11.0+wasi-snapshot-preview1" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" | ||||
| 
 | ||||
| [[package]] | ||||
| name = "wasm-bindgen" | ||||
| version = "0.2.83" | ||||
|  |  | |||
							
								
								
									
										23
									
								
								third_party/rust/base64/Cargo.toml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										23
									
								
								third_party/rust/base64/Cargo.toml
									
									
									
									
										vendored
									
									
								
							|  | @ -10,9 +10,10 @@ | |||
| # See Cargo.toml.orig for the original contents. | ||||
| 
 | ||||
| [package] | ||||
| edition = "2018" | ||||
| edition = "2021" | ||||
| rust-version = "1.57.0" | ||||
| name = "base64" | ||||
| version = "0.13.1" | ||||
| version = "0.21.0" | ||||
| authors = [ | ||||
|     "Alice Maz <alice@alicemaz.com>", | ||||
|     "Marshall Pierce <marshall@mpierce.org>", | ||||
|  | @ -28,24 +29,34 @@ keywords = [ | |||
|     "no_std", | ||||
| ] | ||||
| categories = ["encoding"] | ||||
| license = "MIT/Apache-2.0" | ||||
| license = "MIT OR Apache-2.0" | ||||
| repository = "https://github.com/marshallpierce/rust-base64" | ||||
| 
 | ||||
| [profile.bench] | ||||
| debug = true | ||||
| 
 | ||||
| [profile.test] | ||||
| opt-level = 3 | ||||
| 
 | ||||
| [[bench]] | ||||
| name = "benchmarks" | ||||
| harness = false | ||||
| 
 | ||||
| [dev-dependencies.criterion] | ||||
| version = "=0.3.2" | ||||
| version = "0.4.0" | ||||
| 
 | ||||
| [dev-dependencies.rand] | ||||
| version = "0.6.1" | ||||
| version = "0.8.5" | ||||
| features = ["small_rng"] | ||||
| 
 | ||||
| [dev-dependencies.rstest] | ||||
| version = "0.12.0" | ||||
| 
 | ||||
| [dev-dependencies.rstest_reuse] | ||||
| version = "0.3.0" | ||||
| 
 | ||||
| [dev-dependencies.structopt] | ||||
| version = "0.3" | ||||
| version = "0.3.26" | ||||
| 
 | ||||
| [features] | ||||
| alloc = [] | ||||
|  |  | |||
							
								
								
									
										134
									
								
								third_party/rust/base64/README.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										134
									
								
								third_party/rust/base64/README.md
									
									
									
									
										vendored
									
									
								
							|  | @ -1,7 +1,6 @@ | |||
| [base64](https://crates.io/crates/base64) | ||||
| === | ||||
| # [base64](https://crates.io/crates/base64) | ||||
| 
 | ||||
| [](https://crates.io/crates/base64) [](https://docs.rs/base64) [](https://travis-ci.org/marshallpierce/rust-base64) [](https://codecov.io/gh/marshallpierce/rust-base64) [](https://github.com/rust-secure-code/safety-dance/) | ||||
| [](https://crates.io/crates/base64) [](https://docs.rs/base64) [](https://circleci.com/gh/marshallpierce/rust-base64/tree/master) [](https://codecov.io/gh/marshallpierce/rust-base64) [](https://github.com/rust-secure-code/safety-dance/) | ||||
| 
 | ||||
| <a href="https://www.jetbrains.com/?from=rust-base64"><img src="/icon_CLion.svg" height="40px"/></a> | ||||
| 
 | ||||
|  | @ -9,34 +8,73 @@ Made with CLion. Thanks to JetBrains for supporting open source! | |||
| 
 | ||||
| It's base64. What more could anyone want? | ||||
| 
 | ||||
| This library's goals are to be *correct* and *fast*. It's thoroughly tested and widely used. It exposes functionality at multiple levels of abstraction so you can choose the level of convenience vs performance that you want, e.g. `decode_config_slice` decodes into an existing `&mut [u8]` and is pretty fast (2.6GiB/s for a 3 KiB input), whereas `decode_config` allocates a new `Vec<u8>` and returns it, which might be more convenient in some cases, but is slower (although still fast enough for almost any purpose) at 2.1 GiB/s. | ||||
| 
 | ||||
| Example | ||||
| --- | ||||
| 
 | ||||
| ```rust | ||||
| extern crate base64; | ||||
| 
 | ||||
| use base64::{encode, decode}; | ||||
| 
 | ||||
| fn main() { | ||||
|     let a = b"hello world"; | ||||
|     let b = "aGVsbG8gd29ybGQ="; | ||||
| 
 | ||||
|     assert_eq!(encode(a), b); | ||||
|     assert_eq!(a, &decode(b).unwrap()[..]); | ||||
| } | ||||
| ``` | ||||
| This library's goals are to be *correct* and *fast*. It's thoroughly tested and widely used. It exposes functionality at | ||||
| multiple levels of abstraction so you can choose the level of convenience vs performance that you want, | ||||
| e.g. `decode_engine_slice` decodes into an existing `&mut [u8]` and is pretty fast (2.6GiB/s for a 3 KiB input), | ||||
| whereas `decode_engine` allocates a new `Vec<u8>` and returns it, which might be more convenient in some cases, but is | ||||
| slower (although still fast enough for almost any purpose) at 2.1 GiB/s. | ||||
| 
 | ||||
| See the [docs](https://docs.rs/base64) for all the details. | ||||
| 
 | ||||
| Rust version compatibility | ||||
| --- | ||||
| ## FAQ | ||||
| 
 | ||||
| The minimum required Rust version is 1.34.0. | ||||
| ### I need to decode base64 with whitespace/null bytes/other random things interspersed in it. What should I do? | ||||
| 
 | ||||
| Developing | ||||
| --- | ||||
| Remove non-base64 characters from your input before decoding. | ||||
| 
 | ||||
| If you have a `Vec` of base64, [retain](https://doc.rust-lang.org/std/vec/struct.Vec.html#method.retain) can be used to | ||||
| strip out whatever you need removed. | ||||
| 
 | ||||
| If you have a `Read` (e.g. reading a file or network socket), there are various approaches. | ||||
| 
 | ||||
| - Use [iter_read](https://crates.io/crates/iter-read) together with `Read`'s `bytes()` to filter out unwanted bytes. | ||||
| - Implement `Read` with a `read()` impl that delegates to your actual `Read`, and then drops any bytes you don't want. | ||||
| 
 | ||||
| ### I need to line-wrap base64, e.g. for MIME/PEM. | ||||
| 
 | ||||
| [line-wrap](https://crates.io/crates/line-wrap) does just that. | ||||
| 
 | ||||
| ### I want canonical base64 encoding/decoding. | ||||
| 
 | ||||
| First, don't do this. You should no more expect Base64 to be canonical than you should expect compression algorithms to | ||||
| produce canonical output across all usage in the wild (hint: they don't). | ||||
| However, [people are drawn to their own destruction like moths to a flame](https://eprint.iacr.org/2022/361), so here we | ||||
| are. | ||||
| 
 | ||||
| There are two opportunities for non-canonical encoding (and thus, detection of the same during decoding): the final bits | ||||
| of the last encoded token in two or three token suffixes, and the `=` token used to inflate the suffix to a full four | ||||
| tokens. | ||||
| 
 | ||||
| The trailing bits issue is unavoidable: with 6 bits available in each encoded token, 1 input byte takes 2 tokens, | ||||
| with the second one having some bits unused. Same for two input bytes: 16 bits, but 3 tokens have 18 bits. Unless we | ||||
| decide to stop shipping whole bytes around, we're stuck with those extra bits that a sneaky or buggy encoder might set | ||||
| to 1 instead of 0. | ||||
| 
 | ||||
| The `=` pad bytes, on the other hand, are entirely a self-own by the Base64 standard. They do not affect decoding other | ||||
| than to provide an opportunity to say "that padding is incorrect". Exabytes of storage and transfer have no doubt been | ||||
| wasted on pointless `=` bytes. Somehow we all seem to be quite comfortable with, say, hex-encoded data just stopping | ||||
| when it's done rather than requiring a confirmation that the author of the encoder could count to four. Anyway, there | ||||
| are two ways to make pad bytes predictable: require canonical padding to the next multiple of four bytes as per the RFC, | ||||
| or, if you control all producers and consumers, save a few bytes by requiring no padding (especially applicable to the | ||||
| url-safe alphabet). | ||||
| 
 | ||||
| All `Engine` implementations must at a minimum support treating non-canonical padding of both types as an error, and | ||||
| optionally may allow other behaviors. | ||||
| 
 | ||||
| ## Rust version compatibility | ||||
| 
 | ||||
| The minimum supported Rust version is 1.57.0. | ||||
| 
 | ||||
| # Contributing | ||||
| 
 | ||||
| Contributions are very welcome. However, because this library is used widely, and in security-sensitive contexts, all | ||||
| PRs will be carefully scrutinized. Beyond that, this sort of low level library simply needs to be 100% correct. Nobody | ||||
| wants to chase bugs in encoding of any sort. | ||||
| 
 | ||||
| All this means that it takes me a fair amount of time to review each PR, so it might take quite a while to carve out the | ||||
| free time to give each PR the attention it deserves. I will get to everyone eventually! | ||||
| 
 | ||||
| ## Developing | ||||
| 
 | ||||
| Benchmarks are in `benches/`. Running them requires nightly rust, but `rustup` makes it easy: | ||||
| 
 | ||||
|  | @ -44,23 +82,24 @@ Benchmarks are in `benches/`. Running them requires nightly rust, but `rustup` m | |||
| rustup run nightly cargo bench | ||||
| ``` | ||||
| 
 | ||||
| Decoding is aided by some pre-calculated tables, which are generated by: | ||||
| ## no_std | ||||
| 
 | ||||
| ```bash | ||||
| cargo run --example make_tables > src/tables.rs.tmp && mv src/tables.rs.tmp src/tables.rs | ||||
| ``` | ||||
| This crate supports no_std. By default the crate targets std via the `std` feature. You can deactivate | ||||
| the `default-features` to target `core` instead. In that case you lose out on all the functionality revolving | ||||
| around `std::io`, `std::error::Error`, and heap allocations. There is an additional `alloc` feature that you can activate | ||||
| to bring back the support for heap allocations. | ||||
| 
 | ||||
| no_std | ||||
| --- | ||||
| ## Profiling | ||||
| 
 | ||||
| This crate supports no_std. By default the crate targets std via the `std` feature. You can deactivate the `default-features` to target core instead. In that case you lose out on all the functionality revolving around `std::io`, `std::error::Error` and heap allocations. There is an additional `alloc` feature that you can activate to bring back the support for heap allocations. | ||||
| On Linux, you can use [perf](https://perf.wiki.kernel.org/index.php/Main_Page) for profiling. Then compile the | ||||
| benchmarks with `rustup nightly run cargo bench --no-run`. | ||||
| 
 | ||||
| Profiling | ||||
| --- | ||||
| 
 | ||||
| On Linux, you can use [perf](https://perf.wiki.kernel.org/index.php/Main_Page) for profiling. Then compile the benchmarks with `rustup nightly run cargo bench --no-run`. | ||||
| 
 | ||||
| Run the benchmark binary with `perf` (shown here filtering to one particular benchmark, which will make the results easier to read). `perf` is only available to the root user on most systems as it fiddles with event counters in your CPU, so use `sudo`. We need to run the actual benchmark binary, hence the path into `target`. You can see the actual full path with `rustup run nightly cargo bench -v`; it will print out the commands it runs. If you use the exact path that `bench` outputs, make sure you get the one that's for the benchmarks, not the tests. You may also want to `cargo clean` so you have only one `benchmarks-` binary (they tend to accumulate). | ||||
| Run the benchmark binary with `perf` (shown here filtering to one particular benchmark, which will make the results | ||||
| easier to read). `perf` is only available to the root user on most systems as it fiddles with event counters in your | ||||
| CPU, so use `sudo`. We need to run the actual benchmark binary, hence the path into `target`. You can see the actual | ||||
| full path with `rustup run nightly cargo bench -v`; it will print out the commands it runs. If you use the exact path | ||||
| that `bench` outputs, make sure you get the one that's for the benchmarks, not the tests. You may also want | ||||
| to `cargo clean` so you have only one `benchmarks-` binary (they tend to accumulate). | ||||
| 
 | ||||
| ```bash | ||||
| sudo perf record target/release/deps/benchmarks-* --bench decode_10mib_reuse | ||||
|  | @ -72,7 +111,10 @@ Then analyze the results, again with perf: | |||
| sudo perf annotate -l | ||||
| ``` | ||||
| 
 | ||||
| You'll see a bunch of interleaved rust source and assembly like this. The section with `lib.rs:327` is telling us that 4.02% of samples saw the `movzbl` aka bit shift as the active instruction. However, this percentage is not as exact as it seems due to a phenomenon called *skid*. Basically, a consequence of how fancy modern CPUs are is that this sort of instruction profiling is inherently inaccurate, especially in branch-heavy code. | ||||
| You'll see a bunch of interleaved rust source and assembly like this. The section with `lib.rs:327` is telling us that | ||||
| 4.02% of samples saw the `movzbl` aka bit shift as the active instruction. However, this percentage is not as exact as | ||||
| it seems due to a phenomenon called *skid*. Basically, a consequence of how fancy modern CPUs are is that this sort of | ||||
| instruction profiling is inherently inaccurate, especially in branch-heavy code. | ||||
| 
 | ||||
| ```text | ||||
|  lib.rs:322    0.70 :     10698:       mov    %rdi,%rax | ||||
|  | @ -94,11 +136,10 @@ You'll see a bunch of interleaved rust source and assembly like this. The sectio | |||
|     0.00 :        106ab:       je     1090e <base64::decode_config_buf::hbf68a45fefa299c1+0x46e> | ||||
| ``` | ||||
| 
 | ||||
| ## Fuzzing | ||||
| 
 | ||||
| Fuzzing | ||||
| --- | ||||
| 
 | ||||
| This uses [cargo-fuzz](https://github.com/rust-fuzz/cargo-fuzz). See `fuzz/fuzzers` for the available fuzzing scripts. To run, use an invocation like these: | ||||
| This uses [cargo-fuzz](https://github.com/rust-fuzz/cargo-fuzz). See `fuzz/fuzzers` for the available fuzzing scripts. | ||||
| To run, use an invocation like these: | ||||
| 
 | ||||
| ```bash | ||||
| cargo +nightly fuzz run roundtrip | ||||
|  | @ -107,8 +148,7 @@ cargo +nightly fuzz run roundtrip_random_config -- -max_len=10240 | |||
| cargo +nightly fuzz run decode_random | ||||
| ``` | ||||
| 
 | ||||
| 
 | ||||
| License | ||||
| --- | ||||
| ## License | ||||
| 
 | ||||
| This project is dual-licensed under MIT and Apache 2.0. | ||||
| 
 | ||||
|  |  | |||
							
								
								
									
										130
									
								
								third_party/rust/base64/RELEASE-NOTES.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										130
									
								
								third_party/rust/base64/RELEASE-NOTES.md
									
									
									
									
										vendored
									
									
								
							|  | @ -1,3 +1,108 @@ | |||
| # 0.21.0 | ||||
| 
 | ||||
| (not yet released) | ||||
| 
 | ||||
| 
 | ||||
| ## Migration | ||||
| 
 | ||||
| ### Functions | ||||
| 
 | ||||
| | < 0.20 function         | 0.21 equivalent                                                                     | | ||||
| |-------------------------|-------------------------------------------------------------------------------------| | ||||
| | `encode()`              | `engine::general_purpose::STANDARD.encode()` or `prelude::BASE64_STANDARD.encode()` | | ||||
| | `encode_config()`       | `engine.encode()`                                                                   | | ||||
| | `encode_config_buf()`   | `engine.encode_string()`                                                            | | ||||
| | `encode_config_slice()` | `engine.encode_slice()`                                                             | | ||||
| | `decode()`              | `engine::general_purpose::STANDARD.decode()` or `prelude::BASE64_STANDARD.decode()` | | ||||
| | `decode_config()`       | `engine.decode()`                                                                   | | ||||
| | `decode_config_buf()`   | `engine.decode_vec()`                                                               | | ||||
| | `decode_config_slice()` | `engine.decode_slice()`                                                             | | ||||
| 
 | ||||
| The short-lived 0.20 functions were the 0.13 functions with `config` replaced with `engine`. | ||||
| 
 | ||||
| ### Padding | ||||
| 
 | ||||
| If applicable, use the preset engines `engine::STANDARD`, `engine::STANDARD_NO_PAD`, `engine::URL_SAFE`, | ||||
| or `engine::URL_SAFE_NO_PAD`. | ||||
| The `NO_PAD` ones require that padding is absent when decoding, and the others require that | ||||
| canonical padding is present . | ||||
| 
 | ||||
| If you need the < 0.20 behavior that did not care about padding, or want to recreate < 0.20.0's predefined `Config`s | ||||
| precisely, see the following table. | ||||
| 
 | ||||
| | 0.13.1 Config   | 0.20.0+ alphabet | `encode_padding` | `decode_padding_mode` | | ||||
| |-----------------|------------------|------------------|-----------------------| | ||||
| | STANDARD        | STANDARD         | true             | Indifferent           | | ||||
| | STANDARD_NO_PAD | STANDARD         | false            | Indifferent           | | ||||
| | URL_SAFE        | URL_SAFE         | true             | Indifferent           | | ||||
| | URL_SAFE_NO_PAD | URL_SAFE         | false            | Indifferent           | | ||||
| 
 | ||||
| # 0.21.0-rc.1 | ||||
| 
 | ||||
| - Restore the ability to decode into a slice of precisely the correct length with `Engine.decode_slice_unchecked`. | ||||
| - Add `Engine` as a `pub use` in `prelude`. | ||||
| 
 | ||||
| # 0.21.0-beta.2 | ||||
| 
 | ||||
| ## Breaking changes | ||||
| 
 | ||||
| - Re-exports of preconfigured engines in `engine` are removed in favor of `base64::prelude::...` that are better suited to those who wish to `use` the entire path to a name. | ||||
| 
 | ||||
| # 0.21.0-beta.1 | ||||
| 
 | ||||
| ## Breaking changes | ||||
| 
 | ||||
| - `FastPortable` was only meant to be an interim name, and shouldn't have shipped in 0.20. It is now `GeneralPurpose` to | ||||
|   make its intended usage more clear. | ||||
| - `GeneralPurpose` and its config are now `pub use`'d in the `engine` module for convenience. | ||||
| - Change a few `from()` functions to be `new()`. `from()` causes confusing compiler errors because of confusion | ||||
|   with `From::from`, and is a little misleading because some of those invocations are not very cheap as one would | ||||
|   usually expect from a `from` call. | ||||
| - `encode*` and `decode*` top level functions are now methods on `Engine`. | ||||
| - `DEFAULT_ENGINE` was replaced by `engine::general_purpose::STANDARD` | ||||
| - Predefined engine consts `engine::general_purpose::{STANDARD, STANDARD_NO_PAD, URL_SAFE, URL_SAFE_NO_PAD}` | ||||
|     - These are `pub use`d into `engine` as well | ||||
| - The `*_slice` decode/encode functions now return an error instead of panicking when the output slice is too small | ||||
|     - As part of this, there isn't now a public way to decode into a slice _exactly_ the size needed for inputs that | ||||
|       aren't multiples of 4 tokens. If adding up to 2 bytes to always be a multiple of 3 bytes for the decode buffer is | ||||
|       a problem, file an issue. | ||||
| 
 | ||||
| ## Other changes | ||||
| 
 | ||||
| - `decoded_len_estimate()` is provided to make it easy to size decode buffers correctly. | ||||
| 
 | ||||
| # 0.20.0 | ||||
| 
 | ||||
| ## Breaking changes | ||||
| 
 | ||||
| - Update MSRV to 1.57.0 | ||||
| - Decoding can now either ignore padding, require correct padding, or require no padding. The default is to require | ||||
|   correct padding. | ||||
|     - The `NO_PAD` config now requires that padding be absent when decoding. | ||||
| 
 | ||||
| ## 0.20.0-alpha.1 | ||||
| 
 | ||||
| ### Breaking changes | ||||
| 
 | ||||
| - Extended the `Config` concept into the `Engine` abstraction, allowing the user to pick different encoding / decoding | ||||
|   implementations. | ||||
|     - What was formerly the only algorithm is now the `FastPortable` engine, so named because it's portable (works on | ||||
|       any CPU) and relatively fast. | ||||
|     - This opens the door to a portable constant-time | ||||
|       implementation ([#153](https://github.com/marshallpierce/rust-base64/pull/153), | ||||
|       presumably `ConstantTimePortable`?) for security-sensitive applications that need side-channel resistance, and | ||||
|       CPU-specific SIMD implementations for more speed. | ||||
|     - Standard base64 per the RFC is available via `DEFAULT_ENGINE`. To use different alphabets or other settings ( | ||||
|       padding, etc), create your own engine instance. | ||||
| - `CharacterSet` is now `Alphabet` (per the RFC), and allows creating custom alphabets. The corresponding tables that | ||||
|   were previously code-generated are now built dynamically. | ||||
| - Since there are already multiple breaking changes, various functions are renamed to be more consistent and | ||||
|   discoverable. | ||||
| - MSRV is now 1.47.0 to allow various things to use `const fn`. | ||||
| - `DecoderReader` now owns its inner reader, and can expose it via `into_inner()`. For symmetry, `EncoderWriter` can do | ||||
|   the same with its writer. | ||||
| - `encoded_len` is now public so you can size encode buffers precisely. | ||||
| 
 | ||||
| # 0.13.1 | ||||
| 
 | ||||
| - More precise decode buffer sizing, avoiding unnecessary allocation in `decode_config`. | ||||
|  | @ -7,8 +112,11 @@ | |||
| - Config methods are const | ||||
| - Added `EncoderStringWriter` to allow encoding directly to a String | ||||
| - `EncoderWriter` now owns its delegate writer rather than keeping a reference to it (though refs still work) | ||||
|     - As a consequence, it is now possible to extract the delegate writer from an `EncoderWriter` via `finish()`, which returns `Result<W>` instead of `Result<()>`. If you were calling `finish()` explicitly, you will now need to use `let _ = foo.finish()` instead of just `foo.finish()` to avoid a warning about the unused value. | ||||
| - When decoding input that has both an invalid length and an invalid symbol as the last byte, `InvalidByte` will be emitted instead of `InvalidLength` to make the problem more obvious. | ||||
|     - As a consequence, it is now possible to extract the delegate writer from an `EncoderWriter` via `finish()`, which | ||||
|       returns `Result<W>` instead of `Result<()>`. If you were calling `finish()` explicitly, you will now need to | ||||
|       use `let _ = foo.finish()` instead of just `foo.finish()` to avoid a warning about the unused value. | ||||
| - When decoding input that has both an invalid length and an invalid symbol as the last byte, `InvalidByte` will be | ||||
|   emitted instead of `InvalidLength` to make the problem more obvious. | ||||
| 
 | ||||
| # 0.12.2 | ||||
| 
 | ||||
|  | @ -26,23 +134,31 @@ | |||
| - A minor performance improvement in encoding | ||||
| 
 | ||||
| # 0.11.0 | ||||
| 
 | ||||
| - Minimum rust version 1.34.0 | ||||
| - `no_std` is now supported via the two new features `alloc` and `std`. | ||||
| 
 | ||||
| # 0.10.1 | ||||
| 
 | ||||
| - Minimum rust version 1.27.2 | ||||
| - Fix bug in streaming encoding ([#90](https://github.com/marshallpierce/rust-base64/pull/90)): if the underlying writer didn't write all the bytes given to it, the remaining bytes would not be retried later. See the docs on `EncoderWriter::write`. | ||||
| - Fix bug in streaming encoding ([#90](https://github.com/marshallpierce/rust-base64/pull/90)): if the underlying writer | ||||
|   didn't write all the bytes given to it, the remaining bytes would not be retried later. See the docs | ||||
|   on `EncoderWriter::write`. | ||||
| - Make it configurable whether or not to return an error when decoding detects excess trailing bits. | ||||
| 
 | ||||
| # 0.10.0 | ||||
| 
 | ||||
| - Remove line wrapping. Line wrapping was never a great conceptual fit in this library, and other features (streaming encoding, etc) either couldn't support it or could support only special cases of it with a great increase in complexity. Line wrapping has been pulled out into a [line-wrap](https://crates.io/crates/line-wrap) crate, so it's still available if you need it. | ||||
|   - `Base64Display` creation no longer uses a `Result` because it can't fail, which means its helper methods for common | ||||
|   configs that `unwrap()` for you are no longer needed | ||||
| - Remove line wrapping. Line wrapping was never a great conceptual fit in this library, and other features (streaming | ||||
|   encoding, etc) either couldn't support it or could support only special cases of it with a great increase in | ||||
|   complexity. Line wrapping has been pulled out into a [line-wrap](https://crates.io/crates/line-wrap) crate, so it's | ||||
|   still available if you need it. | ||||
|     - `Base64Display` creation no longer uses a `Result` because it can't fail, which means its helper methods for | ||||
|       common | ||||
|       configs that `unwrap()` for you are no longer needed | ||||
| - Add a streaming encoder `Write` impl to transparently base64 as you write. | ||||
| - Remove the remaining `unsafe` code. | ||||
| - Remove whitespace stripping to simplify `no_std` support. No out of the box configs use it, and it's trivial to do yourself if needed: `filter(|b| !b" \n\t\r\x0b\x0c".contains(b)`. | ||||
| - Remove whitespace stripping to simplify `no_std` support. No out of the box configs use it, and it's trivial to do | ||||
|   yourself if needed: `filter(|b| !b" \n\t\r\x0b\x0c".contains(b)`. | ||||
| - Detect invalid trailing symbols when decoding and return an error rather than silently ignoring them. | ||||
| 
 | ||||
| # 0.9.3 | ||||
|  |  | |||
							
								
								
									
										154
									
								
								third_party/rust/base64/benches/benchmarks.rs
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										154
									
								
								third_party/rust/base64/benches/benchmarks.rs
									
									
									
									
										vendored
									
									
								
							|  | @ -1,27 +1,22 @@ | |||
| extern crate base64; | ||||
| #[macro_use] | ||||
| extern crate criterion; | ||||
| extern crate rand; | ||||
| 
 | ||||
| use base64::display; | ||||
| use base64::{ | ||||
|     decode, decode_config_buf, decode_config_slice, encode, encode_config_buf, encode_config_slice, | ||||
|     write, Config, | ||||
|     display, | ||||
|     engine::{general_purpose::STANDARD, Engine}, | ||||
|     write, | ||||
| }; | ||||
| 
 | ||||
| use criterion::{black_box, Bencher, Criterion, ParameterizedBenchmark, Throughput}; | ||||
| use rand::{FromEntropy, Rng}; | ||||
| use criterion::{black_box, Bencher, BenchmarkId, Criterion, Throughput}; | ||||
| use rand::{Rng, SeedableRng}; | ||||
| use std::io::{self, Read, Write}; | ||||
| 
 | ||||
| const TEST_CONFIG: Config = base64::STANDARD; | ||||
| 
 | ||||
| fn do_decode_bench(b: &mut Bencher, &size: &usize) { | ||||
|     let mut v: Vec<u8> = Vec::with_capacity(size * 3 / 4); | ||||
|     fill(&mut v); | ||||
|     let encoded = encode(&v); | ||||
|     let encoded = STANDARD.encode(&v); | ||||
| 
 | ||||
|     b.iter(|| { | ||||
|         let orig = decode(&encoded); | ||||
|         let orig = STANDARD.decode(&encoded); | ||||
|         black_box(&orig); | ||||
|     }); | ||||
| } | ||||
|  | @ -29,11 +24,11 @@ fn do_decode_bench(b: &mut Bencher, &size: &usize) { | |||
| fn do_decode_bench_reuse_buf(b: &mut Bencher, &size: &usize) { | ||||
|     let mut v: Vec<u8> = Vec::with_capacity(size * 3 / 4); | ||||
|     fill(&mut v); | ||||
|     let encoded = encode(&v); | ||||
|     let encoded = STANDARD.encode(&v); | ||||
| 
 | ||||
|     let mut buf = Vec::new(); | ||||
|     b.iter(|| { | ||||
|         decode_config_buf(&encoded, TEST_CONFIG, &mut buf).unwrap(); | ||||
|         STANDARD.decode_vec(&encoded, &mut buf).unwrap(); | ||||
|         black_box(&buf); | ||||
|         buf.clear(); | ||||
|     }); | ||||
|  | @ -42,12 +37,12 @@ fn do_decode_bench_reuse_buf(b: &mut Bencher, &size: &usize) { | |||
| fn do_decode_bench_slice(b: &mut Bencher, &size: &usize) { | ||||
|     let mut v: Vec<u8> = Vec::with_capacity(size * 3 / 4); | ||||
|     fill(&mut v); | ||||
|     let encoded = encode(&v); | ||||
|     let encoded = STANDARD.encode(&v); | ||||
| 
 | ||||
|     let mut buf = Vec::new(); | ||||
|     buf.resize(size, 0); | ||||
|     b.iter(|| { | ||||
|         decode_config_slice(&encoded, TEST_CONFIG, &mut buf).unwrap(); | ||||
|         STANDARD.decode_slice(&encoded, &mut buf).unwrap(); | ||||
|         black_box(&buf); | ||||
|     }); | ||||
| } | ||||
|  | @ -55,7 +50,7 @@ fn do_decode_bench_slice(b: &mut Bencher, &size: &usize) { | |||
| fn do_decode_bench_stream(b: &mut Bencher, &size: &usize) { | ||||
|     let mut v: Vec<u8> = Vec::with_capacity(size * 3 / 4); | ||||
|     fill(&mut v); | ||||
|     let encoded = encode(&v); | ||||
|     let encoded = STANDARD.encode(&v); | ||||
| 
 | ||||
|     let mut buf = Vec::new(); | ||||
|     buf.resize(size, 0); | ||||
|  | @ -63,7 +58,7 @@ fn do_decode_bench_stream(b: &mut Bencher, &size: &usize) { | |||
| 
 | ||||
|     b.iter(|| { | ||||
|         let mut cursor = io::Cursor::new(&encoded[..]); | ||||
|         let mut decoder = base64::read::DecoderReader::new(&mut cursor, TEST_CONFIG); | ||||
|         let mut decoder = base64::read::DecoderReader::new(&mut cursor, &STANDARD); | ||||
|         decoder.read_to_end(&mut buf).unwrap(); | ||||
|         buf.clear(); | ||||
|         black_box(&buf); | ||||
|  | @ -74,7 +69,7 @@ fn do_encode_bench(b: &mut Bencher, &size: &usize) { | |||
|     let mut v: Vec<u8> = Vec::with_capacity(size); | ||||
|     fill(&mut v); | ||||
|     b.iter(|| { | ||||
|         let e = encode(&v); | ||||
|         let e = STANDARD.encode(&v); | ||||
|         black_box(&e); | ||||
|     }); | ||||
| } | ||||
|  | @ -83,7 +78,7 @@ fn do_encode_bench_display(b: &mut Bencher, &size: &usize) { | |||
|     let mut v: Vec<u8> = Vec::with_capacity(size); | ||||
|     fill(&mut v); | ||||
|     b.iter(|| { | ||||
|         let e = format!("{}", display::Base64Display::with_config(&v, TEST_CONFIG)); | ||||
|         let e = format!("{}", display::Base64Display::new(&v, &STANDARD)); | ||||
|         black_box(&e); | ||||
|     }); | ||||
| } | ||||
|  | @ -93,7 +88,7 @@ fn do_encode_bench_reuse_buf(b: &mut Bencher, &size: &usize) { | |||
|     fill(&mut v); | ||||
|     let mut buf = String::new(); | ||||
|     b.iter(|| { | ||||
|         encode_config_buf(&v, TEST_CONFIG, &mut buf); | ||||
|         STANDARD.encode_string(&v, &mut buf); | ||||
|         buf.clear(); | ||||
|     }); | ||||
| } | ||||
|  | @ -104,9 +99,7 @@ fn do_encode_bench_slice(b: &mut Bencher, &size: &usize) { | |||
|     let mut buf = Vec::new(); | ||||
|     // conservative estimate of encoded size
 | ||||
|     buf.resize(v.len() * 2, 0); | ||||
|     b.iter(|| { | ||||
|         encode_config_slice(&v, TEST_CONFIG, &mut buf); | ||||
|     }); | ||||
|     b.iter(|| STANDARD.encode_slice(&v, &mut buf).unwrap()); | ||||
| } | ||||
| 
 | ||||
| fn do_encode_bench_stream(b: &mut Bencher, &size: &usize) { | ||||
|  | @ -117,7 +110,7 @@ fn do_encode_bench_stream(b: &mut Bencher, &size: &usize) { | |||
|     buf.reserve(size * 2); | ||||
|     b.iter(|| { | ||||
|         buf.clear(); | ||||
|         let mut stream_enc = write::EncoderWriter::new(&mut buf, TEST_CONFIG); | ||||
|         let mut stream_enc = write::EncoderWriter::new(&mut buf, &STANDARD); | ||||
|         stream_enc.write_all(&v).unwrap(); | ||||
|         stream_enc.flush().unwrap(); | ||||
|     }); | ||||
|  | @ -128,7 +121,7 @@ fn do_encode_bench_string_stream(b: &mut Bencher, &size: &usize) { | |||
|     fill(&mut v); | ||||
| 
 | ||||
|     b.iter(|| { | ||||
|         let mut stream_enc = write::EncoderStringWriter::new(TEST_CONFIG); | ||||
|         let mut stream_enc = write::EncoderStringWriter::new(&STANDARD); | ||||
|         stream_enc.write_all(&v).unwrap(); | ||||
|         stream_enc.flush().unwrap(); | ||||
|         let _ = stream_enc.into_inner(); | ||||
|  | @ -142,7 +135,7 @@ fn do_encode_bench_string_reuse_buf_stream(b: &mut Bencher, &size: &usize) { | |||
|     let mut buf = String::new(); | ||||
|     b.iter(|| { | ||||
|         buf.clear(); | ||||
|         let mut stream_enc = write::EncoderStringWriter::from(&mut buf, TEST_CONFIG); | ||||
|         let mut stream_enc = write::EncoderStringWriter::from_consumer(&mut buf, &STANDARD); | ||||
|         stream_enc.write_all(&v).unwrap(); | ||||
|         stream_enc.flush().unwrap(); | ||||
|         let _ = stream_enc.into_inner(); | ||||
|  | @ -164,46 +157,85 @@ const BYTE_SIZES: [usize; 5] = [3, 50, 100, 500, 3 * 1024]; | |||
| // keep the benchmark runtime reasonable.
 | ||||
| const LARGE_BYTE_SIZES: [usize; 3] = [3 * 1024 * 1024, 10 * 1024 * 1024, 30 * 1024 * 1024]; | ||||
| 
 | ||||
| fn encode_benchmarks(byte_sizes: &[usize]) -> ParameterizedBenchmark<usize> { | ||||
|     ParameterizedBenchmark::new("encode", do_encode_bench, byte_sizes.iter().cloned()) | ||||
| fn encode_benchmarks(c: &mut Criterion, label: &str, byte_sizes: &[usize]) { | ||||
|     let mut group = c.benchmark_group(label); | ||||
|     group | ||||
|         .warm_up_time(std::time::Duration::from_millis(500)) | ||||
|         .measurement_time(std::time::Duration::from_secs(3)) | ||||
|         .throughput(|s| Throughput::Bytes(*s as u64)) | ||||
|         .with_function("encode_display", do_encode_bench_display) | ||||
|         .with_function("encode_reuse_buf", do_encode_bench_reuse_buf) | ||||
|         .with_function("encode_slice", do_encode_bench_slice) | ||||
|         .with_function("encode_reuse_buf_stream", do_encode_bench_stream) | ||||
|         .with_function("encode_string_stream", do_encode_bench_string_stream) | ||||
|         .with_function( | ||||
|             "encode_string_reuse_buf_stream", | ||||
|             do_encode_bench_string_reuse_buf_stream, | ||||
|         ) | ||||
|         .measurement_time(std::time::Duration::from_secs(3)); | ||||
| 
 | ||||
|     for size in byte_sizes { | ||||
|         group | ||||
|             .throughput(Throughput::Bytes(*size as u64)) | ||||
|             .bench_with_input(BenchmarkId::new("encode", size), size, do_encode_bench) | ||||
|             .bench_with_input( | ||||
|                 BenchmarkId::new("encode_display", size), | ||||
|                 size, | ||||
|                 do_encode_bench_display, | ||||
|             ) | ||||
|             .bench_with_input( | ||||
|                 BenchmarkId::new("encode_reuse_buf", size), | ||||
|                 size, | ||||
|                 do_encode_bench_reuse_buf, | ||||
|             ) | ||||
|             .bench_with_input( | ||||
|                 BenchmarkId::new("encode_slice", size), | ||||
|                 size, | ||||
|                 do_encode_bench_slice, | ||||
|             ) | ||||
|             .bench_with_input( | ||||
|                 BenchmarkId::new("encode_reuse_buf_stream", size), | ||||
|                 size, | ||||
|                 do_encode_bench_stream, | ||||
|             ) | ||||
|             .bench_with_input( | ||||
|                 BenchmarkId::new("encode_string_stream", size), | ||||
|                 size, | ||||
|                 do_encode_bench_string_stream, | ||||
|             ) | ||||
|             .bench_with_input( | ||||
|                 BenchmarkId::new("encode_string_reuse_buf_stream", size), | ||||
|                 size, | ||||
|                 do_encode_bench_string_reuse_buf_stream, | ||||
|             ); | ||||
|     } | ||||
| 
 | ||||
|     group.finish(); | ||||
| } | ||||
| 
 | ||||
| fn decode_benchmarks(byte_sizes: &[usize]) -> ParameterizedBenchmark<usize> { | ||||
|     ParameterizedBenchmark::new("decode", do_decode_bench, byte_sizes.iter().cloned()) | ||||
|         .warm_up_time(std::time::Duration::from_millis(500)) | ||||
|         .measurement_time(std::time::Duration::from_secs(3)) | ||||
|         .throughput(|s| Throughput::Bytes(*s as u64)) | ||||
|         .with_function("decode_reuse_buf", do_decode_bench_reuse_buf) | ||||
|         .with_function("decode_slice", do_decode_bench_slice) | ||||
|         .with_function("decode_stream", do_decode_bench_stream) | ||||
| fn decode_benchmarks(c: &mut Criterion, label: &str, byte_sizes: &[usize]) { | ||||
|     let mut group = c.benchmark_group(label); | ||||
| 
 | ||||
|     for size in byte_sizes { | ||||
|         group | ||||
|             .warm_up_time(std::time::Duration::from_millis(500)) | ||||
|             .measurement_time(std::time::Duration::from_secs(3)) | ||||
|             .throughput(Throughput::Bytes(*size as u64)) | ||||
|             .bench_with_input(BenchmarkId::new("decode", size), size, do_decode_bench) | ||||
|             .bench_with_input( | ||||
|                 BenchmarkId::new("decode_reuse_buf", size), | ||||
|                 size, | ||||
|                 do_decode_bench_reuse_buf, | ||||
|             ) | ||||
|             .bench_with_input( | ||||
|                 BenchmarkId::new("decode_slice", size), | ||||
|                 size, | ||||
|                 do_decode_bench_slice, | ||||
|             ) | ||||
|             .bench_with_input( | ||||
|                 BenchmarkId::new("decode_stream", size), | ||||
|                 size, | ||||
|                 do_decode_bench_stream, | ||||
|             ); | ||||
|     } | ||||
| 
 | ||||
|     group.finish(); | ||||
| } | ||||
| 
 | ||||
| fn bench(c: &mut Criterion) { | ||||
|     c.bench("bench_small_input", encode_benchmarks(&BYTE_SIZES[..])); | ||||
| 
 | ||||
|     c.bench( | ||||
|         "bench_large_input", | ||||
|         encode_benchmarks(&LARGE_BYTE_SIZES[..]).sample_size(10), | ||||
|     ); | ||||
| 
 | ||||
|     c.bench("bench_small_input", decode_benchmarks(&BYTE_SIZES[..])); | ||||
| 
 | ||||
|     c.bench( | ||||
|         "bench_large_input", | ||||
|         decode_benchmarks(&LARGE_BYTE_SIZES[..]).sample_size(10), | ||||
|     ); | ||||
|     encode_benchmarks(c, "encode_small_input", &BYTE_SIZES[..]); | ||||
|     encode_benchmarks(c, "encode_large_input", &LARGE_BYTE_SIZES[..]); | ||||
|     decode_benchmarks(c, "decode_small_input", &BYTE_SIZES[..]); | ||||
|     decode_benchmarks(c, "decode_large_input", &LARGE_BYTE_SIZES[..]); | ||||
| } | ||||
| 
 | ||||
| criterion_group!(benches, bench); | ||||
|  |  | |||
							
								
								
									
										1
									
								
								third_party/rust/base64/clippy.toml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								third_party/rust/base64/clippy.toml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1 @@ | |||
| msrv = "1.57.0" | ||||
							
								
								
									
										50
									
								
								third_party/rust/base64/examples/base64.rs
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										50
									
								
								third_party/rust/base64/examples/base64.rs
									
									
									
									
										vendored
									
									
								
							|  | @ -4,37 +4,28 @@ use std::path::PathBuf; | |||
| use std::process; | ||||
| use std::str::FromStr; | ||||
| 
 | ||||
| use base64::{read, write}; | ||||
| use base64::{alphabet, engine, read, write}; | ||||
| use structopt::StructOpt; | ||||
| 
 | ||||
| #[derive(Debug, StructOpt)] | ||||
| enum CharacterSet { | ||||
| enum Alphabet { | ||||
|     Standard, | ||||
|     UrlSafe, | ||||
| } | ||||
| 
 | ||||
| impl Default for CharacterSet { | ||||
| impl Default for Alphabet { | ||||
|     fn default() -> Self { | ||||
|         CharacterSet::Standard | ||||
|         Self::Standard | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| impl Into<base64::Config> for CharacterSet { | ||||
|     fn into(self) -> base64::Config { | ||||
|         match self { | ||||
|             CharacterSet::Standard => base64::STANDARD, | ||||
|             CharacterSet::UrlSafe => base64::URL_SAFE, | ||||
|         } | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| impl FromStr for CharacterSet { | ||||
| impl FromStr for Alphabet { | ||||
|     type Err = String; | ||||
|     fn from_str(s: &str) -> Result<CharacterSet, String> { | ||||
|     fn from_str(s: &str) -> Result<Self, String> { | ||||
|         match s { | ||||
|             "standard" => Ok(CharacterSet::Standard), | ||||
|             "urlsafe" => Ok(CharacterSet::UrlSafe), | ||||
|             _ => Err(format!("charset '{}' unrecognized", s)), | ||||
|             "standard" => Ok(Self::Standard), | ||||
|             "urlsafe" => Ok(Self::UrlSafe), | ||||
|             _ => Err(format!("alphabet '{}' unrecognized", s)), | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | @ -45,10 +36,10 @@ struct Opt { | |||
|     /// decode data
 | ||||
|     #[structopt(short = "d", long = "decode")] | ||||
|     decode: bool, | ||||
|     /// The character set to choose. Defaults to the standard base64 character set.
 | ||||
|     /// Supported character sets include "standard" and "urlsafe".
 | ||||
|     #[structopt(long = "charset")] | ||||
|     charset: Option<CharacterSet>, | ||||
|     /// The alphabet to choose. Defaults to the standard base64 alphabet.
 | ||||
|     /// Supported alphabets include "standard" and "urlsafe".
 | ||||
|     #[structopt(long = "alphabet")] | ||||
|     alphabet: Option<Alphabet>, | ||||
|     /// The file to encode/decode.
 | ||||
|     #[structopt(parse(from_os_str))] | ||||
|     file: Option<PathBuf>, | ||||
|  | @ -68,14 +59,23 @@ fn main() { | |||
|         } | ||||
|         Some(f) => Box::new(File::open(f).unwrap()), | ||||
|     }; | ||||
|     let config = opt.charset.unwrap_or_default().into(); | ||||
| 
 | ||||
|     let alphabet = opt.alphabet.unwrap_or_default(); | ||||
|     let engine = engine::GeneralPurpose::new( | ||||
|         &match alphabet { | ||||
|             Alphabet::Standard => alphabet::STANDARD, | ||||
|             Alphabet::UrlSafe => alphabet::URL_SAFE, | ||||
|         }, | ||||
|         engine::general_purpose::PAD, | ||||
|     ); | ||||
| 
 | ||||
|     let stdout = io::stdout(); | ||||
|     let mut stdout = stdout.lock(); | ||||
|     let r = if opt.decode { | ||||
|         let mut decoder = read::DecoderReader::new(&mut input, config); | ||||
|         let mut decoder = read::DecoderReader::new(&mut input, &engine); | ||||
|         io::copy(&mut decoder, &mut stdout) | ||||
|     } else { | ||||
|         let mut encoder = write::EncoderWriter::new(&mut stdout, config); | ||||
|         let mut encoder = write::EncoderWriter::new(&mut stdout, &engine); | ||||
|         io::copy(&mut input, &mut encoder) | ||||
|     }; | ||||
|     if let Err(e) = r { | ||||
|  |  | |||
							
								
								
									
										179
									
								
								third_party/rust/base64/examples/make_tables.rs
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										179
									
								
								third_party/rust/base64/examples/make_tables.rs
									
									
									
									
										vendored
									
									
								
							|  | @ -1,179 +0,0 @@ | |||
| use std::collections::{HashMap, HashSet}; | ||||
| use std::iter::Iterator; | ||||
| 
 | ||||
| fn main() { | ||||
|     println!("pub const INVALID_VALUE: u8 = 255;"); | ||||
| 
 | ||||
|     // A-Z
 | ||||
|     let standard_alphabet: Vec<u8> = (0x41..0x5B) | ||||
|         // a-z
 | ||||
|         .chain(0x61..0x7B) | ||||
|         // 0-9
 | ||||
|         .chain(0x30..0x3A) | ||||
|         // +
 | ||||
|         .chain(0x2B..0x2C) | ||||
|         // /
 | ||||
|         .chain(0x2F..0x30) | ||||
|         .collect(); | ||||
|     print_encode_table(&standard_alphabet, "STANDARD_ENCODE", 0); | ||||
|     print_decode_table(&standard_alphabet, "STANDARD_DECODE", 0); | ||||
| 
 | ||||
|     // A-Z
 | ||||
|     let url_alphabet: Vec<u8> = (0x41..0x5B) | ||||
|         // a-z
 | ||||
|         .chain(0x61..0x7B) | ||||
|         // 0-9
 | ||||
|         .chain(0x30..0x3A) | ||||
|         // -
 | ||||
|         .chain(0x2D..0x2E) | ||||
|         // _
 | ||||
|         .chain(0x5F..0x60) | ||||
|         .collect(); | ||||
|     print_encode_table(&url_alphabet, "URL_SAFE_ENCODE", 0); | ||||
|     print_decode_table(&url_alphabet, "URL_SAFE_DECODE", 0); | ||||
| 
 | ||||
|     // ./0123456789
 | ||||
|     let crypt_alphabet: Vec<u8> = (b'.'..(b'9' + 1)) | ||||
|         // A-Z
 | ||||
|         .chain(b'A'..(b'Z' + 1)) | ||||
|         // a-z
 | ||||
|         .chain(b'a'..(b'z' + 1)) | ||||
|         .collect(); | ||||
|     print_encode_table(&crypt_alphabet, "CRYPT_ENCODE", 0); | ||||
|     print_decode_table(&crypt_alphabet, "CRYPT_DECODE", 0); | ||||
| 
 | ||||
|     // ./
 | ||||
|     let bcrypt_alphabet: Vec<u8> = (b'.'..(b'/' + 1)) | ||||
|         // A-Z
 | ||||
|         .chain(b'A'..(b'Z' + 1)) | ||||
|         // a-z
 | ||||
|         .chain(b'a'..(b'z' + 1)) | ||||
|         // 0-9
 | ||||
|         .chain(b'0'..(b'9' + 1)) | ||||
|         .collect(); | ||||
|     print_encode_table(&bcrypt_alphabet, "BCRYPT_ENCODE", 0); | ||||
|     print_decode_table(&bcrypt_alphabet, "BCRYPT_DECODE", 0); | ||||
| 
 | ||||
|     // A-Z
 | ||||
|     let imap_alphabet: Vec<u8> = (0x41..0x5B) | ||||
|         // a-z
 | ||||
|         .chain(0x61..0x7B) | ||||
|         // 0-9
 | ||||
|         .chain(0x30..0x3A) | ||||
|         // +
 | ||||
|         .chain(0x2B..0x2C) | ||||
|         // ,
 | ||||
|         .chain(0x2C..0x2D) | ||||
|         .collect(); | ||||
|     print_encode_table(&imap_alphabet, "IMAP_MUTF7_ENCODE", 0); | ||||
|     print_decode_table(&imap_alphabet, "IMAP_MUTF7_DECODE", 0); | ||||
| 
 | ||||
|     // '!' - '-'
 | ||||
|     let binhex_alphabet: Vec<u8> = (0x21..0x2E) | ||||
|         // 0-9
 | ||||
|         .chain(0x30..0x3A) | ||||
|         // @-N
 | ||||
|         .chain(0x40..0x4F) | ||||
|         // P-V
 | ||||
|         .chain(0x50..0x57) | ||||
|         // X-[
 | ||||
|         .chain(0x58..0x5C) | ||||
|         // `-f
 | ||||
|         .chain(0x60..0x66) | ||||
|         // h-m
 | ||||
|         .chain(0x68..0x6E) | ||||
|         // p-r
 | ||||
|         .chain(0x70..0x73) | ||||
|         .collect(); | ||||
|     print_encode_table(&binhex_alphabet, "BINHEX_ENCODE", 0); | ||||
|     print_decode_table(&binhex_alphabet, "BINHEX_DECODE", 0); | ||||
| } | ||||
| 
 | ||||
| fn print_encode_table(alphabet: &[u8], const_name: &str, indent_depth: usize) { | ||||
|     check_alphabet(alphabet); | ||||
|     println!("#[rustfmt::skip]"); | ||||
|     println!( | ||||
|         "{:width$}pub const {}: &[u8; 64] = &[", | ||||
|         "", | ||||
|         const_name, | ||||
|         width = indent_depth | ||||
|     ); | ||||
| 
 | ||||
|     for (i, b) in alphabet.iter().enumerate() { | ||||
|         println!( | ||||
|             "{:width$}{}, // input {} (0x{:X}) => '{}' (0x{:X})", | ||||
|             "", | ||||
|             b, | ||||
|             i, | ||||
|             i, | ||||
|             String::from_utf8(vec![*b as u8]).unwrap(), | ||||
|             b, | ||||
|             width = indent_depth + 4 | ||||
|         ); | ||||
|     } | ||||
| 
 | ||||
|     println!("{:width$}];", "", width = indent_depth); | ||||
| } | ||||
| 
 | ||||
| fn print_decode_table(alphabet: &[u8], const_name: &str, indent_depth: usize) { | ||||
|     check_alphabet(alphabet); | ||||
|     // map of alphabet bytes to 6-bit morsels
 | ||||
|     let mut input_to_morsel = HashMap::<u8, u8>::new(); | ||||
| 
 | ||||
|     // standard base64 alphabet bytes, in order
 | ||||
|     for (morsel, ascii_byte) in alphabet.iter().enumerate() { | ||||
|         // truncation cast is fine here
 | ||||
|         let _ = input_to_morsel.insert(*ascii_byte, morsel as u8); | ||||
|     } | ||||
| 
 | ||||
|     println!("#[rustfmt::skip]"); | ||||
|     println!( | ||||
|         "{:width$}pub const {}: &[u8; 256] = &[", | ||||
|         "", | ||||
|         const_name, | ||||
|         width = indent_depth | ||||
|     ); | ||||
|     for ascii_byte in 0..256 { | ||||
|         let (value, comment) = match input_to_morsel.get(&(ascii_byte as u8)) { | ||||
|             None => ( | ||||
|                 "INVALID_VALUE".to_string(), | ||||
|                 format!("input {} (0x{:X})", ascii_byte, ascii_byte), | ||||
|             ), | ||||
|             Some(v) => ( | ||||
|                 format!("{}", *v), | ||||
|                 format!( | ||||
|                     "input {} (0x{:X} char '{}') => {} (0x{:X})", | ||||
|                     ascii_byte, | ||||
|                     ascii_byte, | ||||
|                     String::from_utf8(vec![ascii_byte as u8]).unwrap(), | ||||
|                     *v, | ||||
|                     *v | ||||
|                 ), | ||||
|             ), | ||||
|         }; | ||||
| 
 | ||||
|         println!( | ||||
|             "{:width$}{}, // {}", | ||||
|             "", | ||||
|             value, | ||||
|             comment, | ||||
|             width = indent_depth + 4 | ||||
|         ); | ||||
|     } | ||||
|     println!("{:width$}];", "", width = indent_depth); | ||||
| } | ||||
| 
 | ||||
| fn check_alphabet(alphabet: &[u8]) { | ||||
|     // ensure all characters are distinct
 | ||||
|     assert_eq!(64, alphabet.len()); | ||||
|     let mut set: HashSet<u8> = HashSet::new(); | ||||
|     set.extend(alphabet); | ||||
|     assert_eq!(64, set.len()); | ||||
| 
 | ||||
|     // must be ASCII to be valid as single UTF-8 bytes
 | ||||
|     for &b in alphabet { | ||||
|         assert!(b <= 0x7F_u8); | ||||
|         // = is assumed to be padding, so cannot be used as a symbol
 | ||||
|         assert_ne!(b'=', b); | ||||
|     } | ||||
| } | ||||
							
								
								
									
										241
									
								
								third_party/rust/base64/src/alphabet.rs
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										241
									
								
								third_party/rust/base64/src/alphabet.rs
									
									
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1,241 @@ | |||
| //! Provides [Alphabet] and constants for alphabets commonly used in the wild.
 | ||||
| 
 | ||||
| use crate::PAD_BYTE; | ||||
| use core::fmt; | ||||
| #[cfg(any(feature = "std", test))] | ||||
| use std::error; | ||||
| 
 | ||||
| const ALPHABET_SIZE: usize = 64; | ||||
| 
 | ||||
| /// An alphabet defines the 64 ASCII characters (symbols) used for base64.
 | ||||
| ///
 | ||||
| /// Common alphabets are provided as constants, and custom alphabets
 | ||||
| /// can be made via `from_str` or the `TryFrom<str>` implementation.
 | ||||
| ///
 | ||||
| /// ```
 | ||||
| /// let custom = base64::alphabet::Alphabet::new("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/").unwrap();
 | ||||
| ///
 | ||||
| /// let engine = base64::engine::GeneralPurpose::new(
 | ||||
| ///     &custom,
 | ||||
| ///     base64::engine::general_purpose::PAD);
 | ||||
| /// ```
 | ||||
| #[derive(Clone, Debug, Eq, PartialEq)] | ||||
| pub struct Alphabet { | ||||
|     pub(crate) symbols: [u8; ALPHABET_SIZE], | ||||
| } | ||||
| 
 | ||||
| impl Alphabet { | ||||
|     /// Performs no checks so that it can be const.
 | ||||
|     /// Used only for known-valid strings.
 | ||||
|     const fn from_str_unchecked(alphabet: &str) -> Self { | ||||
|         let mut symbols = [0_u8; ALPHABET_SIZE]; | ||||
|         let source_bytes = alphabet.as_bytes(); | ||||
| 
 | ||||
|         // a way to copy that's allowed in const fn
 | ||||
|         let mut index = 0; | ||||
|         while index < ALPHABET_SIZE { | ||||
|             symbols[index] = source_bytes[index]; | ||||
|             index += 1; | ||||
|         } | ||||
| 
 | ||||
|         Self { symbols } | ||||
|     } | ||||
| 
 | ||||
|     /// Create an `Alphabet` from a string of 64 unique printable ASCII bytes.
 | ||||
|     ///
 | ||||
|     /// The `=` byte is not allowed as it is used for padding.
 | ||||
|     pub const fn new(alphabet: &str) -> Result<Self, ParseAlphabetError> { | ||||
|         let bytes = alphabet.as_bytes(); | ||||
|         if bytes.len() != ALPHABET_SIZE { | ||||
|             return Err(ParseAlphabetError::InvalidLength); | ||||
|         } | ||||
| 
 | ||||
|         { | ||||
|             let mut index = 0; | ||||
|             while index < ALPHABET_SIZE { | ||||
|                 let byte = bytes[index]; | ||||
| 
 | ||||
|                 // must be ascii printable. 127 (DEL) is commonly considered printable
 | ||||
|                 // for some reason but clearly unsuitable for base64.
 | ||||
|                 if !(byte >= 32_u8 && byte <= 126_u8) { | ||||
|                     return Err(ParseAlphabetError::UnprintableByte(byte)); | ||||
|                 } | ||||
|                 // = is assumed to be padding, so cannot be used as a symbol
 | ||||
|                 if byte == PAD_BYTE { | ||||
|                     return Err(ParseAlphabetError::ReservedByte(byte)); | ||||
|                 } | ||||
| 
 | ||||
|                 // Check for duplicates while staying within what const allows.
 | ||||
|                 // It's n^2, but only over 64 hot bytes, and only once, so it's likely in the single digit
 | ||||
|                 // microsecond range.
 | ||||
| 
 | ||||
|                 let mut probe_index = 0; | ||||
|                 while probe_index < ALPHABET_SIZE { | ||||
|                     if probe_index == index { | ||||
|                         probe_index += 1; | ||||
|                         continue; | ||||
|                     } | ||||
| 
 | ||||
|                     let probe_byte = bytes[probe_index]; | ||||
| 
 | ||||
|                     if byte == probe_byte { | ||||
|                         return Err(ParseAlphabetError::DuplicatedByte(byte)); | ||||
|                     } | ||||
| 
 | ||||
|                     probe_index += 1; | ||||
|                 } | ||||
| 
 | ||||
|                 index += 1; | ||||
|             } | ||||
|         } | ||||
| 
 | ||||
|         Ok(Self::from_str_unchecked(alphabet)) | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| impl TryFrom<&str> for Alphabet { | ||||
|     type Error = ParseAlphabetError; | ||||
| 
 | ||||
|     fn try_from(value: &str) -> Result<Self, Self::Error> { | ||||
|         Self::new(value) | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| /// Possible errors when constructing an [Alphabet] from a `str`.
 | ||||
| #[derive(Debug, Eq, PartialEq)] | ||||
| pub enum ParseAlphabetError { | ||||
|     /// Alphabets must be 64 ASCII bytes
 | ||||
|     InvalidLength, | ||||
|     /// All bytes must be unique
 | ||||
|     DuplicatedByte(u8), | ||||
|     /// All bytes must be printable (in the range `[32, 126]`).
 | ||||
|     UnprintableByte(u8), | ||||
|     /// `=` cannot be used
 | ||||
|     ReservedByte(u8), | ||||
| } | ||||
| 
 | ||||
| impl fmt::Display for ParseAlphabetError { | ||||
|     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||||
|         match self { | ||||
|             Self::InvalidLength => write!(f, "Invalid length - must be 64 bytes"), | ||||
|             Self::DuplicatedByte(b) => write!(f, "Duplicated byte: {:#04x}", b), | ||||
|             Self::UnprintableByte(b) => write!(f, "Unprintable byte: {:#04x}", b), | ||||
|             Self::ReservedByte(b) => write!(f, "Reserved byte: {:#04x}", b), | ||||
|         } | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| #[cfg(any(feature = "std", test))] | ||||
| impl error::Error for ParseAlphabetError {} | ||||
| 
 | ||||
| /// The standard alphabet (uses `+` and `/`).
 | ||||
| ///
 | ||||
| /// See [RFC 3548](https://tools.ietf.org/html/rfc3548#section-3).
 | ||||
| pub const STANDARD: Alphabet = Alphabet::from_str_unchecked( | ||||
|     "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/", | ||||
| ); | ||||
| 
 | ||||
| /// The URL safe alphabet (uses `-` and `_`).
 | ||||
| ///
 | ||||
| /// See [RFC 3548](https://tools.ietf.org/html/rfc3548#section-4).
 | ||||
| pub const URL_SAFE: Alphabet = Alphabet::from_str_unchecked( | ||||
|     "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_", | ||||
| ); | ||||
| 
 | ||||
| /// The `crypt(3)` alphabet (uses `.` and `/` as the first two values).
 | ||||
| ///
 | ||||
| /// Not standardized, but folk wisdom on the net asserts that this alphabet is what crypt uses.
 | ||||
| pub const CRYPT: Alphabet = Alphabet::from_str_unchecked( | ||||
|     "./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", | ||||
| ); | ||||
| 
 | ||||
| /// The bcrypt alphabet.
 | ||||
| pub const BCRYPT: Alphabet = Alphabet::from_str_unchecked( | ||||
|     "./ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789", | ||||
| ); | ||||
| 
 | ||||
| /// The alphabet used in IMAP-modified UTF-7 (uses `+` and `,`).
 | ||||
| ///
 | ||||
| /// See [RFC 3501](https://tools.ietf.org/html/rfc3501#section-5.1.3)
 | ||||
| pub const IMAP_MUTF7: Alphabet = Alphabet::from_str_unchecked( | ||||
|     "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+,", | ||||
| ); | ||||
| 
 | ||||
| /// The alphabet used in BinHex 4.0 files.
 | ||||
| ///
 | ||||
| /// See [BinHex 4.0 Definition](http://files.stairways.com/other/binhex-40-specs-info.txt)
 | ||||
| pub const BIN_HEX: Alphabet = Alphabet::from_str_unchecked( | ||||
|     "!\"#$%&'()*+,-0123456789@ABCDEFGHIJKLMNPQRSTUVXYZ[`abcdehijklmpqr", | ||||
| ); | ||||
| 
 | ||||
| #[cfg(test)] | ||||
| mod tests { | ||||
|     use crate::alphabet::*; | ||||
|     use std::convert::TryFrom as _; | ||||
| 
 | ||||
|     #[test] | ||||
|     fn detects_duplicate_start() { | ||||
|         assert_eq!( | ||||
|             ParseAlphabetError::DuplicatedByte(b'A'), | ||||
|             Alphabet::new("AACDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/") | ||||
|                 .unwrap_err() | ||||
|         ); | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|     fn detects_duplicate_end() { | ||||
|         assert_eq!( | ||||
|             ParseAlphabetError::DuplicatedByte(b'/'), | ||||
|             Alphabet::new("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789//") | ||||
|                 .unwrap_err() | ||||
|         ); | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|     fn detects_duplicate_middle() { | ||||
|         assert_eq!( | ||||
|             ParseAlphabetError::DuplicatedByte(b'Z'), | ||||
|             Alphabet::new("ABCDEFGHIJKLMNOPQRSTUVWXYZZbcdefghijklmnopqrstuvwxyz0123456789+/") | ||||
|                 .unwrap_err() | ||||
|         ); | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|     fn detects_length() { | ||||
|         assert_eq!( | ||||
|             ParseAlphabetError::InvalidLength, | ||||
|             Alphabet::new( | ||||
|                 "xxxxxxxxxABCDEFGHIJKLMNOPQRSTUVWXYZZbcdefghijklmnopqrstuvwxyz0123456789+/", | ||||
|             ) | ||||
|             .unwrap_err() | ||||
|         ); | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|     fn detects_padding() { | ||||
|         assert_eq!( | ||||
|             ParseAlphabetError::ReservedByte(b'='), | ||||
|             Alphabet::new("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+=") | ||||
|                 .unwrap_err() | ||||
|         ); | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|     fn detects_unprintable() { | ||||
|         // form feed
 | ||||
|         assert_eq!( | ||||
|             ParseAlphabetError::UnprintableByte(0xc), | ||||
|             Alphabet::new("\x0cBCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/") | ||||
|                 .unwrap_err() | ||||
|         ); | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|     fn same_as_unchecked() { | ||||
|         assert_eq!( | ||||
|             STANDARD, | ||||
|             Alphabet::try_from("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/") | ||||
|                 .unwrap() | ||||
|         ); | ||||
|     } | ||||
| } | ||||
							
								
								
									
										108
									
								
								third_party/rust/base64/src/chunked_encoder.rs
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										108
									
								
								third_party/rust/base64/src/chunked_encoder.rs
									
									
									
									
										vendored
									
									
								
							|  | @ -1,13 +1,12 @@ | |||
| use crate::{ | ||||
|     encode::{add_padding, encode_to_slice}, | ||||
|     Config, | ||||
| }; | ||||
| #[cfg(any(feature = "alloc", feature = "std", test))] | ||||
| use alloc::string::String; | ||||
| use core::cmp; | ||||
| #[cfg(any(feature = "alloc", feature = "std", test))] | ||||
| use core::str; | ||||
| 
 | ||||
| use crate::encode::add_padding; | ||||
| use crate::engine::{Config, Engine}; | ||||
| 
 | ||||
| /// The output mechanism for ChunkedEncoder's encoded bytes.
 | ||||
| pub trait Sink { | ||||
|     type Error; | ||||
|  | @ -19,23 +18,21 @@ pub trait Sink { | |||
| const BUF_SIZE: usize = 1024; | ||||
| 
 | ||||
| /// A base64 encoder that emits encoded bytes in chunks without heap allocation.
 | ||||
| pub struct ChunkedEncoder { | ||||
|     config: Config, | ||||
| pub struct ChunkedEncoder<'e, E: Engine + ?Sized> { | ||||
|     engine: &'e E, | ||||
|     max_input_chunk_len: usize, | ||||
| } | ||||
| 
 | ||||
| impl ChunkedEncoder { | ||||
|     pub fn new(config: Config) -> ChunkedEncoder { | ||||
| impl<'e, E: Engine + ?Sized> ChunkedEncoder<'e, E> { | ||||
|     pub fn new(engine: &'e E) -> ChunkedEncoder<'e, E> { | ||||
|         ChunkedEncoder { | ||||
|             config, | ||||
|             max_input_chunk_len: max_input_length(BUF_SIZE, config), | ||||
|             engine, | ||||
|             max_input_chunk_len: max_input_length(BUF_SIZE, engine.config().encode_padding()), | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     pub fn encode<S: Sink>(&self, bytes: &[u8], sink: &mut S) -> Result<(), S::Error> { | ||||
|         let mut encode_buf: [u8; BUF_SIZE] = [0; BUF_SIZE]; | ||||
|         let encode_table = self.config.char_set.encode_table(); | ||||
| 
 | ||||
|         let mut input_index = 0; | ||||
| 
 | ||||
|         while input_index < bytes.len() { | ||||
|  | @ -44,12 +41,12 @@ impl ChunkedEncoder { | |||
| 
 | ||||
|             let chunk = &bytes[input_index..(input_index + input_chunk_len)]; | ||||
| 
 | ||||
|             let mut b64_bytes_written = encode_to_slice(chunk, &mut encode_buf, encode_table); | ||||
|             let mut b64_bytes_written = self.engine.internal_encode(chunk, &mut encode_buf); | ||||
| 
 | ||||
|             input_index += input_chunk_len; | ||||
|             let more_input_left = input_index < bytes.len(); | ||||
| 
 | ||||
|             if self.config.pad && !more_input_left { | ||||
|             if self.engine.config().encode_padding() && !more_input_left { | ||||
|                 // no more input, add padding if needed. Buffer will have room because
 | ||||
|                 // max_input_length leaves room for it.
 | ||||
|                 b64_bytes_written += add_padding(bytes.len(), &mut encode_buf[b64_bytes_written..]); | ||||
|  | @ -69,8 +66,8 @@ impl ChunkedEncoder { | |||
| ///
 | ||||
| /// The input length will always be a multiple of 3 so that no encoding state has to be carried over
 | ||||
| /// between chunks.
 | ||||
| fn max_input_length(encoded_buf_len: usize, config: Config) -> usize { | ||||
|     let effective_buf_len = if config.pad { | ||||
| fn max_input_length(encoded_buf_len: usize, padded: bool) -> usize { | ||||
|     let effective_buf_len = if padded { | ||||
|         // make room for padding
 | ||||
|         encoded_buf_len | ||||
|             .checked_sub(2) | ||||
|  | @ -109,26 +106,28 @@ impl<'a> Sink for StringSink<'a> { | |||
| 
 | ||||
| #[cfg(test)] | ||||
| pub mod tests { | ||||
|     use super::*; | ||||
|     use crate::{encode_config_buf, tests::random_config, CharacterSet, STANDARD}; | ||||
| 
 | ||||
|     use rand::{ | ||||
|         distributions::{Distribution, Uniform}, | ||||
|         FromEntropy, Rng, | ||||
|         Rng, SeedableRng, | ||||
|     }; | ||||
| 
 | ||||
|     use crate::{ | ||||
|         alphabet::STANDARD, | ||||
|         engine::general_purpose::{GeneralPurpose, GeneralPurposeConfig, PAD}, | ||||
|         tests::random_engine, | ||||
|     }; | ||||
| 
 | ||||
|     use super::*; | ||||
| 
 | ||||
|     #[test] | ||||
|     fn chunked_encode_empty() { | ||||
|         assert_eq!("", chunked_encode_str(&[], STANDARD)); | ||||
|         assert_eq!("", chunked_encode_str(&[], PAD)); | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|     fn chunked_encode_intermediate_fast_loop() { | ||||
|         // > 8 bytes input, will enter the pretty fast loop
 | ||||
|         assert_eq!( | ||||
|             "Zm9vYmFyYmF6cXV4", | ||||
|             chunked_encode_str(b"foobarbazqux", STANDARD) | ||||
|         ); | ||||
|         assert_eq!("Zm9vYmFyYmF6cXV4", chunked_encode_str(b"foobarbazqux", PAD)); | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|  | @ -136,14 +135,14 @@ pub mod tests { | |||
|         // > 32 bytes input, will enter the uber fast loop
 | ||||
|         assert_eq!( | ||||
|             "Zm9vYmFyYmF6cXV4cXV1eGNvcmdlZ3JhdWx0Z2FycGx5eg==", | ||||
|             chunked_encode_str(b"foobarbazquxquuxcorgegraultgarplyz", STANDARD) | ||||
|             chunked_encode_str(b"foobarbazquxquuxcorgegraultgarplyz", PAD) | ||||
|         ); | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|     fn chunked_encode_slow_loop_only() { | ||||
|         // < 8 bytes input, slow loop only
 | ||||
|         assert_eq!("Zm9vYmFy", chunked_encode_str(b"foobar", STANDARD)); | ||||
|         assert_eq!("Zm9vYmFy", chunked_encode_str(b"foobar", PAD)); | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|  | @ -154,32 +153,27 @@ pub mod tests { | |||
| 
 | ||||
|     #[test] | ||||
|     fn max_input_length_no_pad() { | ||||
|         let config = config_with_pad(false); | ||||
|         assert_eq!(768, max_input_length(1024, config)); | ||||
|         assert_eq!(768, max_input_length(1024, false)); | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|     fn max_input_length_with_pad_decrements_one_triple() { | ||||
|         let config = config_with_pad(true); | ||||
|         assert_eq!(765, max_input_length(1024, config)); | ||||
|         assert_eq!(765, max_input_length(1024, true)); | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|     fn max_input_length_with_pad_one_byte_short() { | ||||
|         let config = config_with_pad(true); | ||||
|         assert_eq!(765, max_input_length(1025, config)); | ||||
|         assert_eq!(765, max_input_length(1025, true)); | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|     fn max_input_length_with_pad_fits_exactly() { | ||||
|         let config = config_with_pad(true); | ||||
|         assert_eq!(768, max_input_length(1026, config)); | ||||
|         assert_eq!(768, max_input_length(1026, true)); | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|     fn max_input_length_cant_use_extra_single_encoded_byte() { | ||||
|         let config = Config::new(crate::CharacterSet::Standard, false); | ||||
|         assert_eq!(300, max_input_length(401, config)); | ||||
|         assert_eq!(300, max_input_length(401, false)); | ||||
|     } | ||||
| 
 | ||||
|     pub fn chunked_encode_matches_normal_encode_random<S: SinkTestHelper>(sink_test_helper: &S) { | ||||
|  | @ -197,49 +191,39 @@ pub mod tests { | |||
|                 input_buf.push(rng.gen()); | ||||
|             } | ||||
| 
 | ||||
|             let config = random_config(&mut rng); | ||||
|             let engine = random_engine(&mut rng); | ||||
| 
 | ||||
|             let chunk_encoded_string = sink_test_helper.encode_to_string(config, &input_buf); | ||||
|             encode_config_buf(&input_buf, config, &mut output_buf); | ||||
|             let chunk_encoded_string = sink_test_helper.encode_to_string(&engine, &input_buf); | ||||
|             engine.encode_string(&input_buf, &mut output_buf); | ||||
| 
 | ||||
|             assert_eq!( | ||||
|                 output_buf, chunk_encoded_string, | ||||
|                 "input len={}, config: pad={}", | ||||
|                 buf_len, config.pad | ||||
|             ); | ||||
|             assert_eq!(output_buf, chunk_encoded_string, "input len={}", buf_len); | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     fn chunked_encode_str(bytes: &[u8], config: Config) -> String { | ||||
|     fn chunked_encode_str(bytes: &[u8], config: GeneralPurposeConfig) -> String { | ||||
|         let mut s = String::new(); | ||||
|         { | ||||
|             let mut sink = StringSink::new(&mut s); | ||||
|             let encoder = ChunkedEncoder::new(config); | ||||
|             encoder.encode(bytes, &mut sink).unwrap(); | ||||
|         } | ||||
| 
 | ||||
|         return s; | ||||
|     } | ||||
|         let mut sink = StringSink::new(&mut s); | ||||
|         let engine = GeneralPurpose::new(&STANDARD, config); | ||||
|         let encoder = ChunkedEncoder::new(&engine); | ||||
|         encoder.encode(bytes, &mut sink).unwrap(); | ||||
| 
 | ||||
|     fn config_with_pad(pad: bool) -> Config { | ||||
|         Config::new(CharacterSet::Standard, pad) | ||||
|         s | ||||
|     } | ||||
| 
 | ||||
|     // An abstraction around sinks so that we can have tests that easily to any sink implementation
 | ||||
|     pub trait SinkTestHelper { | ||||
|         fn encode_to_string(&self, config: Config, bytes: &[u8]) -> String; | ||||
|         fn encode_to_string<E: Engine>(&self, engine: &E, bytes: &[u8]) -> String; | ||||
|     } | ||||
| 
 | ||||
|     struct StringSinkTestHelper; | ||||
| 
 | ||||
|     impl SinkTestHelper for StringSinkTestHelper { | ||||
|         fn encode_to_string(&self, config: Config, bytes: &[u8]) -> String { | ||||
|             let encoder = ChunkedEncoder::new(config); | ||||
|         fn encode_to_string<E: Engine>(&self, engine: &E, bytes: &[u8]) -> String { | ||||
|             let encoder = ChunkedEncoder::new(engine); | ||||
|             let mut s = String::new(); | ||||
|             { | ||||
|                 let mut sink = StringSink::new(&mut s); | ||||
|                 encoder.encode(bytes, &mut sink).unwrap(); | ||||
|             } | ||||
|             let mut sink = StringSink::new(&mut s); | ||||
|             encoder.encode(bytes, &mut sink).unwrap(); | ||||
| 
 | ||||
|             s | ||||
|         } | ||||
|  |  | |||
							
								
								
									
										900
									
								
								third_party/rust/base64/src/decode.rs
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										900
									
								
								third_party/rust/base64/src/decode.rs
									
									
									
									
										vendored
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load diff
											
										
									
								
							
							
								
								
									
										30
									
								
								third_party/rust/base64/src/display.rs
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										30
									
								
								third_party/rust/base64/src/display.rs
									
									
									
									
										vendored
									
									
								
							|  | @ -1,36 +1,36 @@ | |||
| //! Enables base64'd output anywhere you might use a `Display` implementation, like a format string.
 | ||||
| //!
 | ||||
| //! ```
 | ||||
| //! use base64::display::Base64Display;
 | ||||
| //! use base64::{display::Base64Display, engine::general_purpose::STANDARD};
 | ||||
| //!
 | ||||
| //! let data = vec![0x0, 0x1, 0x2, 0x3];
 | ||||
| //! let wrapper = Base64Display::with_config(&data, base64::STANDARD);
 | ||||
| //! let wrapper = Base64Display::new(&data, &STANDARD);
 | ||||
| //!
 | ||||
| //! assert_eq!("base64: AAECAw==", format!("base64: {}", wrapper));
 | ||||
| //! ```
 | ||||
| 
 | ||||
| use super::chunked_encoder::ChunkedEncoder; | ||||
| use super::Config; | ||||
| use crate::engine::Engine; | ||||
| use core::fmt::{Display, Formatter}; | ||||
| use core::{fmt, str}; | ||||
| 
 | ||||
| /// A convenience wrapper for base64'ing bytes into a format string without heap allocation.
 | ||||
| pub struct Base64Display<'a> { | ||||
| pub struct Base64Display<'a, 'e, E: Engine> { | ||||
|     bytes: &'a [u8], | ||||
|     chunked_encoder: ChunkedEncoder, | ||||
|     chunked_encoder: ChunkedEncoder<'e, E>, | ||||
| } | ||||
| 
 | ||||
| impl<'a> Base64Display<'a> { | ||||
|     /// Create a `Base64Display` with the provided config.
 | ||||
|     pub fn with_config(bytes: &[u8], config: Config) -> Base64Display { | ||||
| impl<'a, 'e, E: Engine> Base64Display<'a, 'e, E> { | ||||
|     /// Create a `Base64Display` with the provided engine.
 | ||||
|     pub fn new(bytes: &'a [u8], engine: &'e E) -> Base64Display<'a, 'e, E> { | ||||
|         Base64Display { | ||||
|             bytes, | ||||
|             chunked_encoder: ChunkedEncoder::new(config), | ||||
|             chunked_encoder: ChunkedEncoder::new(engine), | ||||
|         } | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| impl<'a> Display for Base64Display<'a> { | ||||
| impl<'a, 'e, E: Engine> Display for Base64Display<'a, 'e, E> { | ||||
|     fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> { | ||||
|         let mut sink = FormatterSink { f: formatter }; | ||||
|         self.chunked_encoder.encode(self.bytes, &mut sink) | ||||
|  | @ -57,18 +57,18 @@ mod tests { | |||
|     use super::super::chunked_encoder::tests::{ | ||||
|         chunked_encode_matches_normal_encode_random, SinkTestHelper, | ||||
|     }; | ||||
|     use super::super::*; | ||||
|     use super::*; | ||||
|     use crate::engine::general_purpose::STANDARD; | ||||
| 
 | ||||
|     #[test] | ||||
|     fn basic_display() { | ||||
|         assert_eq!( | ||||
|             "~$Zm9vYmFy#*", | ||||
|             format!("~${}#*", Base64Display::with_config(b"foobar", STANDARD)) | ||||
|             format!("~${}#*", Base64Display::new(b"foobar", &STANDARD)) | ||||
|         ); | ||||
|         assert_eq!( | ||||
|             "~$Zm9vYmFyZg==#*", | ||||
|             format!("~${}#*", Base64Display::with_config(b"foobarf", STANDARD)) | ||||
|             format!("~${}#*", Base64Display::new(b"foobarf", &STANDARD)) | ||||
|         ); | ||||
|     } | ||||
| 
 | ||||
|  | @ -81,8 +81,8 @@ mod tests { | |||
|     struct DisplaySinkTestHelper; | ||||
| 
 | ||||
|     impl SinkTestHelper for DisplaySinkTestHelper { | ||||
|         fn encode_to_string(&self, config: Config, bytes: &[u8]) -> String { | ||||
|             format!("{}", Base64Display::with_config(bytes, config)) | ||||
|         fn encode_to_string<E: Engine>(&self, engine: &E, bytes: &[u8]) -> String { | ||||
|             format!("{}", Base64Display::new(bytes, engine)) | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  |  | |||
							
								
								
									
										529
									
								
								third_party/rust/base64/src/encode.rs
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										529
									
								
								third_party/rust/base64/src/encode.rs
									
									
									
									
										vendored
									
									
								
							|  | @ -1,130 +1,59 @@ | |||
| use crate::{Config, PAD_BYTE}; | ||||
| #[cfg(any(feature = "alloc", feature = "std", test))] | ||||
| use crate::{chunked_encoder, STANDARD}; | ||||
| #[cfg(any(feature = "alloc", feature = "std", test))] | ||||
| use alloc::{string::String, vec}; | ||||
| use core::convert::TryInto; | ||||
| use alloc::string::String; | ||||
| use core::fmt; | ||||
| #[cfg(any(feature = "std", test))] | ||||
| use std::error; | ||||
| 
 | ||||
| ///Encode arbitrary octets as base64.
 | ||||
| ///Returns a String.
 | ||||
| ///Convenience for `encode_config(input, base64::STANDARD);`.
 | ||||
| #[cfg(any(feature = "alloc", feature = "std", test))] | ||||
| use crate::engine::general_purpose::STANDARD; | ||||
| use crate::engine::{Config, Engine}; | ||||
| use crate::PAD_BYTE; | ||||
| 
 | ||||
| /// Encode arbitrary octets as base64 using the [`STANDARD` engine](STANDARD).
 | ||||
| ///
 | ||||
| ///# Example
 | ||||
| ///
 | ||||
| ///```rust
 | ||||
| ///extern crate base64;
 | ||||
| ///
 | ||||
| ///fn main() {
 | ||||
| ///    let b64 = base64::encode(b"hello world");
 | ||||
| ///    println!("{}", b64);
 | ||||
| ///}
 | ||||
| ///```
 | ||||
| /// See [Engine::encode].
 | ||||
| #[allow(unused)] | ||||
| #[deprecated(since = "0.21.0", note = "Use Engine::encode")] | ||||
| #[cfg(any(feature = "alloc", feature = "std", test))] | ||||
| pub fn encode<T: AsRef<[u8]>>(input: T) -> String { | ||||
|     encode_config(input, STANDARD) | ||||
|     STANDARD.encode(input) | ||||
| } | ||||
| 
 | ||||
| ///Encode arbitrary octets as base64.
 | ||||
| ///Returns a String.
 | ||||
| ///Encode arbitrary octets as base64 using the provided `Engine` into a new `String`.
 | ||||
| ///
 | ||||
| ///# Example
 | ||||
| ///
 | ||||
| ///```rust
 | ||||
| ///extern crate base64;
 | ||||
| ///
 | ||||
| ///fn main() {
 | ||||
| ///    let b64 = base64::encode_config(b"hello world~", base64::STANDARD);
 | ||||
| ///    println!("{}", b64);
 | ||||
| ///
 | ||||
| ///    let b64_url = base64::encode_config(b"hello internet~", base64::URL_SAFE);
 | ||||
| ///    println!("{}", b64_url);
 | ||||
| ///}
 | ||||
| ///```
 | ||||
| /// See [Engine::encode].
 | ||||
| #[allow(unused)] | ||||
| #[deprecated(since = "0.21.0", note = "Use Engine::encode")] | ||||
| #[cfg(any(feature = "alloc", feature = "std", test))] | ||||
| pub fn encode_config<T: AsRef<[u8]>>(input: T, config: Config) -> String { | ||||
|     let mut buf = match encoded_size(input.as_ref().len(), config) { | ||||
|         Some(n) => vec![0; n], | ||||
|         None => panic!("integer overflow when calculating buffer size"), | ||||
|     }; | ||||
| 
 | ||||
|     encode_with_padding(input.as_ref(), config, buf.len(), &mut buf[..]); | ||||
| 
 | ||||
|     String::from_utf8(buf).expect("Invalid UTF8") | ||||
| pub fn encode_engine<E: Engine, T: AsRef<[u8]>>(input: T, engine: &E) -> String { | ||||
|     engine.encode(input) | ||||
| } | ||||
| 
 | ||||
| ///Encode arbitrary octets as base64.
 | ||||
| ///Writes into the supplied output buffer, which will grow the buffer if needed.
 | ||||
| ///Encode arbitrary octets as base64 into a supplied `String`.
 | ||||
| ///
 | ||||
| ///# Example
 | ||||
| ///
 | ||||
| ///```rust
 | ||||
| ///extern crate base64;
 | ||||
| ///
 | ||||
| ///fn main() {
 | ||||
| ///    let mut buf = String::new();
 | ||||
| ///    base64::encode_config_buf(b"hello world~", base64::STANDARD, &mut buf);
 | ||||
| ///    println!("{}", buf);
 | ||||
| ///
 | ||||
| ///    buf.clear();
 | ||||
| ///    base64::encode_config_buf(b"hello internet~", base64::URL_SAFE, &mut buf);
 | ||||
| ///    println!("{}", buf);
 | ||||
| ///}
 | ||||
| ///```
 | ||||
| /// See [Engine::encode_string].
 | ||||
| #[allow(unused)] | ||||
| #[deprecated(since = "0.21.0", note = "Use Engine::encode_string")] | ||||
| #[cfg(any(feature = "alloc", feature = "std", test))] | ||||
| pub fn encode_config_buf<T: AsRef<[u8]>>(input: T, config: Config, buf: &mut String) { | ||||
|     let input_bytes = input.as_ref(); | ||||
| 
 | ||||
|     { | ||||
|         let mut sink = chunked_encoder::StringSink::new(buf); | ||||
|         let encoder = chunked_encoder::ChunkedEncoder::new(config); | ||||
| 
 | ||||
|         encoder | ||||
|             .encode(input_bytes, &mut sink) | ||||
|             .expect("Writing to a String shouldn't fail") | ||||
|     } | ||||
| pub fn encode_engine_string<E: Engine, T: AsRef<[u8]>>( | ||||
|     input: T, | ||||
|     output_buf: &mut String, | ||||
|     engine: &E, | ||||
| ) { | ||||
|     engine.encode_string(input, output_buf) | ||||
| } | ||||
| 
 | ||||
| /// Encode arbitrary octets as base64.
 | ||||
| /// Writes into the supplied output buffer.
 | ||||
| /// Encode arbitrary octets as base64 into a supplied slice.
 | ||||
| ///
 | ||||
| /// This is useful if you wish to avoid allocation entirely (e.g. encoding into a stack-resident
 | ||||
| /// or statically-allocated buffer).
 | ||||
| ///
 | ||||
| /// # Panics
 | ||||
| ///
 | ||||
| /// If `output` is too small to hold the encoded version of `input`, a panic will result.
 | ||||
| ///
 | ||||
| /// # Example
 | ||||
| ///
 | ||||
| /// ```rust
 | ||||
| /// extern crate base64;
 | ||||
| ///
 | ||||
| /// fn main() {
 | ||||
| ///     let s = b"hello internet!";
 | ||||
| ///     let mut buf = Vec::new();
 | ||||
| ///     // make sure we'll have a slice big enough for base64 + padding
 | ||||
| ///     buf.resize(s.len() * 4 / 3 + 4, 0);
 | ||||
| ///
 | ||||
| ///     let bytes_written = base64::encode_config_slice(s,
 | ||||
| ///                             base64::STANDARD, &mut buf);
 | ||||
| ///
 | ||||
| ///     // shorten our vec down to just what was written
 | ||||
| ///     buf.resize(bytes_written, 0);
 | ||||
| ///
 | ||||
| ///     assert_eq!(s, base64::decode(&buf).unwrap().as_slice());
 | ||||
| /// }
 | ||||
| /// ```
 | ||||
| pub fn encode_config_slice<T: AsRef<[u8]>>(input: T, config: Config, output: &mut [u8]) -> usize { | ||||
|     let input_bytes = input.as_ref(); | ||||
| 
 | ||||
|     let encoded_size = encoded_size(input_bytes.len(), config) | ||||
|         .expect("usize overflow when calculating buffer size"); | ||||
| 
 | ||||
|     let mut b64_output = &mut output[0..encoded_size]; | ||||
| 
 | ||||
|     encode_with_padding(&input_bytes, config, encoded_size, &mut b64_output); | ||||
| 
 | ||||
|     encoded_size | ||||
| /// See [Engine::encode_slice].
 | ||||
| #[allow(unused)] | ||||
| #[deprecated(since = "0.21.0", note = "Use Engine::encode_slice")] | ||||
| pub fn encode_engine_slice<E: Engine, T: AsRef<[u8]>>( | ||||
|     input: T, | ||||
|     output_buf: &mut [u8], | ||||
|     engine: &E, | ||||
| ) -> Result<usize, EncodeSliceError> { | ||||
|     engine.encode_slice(input, output_buf) | ||||
| } | ||||
| 
 | ||||
| /// B64-encode and pad (if configured).
 | ||||
|  | @ -137,12 +66,17 @@ pub fn encode_config_slice<T: AsRef<[u8]>>(input: T, config: Config, output: &mu | |||
| /// `output` must be of size `encoded_size`.
 | ||||
| ///
 | ||||
| /// All bytes in `output` will be written to since it is exactly the size of the output.
 | ||||
| fn encode_with_padding(input: &[u8], config: Config, encoded_size: usize, output: &mut [u8]) { | ||||
|     debug_assert_eq!(encoded_size, output.len()); | ||||
| pub(crate) fn encode_with_padding<E: Engine + ?Sized>( | ||||
|     input: &[u8], | ||||
|     output: &mut [u8], | ||||
|     engine: &E, | ||||
|     expected_encoded_size: usize, | ||||
| ) { | ||||
|     debug_assert_eq!(expected_encoded_size, output.len()); | ||||
| 
 | ||||
|     let b64_bytes_written = encode_to_slice(input, output, config.char_set.encode_table()); | ||||
|     let b64_bytes_written = engine.internal_encode(input, output); | ||||
| 
 | ||||
|     let padding_bytes = if config.pad { | ||||
|     let padding_bytes = if engine.config().encode_padding() { | ||||
|         add_padding(input.len(), &mut output[b64_bytes_written..]) | ||||
|     } else { | ||||
|         0 | ||||
|  | @ -152,144 +86,22 @@ fn encode_with_padding(input: &[u8], config: Config, encoded_size: usize, output | |||
|         .checked_add(padding_bytes) | ||||
|         .expect("usize overflow when calculating b64 length"); | ||||
| 
 | ||||
|     debug_assert_eq!(encoded_size, encoded_bytes); | ||||
|     debug_assert_eq!(expected_encoded_size, encoded_bytes); | ||||
| } | ||||
| 
 | ||||
| #[inline] | ||||
| fn read_u64(s: &[u8]) -> u64 { | ||||
|     u64::from_be_bytes(s[..8].try_into().unwrap()) | ||||
| } | ||||
| 
 | ||||
| /// Encode input bytes to utf8 base64 bytes. Does not pad.
 | ||||
| /// `output` must be long enough to hold the encoded `input` without padding.
 | ||||
| /// Returns the number of bytes written.
 | ||||
| #[inline] | ||||
| pub fn encode_to_slice(input: &[u8], output: &mut [u8], encode_table: &[u8; 64]) -> usize { | ||||
|     let mut input_index: usize = 0; | ||||
| 
 | ||||
|     const BLOCKS_PER_FAST_LOOP: usize = 4; | ||||
|     const LOW_SIX_BITS: u64 = 0x3F; | ||||
| 
 | ||||
|     // we read 8 bytes at a time (u64) but only actually consume 6 of those bytes. Thus, we need
 | ||||
|     // 2 trailing bytes to be available to read..
 | ||||
|     let last_fast_index = input.len().saturating_sub(BLOCKS_PER_FAST_LOOP * 6 + 2); | ||||
|     let mut output_index = 0; | ||||
| 
 | ||||
|     if last_fast_index > 0 { | ||||
|         while input_index <= last_fast_index { | ||||
|             // Major performance wins from letting the optimizer do the bounds check once, mostly
 | ||||
|             // on the output side
 | ||||
|             let input_chunk = &input[input_index..(input_index + (BLOCKS_PER_FAST_LOOP * 6 + 2))]; | ||||
|             let output_chunk = &mut output[output_index..(output_index + BLOCKS_PER_FAST_LOOP * 8)]; | ||||
| 
 | ||||
|             // Hand-unrolling for 32 vs 16 or 8 bytes produces yields performance about equivalent
 | ||||
|             // to unsafe pointer code on a Xeon E5-1650v3. 64 byte unrolling was slightly better for
 | ||||
|             // large inputs but significantly worse for 50-byte input, unsurprisingly. I suspect
 | ||||
|             // that it's a not uncommon use case to encode smallish chunks of data (e.g. a 64-byte
 | ||||
|             // SHA-512 digest), so it would be nice if that fit in the unrolled loop at least once.
 | ||||
|             // Plus, single-digit percentage performance differences might well be quite different
 | ||||
|             // on different hardware.
 | ||||
| 
 | ||||
|             let input_u64 = read_u64(&input_chunk[0..]); | ||||
| 
 | ||||
|             output_chunk[0] = encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[1] = encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[2] = encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[3] = encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[4] = encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[5] = encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[6] = encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[7] = encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize]; | ||||
| 
 | ||||
|             let input_u64 = read_u64(&input_chunk[6..]); | ||||
| 
 | ||||
|             output_chunk[8] = encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[9] = encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[10] = encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[11] = encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[12] = encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[13] = encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[14] = encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[15] = encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize]; | ||||
| 
 | ||||
|             let input_u64 = read_u64(&input_chunk[12..]); | ||||
| 
 | ||||
|             output_chunk[16] = encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[17] = encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[18] = encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[19] = encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[20] = encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[21] = encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[22] = encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[23] = encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize]; | ||||
| 
 | ||||
|             let input_u64 = read_u64(&input_chunk[18..]); | ||||
| 
 | ||||
|             output_chunk[24] = encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[25] = encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[26] = encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[27] = encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[28] = encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[29] = encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[30] = encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize]; | ||||
|             output_chunk[31] = encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize]; | ||||
| 
 | ||||
|             output_index += BLOCKS_PER_FAST_LOOP * 8; | ||||
|             input_index += BLOCKS_PER_FAST_LOOP * 6; | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     // Encode what's left after the fast loop.
 | ||||
| 
 | ||||
|     const LOW_SIX_BITS_U8: u8 = 0x3F; | ||||
| 
 | ||||
|     let rem = input.len() % 3; | ||||
|     let start_of_rem = input.len() - rem; | ||||
| 
 | ||||
|     // start at the first index not handled by fast loop, which may be 0.
 | ||||
| 
 | ||||
|     while input_index < start_of_rem { | ||||
|         let input_chunk = &input[input_index..(input_index + 3)]; | ||||
|         let output_chunk = &mut output[output_index..(output_index + 4)]; | ||||
| 
 | ||||
|         output_chunk[0] = encode_table[(input_chunk[0] >> 2) as usize]; | ||||
|         output_chunk[1] = | ||||
|             encode_table[((input_chunk[0] << 4 | input_chunk[1] >> 4) & LOW_SIX_BITS_U8) as usize]; | ||||
|         output_chunk[2] = | ||||
|             encode_table[((input_chunk[1] << 2 | input_chunk[2] >> 6) & LOW_SIX_BITS_U8) as usize]; | ||||
|         output_chunk[3] = encode_table[(input_chunk[2] & LOW_SIX_BITS_U8) as usize]; | ||||
| 
 | ||||
|         input_index += 3; | ||||
|         output_index += 4; | ||||
|     } | ||||
| 
 | ||||
|     if rem == 2 { | ||||
|         output[output_index] = encode_table[(input[start_of_rem] >> 2) as usize]; | ||||
|         output[output_index + 1] = encode_table[((input[start_of_rem] << 4 | ||||
|             | input[start_of_rem + 1] >> 4) | ||||
|             & LOW_SIX_BITS_U8) as usize]; | ||||
|         output[output_index + 2] = | ||||
|             encode_table[((input[start_of_rem + 1] << 2) & LOW_SIX_BITS_U8) as usize]; | ||||
|         output_index += 3; | ||||
|     } else if rem == 1 { | ||||
|         output[output_index] = encode_table[(input[start_of_rem] >> 2) as usize]; | ||||
|         output[output_index + 1] = | ||||
|             encode_table[((input[start_of_rem] << 4) & LOW_SIX_BITS_U8) as usize]; | ||||
|         output_index += 2; | ||||
|     } | ||||
| 
 | ||||
|     output_index | ||||
| } | ||||
| 
 | ||||
| /// calculate the base64 encoded string size, including padding if appropriate
 | ||||
| pub fn encoded_size(bytes_len: usize, config: Config) -> Option<usize> { | ||||
| /// Calculate the base64 encoded length for a given input length, optionally including any
 | ||||
| /// appropriate padding bytes.
 | ||||
| ///
 | ||||
| /// Returns `None` if the encoded length can't be represented in `usize`. This will happen for
 | ||||
| /// input lengths in approximately the top quarter of the range of `usize`.
 | ||||
| pub fn encoded_len(bytes_len: usize, padding: bool) -> Option<usize> { | ||||
|     let rem = bytes_len % 3; | ||||
| 
 | ||||
|     let complete_input_chunks = bytes_len / 3; | ||||
|     let complete_chunk_output = complete_input_chunks.checked_mul(4); | ||||
| 
 | ||||
|     if rem > 0 { | ||||
|         if config.pad { | ||||
|         if padding { | ||||
|             complete_chunk_output.and_then(|c| c.checked_add(4)) | ||||
|         } else { | ||||
|             let encoded_rem = match rem { | ||||
|  | @ -305,10 +117,12 @@ pub fn encoded_size(bytes_len: usize, config: Config) -> Option<usize> { | |||
| } | ||||
| 
 | ||||
| /// Write padding characters.
 | ||||
| /// `input_len` is the size of the original, not encoded, input.
 | ||||
| /// `output` is the slice where padding should be written, of length at least 2.
 | ||||
| ///
 | ||||
| /// Returns the number of padding bytes written.
 | ||||
| pub fn add_padding(input_len: usize, output: &mut [u8]) -> usize { | ||||
| pub(crate) fn add_padding(input_len: usize, output: &mut [u8]) -> usize { | ||||
|     // TODO base on encoded len to use cheaper mod by 4 (aka & 7)
 | ||||
|     let rem = input_len % 3; | ||||
|     let mut bytes_written = 0; | ||||
|     for _ in 0..((3 - rem) % 3) { | ||||
|  | @ -319,79 +133,102 @@ pub fn add_padding(input_len: usize, output: &mut [u8]) -> usize { | |||
|     bytes_written | ||||
| } | ||||
| 
 | ||||
| /// Errors that can occur while encoding into a slice.
 | ||||
| #[derive(Clone, Debug, PartialEq, Eq)] | ||||
| pub enum EncodeSliceError { | ||||
|     /// The provided slice is too small.
 | ||||
|     OutputSliceTooSmall, | ||||
| } | ||||
| 
 | ||||
| impl fmt::Display for EncodeSliceError { | ||||
|     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||||
|         match self { | ||||
|             Self::OutputSliceTooSmall => write!(f, "Output slice too small"), | ||||
|         } | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| #[cfg(any(feature = "std", test))] | ||||
| impl error::Error for EncodeSliceError { | ||||
|     fn cause(&self) -> Option<&dyn error::Error> { | ||||
|         None | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| #[cfg(test)] | ||||
| mod tests { | ||||
|     use super::*; | ||||
|     use crate::{ | ||||
|         decode::decode_config_buf, | ||||
|         tests::{assert_encode_sanity, random_config}, | ||||
|         Config, STANDARD, URL_SAFE_NO_PAD, | ||||
|     }; | ||||
| 
 | ||||
|     use crate::{ | ||||
|         alphabet, | ||||
|         engine::general_purpose::{GeneralPurpose, NO_PAD, STANDARD}, | ||||
|         tests::{assert_encode_sanity, random_config, random_engine}, | ||||
|     }; | ||||
|     use rand::{ | ||||
|         distributions::{Distribution, Uniform}, | ||||
|         FromEntropy, Rng, | ||||
|         Rng, SeedableRng, | ||||
|     }; | ||||
|     use std; | ||||
|     use std::str; | ||||
| 
 | ||||
|     const URL_SAFE_NO_PAD_ENGINE: GeneralPurpose = GeneralPurpose::new(&alphabet::URL_SAFE, NO_PAD); | ||||
| 
 | ||||
|     #[test] | ||||
|     fn encoded_size_correct_standard() { | ||||
|         assert_encoded_length(0, 0, STANDARD); | ||||
|         assert_encoded_length(0, 0, &STANDARD, true); | ||||
| 
 | ||||
|         assert_encoded_length(1, 4, STANDARD); | ||||
|         assert_encoded_length(2, 4, STANDARD); | ||||
|         assert_encoded_length(3, 4, STANDARD); | ||||
|         assert_encoded_length(1, 4, &STANDARD, true); | ||||
|         assert_encoded_length(2, 4, &STANDARD, true); | ||||
|         assert_encoded_length(3, 4, &STANDARD, true); | ||||
| 
 | ||||
|         assert_encoded_length(4, 8, STANDARD); | ||||
|         assert_encoded_length(5, 8, STANDARD); | ||||
|         assert_encoded_length(6, 8, STANDARD); | ||||
|         assert_encoded_length(4, 8, &STANDARD, true); | ||||
|         assert_encoded_length(5, 8, &STANDARD, true); | ||||
|         assert_encoded_length(6, 8, &STANDARD, true); | ||||
| 
 | ||||
|         assert_encoded_length(7, 12, STANDARD); | ||||
|         assert_encoded_length(8, 12, STANDARD); | ||||
|         assert_encoded_length(9, 12, STANDARD); | ||||
|         assert_encoded_length(7, 12, &STANDARD, true); | ||||
|         assert_encoded_length(8, 12, &STANDARD, true); | ||||
|         assert_encoded_length(9, 12, &STANDARD, true); | ||||
| 
 | ||||
|         assert_encoded_length(54, 72, STANDARD); | ||||
|         assert_encoded_length(54, 72, &STANDARD, true); | ||||
| 
 | ||||
|         assert_encoded_length(55, 76, STANDARD); | ||||
|         assert_encoded_length(56, 76, STANDARD); | ||||
|         assert_encoded_length(57, 76, STANDARD); | ||||
|         assert_encoded_length(55, 76, &STANDARD, true); | ||||
|         assert_encoded_length(56, 76, &STANDARD, true); | ||||
|         assert_encoded_length(57, 76, &STANDARD, true); | ||||
| 
 | ||||
|         assert_encoded_length(58, 80, STANDARD); | ||||
|         assert_encoded_length(58, 80, &STANDARD, true); | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|     fn encoded_size_correct_no_pad() { | ||||
|         assert_encoded_length(0, 0, URL_SAFE_NO_PAD); | ||||
|         assert_encoded_length(0, 0, &URL_SAFE_NO_PAD_ENGINE, false); | ||||
| 
 | ||||
|         assert_encoded_length(1, 2, URL_SAFE_NO_PAD); | ||||
|         assert_encoded_length(2, 3, URL_SAFE_NO_PAD); | ||||
|         assert_encoded_length(3, 4, URL_SAFE_NO_PAD); | ||||
|         assert_encoded_length(1, 2, &URL_SAFE_NO_PAD_ENGINE, false); | ||||
|         assert_encoded_length(2, 3, &URL_SAFE_NO_PAD_ENGINE, false); | ||||
|         assert_encoded_length(3, 4, &URL_SAFE_NO_PAD_ENGINE, false); | ||||
| 
 | ||||
|         assert_encoded_length(4, 6, URL_SAFE_NO_PAD); | ||||
|         assert_encoded_length(5, 7, URL_SAFE_NO_PAD); | ||||
|         assert_encoded_length(6, 8, URL_SAFE_NO_PAD); | ||||
|         assert_encoded_length(4, 6, &URL_SAFE_NO_PAD_ENGINE, false); | ||||
|         assert_encoded_length(5, 7, &URL_SAFE_NO_PAD_ENGINE, false); | ||||
|         assert_encoded_length(6, 8, &URL_SAFE_NO_PAD_ENGINE, false); | ||||
| 
 | ||||
|         assert_encoded_length(7, 10, URL_SAFE_NO_PAD); | ||||
|         assert_encoded_length(8, 11, URL_SAFE_NO_PAD); | ||||
|         assert_encoded_length(9, 12, URL_SAFE_NO_PAD); | ||||
|         assert_encoded_length(7, 10, &URL_SAFE_NO_PAD_ENGINE, false); | ||||
|         assert_encoded_length(8, 11, &URL_SAFE_NO_PAD_ENGINE, false); | ||||
|         assert_encoded_length(9, 12, &URL_SAFE_NO_PAD_ENGINE, false); | ||||
| 
 | ||||
|         assert_encoded_length(54, 72, URL_SAFE_NO_PAD); | ||||
|         assert_encoded_length(54, 72, &URL_SAFE_NO_PAD_ENGINE, false); | ||||
| 
 | ||||
|         assert_encoded_length(55, 74, URL_SAFE_NO_PAD); | ||||
|         assert_encoded_length(56, 75, URL_SAFE_NO_PAD); | ||||
|         assert_encoded_length(57, 76, URL_SAFE_NO_PAD); | ||||
|         assert_encoded_length(55, 74, &URL_SAFE_NO_PAD_ENGINE, false); | ||||
|         assert_encoded_length(56, 75, &URL_SAFE_NO_PAD_ENGINE, false); | ||||
|         assert_encoded_length(57, 76, &URL_SAFE_NO_PAD_ENGINE, false); | ||||
| 
 | ||||
|         assert_encoded_length(58, 78, URL_SAFE_NO_PAD); | ||||
|         assert_encoded_length(58, 78, &URL_SAFE_NO_PAD_ENGINE, false); | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|     fn encoded_size_overflow() { | ||||
|         assert_eq!(None, encoded_size(std::usize::MAX, STANDARD)); | ||||
|         assert_eq!(None, encoded_len(usize::MAX, true)); | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|     fn encode_config_buf_into_nonempty_buffer_doesnt_clobber_prefix() { | ||||
|     fn encode_engine_string_into_nonempty_buffer_doesnt_clobber_prefix() { | ||||
|         let mut orig_data = Vec::new(); | ||||
|         let mut prefix = String::new(); | ||||
|         let mut encoded_data_no_prefix = String::new(); | ||||
|  | @ -424,29 +261,39 @@ mod tests { | |||
|             } | ||||
|             encoded_data_with_prefix.push_str(&prefix); | ||||
| 
 | ||||
|             let config = random_config(&mut rng); | ||||
|             encode_config_buf(&orig_data, config, &mut encoded_data_no_prefix); | ||||
|             encode_config_buf(&orig_data, config, &mut encoded_data_with_prefix); | ||||
|             let engine = random_engine(&mut rng); | ||||
|             engine.encode_string(&orig_data, &mut encoded_data_no_prefix); | ||||
|             engine.encode_string(&orig_data, &mut encoded_data_with_prefix); | ||||
| 
 | ||||
|             assert_eq!( | ||||
|                 encoded_data_no_prefix.len() + prefix_len, | ||||
|                 encoded_data_with_prefix.len() | ||||
|             ); | ||||
|             assert_encode_sanity(&encoded_data_no_prefix, config, input_len); | ||||
|             assert_encode_sanity(&encoded_data_with_prefix[prefix_len..], config, input_len); | ||||
|             assert_encode_sanity( | ||||
|                 &encoded_data_no_prefix, | ||||
|                 engine.config().encode_padding(), | ||||
|                 input_len, | ||||
|             ); | ||||
|             assert_encode_sanity( | ||||
|                 &encoded_data_with_prefix[prefix_len..], | ||||
|                 engine.config().encode_padding(), | ||||
|                 input_len, | ||||
|             ); | ||||
| 
 | ||||
|             // append plain encode onto prefix
 | ||||
|             prefix.push_str(&mut encoded_data_no_prefix); | ||||
|             prefix.push_str(&encoded_data_no_prefix); | ||||
| 
 | ||||
|             assert_eq!(prefix, encoded_data_with_prefix); | ||||
| 
 | ||||
|             decode_config_buf(&encoded_data_no_prefix, config, &mut decoded).unwrap(); | ||||
|             engine | ||||
|                 .decode_vec(&encoded_data_no_prefix, &mut decoded) | ||||
|                 .unwrap(); | ||||
|             assert_eq!(orig_data, decoded); | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|     fn encode_config_slice_into_nonempty_buffer_doesnt_clobber_suffix() { | ||||
|     fn encode_engine_slice_into_nonempty_buffer_doesnt_clobber_suffix() { | ||||
|         let mut orig_data = Vec::new(); | ||||
|         let mut encoded_data = Vec::new(); | ||||
|         let mut encoded_data_original_state = Vec::new(); | ||||
|  | @ -475,18 +322,18 @@ mod tests { | |||
| 
 | ||||
|             encoded_data_original_state.extend_from_slice(&encoded_data); | ||||
| 
 | ||||
|             let config = random_config(&mut rng); | ||||
|             let engine = random_engine(&mut rng); | ||||
| 
 | ||||
|             let encoded_size = encoded_size(input_len, config).unwrap(); | ||||
|             let encoded_size = encoded_len(input_len, engine.config().encode_padding()).unwrap(); | ||||
| 
 | ||||
|             assert_eq!( | ||||
|                 encoded_size, | ||||
|                 encode_config_slice(&orig_data, config, &mut encoded_data) | ||||
|                 engine.encode_slice(&orig_data, &mut encoded_data).unwrap() | ||||
|             ); | ||||
| 
 | ||||
|             assert_encode_sanity( | ||||
|                 std::str::from_utf8(&encoded_data[0..encoded_size]).unwrap(), | ||||
|                 config, | ||||
|                 str::from_utf8(&encoded_data[0..encoded_size]).unwrap(), | ||||
|                 engine.config().encode_padding(), | ||||
|                 input_len, | ||||
|             ); | ||||
| 
 | ||||
|  | @ -495,50 +342,9 @@ mod tests { | |||
|                 &encoded_data_original_state[encoded_size..] | ||||
|             ); | ||||
| 
 | ||||
|             decode_config_buf(&encoded_data[0..encoded_size], config, &mut decoded).unwrap(); | ||||
|             assert_eq!(orig_data, decoded); | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|     fn encode_config_slice_fits_into_precisely_sized_slice() { | ||||
|         let mut orig_data = Vec::new(); | ||||
|         let mut encoded_data = Vec::new(); | ||||
|         let mut decoded = Vec::new(); | ||||
| 
 | ||||
|         let input_len_range = Uniform::new(0, 1000); | ||||
| 
 | ||||
|         let mut rng = rand::rngs::SmallRng::from_entropy(); | ||||
| 
 | ||||
|         for _ in 0..10_000 { | ||||
|             orig_data.clear(); | ||||
|             encoded_data.clear(); | ||||
|             decoded.clear(); | ||||
| 
 | ||||
|             let input_len = input_len_range.sample(&mut rng); | ||||
| 
 | ||||
|             for _ in 0..input_len { | ||||
|                 orig_data.push(rng.gen()); | ||||
|             } | ||||
| 
 | ||||
|             let config = random_config(&mut rng); | ||||
| 
 | ||||
|             let encoded_size = encoded_size(input_len, config).unwrap(); | ||||
| 
 | ||||
|             encoded_data.resize(encoded_size, 0); | ||||
| 
 | ||||
|             assert_eq!( | ||||
|                 encoded_size, | ||||
|                 encode_config_slice(&orig_data, config, &mut encoded_data) | ||||
|             ); | ||||
| 
 | ||||
|             assert_encode_sanity( | ||||
|                 std::str::from_utf8(&encoded_data[0..encoded_size]).unwrap(), | ||||
|                 config, | ||||
|                 input_len, | ||||
|             ); | ||||
| 
 | ||||
|             decode_config_buf(&encoded_data[0..encoded_size], config, &mut decoded).unwrap(); | ||||
|             engine | ||||
|                 .decode_vec(&encoded_data[0..encoded_size], &mut decoded) | ||||
|                 .unwrap(); | ||||
|             assert_eq!(orig_data, decoded); | ||||
|         } | ||||
|     } | ||||
|  | @ -563,17 +369,17 @@ mod tests { | |||
|             } | ||||
| 
 | ||||
|             let config = random_config(&mut rng); | ||||
|             let engine = random_engine(&mut rng); | ||||
| 
 | ||||
|             // fill up the output buffer with garbage
 | ||||
|             let encoded_size = encoded_size(input_len, config).unwrap(); | ||||
|             let encoded_size = encoded_len(input_len, config.encode_padding()).unwrap(); | ||||
|             for _ in 0..encoded_size { | ||||
|                 output.push(rng.gen()); | ||||
|             } | ||||
| 
 | ||||
|             let orig_output_buf = output.to_vec(); | ||||
|             let orig_output_buf = output.clone(); | ||||
| 
 | ||||
|             let bytes_written = | ||||
|                 encode_to_slice(&input, &mut output, config.char_set.encode_table()); | ||||
|             let bytes_written = engine.internal_encode(&input, &mut output); | ||||
| 
 | ||||
|             // make sure the part beyond bytes_written is the same garbage it was before
 | ||||
|             assert_eq!(orig_output_buf[bytes_written..], output[bytes_written..]); | ||||
|  | @ -602,17 +408,17 @@ mod tests { | |||
|                 input.push(rng.gen()); | ||||
|             } | ||||
| 
 | ||||
|             let config = random_config(&mut rng); | ||||
|             let engine = random_engine(&mut rng); | ||||
| 
 | ||||
|             // fill up the output buffer with garbage
 | ||||
|             let encoded_size = encoded_size(input_len, config).unwrap(); | ||||
|             let encoded_size = encoded_len(input_len, engine.config().encode_padding()).unwrap(); | ||||
|             for _ in 0..encoded_size + 1000 { | ||||
|                 output.push(rng.gen()); | ||||
|             } | ||||
| 
 | ||||
|             let orig_output_buf = output.to_vec(); | ||||
|             let orig_output_buf = output.clone(); | ||||
| 
 | ||||
|             encode_with_padding(&input, config, encoded_size, &mut output[0..encoded_size]); | ||||
|             encode_with_padding(&input, &mut output[0..encoded_size], &engine, encoded_size); | ||||
| 
 | ||||
|             // make sure the part beyond b64 is the same garbage it was before
 | ||||
|             assert_eq!(orig_output_buf[encoded_size..], output[encoded_size..]); | ||||
|  | @ -637,7 +443,7 @@ mod tests { | |||
|                 output.push(rng.gen()); | ||||
|             } | ||||
| 
 | ||||
|             let orig_output_buf = output.to_vec(); | ||||
|             let orig_output_buf = output.clone(); | ||||
| 
 | ||||
|             let bytes_written = add_padding(input_len, &mut output); | ||||
| 
 | ||||
|  | @ -649,8 +455,13 @@ mod tests { | |||
|         } | ||||
|     } | ||||
| 
 | ||||
|     fn assert_encoded_length(input_len: usize, encoded_len: usize, config: Config) { | ||||
|         assert_eq!(encoded_len, encoded_size(input_len, config).unwrap()); | ||||
|     fn assert_encoded_length<E: Engine>( | ||||
|         input_len: usize, | ||||
|         enc_len: usize, | ||||
|         engine: &E, | ||||
|         padded: bool, | ||||
|     ) { | ||||
|         assert_eq!(enc_len, encoded_len(input_len, padded).unwrap()); | ||||
| 
 | ||||
|         let mut bytes: Vec<u8> = Vec::new(); | ||||
|         let mut rng = rand::rngs::SmallRng::from_entropy(); | ||||
|  | @ -659,17 +470,19 @@ mod tests { | |||
|             bytes.push(rng.gen()); | ||||
|         } | ||||
| 
 | ||||
|         let encoded = encode_config(&bytes, config); | ||||
|         assert_encode_sanity(&encoded, config, input_len); | ||||
|         let encoded = engine.encode(&bytes); | ||||
|         assert_encode_sanity(&encoded, padded, input_len); | ||||
| 
 | ||||
|         assert_eq!(encoded_len, encoded.len()); | ||||
|         assert_eq!(enc_len, encoded.len()); | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|     fn encode_imap() { | ||||
|         assert_eq!( | ||||
|             encode_config(b"\xFB\xFF", crate::IMAP_MUTF7), | ||||
|             encode_config(b"\xFB\xFF", crate::STANDARD_NO_PAD).replace("/", ",") | ||||
|             &GeneralPurpose::new(&alphabet::IMAP_MUTF7, NO_PAD).encode(b"\xFB\xFF"), | ||||
|             &GeneralPurpose::new(&alphabet::STANDARD, NO_PAD) | ||||
|                 .encode(b"\xFB\xFF") | ||||
|                 .replace('/', ",") | ||||
|         ); | ||||
|     } | ||||
| } | ||||
|  |  | |||
							
								
								
									
										348
									
								
								third_party/rust/base64/src/engine/general_purpose/decode.rs
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										348
									
								
								third_party/rust/base64/src/engine/general_purpose/decode.rs
									
									
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1,348 @@ | |||
| use crate::{ | ||||
|     engine::{general_purpose::INVALID_VALUE, DecodeEstimate, DecodePaddingMode}, | ||||
|     DecodeError, PAD_BYTE, | ||||
| }; | ||||
| 
 | ||||
| // decode logic operates on chunks of 8 input bytes without padding
 | ||||
| const INPUT_CHUNK_LEN: usize = 8; | ||||
| const DECODED_CHUNK_LEN: usize = 6; | ||||
| 
 | ||||
| // we read a u64 and write a u64, but a u64 of input only yields 6 bytes of output, so the last
 | ||||
| // 2 bytes of any output u64 should not be counted as written to (but must be available in a
 | ||||
| // slice).
 | ||||
| const DECODED_CHUNK_SUFFIX: usize = 2; | ||||
| 
 | ||||
| // how many u64's of input to handle at a time
 | ||||
| const CHUNKS_PER_FAST_LOOP_BLOCK: usize = 4; | ||||
| 
 | ||||
| const INPUT_BLOCK_LEN: usize = CHUNKS_PER_FAST_LOOP_BLOCK * INPUT_CHUNK_LEN; | ||||
| 
 | ||||
| // includes the trailing 2 bytes for the final u64 write
 | ||||
| const DECODED_BLOCK_LEN: usize = | ||||
|     CHUNKS_PER_FAST_LOOP_BLOCK * DECODED_CHUNK_LEN + DECODED_CHUNK_SUFFIX; | ||||
| 
 | ||||
| #[doc(hidden)] | ||||
| pub struct GeneralPurposeEstimate { | ||||
|     /// Total number of decode chunks, including a possibly partial last chunk
 | ||||
|     num_chunks: usize, | ||||
|     decoded_len_estimate: usize, | ||||
| } | ||||
| 
 | ||||
| impl GeneralPurposeEstimate { | ||||
|     pub(crate) fn new(encoded_len: usize) -> Self { | ||||
|         Self { | ||||
|             num_chunks: encoded_len | ||||
|                 .checked_add(INPUT_CHUNK_LEN - 1) | ||||
|                 .expect("Overflow when calculating number of chunks in input") | ||||
|                 / INPUT_CHUNK_LEN, | ||||
|             decoded_len_estimate: encoded_len | ||||
|                 .checked_add(3) | ||||
|                 .expect("Overflow when calculating decoded len estimate") | ||||
|                 / 4 | ||||
|                 * 3, | ||||
|         } | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| impl DecodeEstimate for GeneralPurposeEstimate { | ||||
|     fn decoded_len_estimate(&self) -> usize { | ||||
|         self.decoded_len_estimate | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| /// Helper to avoid duplicating num_chunks calculation, which is costly on short inputs.
 | ||||
| /// Returns the number of bytes written, or an error.
 | ||||
| // We're on the fragile edge of compiler heuristics here. If this is not inlined, slow. If this is
 | ||||
| // inlined(always), a different slow. plain ol' inline makes the benchmarks happiest at the moment,
 | ||||
| // but this is fragile and the best setting changes with only minor code modifications.
 | ||||
| #[inline] | ||||
| pub(crate) fn decode_helper( | ||||
|     input: &[u8], | ||||
|     estimate: GeneralPurposeEstimate, | ||||
|     output: &mut [u8], | ||||
|     decode_table: &[u8; 256], | ||||
|     decode_allow_trailing_bits: bool, | ||||
|     padding_mode: DecodePaddingMode, | ||||
| ) -> Result<usize, DecodeError> { | ||||
|     let remainder_len = input.len() % INPUT_CHUNK_LEN; | ||||
| 
 | ||||
|     // Because the fast decode loop writes in groups of 8 bytes (unrolled to
 | ||||
|     // CHUNKS_PER_FAST_LOOP_BLOCK times 8 bytes, where possible) and outputs 8 bytes at a time (of
 | ||||
|     // which only 6 are valid data), we need to be sure that we stop using the fast decode loop
 | ||||
|     // soon enough that there will always be 2 more bytes of valid data written after that loop.
 | ||||
|     let trailing_bytes_to_skip = match remainder_len { | ||||
|         // if input is a multiple of the chunk size, ignore the last chunk as it may have padding,
 | ||||
|         // and the fast decode logic cannot handle padding
 | ||||
|         0 => INPUT_CHUNK_LEN, | ||||
|         // 1 and 5 trailing bytes are illegal: can't decode 6 bits of input into a byte
 | ||||
|         1 | 5 => { | ||||
|             // trailing whitespace is so common that it's worth it to check the last byte to
 | ||||
|             // possibly return a better error message
 | ||||
|             if let Some(b) = input.last() { | ||||
|                 if *b != PAD_BYTE && decode_table[*b as usize] == INVALID_VALUE { | ||||
|                     return Err(DecodeError::InvalidByte(input.len() - 1, *b)); | ||||
|                 } | ||||
|             } | ||||
| 
 | ||||
|             return Err(DecodeError::InvalidLength); | ||||
|         } | ||||
|         // This will decode to one output byte, which isn't enough to overwrite the 2 extra bytes
 | ||||
|         // written by the fast decode loop. So, we have to ignore both these 2 bytes and the
 | ||||
|         // previous chunk.
 | ||||
|         2 => INPUT_CHUNK_LEN + 2, | ||||
|         // If this is 3 un-padded chars, then it would actually decode to 2 bytes. However, if this
 | ||||
|         // is an erroneous 2 chars + 1 pad char that would decode to 1 byte, then it should fail
 | ||||
|         // with an error, not panic from going past the bounds of the output slice, so we let it
 | ||||
|         // use stage 3 + 4.
 | ||||
|         3 => INPUT_CHUNK_LEN + 3, | ||||
|         // This can also decode to one output byte because it may be 2 input chars + 2 padding
 | ||||
|         // chars, which would decode to 1 byte.
 | ||||
|         4 => INPUT_CHUNK_LEN + 4, | ||||
|         // Everything else is a legal decode len (given that we don't require padding), and will
 | ||||
|         // decode to at least 2 bytes of output.
 | ||||
|         _ => remainder_len, | ||||
|     }; | ||||
| 
 | ||||
|     // rounded up to include partial chunks
 | ||||
|     let mut remaining_chunks = estimate.num_chunks; | ||||
| 
 | ||||
|     let mut input_index = 0; | ||||
|     let mut output_index = 0; | ||||
| 
 | ||||
|     { | ||||
|         let length_of_fast_decode_chunks = input.len().saturating_sub(trailing_bytes_to_skip); | ||||
| 
 | ||||
|         // Fast loop, stage 1
 | ||||
|         // manual unroll to CHUNKS_PER_FAST_LOOP_BLOCK of u64s to amortize slice bounds checks
 | ||||
|         if let Some(max_start_index) = length_of_fast_decode_chunks.checked_sub(INPUT_BLOCK_LEN) { | ||||
|             while input_index <= max_start_index { | ||||
|                 let input_slice = &input[input_index..(input_index + INPUT_BLOCK_LEN)]; | ||||
|                 let output_slice = &mut output[output_index..(output_index + DECODED_BLOCK_LEN)]; | ||||
| 
 | ||||
|                 decode_chunk( | ||||
|                     &input_slice[0..], | ||||
|                     input_index, | ||||
|                     decode_table, | ||||
|                     &mut output_slice[0..], | ||||
|                 )?; | ||||
|                 decode_chunk( | ||||
|                     &input_slice[8..], | ||||
|                     input_index + 8, | ||||
|                     decode_table, | ||||
|                     &mut output_slice[6..], | ||||
|                 )?; | ||||
|                 decode_chunk( | ||||
|                     &input_slice[16..], | ||||
|                     input_index + 16, | ||||
|                     decode_table, | ||||
|                     &mut output_slice[12..], | ||||
|                 )?; | ||||
|                 decode_chunk( | ||||
|                     &input_slice[24..], | ||||
|                     input_index + 24, | ||||
|                     decode_table, | ||||
|                     &mut output_slice[18..], | ||||
|                 )?; | ||||
| 
 | ||||
|                 input_index += INPUT_BLOCK_LEN; | ||||
|                 output_index += DECODED_BLOCK_LEN - DECODED_CHUNK_SUFFIX; | ||||
|                 remaining_chunks -= CHUNKS_PER_FAST_LOOP_BLOCK; | ||||
|             } | ||||
|         } | ||||
| 
 | ||||
|         // Fast loop, stage 2 (aka still pretty fast loop)
 | ||||
|         // 8 bytes at a time for whatever we didn't do in stage 1.
 | ||||
|         if let Some(max_start_index) = length_of_fast_decode_chunks.checked_sub(INPUT_CHUNK_LEN) { | ||||
|             while input_index < max_start_index { | ||||
|                 decode_chunk( | ||||
|                     &input[input_index..(input_index + INPUT_CHUNK_LEN)], | ||||
|                     input_index, | ||||
|                     decode_table, | ||||
|                     &mut output | ||||
|                         [output_index..(output_index + DECODED_CHUNK_LEN + DECODED_CHUNK_SUFFIX)], | ||||
|                 )?; | ||||
| 
 | ||||
|                 output_index += DECODED_CHUNK_LEN; | ||||
|                 input_index += INPUT_CHUNK_LEN; | ||||
|                 remaining_chunks -= 1; | ||||
|             } | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     // Stage 3
 | ||||
|     // If input length was such that a chunk had to be deferred until after the fast loop
 | ||||
|     // because decoding it would have produced 2 trailing bytes that wouldn't then be
 | ||||
|     // overwritten, we decode that chunk here. This way is slower but doesn't write the 2
 | ||||
|     // trailing bytes.
 | ||||
|     // However, we still need to avoid the last chunk (partial or complete) because it could
 | ||||
|     // have padding, so we always do 1 fewer to avoid the last chunk.
 | ||||
|     for _ in 1..remaining_chunks { | ||||
|         decode_chunk_precise( | ||||
|             &input[input_index..], | ||||
|             input_index, | ||||
|             decode_table, | ||||
|             &mut output[output_index..(output_index + DECODED_CHUNK_LEN)], | ||||
|         )?; | ||||
| 
 | ||||
|         input_index += INPUT_CHUNK_LEN; | ||||
|         output_index += DECODED_CHUNK_LEN; | ||||
|     } | ||||
| 
 | ||||
|     // always have one more (possibly partial) block of 8 input
 | ||||
|     debug_assert!(input.len() - input_index > 1 || input.is_empty()); | ||||
|     debug_assert!(input.len() - input_index <= 8); | ||||
| 
 | ||||
|     super::decode_suffix::decode_suffix( | ||||
|         input, | ||||
|         input_index, | ||||
|         output, | ||||
|         output_index, | ||||
|         decode_table, | ||||
|         decode_allow_trailing_bits, | ||||
|         padding_mode, | ||||
|     ) | ||||
| } | ||||
| 
 | ||||
| /// Decode 8 bytes of input into 6 bytes of output. 8 bytes of output will be written, but only the
 | ||||
| /// first 6 of those contain meaningful data.
 | ||||
| ///
 | ||||
| /// `input` is the bytes to decode, of which the first 8 bytes will be processed.
 | ||||
| /// `index_at_start_of_input` is the offset in the overall input (used for reporting errors
 | ||||
| /// accurately)
 | ||||
| /// `decode_table` is the lookup table for the particular base64 alphabet.
 | ||||
| /// `output` will have its first 8 bytes overwritten, of which only the first 6 are valid decoded
 | ||||
| /// data.
 | ||||
| // yes, really inline (worth 30-50% speedup)
 | ||||
| #[inline(always)] | ||||
| fn decode_chunk( | ||||
|     input: &[u8], | ||||
|     index_at_start_of_input: usize, | ||||
|     decode_table: &[u8; 256], | ||||
|     output: &mut [u8], | ||||
| ) -> Result<(), DecodeError> { | ||||
|     let morsel = decode_table[input[0] as usize]; | ||||
|     if morsel == INVALID_VALUE { | ||||
|         return Err(DecodeError::InvalidByte(index_at_start_of_input, input[0])); | ||||
|     } | ||||
|     let mut accum = (morsel as u64) << 58; | ||||
| 
 | ||||
|     let morsel = decode_table[input[1] as usize]; | ||||
|     if morsel == INVALID_VALUE { | ||||
|         return Err(DecodeError::InvalidByte( | ||||
|             index_at_start_of_input + 1, | ||||
|             input[1], | ||||
|         )); | ||||
|     } | ||||
|     accum |= (morsel as u64) << 52; | ||||
| 
 | ||||
|     let morsel = decode_table[input[2] as usize]; | ||||
|     if morsel == INVALID_VALUE { | ||||
|         return Err(DecodeError::InvalidByte( | ||||
|             index_at_start_of_input + 2, | ||||
|             input[2], | ||||
|         )); | ||||
|     } | ||||
|     accum |= (morsel as u64) << 46; | ||||
| 
 | ||||
|     let morsel = decode_table[input[3] as usize]; | ||||
|     if morsel == INVALID_VALUE { | ||||
|         return Err(DecodeError::InvalidByte( | ||||
|             index_at_start_of_input + 3, | ||||
|             input[3], | ||||
|         )); | ||||
|     } | ||||
|     accum |= (morsel as u64) << 40; | ||||
| 
 | ||||
|     let morsel = decode_table[input[4] as usize]; | ||||
|     if morsel == INVALID_VALUE { | ||||
|         return Err(DecodeError::InvalidByte( | ||||
|             index_at_start_of_input + 4, | ||||
|             input[4], | ||||
|         )); | ||||
|     } | ||||
|     accum |= (morsel as u64) << 34; | ||||
| 
 | ||||
|     let morsel = decode_table[input[5] as usize]; | ||||
|     if morsel == INVALID_VALUE { | ||||
|         return Err(DecodeError::InvalidByte( | ||||
|             index_at_start_of_input + 5, | ||||
|             input[5], | ||||
|         )); | ||||
|     } | ||||
|     accum |= (morsel as u64) << 28; | ||||
| 
 | ||||
|     let morsel = decode_table[input[6] as usize]; | ||||
|     if morsel == INVALID_VALUE { | ||||
|         return Err(DecodeError::InvalidByte( | ||||
|             index_at_start_of_input + 6, | ||||
|             input[6], | ||||
|         )); | ||||
|     } | ||||
|     accum |= (morsel as u64) << 22; | ||||
| 
 | ||||
|     let morsel = decode_table[input[7] as usize]; | ||||
|     if morsel == INVALID_VALUE { | ||||
|         return Err(DecodeError::InvalidByte( | ||||
|             index_at_start_of_input + 7, | ||||
|             input[7], | ||||
|         )); | ||||
|     } | ||||
|     accum |= (morsel as u64) << 16; | ||||
| 
 | ||||
|     write_u64(output, accum); | ||||
| 
 | ||||
|     Ok(()) | ||||
| } | ||||
| 
 | ||||
| /// Decode an 8-byte chunk, but only write the 6 bytes actually decoded instead of including 2
 | ||||
| /// trailing garbage bytes.
 | ||||
| #[inline] | ||||
| fn decode_chunk_precise( | ||||
|     input: &[u8], | ||||
|     index_at_start_of_input: usize, | ||||
|     decode_table: &[u8; 256], | ||||
|     output: &mut [u8], | ||||
| ) -> Result<(), DecodeError> { | ||||
|     let mut tmp_buf = [0_u8; 8]; | ||||
| 
 | ||||
|     decode_chunk( | ||||
|         input, | ||||
|         index_at_start_of_input, | ||||
|         decode_table, | ||||
|         &mut tmp_buf[..], | ||||
|     )?; | ||||
| 
 | ||||
|     output[0..6].copy_from_slice(&tmp_buf[0..6]); | ||||
| 
 | ||||
|     Ok(()) | ||||
| } | ||||
| 
 | ||||
| #[inline] | ||||
| fn write_u64(output: &mut [u8], value: u64) { | ||||
|     output[..8].copy_from_slice(&value.to_be_bytes()); | ||||
| } | ||||
| 
 | ||||
| #[cfg(test)] | ||||
| mod tests { | ||||
|     use super::*; | ||||
| 
 | ||||
|     use crate::engine::general_purpose::STANDARD; | ||||
| 
 | ||||
|     #[test] | ||||
|     fn decode_chunk_precise_writes_only_6_bytes() { | ||||
|         let input = b"Zm9vYmFy"; // "foobar"
 | ||||
|         let mut output = [0_u8, 1, 2, 3, 4, 5, 6, 7]; | ||||
| 
 | ||||
|         decode_chunk_precise(&input[..], 0, &STANDARD.decode_table, &mut output).unwrap(); | ||||
|         assert_eq!(&vec![b'f', b'o', b'o', b'b', b'a', b'r', 6, 7], &output); | ||||
|     } | ||||
| 
 | ||||
|     #[test] | ||||
|     fn decode_chunk_writes_8_bytes() { | ||||
|         let input = b"Zm9vYmFy"; // "foobar"
 | ||||
|         let mut output = [0_u8, 1, 2, 3, 4, 5, 6, 7]; | ||||
| 
 | ||||
|         decode_chunk(&input[..], 0, &STANDARD.decode_table, &mut output).unwrap(); | ||||
|         assert_eq!(&vec![b'f', b'o', b'o', b'b', b'a', b'r', 0, 0], &output); | ||||
|     } | ||||
| } | ||||
							
								
								
									
										161
									
								
								third_party/rust/base64/src/engine/general_purpose/decode_suffix.rs
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										161
									
								
								third_party/rust/base64/src/engine/general_purpose/decode_suffix.rs
									
									
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1,161 @@ | |||
| use crate::{ | ||||
|     engine::{general_purpose::INVALID_VALUE, DecodePaddingMode}, | ||||
|     DecodeError, PAD_BYTE, | ||||
| }; | ||||
| 
 | ||||
| /// Decode the last 1-8 bytes, checking for trailing set bits and padding per the provided
 | ||||
| /// parameters.
 | ||||
| ///
 | ||||
| /// Returns the total number of bytes decoded, including the ones indicated as already written by
 | ||||
| /// `output_index`.
 | ||||
| pub(crate) fn decode_suffix( | ||||
|     input: &[u8], | ||||
|     input_index: usize, | ||||
|     output: &mut [u8], | ||||
|     mut output_index: usize, | ||||
|     decode_table: &[u8; 256], | ||||
|     decode_allow_trailing_bits: bool, | ||||
|     padding_mode: DecodePaddingMode, | ||||
| ) -> Result<usize, DecodeError> { | ||||
|     // Decode any leftovers that aren't a complete input block of 8 bytes.
 | ||||
|     // Use a u64 as a stack-resident 8 byte buffer.
 | ||||
|     let mut leftover_bits: u64 = 0; | ||||
|     let mut morsels_in_leftover = 0; | ||||
|     let mut padding_bytes = 0; | ||||
|     let mut first_padding_index: usize = 0; | ||||
|     let mut last_symbol = 0_u8; | ||||
|     let start_of_leftovers = input_index; | ||||
| 
 | ||||
|     for (i, &b) in input[start_of_leftovers..].iter().enumerate() { | ||||
|         // '=' padding
 | ||||
|         if b == PAD_BYTE { | ||||
|             // There can be bad padding bytes in a few ways:
 | ||||
|             // 1 - Padding with non-padding characters after it
 | ||||
|             // 2 - Padding after zero or one characters in the current quad (should only
 | ||||
|             //     be after 2 or 3 chars)
 | ||||
|             // 3 - More than two characters of padding. If 3 or 4 padding chars
 | ||||
|             //     are in the same quad, that implies it will be caught by #2.
 | ||||
|             //     If it spreads from one quad to another, it will be an invalid byte
 | ||||
|             //     in the first quad.
 | ||||
|             // 4 - Non-canonical padding -- 1 byte when it should be 2, etc.
 | ||||
|             //     Per config, non-canonical but still functional non- or partially-padded base64
 | ||||
|             //     may be treated as an error condition.
 | ||||
| 
 | ||||
|             if i % 4 < 2 { | ||||
|                 // Check for case #2.
 | ||||
|                 let bad_padding_index = start_of_leftovers | ||||
|                     + if padding_bytes > 0 { | ||||
|                         // If we've already seen padding, report the first padding index.
 | ||||
|                         // This is to be consistent with the normal decode logic: it will report an
 | ||||
|                         // error on the first padding character (since it doesn't expect to see
 | ||||
|                         // anything but actual encoded data).
 | ||||
|                         // This could only happen if the padding started in the previous quad since
 | ||||
|                         // otherwise this case would have been hit at i % 4 == 0 if it was the same
 | ||||
|                         // quad.
 | ||||
|                         first_padding_index | ||||
|                     } else { | ||||
|                         // haven't seen padding before, just use where we are now
 | ||||
|                         i | ||||
|                     }; | ||||
|                 return Err(DecodeError::InvalidByte(bad_padding_index, b)); | ||||
|             } | ||||
| 
 | ||||
|             if padding_bytes == 0 { | ||||
|                 first_padding_index = i; | ||||
|             } | ||||
| 
 | ||||
|             padding_bytes += 1; | ||||
|             continue; | ||||
|         } | ||||
| 
 | ||||
|         // Check for case #1.
 | ||||
|         // To make '=' handling consistent with the main loop, don't allow
 | ||||
|         // non-suffix '=' in trailing chunk either. Report error as first
 | ||||
|         // erroneous padding.
 | ||||
|         if padding_bytes > 0 { | ||||
|             return Err(DecodeError::InvalidByte( | ||||
|                 start_of_leftovers + first_padding_index, | ||||
|                 PAD_BYTE, | ||||
|             )); | ||||
|         } | ||||
| 
 | ||||
|         last_symbol = b; | ||||
| 
 | ||||
|         // can use up to 8 * 6 = 48 bits of the u64, if last chunk has no padding.
 | ||||
|         // Pack the leftovers from left to right.
 | ||||
|         let shift = 64 - (morsels_in_leftover + 1) * 6; | ||||
|         let morsel = decode_table[b as usize]; | ||||
|         if morsel == INVALID_VALUE { | ||||
|             return Err(DecodeError::InvalidByte(start_of_leftovers + i, b)); | ||||
|         } | ||||
| 
 | ||||
|         leftover_bits |= (morsel as u64) << shift; | ||||
|         morsels_in_leftover += 1; | ||||
|     } | ||||
| 
 | ||||
|     match padding_mode { | ||||
|         DecodePaddingMode::Indifferent => { /* everything we care about was already checked */ } | ||||
|         DecodePaddingMode::RequireCanonical => { | ||||
|             if (padding_bytes + morsels_in_leftover) % 4 != 0 { | ||||
|                 return Err(DecodeError::InvalidPadding); | ||||
|             } | ||||
|         } | ||||
|         DecodePaddingMode::RequireNone => { | ||||
|             if padding_bytes > 0 { | ||||
|                 // check at the end to make sure we let the cases of padding that should be InvalidByte
 | ||||
|                 // get hit
 | ||||
|                 return Err(DecodeError::InvalidPadding); | ||||
|             } | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     // When encoding 1 trailing byte (e.g. 0xFF), 2 base64 bytes ("/w") are needed.
 | ||||
|     // / is the symbol for 63 (0x3F, bottom 6 bits all set) and w is 48 (0x30, top 2 bits
 | ||||
|     // of bottom 6 bits set).
 | ||||
|     // When decoding two symbols back to one trailing byte, any final symbol higher than
 | ||||
|     // w would still decode to the original byte because we only care about the top two
 | ||||
|     // bits in the bottom 6, but would be a non-canonical encoding. So, we calculate a
 | ||||
|     // mask based on how many bits are used for just the canonical encoding, and optionally
 | ||||
|     // error if any other bits are set. In the example of one encoded byte -> 2 symbols,
 | ||||
|     // 2 symbols can technically encode 12 bits, but the last 4 are non canonical, and
 | ||||
|     // useless since there are no more symbols to provide the necessary 4 additional bits
 | ||||
|     // to finish the second original byte.
 | ||||
| 
 | ||||
|     let leftover_bits_ready_to_append = match morsels_in_leftover { | ||||
|         0 => 0, | ||||
|         2 => 8, | ||||
|         3 => 16, | ||||
|         4 => 24, | ||||
|         6 => 32, | ||||
|         7 => 40, | ||||
|         8 => 48, | ||||
|         // can also be detected as case #2 bad padding above
 | ||||
|         _ => unreachable!( | ||||
|             "Impossible: must only have 0 to 8 input bytes in last chunk, with no invalid lengths" | ||||
|         ), | ||||
|     }; | ||||
| 
 | ||||
|     // if there are bits set outside the bits we care about, last symbol encodes trailing bits that
 | ||||
|     // will not be included in the output
 | ||||
|     let mask = !0 >> leftover_bits_ready_to_append; | ||||
|     if !decode_allow_trailing_bits && (leftover_bits & mask) != 0 { | ||||
|         // last morsel is at `morsels_in_leftover` - 1
 | ||||
|         return Err(DecodeError::InvalidLastSymbol( | ||||
|             start_of_leftovers + morsels_in_leftover - 1, | ||||
|             last_symbol, | ||||
|         )); | ||||
|     } | ||||
| 
 | ||||
|     // TODO benchmark simply converting to big endian bytes
 | ||||
|     let mut leftover_bits_appended_to_buf = 0; | ||||
|     while leftover_bits_appended_to_buf < leftover_bits_ready_to_append { | ||||
|         // `as` simply truncates the higher bits, which is what we want here
 | ||||
|         let selected_bits = (leftover_bits >> (56 - leftover_bits_appended_to_buf)) as u8; | ||||
|         output[output_index] = selected_bits; | ||||
|         output_index += 1; | ||||
| 
 | ||||
|         leftover_bits_appended_to_buf += 8; | ||||
|     } | ||||
| 
 | ||||
|     Ok(output_index) | ||||
| } | ||||
							
								
								
									
										349
									
								
								third_party/rust/base64/src/engine/general_purpose/mod.rs
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										349
									
								
								third_party/rust/base64/src/engine/general_purpose/mod.rs
									
									
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1,349 @@ | |||
| //! Provides the [GeneralPurpose] engine and associated config types.
 | ||||
| use crate::{ | ||||
|     alphabet, | ||||
|     alphabet::Alphabet, | ||||
|     engine::{Config, DecodePaddingMode}, | ||||
|     DecodeError, | ||||
| }; | ||||
| use core::convert::TryInto; | ||||
| 
 | ||||
| mod decode; | ||||
| pub(crate) mod decode_suffix; | ||||
| pub use decode::GeneralPurposeEstimate; | ||||
| 
 | ||||
| pub(crate) const INVALID_VALUE: u8 = 255; | ||||
| 
 | ||||
| /// A general-purpose base64 engine.
 | ||||
| ///
 | ||||
| /// - It uses no vector CPU instructions, so it will work on any system.
 | ||||
| /// - It is reasonably fast (~2-3GiB/s).
 | ||||
| /// - It is not constant-time, though, so it is vulnerable to timing side-channel attacks. For loading cryptographic keys, etc, it is suggested to use the forthcoming constant-time implementation.
 | ||||
| pub struct GeneralPurpose { | ||||
|     encode_table: [u8; 64], | ||||
|     decode_table: [u8; 256], | ||||
|     config: GeneralPurposeConfig, | ||||
| } | ||||
| 
 | ||||
| impl GeneralPurpose { | ||||
|     /// Create a `GeneralPurpose` engine from an [Alphabet].
 | ||||
|     ///
 | ||||
|     /// While not very expensive to initialize, ideally these should be cached
 | ||||
|     /// if the engine will be used repeatedly.
 | ||||
|     pub const fn new(alphabet: &Alphabet, config: GeneralPurposeConfig) -> Self { | ||||
|         Self { | ||||
|             encode_table: encode_table(alphabet), | ||||
|             decode_table: decode_table(alphabet), | ||||
|             config, | ||||
|         } | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| impl super::Engine for GeneralPurpose { | ||||
|     type Config = GeneralPurposeConfig; | ||||
|     type DecodeEstimate = GeneralPurposeEstimate; | ||||
| 
 | ||||
|     fn internal_encode(&self, input: &[u8], output: &mut [u8]) -> usize { | ||||
|         let mut input_index: usize = 0; | ||||
| 
 | ||||
|         const BLOCKS_PER_FAST_LOOP: usize = 4; | ||||
|         const LOW_SIX_BITS: u64 = 0x3F; | ||||
| 
 | ||||
|         // we read 8 bytes at a time (u64) but only actually consume 6 of those bytes. Thus, we need
 | ||||
|         // 2 trailing bytes to be available to read..
 | ||||
|         let last_fast_index = input.len().saturating_sub(BLOCKS_PER_FAST_LOOP * 6 + 2); | ||||
|         let mut output_index = 0; | ||||
| 
 | ||||
|         if last_fast_index > 0 { | ||||
|             while input_index <= last_fast_index { | ||||
|                 // Major performance wins from letting the optimizer do the bounds check once, mostly
 | ||||
|                 // on the output side
 | ||||
|                 let input_chunk = | ||||
|                     &input[input_index..(input_index + (BLOCKS_PER_FAST_LOOP * 6 + 2))]; | ||||
|                 let output_chunk = | ||||
|                     &mut output[output_index..(output_index + BLOCKS_PER_FAST_LOOP * 8)]; | ||||
| 
 | ||||
|                 // Hand-unrolling for 32 vs 16 or 8 bytes produces yields performance about equivalent
 | ||||
|                 // to unsafe pointer code on a Xeon E5-1650v3. 64 byte unrolling was slightly better for
 | ||||
|                 // large inputs but significantly worse for 50-byte input, unsurprisingly. I suspect
 | ||||
|                 // that it's a not uncommon use case to encode smallish chunks of data (e.g. a 64-byte
 | ||||
|                 // SHA-512 digest), so it would be nice if that fit in the unrolled loop at least once.
 | ||||
|                 // Plus, single-digit percentage performance differences might well be quite different
 | ||||
|                 // on different hardware.
 | ||||
| 
 | ||||
|                 let input_u64 = read_u64(&input_chunk[0..]); | ||||
| 
 | ||||
|                 output_chunk[0] = self.encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[1] = self.encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[2] = self.encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[3] = self.encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[4] = self.encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[5] = self.encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[6] = self.encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[7] = self.encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize]; | ||||
| 
 | ||||
|                 let input_u64 = read_u64(&input_chunk[6..]); | ||||
| 
 | ||||
|                 output_chunk[8] = self.encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[9] = self.encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[10] = self.encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[11] = self.encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[12] = self.encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[13] = self.encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[14] = self.encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[15] = self.encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize]; | ||||
| 
 | ||||
|                 let input_u64 = read_u64(&input_chunk[12..]); | ||||
| 
 | ||||
|                 output_chunk[16] = self.encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[17] = self.encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[18] = self.encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[19] = self.encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[20] = self.encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[21] = self.encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[22] = self.encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[23] = self.encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize]; | ||||
| 
 | ||||
|                 let input_u64 = read_u64(&input_chunk[18..]); | ||||
| 
 | ||||
|                 output_chunk[24] = self.encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[25] = self.encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[26] = self.encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[27] = self.encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[28] = self.encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[29] = self.encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[30] = self.encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize]; | ||||
|                 output_chunk[31] = self.encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize]; | ||||
| 
 | ||||
|                 output_index += BLOCKS_PER_FAST_LOOP * 8; | ||||
|                 input_index += BLOCKS_PER_FAST_LOOP * 6; | ||||
|             } | ||||
|         } | ||||
| 
 | ||||
|         // Encode what's left after the fast loop.
 | ||||
| 
 | ||||
|         const LOW_SIX_BITS_U8: u8 = 0x3F; | ||||
| 
 | ||||
|         let rem = input.len() % 3; | ||||
|         let start_of_rem = input.len() - rem; | ||||
| 
 | ||||
|         // start at the first index not handled by fast loop, which may be 0.
 | ||||
| 
 | ||||
|         while input_index < start_of_rem { | ||||
|             let input_chunk = &input[input_index..(input_index + 3)]; | ||||
|             let output_chunk = &mut output[output_index..(output_index + 4)]; | ||||
| 
 | ||||
|             output_chunk[0] = self.encode_table[(input_chunk[0] >> 2) as usize]; | ||||
|             output_chunk[1] = self.encode_table | ||||
|                 [((input_chunk[0] << 4 | input_chunk[1] >> 4) & LOW_SIX_BITS_U8) as usize]; | ||||
|             output_chunk[2] = self.encode_table | ||||
|                 [((input_chunk[1] << 2 | input_chunk[2] >> 6) & LOW_SIX_BITS_U8) as usize]; | ||||
|             output_chunk[3] = self.encode_table[(input_chunk[2] & LOW_SIX_BITS_U8) as usize]; | ||||
| 
 | ||||
|             input_index += 3; | ||||
|             output_index += 4; | ||||
|         } | ||||
| 
 | ||||
|         if rem == 2 { | ||||
|             output[output_index] = self.encode_table[(input[start_of_rem] >> 2) as usize]; | ||||
|             output[output_index + 1] = | ||||
|                 self.encode_table[((input[start_of_rem] << 4 | input[start_of_rem + 1] >> 4) | ||||
|                     & LOW_SIX_BITS_U8) as usize]; | ||||
|             output[output_index + 2] = | ||||
|                 self.encode_table[((input[start_of_rem + 1] << 2) & LOW_SIX_BITS_U8) as usize]; | ||||
|             output_index += 3; | ||||
|         } else if rem == 1 { | ||||
|             output[output_index] = self.encode_table[(input[start_of_rem] >> 2) as usize]; | ||||
|             output[output_index + 1] = | ||||
|                 self.encode_table[((input[start_of_rem] << 4) & LOW_SIX_BITS_U8) as usize]; | ||||
|             output_index += 2; | ||||
|         } | ||||
| 
 | ||||
|         output_index | ||||
|     } | ||||
| 
 | ||||
|     fn internal_decoded_len_estimate(&self, input_len: usize) -> Self::DecodeEstimate { | ||||
|         GeneralPurposeEstimate::new(input_len) | ||||
|     } | ||||
| 
 | ||||
|     fn internal_decode( | ||||
|         &self, | ||||
|         input: &[u8], | ||||
|         output: &mut [u8], | ||||
|         estimate: Self::DecodeEstimate, | ||||
|     ) -> Result<usize, DecodeError> { | ||||
|         decode::decode_helper( | ||||
|             input, | ||||
|             estimate, | ||||
|             output, | ||||
|             &self.decode_table, | ||||
|             self.config.decode_allow_trailing_bits, | ||||
|             self.config.decode_padding_mode, | ||||
|         ) | ||||
|     } | ||||
| 
 | ||||
|     fn config(&self) -> &Self::Config { | ||||
|         &self.config | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| /// Returns a table mapping a 6-bit index to the ASCII byte encoding of the index
 | ||||
| pub(crate) const fn encode_table(alphabet: &Alphabet) -> [u8; 64] { | ||||
|     // the encode table is just the alphabet:
 | ||||
|     // 6-bit index lookup -> printable byte
 | ||||
|     let mut encode_table = [0_u8; 64]; | ||||
|     { | ||||
|         let mut index = 0; | ||||
|         while index < 64 { | ||||
|             encode_table[index] = alphabet.symbols[index]; | ||||
|             index += 1; | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     encode_table | ||||
| } | ||||
| 
 | ||||
| /// Returns a table mapping base64 bytes as the lookup index to either:
 | ||||
| /// - [INVALID_VALUE] for bytes that aren't members of the alphabet
 | ||||
| /// - a byte whose lower 6 bits are the value that was encoded into the index byte
 | ||||
| pub(crate) const fn decode_table(alphabet: &Alphabet) -> [u8; 256] { | ||||
|     let mut decode_table = [INVALID_VALUE; 256]; | ||||
| 
 | ||||
|     // Since the table is full of `INVALID_VALUE` already, we only need to overwrite
 | ||||
|     // the parts that are valid.
 | ||||
|     let mut index = 0; | ||||
|     while index < 64 { | ||||
|         // The index in the alphabet is the 6-bit value we care about.
 | ||||
|         // Since the index is in 0-63, it is safe to cast to u8.
 | ||||
|         decode_table[alphabet.symbols[index] as usize] = index as u8; | ||||
|         index += 1; | ||||
|     } | ||||
| 
 | ||||
|     decode_table | ||||
| } | ||||
| 
 | ||||
| #[inline] | ||||
| fn read_u64(s: &[u8]) -> u64 { | ||||
|     u64::from_be_bytes(s[..8].try_into().unwrap()) | ||||
| } | ||||
| 
 | ||||
| /// Contains configuration parameters for base64 encoding and decoding.
 | ||||
| ///
 | ||||
| /// ```
 | ||||
| /// # use base64::engine::GeneralPurposeConfig;
 | ||||
| /// let config = GeneralPurposeConfig::new()
 | ||||
| ///     .with_encode_padding(false);
 | ||||
| ///     // further customize using `.with_*` methods as needed
 | ||||
| /// ```
 | ||||
| ///
 | ||||
| /// The constants [PAD] and [NO_PAD] cover most use cases.
 | ||||
| ///
 | ||||
| /// To specify the characters used, see [Alphabet].
 | ||||
| #[derive(Clone, Copy, Debug)] | ||||
| pub struct GeneralPurposeConfig { | ||||
|     encode_padding: bool, | ||||
|     decode_allow_trailing_bits: bool, | ||||
|     decode_padding_mode: DecodePaddingMode, | ||||
| } | ||||
| 
 | ||||
| impl GeneralPurposeConfig { | ||||
|     /// Create a new config with `padding` = `true`, `decode_allow_trailing_bits` = `false`, and
 | ||||
|     /// `decode_padding_mode = DecodePaddingMode::RequireCanonicalPadding`.
 | ||||
|     ///
 | ||||
|     /// This probably matches most people's expectations, but consider disabling padding to save
 | ||||
|     /// a few bytes unless you specifically need it for compatibility with some legacy system.
 | ||||
|     pub const fn new() -> Self { | ||||
|         Self { | ||||
|             // RFC states that padding must be applied by default
 | ||||
|             encode_padding: true, | ||||
|             decode_allow_trailing_bits: false, | ||||
|             decode_padding_mode: DecodePaddingMode::RequireCanonical, | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     /// Create a new config based on `self` with an updated `padding` setting.
 | ||||
|     ///
 | ||||
|     /// If `padding` is `true`, encoding will append either 1 or 2 `=` padding characters as needed
 | ||||
|     /// to produce an output whose length is a multiple of 4.
 | ||||
|     ///
 | ||||
|     /// Padding is not needed for correct decoding and only serves to waste bytes, but it's in the
 | ||||
|     /// [spec](https://datatracker.ietf.org/doc/html/rfc4648#section-3.2).
 | ||||
|     ///
 | ||||
|     /// For new applications, consider not using padding if the decoders you're using don't require
 | ||||
|     /// padding to be present.
 | ||||
|     pub const fn with_encode_padding(self, padding: bool) -> Self { | ||||
|         Self { | ||||
|             encode_padding: padding, | ||||
|             ..self | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     /// Create a new config based on `self` with an updated `decode_allow_trailing_bits` setting.
 | ||||
|     ///
 | ||||
|     /// Most users will not need to configure this. It's useful if you need to decode base64
 | ||||
|     /// produced by a buggy encoder that has bits set in the unused space on the last base64
 | ||||
|     /// character as per [forgiving-base64 decode](https://infra.spec.whatwg.org/#forgiving-base64-decode).
 | ||||
|     /// If invalid trailing bits are present and this is `true`, those bits will
 | ||||
|     /// be silently ignored, else `DecodeError::InvalidLastSymbol` will be emitted.
 | ||||
|     pub const fn with_decode_allow_trailing_bits(self, allow: bool) -> Self { | ||||
|         Self { | ||||
|             decode_allow_trailing_bits: allow, | ||||
|             ..self | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     /// Create a new config based on `self` with an updated `decode_padding_mode` setting.
 | ||||
|     ///
 | ||||
|     /// Padding is not useful in terms of representing encoded data -- it makes no difference to
 | ||||
|     /// the decoder if padding is present or not, so if you have some un-padded input to decode, it
 | ||||
|     /// is perfectly fine to use `DecodePaddingMode::Indifferent` to prevent errors from being
 | ||||
|     /// emitted.
 | ||||
|     ///
 | ||||
|     /// However, since in practice
 | ||||
|     /// [people who learned nothing from BER vs DER seem to expect base64 to have one canonical encoding](https://eprint.iacr.org/2022/361),
 | ||||
|     /// the default setting is the stricter `DecodePaddingMode::RequireCanonicalPadding`.
 | ||||
|     ///
 | ||||
|     /// Or, if "canonical" in your circumstance means _no_ padding rather than padding to the
 | ||||
|     /// next multiple of four, there's `DecodePaddingMode::RequireNoPadding`.
 | ||||
|     pub const fn with_decode_padding_mode(self, mode: DecodePaddingMode) -> Self { | ||||
|         Self { | ||||
|             decode_padding_mode: mode, | ||||
|             ..self | ||||
|         } | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| impl Default for GeneralPurposeConfig { | ||||
|     /// Delegates to [GeneralPurposeConfig::new].
 | ||||
|     fn default() -> Self { | ||||
|         Self::new() | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| impl Config for GeneralPurposeConfig { | ||||
|     fn encode_padding(&self) -> bool { | ||||
|         self.encode_padding | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| /// A [GeneralPurpose] engine using the [alphabet::STANDARD] base64 alphabet and [PAD] config.
 | ||||
| pub const STANDARD: GeneralPurpose = GeneralPurpose::new(&alphabet::STANDARD, PAD); | ||||
| 
 | ||||
| /// A [GeneralPurpose] engine using the [alphabet::STANDARD] base64 alphabet and [NO_PAD] config.
 | ||||
| pub const STANDARD_NO_PAD: GeneralPurpose = GeneralPurpose::new(&alphabet::STANDARD, NO_PAD); | ||||
| 
 | ||||
| /// A [GeneralPurpose] engine using the [alphabet::URL_SAFE] base64 alphabet and [PAD] config.
 | ||||
| pub const URL_SAFE: GeneralPurpose = GeneralPurpose::new(&alphabet::URL_SAFE, PAD); | ||||
| 
 | ||||
| /// A [GeneralPurpose] engine using the [alphabet::URL_SAFE] base64 alphabet and [NO_PAD] config.
 | ||||
| pub const URL_SAFE_NO_PAD: GeneralPurpose = GeneralPurpose::new(&alphabet::URL_SAFE, NO_PAD); | ||||
| 
 | ||||
| /// Include padding bytes when encoding, and require that they be present when decoding.
 | ||||
| ///
 | ||||
| /// This is the standard per the base64 RFC, but consider using [NO_PAD] instead as padding serves
 | ||||
| /// little purpose in practice.
 | ||||
| pub const PAD: GeneralPurposeConfig = GeneralPurposeConfig::new(); | ||||
| 
 | ||||
| /// Don't add padding when encoding, and require no padding when decoding.
 | ||||
| pub const NO_PAD: GeneralPurposeConfig = GeneralPurposeConfig::new() | ||||
|     .with_encode_padding(false) | ||||
|     .with_decode_padding_mode(DecodePaddingMode::RequireNone); | ||||
							
								
								
									
										410
									
								
								third_party/rust/base64/src/engine/mod.rs
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										410
									
								
								third_party/rust/base64/src/engine/mod.rs
									
									
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1,410 @@ | |||
| //! Provides the [Engine] abstraction and out of the box implementations.
 | ||||
| #[cfg(any(feature = "alloc", feature = "std", test))] | ||||
| use crate::chunked_encoder; | ||||
| use crate::{ | ||||
|     encode::{encode_with_padding, EncodeSliceError}, | ||||
|     encoded_len, DecodeError, DecodeSliceError, | ||||
| }; | ||||
| #[cfg(any(feature = "alloc", feature = "std", test))] | ||||
| use alloc::vec::Vec; | ||||
| 
 | ||||
| #[cfg(any(feature = "alloc", feature = "std", test))] | ||||
| use alloc::{string::String, vec}; | ||||
| 
 | ||||
| pub mod general_purpose; | ||||
| 
 | ||||
| #[cfg(test)] | ||||
| mod naive; | ||||
| 
 | ||||
| #[cfg(test)] | ||||
| mod tests; | ||||
| 
 | ||||
| pub use general_purpose::{GeneralPurpose, GeneralPurposeConfig}; | ||||
| 
 | ||||
| /// An `Engine` provides low-level encoding and decoding operations that all other higher-level parts of the API use. Users of the library will generally not need to implement this.
 | ||||
| ///
 | ||||
| /// Different implementations offer different characteristics. The library currently ships with
 | ||||
| /// [GeneralPurpose] that offers good speed and works on any CPU, with more choices
 | ||||
| /// coming later, like a constant-time one when side channel resistance is called for, and vendor-specific vectorized ones for more speed.
 | ||||
| ///
 | ||||
| /// See [general_purpose::STANDARD_NO_PAD] if you just want standard base64. Otherwise, when possible, it's
 | ||||
| /// recommended to store the engine in a `const` so that references to it won't pose any lifetime
 | ||||
| /// issues, and to avoid repeating the cost of engine setup.
 | ||||
| ///
 | ||||
| /// Since almost nobody will need to implement `Engine`, docs for internal methods are hidden.
 | ||||
| // When adding an implementation of Engine, include them in the engine test suite:
 | ||||
| // - add an implementation of [engine::tests::EngineWrapper]
 | ||||
| // - add the implementation to the `all_engines` macro
 | ||||
| // All tests run on all engines listed in the macro.
 | ||||
| pub trait Engine: Send + Sync { | ||||
|     /// The config type used by this engine
 | ||||
|     type Config: Config; | ||||
|     /// The decode estimate used by this engine
 | ||||
|     type DecodeEstimate: DecodeEstimate; | ||||
| 
 | ||||
|     /// This is not meant to be called directly; it is only for `Engine` implementors.
 | ||||
|     /// See the other `encode*` functions on this trait.
 | ||||
|     ///
 | ||||
|     /// Encode the `input` bytes into the `output` buffer based on the mapping in `encode_table`.
 | ||||
|     ///
 | ||||
|     /// `output` will be long enough to hold the encoded data.
 | ||||
|     ///
 | ||||
|     /// Returns the number of bytes written.
 | ||||
|     ///
 | ||||
|     /// No padding should be written; that is handled separately.
 | ||||
|     ///
 | ||||
|     /// Must not write any bytes into the output slice other than the encoded data.
 | ||||
|     #[doc(hidden)] | ||||
|     fn internal_encode(&self, input: &[u8], output: &mut [u8]) -> usize; | ||||
| 
 | ||||
|     /// This is not meant to be called directly; it is only for `Engine` implementors.
 | ||||
|     ///
 | ||||
|     /// As an optimization to prevent the decoded length from being calculated twice, it is
 | ||||
|     /// sometimes helpful to have a conservative estimate of the decoded size before doing the
 | ||||
|     /// decoding, so this calculation is done separately and passed to [Engine::decode()] as needed.
 | ||||
|     ///
 | ||||
|     /// # Panics
 | ||||
|     ///
 | ||||
|     /// Panics if decoded length estimation overflows.
 | ||||
|     #[doc(hidden)] | ||||
|     fn internal_decoded_len_estimate(&self, input_len: usize) -> Self::DecodeEstimate; | ||||
| 
 | ||||
|     /// This is not meant to be called directly; it is only for `Engine` implementors.
 | ||||
|     /// See the other `decode*` functions on this trait.
 | ||||
|     ///
 | ||||
|     /// Decode `input` base64 bytes into the `output` buffer.
 | ||||
|     ///
 | ||||
|     /// `decode_estimate` is the result of [Engine::internal_decoded_len_estimate()], which is passed in to avoid
 | ||||
|     /// calculating it again (expensive on short inputs).`
 | ||||
|     ///
 | ||||
|     /// Returns the number of bytes written to `output`.
 | ||||
|     ///
 | ||||
|     /// Each complete 4-byte chunk of encoded data decodes to 3 bytes of decoded data, but this
 | ||||
|     /// function must also handle the final possibly partial chunk.
 | ||||
|     /// If the input length is not a multiple of 4, or uses padding bytes to reach a multiple of 4,
 | ||||
|     /// the trailing 2 or 3 bytes must decode to 1 or 2 bytes, respectively, as per the
 | ||||
|     /// [RFC](https://tools.ietf.org/html/rfc4648#section-3.5).
 | ||||
|     ///
 | ||||
|     /// Decoding must not write any bytes into the output slice other than the decoded data.
 | ||||
|     ///
 | ||||
|     /// Non-canonical trailing bits in the final tokens or non-canonical padding must be reported as
 | ||||
|     /// errors unless the engine is configured otherwise.
 | ||||
|     ///
 | ||||
|     /// # Panics
 | ||||
|     ///
 | ||||
|     /// Panics if `output` is too small.
 | ||||
|     #[doc(hidden)] | ||||
|     fn internal_decode( | ||||
|         &self, | ||||
|         input: &[u8], | ||||
|         output: &mut [u8], | ||||
|         decode_estimate: Self::DecodeEstimate, | ||||
|     ) -> Result<usize, DecodeError>; | ||||
| 
 | ||||
|     /// Returns the config for this engine.
 | ||||
|     fn config(&self) -> &Self::Config; | ||||
| 
 | ||||
|     /// Encode arbitrary octets as base64 using the provided `Engine`.
 | ||||
|     /// Returns a `String`.
 | ||||
|     ///
 | ||||
|     /// # Example
 | ||||
|     ///
 | ||||
|     /// ```rust
 | ||||
|     /// use base64::{Engine as _, engine::{self, general_purpose}, alphabet};
 | ||||
|     ///
 | ||||
|     /// let b64 = general_purpose::STANDARD.encode(b"hello world~");
 | ||||
|     /// println!("{}", b64);
 | ||||
|     ///
 | ||||
|     /// const CUSTOM_ENGINE: engine::GeneralPurpose =
 | ||||
|     ///     engine::GeneralPurpose::new(&alphabet::URL_SAFE, general_purpose::NO_PAD);
 | ||||
|     ///
 | ||||
|     /// let b64_url = CUSTOM_ENGINE.encode(b"hello internet~");
 | ||||
|     #[cfg(any(feature = "alloc", feature = "std", test))] | ||||
|     fn encode<T: AsRef<[u8]>>(&self, input: T) -> String { | ||||
|         let encoded_size = encoded_len(input.as_ref().len(), self.config().encode_padding()) | ||||
|             .expect("integer overflow when calculating buffer size"); | ||||
|         let mut buf = vec![0; encoded_size]; | ||||
| 
 | ||||
|         encode_with_padding(input.as_ref(), &mut buf[..], self, encoded_size); | ||||
| 
 | ||||
|         String::from_utf8(buf).expect("Invalid UTF8") | ||||
|     } | ||||
| 
 | ||||
|     /// Encode arbitrary octets as base64 into a supplied `String`.
 | ||||
|     /// Writes into the supplied `String`, which may allocate if its internal buffer isn't big enough.
 | ||||
|     ///
 | ||||
|     /// # Example
 | ||||
|     ///
 | ||||
|     /// ```rust
 | ||||
|     /// use base64::{Engine as _, engine::{self, general_purpose}, alphabet};
 | ||||
|     /// const CUSTOM_ENGINE: engine::GeneralPurpose =
 | ||||
|     ///     engine::GeneralPurpose::new(&alphabet::URL_SAFE, general_purpose::NO_PAD);
 | ||||
|     ///
 | ||||
|     /// fn main() {
 | ||||
|     ///     let mut buf = String::new();
 | ||||
|     ///     general_purpose::STANDARD.encode_string(b"hello world~", &mut buf);
 | ||||
|     ///     println!("{}", buf);
 | ||||
|     ///
 | ||||
|     ///     buf.clear();
 | ||||
|     ///     CUSTOM_ENGINE.encode_string(b"hello internet~", &mut buf);
 | ||||
|     ///     println!("{}", buf);
 | ||||
|     /// }
 | ||||
|     /// ```
 | ||||
|     #[cfg(any(feature = "alloc", feature = "std", test))] | ||||
|     fn encode_string<T: AsRef<[u8]>>(&self, input: T, output_buf: &mut String) { | ||||
|         let input_bytes = input.as_ref(); | ||||
| 
 | ||||
|         { | ||||
|             let mut sink = chunked_encoder::StringSink::new(output_buf); | ||||
| 
 | ||||
|             chunked_encoder::ChunkedEncoder::new(self) | ||||
|                 .encode(input_bytes, &mut sink) | ||||
|                 .expect("Writing to a String shouldn't fail"); | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     /// Encode arbitrary octets as base64 into a supplied slice.
 | ||||
|     /// Writes into the supplied output buffer.
 | ||||
|     ///
 | ||||
|     /// This is useful if you wish to avoid allocation entirely (e.g. encoding into a stack-resident
 | ||||
|     /// or statically-allocated buffer).
 | ||||
|     ///
 | ||||
|     /// # Example
 | ||||
|     ///
 | ||||
|     /// ```rust
 | ||||
|     /// use base64::{Engine as _, engine::general_purpose};
 | ||||
|     /// let s = b"hello internet!";
 | ||||
|     /// let mut buf = Vec::new();
 | ||||
|     /// // make sure we'll have a slice big enough for base64 + padding
 | ||||
|     /// buf.resize(s.len() * 4 / 3 + 4, 0);
 | ||||
|     ///
 | ||||
|     /// let bytes_written = general_purpose::STANDARD.encode_slice(s, &mut buf).unwrap();
 | ||||
|     ///
 | ||||
|     /// // shorten our vec down to just what was written
 | ||||
|     /// buf.truncate(bytes_written);
 | ||||
|     ///
 | ||||
|     /// assert_eq!(s, general_purpose::STANDARD.decode(&buf).unwrap().as_slice());
 | ||||
|     /// ```
 | ||||
|     fn encode_slice<T: AsRef<[u8]>>( | ||||
|         &self, | ||||
|         input: T, | ||||
|         output_buf: &mut [u8], | ||||
|     ) -> Result<usize, EncodeSliceError> { | ||||
|         let input_bytes = input.as_ref(); | ||||
| 
 | ||||
|         let encoded_size = encoded_len(input_bytes.len(), self.config().encode_padding()) | ||||
|             .expect("usize overflow when calculating buffer size"); | ||||
| 
 | ||||
|         if output_buf.len() < encoded_size { | ||||
|             return Err(EncodeSliceError::OutputSliceTooSmall); | ||||
|         } | ||||
| 
 | ||||
|         let b64_output = &mut output_buf[0..encoded_size]; | ||||
| 
 | ||||
|         encode_with_padding(input_bytes, b64_output, self, encoded_size); | ||||
| 
 | ||||
|         Ok(encoded_size) | ||||
|     } | ||||
| 
 | ||||
|     /// Decode from string reference as octets using the specified [Engine].
 | ||||
|     /// Returns a `Result` containing a `Vec<u8>`.
 | ||||
|     ///
 | ||||
|     /// # Example
 | ||||
|     ///
 | ||||
|     /// ```rust
 | ||||
|     /// use base64::{Engine as _, alphabet, engine::{self, general_purpose}};
 | ||||
|     ///
 | ||||
|     /// let bytes = general_purpose::STANDARD
 | ||||
|     ///     .decode("aGVsbG8gd29ybGR+Cg==").unwrap();
 | ||||
|     /// println!("{:?}", bytes);
 | ||||
|     ///
 | ||||
|     /// // custom engine setup
 | ||||
|     /// let bytes_url = engine::GeneralPurpose::new(
 | ||||
|     ///              &alphabet::URL_SAFE,
 | ||||
|     ///              general_purpose::NO_PAD)
 | ||||
|     ///     .decode("aGVsbG8gaW50ZXJuZXR-Cg").unwrap();
 | ||||
|     /// println!("{:?}", bytes_url);
 | ||||
|     /// ```
 | ||||
|     ///
 | ||||
|     /// # Panics
 | ||||
|     ///
 | ||||
|     /// Panics if decoded length estimation overflows.
 | ||||
|     /// This would happen for sizes within a few bytes of the maximum value of `usize`.
 | ||||
|     #[cfg(any(feature = "alloc", feature = "std", test))] | ||||
|     fn decode<T: AsRef<[u8]>>(&self, input: T) -> Result<Vec<u8>, DecodeError> { | ||||
|         let input_bytes = input.as_ref(); | ||||
| 
 | ||||
|         let estimate = self.internal_decoded_len_estimate(input_bytes.len()); | ||||
|         let mut buffer = vec![0; estimate.decoded_len_estimate()]; | ||||
| 
 | ||||
|         let bytes_written = self.internal_decode(input_bytes, &mut buffer, estimate)?; | ||||
|         buffer.truncate(bytes_written); | ||||
| 
 | ||||
|         Ok(buffer) | ||||
|     } | ||||
| 
 | ||||
|     /// Decode from string reference as octets.
 | ||||
|     /// Writes into the supplied `Vec`, which may allocate if its internal buffer isn't big enough.
 | ||||
|     /// Returns a `Result` containing an empty tuple, aka `()`.
 | ||||
|     ///
 | ||||
|     /// # Example
 | ||||
|     ///
 | ||||
|     /// ```rust
 | ||||
|     /// use base64::{Engine as _, alphabet, engine::{self, general_purpose}};
 | ||||
|     /// const CUSTOM_ENGINE: engine::GeneralPurpose =
 | ||||
|     ///     engine::GeneralPurpose::new(&alphabet::URL_SAFE, general_purpose::PAD);
 | ||||
|     ///
 | ||||
|     /// fn main() {
 | ||||
|     ///     use base64::Engine;
 | ||||
|     ///     let mut buffer = Vec::<u8>::new();
 | ||||
|     ///     // with the default engine
 | ||||
|     ///     general_purpose::STANDARD
 | ||||
|     ///         .decode_vec("aGVsbG8gd29ybGR+Cg==", &mut buffer,).unwrap();
 | ||||
|     ///     println!("{:?}", buffer);
 | ||||
|     ///
 | ||||
|     ///     buffer.clear();
 | ||||
|     ///
 | ||||
|     ///     // with a custom engine
 | ||||
|     ///     CUSTOM_ENGINE.decode_vec(
 | ||||
|     ///         "aGVsbG8gaW50ZXJuZXR-Cg==",
 | ||||
|     ///         &mut buffer,
 | ||||
|     ///     ).unwrap();
 | ||||
|     ///     println!("{:?}", buffer);
 | ||||
|     /// }
 | ||||
|     /// ```
 | ||||
|     ///
 | ||||
|     /// # Panics
 | ||||
|     ///
 | ||||
|     /// Panics if decoded length estimation overflows.
 | ||||
|     /// This would happen for sizes within a few bytes of the maximum value of `usize`.
 | ||||
|     #[cfg(any(feature = "alloc", feature = "std", test))] | ||||
|     fn decode_vec<T: AsRef<[u8]>>( | ||||
|         &self, | ||||
|         input: T, | ||||
|         buffer: &mut Vec<u8>, | ||||
|     ) -> Result<(), DecodeError> { | ||||
|         let input_bytes = input.as_ref(); | ||||
| 
 | ||||
|         let starting_output_len = buffer.len(); | ||||
| 
 | ||||
|         let estimate = self.internal_decoded_len_estimate(input_bytes.len()); | ||||
|         let total_len_estimate = estimate | ||||
|             .decoded_len_estimate() | ||||
|             .checked_add(starting_output_len) | ||||
|             .expect("Overflow when calculating output buffer length"); | ||||
|         buffer.resize(total_len_estimate, 0); | ||||
| 
 | ||||
|         let buffer_slice = &mut buffer.as_mut_slice()[starting_output_len..]; | ||||
|         let bytes_written = self.internal_decode(input_bytes, buffer_slice, estimate)?; | ||||
| 
 | ||||
|         buffer.truncate(starting_output_len + bytes_written); | ||||
| 
 | ||||
|         Ok(()) | ||||
|     } | ||||
| 
 | ||||
|     /// Decode the input into the provided output slice.
 | ||||
|     ///
 | ||||
|     /// Returns an error if `output` is smaller than the estimated decoded length.
 | ||||
|     ///
 | ||||
|     /// This will not write any bytes past exactly what is decoded (no stray garbage bytes at the end).
 | ||||
|     ///
 | ||||
|     /// See [crate::decoded_len_estimate] for calculating buffer sizes.
 | ||||
|     ///
 | ||||
|     /// See [Engine::decode_slice_unchecked] for a version that panics instead of returning an error
 | ||||
|     /// if the output buffer is too small.
 | ||||
|     ///
 | ||||
|     /// # Panics
 | ||||
|     ///
 | ||||
|     /// Panics if decoded length estimation overflows.
 | ||||
|     /// This would happen for sizes within a few bytes of the maximum value of `usize`.
 | ||||
|     fn decode_slice<T: AsRef<[u8]>>( | ||||
|         &self, | ||||
|         input: T, | ||||
|         output: &mut [u8], | ||||
|     ) -> Result<usize, DecodeSliceError> { | ||||
|         let input_bytes = input.as_ref(); | ||||
| 
 | ||||
|         let estimate = self.internal_decoded_len_estimate(input_bytes.len()); | ||||
|         if output.len() < estimate.decoded_len_estimate() { | ||||
|             return Err(DecodeSliceError::OutputSliceTooSmall); | ||||
|         } | ||||
| 
 | ||||
|         self.internal_decode(input_bytes, output, estimate) | ||||
|             .map_err(|e| e.into()) | ||||
|     } | ||||
| 
 | ||||
|     /// Decode the input into the provided output slice.
 | ||||
|     ///
 | ||||
|     /// This will not write any bytes past exactly what is decoded (no stray garbage bytes at the end).
 | ||||
|     ///
 | ||||
|     /// See [crate::decoded_len_estimate] for calculating buffer sizes.
 | ||||
|     ///
 | ||||
|     /// See [Engine::decode_slice] for a version that returns an error instead of panicking if the output
 | ||||
|     /// buffer is too small.
 | ||||
|     ///
 | ||||
|     /// # Panics
 | ||||
|     ///
 | ||||
|     /// Panics if decoded length estimation overflows.
 | ||||
|     /// This would happen for sizes within a few bytes of the maximum value of `usize`.
 | ||||
|     ///
 | ||||
|     /// Panics if the provided output buffer is too small for the decoded data.
 | ||||
|     fn decode_slice_unchecked<T: AsRef<[u8]>>( | ||||
|         &self, | ||||
|         input: T, | ||||
|         output: &mut [u8], | ||||
|     ) -> Result<usize, DecodeError> { | ||||
|         let input_bytes = input.as_ref(); | ||||
| 
 | ||||
|         self.internal_decode( | ||||
|             input_bytes, | ||||
|             output, | ||||
|             self.internal_decoded_len_estimate(input_bytes.len()), | ||||
|         ) | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| /// The minimal level of configuration that engines must support.
 | ||||
| pub trait Config { | ||||
|     /// Returns `true` if padding should be added after the encoded output.
 | ||||
|     ///
 | ||||
|     /// Padding is added outside the engine's encode() since the engine may be used
 | ||||
|     /// to encode only a chunk of the overall output, so it can't always know when
 | ||||
|     /// the output is "done" and would therefore need padding (if configured).
 | ||||
|     // It could be provided as a separate parameter when encoding, but that feels like
 | ||||
|     // leaking an implementation detail to the user, and it's hopefully more convenient
 | ||||
|     // to have to only pass one thing (the engine) to any part of the API.
 | ||||
|     fn encode_padding(&self) -> bool; | ||||
| } | ||||
| 
 | ||||
| /// The decode estimate used by an engine implementation. Users do not need to interact with this;
 | ||||
| /// it is only for engine implementors.
 | ||||
| ///
 | ||||
| /// Implementors may store relevant data here when constructing this to avoid having to calculate
 | ||||
| /// them again during actual decoding.
 | ||||
| pub trait DecodeEstimate { | ||||
|     /// Returns a conservative (err on the side of too big) estimate of the decoded length to use
 | ||||
|     /// for pre-allocating buffers, etc.
 | ||||
|     ///
 | ||||
|     /// The estimate must be no larger than the next largest complete triple of decoded bytes.
 | ||||
|     /// That is, the final quad of tokens to decode may be assumed to be complete with no padding.
 | ||||
|     ///
 | ||||
|     /// # Panics
 | ||||
|     ///
 | ||||
|     /// Panics if decoded length estimation overflows.
 | ||||
|     /// This would happen for sizes within a few bytes of the maximum value of `usize`.
 | ||||
|     fn decoded_len_estimate(&self) -> usize; | ||||
| } | ||||
| 
 | ||||
| /// Controls how pad bytes are handled when decoding.
 | ||||
| ///
 | ||||
| /// Each [Engine] must support at least the behavior indicated by
 | ||||
| /// [DecodePaddingMode::RequireCanonical], and may support other modes.
 | ||||
| #[derive(Clone, Copy, Debug, PartialEq, Eq)] | ||||
| pub enum DecodePaddingMode { | ||||
|     /// Canonical padding is allowed, but any fewer padding bytes than that is also allowed.
 | ||||
|     Indifferent, | ||||
|     /// Padding must be canonical (0, 1, or 2 `=` as needed to produce a 4 byte suffix).
 | ||||
|     RequireCanonical, | ||||
|     /// Padding must be absent -- for when you want predictable padding, without any wasted bytes.
 | ||||
|     RequireNone, | ||||
| } | ||||
							
								
								
									
										219
									
								
								third_party/rust/base64/src/engine/naive.rs
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										219
									
								
								third_party/rust/base64/src/engine/naive.rs
									
									
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1,219 @@ | |||
| use crate::{ | ||||
|     alphabet::Alphabet, | ||||
|     engine::{ | ||||
|         general_purpose::{self, decode_table, encode_table}, | ||||
|         Config, DecodeEstimate, DecodePaddingMode, Engine, | ||||
|     }, | ||||
|     DecodeError, PAD_BYTE, | ||||
| }; | ||||
| use alloc::ops::BitOr; | ||||
| use std::ops::{BitAnd, Shl, Shr}; | ||||
| 
 | ||||
| /// Comparatively simple implementation that can be used as something to compare against in tests
 | ||||
| pub struct Naive { | ||||
|     encode_table: [u8; 64], | ||||
|     decode_table: [u8; 256], | ||||
|     config: NaiveConfig, | ||||
| } | ||||
| 
 | ||||
| impl Naive { | ||||
|     const ENCODE_INPUT_CHUNK_SIZE: usize = 3; | ||||
|     const DECODE_INPUT_CHUNK_SIZE: usize = 4; | ||||
| 
 | ||||
|     pub const fn new(alphabet: &Alphabet, config: NaiveConfig) -> Self { | ||||
|         Self { | ||||
|             encode_table: encode_table(alphabet), | ||||
|             decode_table: decode_table(alphabet), | ||||
|             config, | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     fn decode_byte_into_u32(&self, offset: usize, byte: u8) -> Result<u32, DecodeError> { | ||||
|         let decoded = self.decode_table[byte as usize]; | ||||
| 
 | ||||
|         if decoded == general_purpose::INVALID_VALUE { | ||||
|             return Err(DecodeError::InvalidByte(offset, byte)); | ||||
|         } | ||||
| 
 | ||||
|         Ok(decoded as u32) | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| impl Engine for Naive { | ||||
|     type Config = NaiveConfig; | ||||
|     type DecodeEstimate = NaiveEstimate; | ||||
| 
 | ||||
|     fn internal_encode(&self, input: &[u8], output: &mut [u8]) -> usize { | ||||
|         // complete chunks first
 | ||||
| 
 | ||||
|         const LOW_SIX_BITS: u32 = 0x3F; | ||||
| 
 | ||||
|         let rem = input.len() % Self::ENCODE_INPUT_CHUNK_SIZE; | ||||
|         // will never underflow
 | ||||
|         let complete_chunk_len = input.len() - rem; | ||||
| 
 | ||||
|         let mut input_index = 0_usize; | ||||
|         let mut output_index = 0_usize; | ||||
|         if let Some(last_complete_chunk_index) = | ||||
|             complete_chunk_len.checked_sub(Self::ENCODE_INPUT_CHUNK_SIZE) | ||||
|         { | ||||
|             while input_index <= last_complete_chunk_index { | ||||
|                 let chunk = &input[input_index..input_index + Self::ENCODE_INPUT_CHUNK_SIZE]; | ||||
| 
 | ||||
|                 // populate low 24 bits from 3 bytes
 | ||||
|                 let chunk_int: u32 = | ||||
|                     (chunk[0] as u32).shl(16) | (chunk[1] as u32).shl(8) | (chunk[2] as u32); | ||||
|                 // encode 4x 6-bit output bytes
 | ||||
|                 output[output_index] = self.encode_table[chunk_int.shr(18) as usize]; | ||||
|                 output[output_index + 1] = | ||||
|                     self.encode_table[chunk_int.shr(12_u8).bitand(LOW_SIX_BITS) as usize]; | ||||
|                 output[output_index + 2] = | ||||
|                     self.encode_table[chunk_int.shr(6_u8).bitand(LOW_SIX_BITS) as usize]; | ||||
|                 output[output_index + 3] = | ||||
|                     self.encode_table[chunk_int.bitand(LOW_SIX_BITS) as usize]; | ||||
| 
 | ||||
|                 input_index += Self::ENCODE_INPUT_CHUNK_SIZE; | ||||
|                 output_index += 4; | ||||
|             } | ||||
|         } | ||||
| 
 | ||||
|         // then leftovers
 | ||||
|         if rem == 2 { | ||||
|             let chunk = &input[input_index..input_index + 2]; | ||||
| 
 | ||||
|             // high six bits of chunk[0]
 | ||||
|             output[output_index] = self.encode_table[chunk[0].shr(2) as usize]; | ||||
|             // bottom 2 bits of [0], high 4 bits of [1]
 | ||||
|             output[output_index + 1] = | ||||
|                 self.encode_table[(chunk[0].shl(4_u8).bitor(chunk[1].shr(4_u8)) as u32) | ||||
|                     .bitand(LOW_SIX_BITS) as usize]; | ||||
|             // bottom 4 bits of [1], with the 2 bottom bits as zero
 | ||||
|             output[output_index + 2] = | ||||
|                 self.encode_table[(chunk[1].shl(2_u8) as u32).bitand(LOW_SIX_BITS) as usize]; | ||||
| 
 | ||||
|             output_index += 3; | ||||
|         } else if rem == 1 { | ||||
|             let byte = input[input_index]; | ||||
|             output[output_index] = self.encode_table[byte.shr(2) as usize]; | ||||
|             output[output_index + 1] = | ||||
|                 self.encode_table[(byte.shl(4_u8) as u32).bitand(LOW_SIX_BITS) as usize]; | ||||
|             output_index += 2; | ||||
|         } | ||||
| 
 | ||||
|         output_index | ||||
|     } | ||||
| 
 | ||||
|     fn internal_decoded_len_estimate(&self, input_len: usize) -> Self::DecodeEstimate { | ||||
|         NaiveEstimate::new(input_len) | ||||
|     } | ||||
| 
 | ||||
|     fn internal_decode( | ||||
|         &self, | ||||
|         input: &[u8], | ||||
|         output: &mut [u8], | ||||
|         estimate: Self::DecodeEstimate, | ||||
|     ) -> Result<usize, DecodeError> { | ||||
|         if estimate.rem == 1 { | ||||
|             // trailing whitespace is so common that it's worth it to check the last byte to
 | ||||
|             // possibly return a better error message
 | ||||
|             if let Some(b) = input.last() { | ||||
|                 if *b != PAD_BYTE | ||||
|                     && self.decode_table[*b as usize] == general_purpose::INVALID_VALUE | ||||
|                 { | ||||
|                     return Err(DecodeError::InvalidByte(input.len() - 1, *b)); | ||||
|                 } | ||||
|             } | ||||
| 
 | ||||
|             return Err(DecodeError::InvalidLength); | ||||
|         } | ||||
| 
 | ||||
|         let mut input_index = 0_usize; | ||||
|         let mut output_index = 0_usize; | ||||
|         const BOTTOM_BYTE: u32 = 0xFF; | ||||
| 
 | ||||
|         // can only use the main loop on non-trailing chunks
 | ||||
|         if input.len() > Self::DECODE_INPUT_CHUNK_SIZE { | ||||
|             // skip the last chunk, whether it's partial or full, since it might
 | ||||
|             // have padding, and start at the beginning of the chunk before that
 | ||||
|             let last_complete_chunk_start_index = estimate.complete_chunk_len | ||||
|                 - if estimate.rem == 0 { | ||||
|                     // Trailing chunk is also full chunk, so there must be at least 2 chunks, and
 | ||||
|                     // this won't underflow
 | ||||
|                     Self::DECODE_INPUT_CHUNK_SIZE * 2 | ||||
|                 } else { | ||||
|                     // Trailing chunk is partial, so it's already excluded in
 | ||||
|                     // complete_chunk_len
 | ||||
|                     Self::DECODE_INPUT_CHUNK_SIZE | ||||
|                 }; | ||||
| 
 | ||||
|             while input_index <= last_complete_chunk_start_index { | ||||
|                 let chunk = &input[input_index..input_index + Self::DECODE_INPUT_CHUNK_SIZE]; | ||||
|                 let decoded_int: u32 = self.decode_byte_into_u32(input_index, chunk[0])?.shl(18) | ||||
|                     | self | ||||
|                         .decode_byte_into_u32(input_index + 1, chunk[1])? | ||||
|                         .shl(12) | ||||
|                     | self.decode_byte_into_u32(input_index + 2, chunk[2])?.shl(6) | ||||
|                     | self.decode_byte_into_u32(input_index + 3, chunk[3])?; | ||||
| 
 | ||||
|                 output[output_index] = decoded_int.shr(16_u8).bitand(BOTTOM_BYTE) as u8; | ||||
|                 output[output_index + 1] = decoded_int.shr(8_u8).bitand(BOTTOM_BYTE) as u8; | ||||
|                 output[output_index + 2] = decoded_int.bitand(BOTTOM_BYTE) as u8; | ||||
| 
 | ||||
|                 input_index += Self::DECODE_INPUT_CHUNK_SIZE; | ||||
|                 output_index += 3; | ||||
|             } | ||||
|         } | ||||
| 
 | ||||
|         general_purpose::decode_suffix::decode_suffix( | ||||
|             input, | ||||
|             input_index, | ||||
|             output, | ||||
|             output_index, | ||||
|             &self.decode_table, | ||||
|             self.config.decode_allow_trailing_bits, | ||||
|             self.config.decode_padding_mode, | ||||
|         ) | ||||
|     } | ||||
| 
 | ||||
|     fn config(&self) -> &Self::Config { | ||||
|         &self.config | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| pub struct NaiveEstimate { | ||||
|     /// remainder from dividing input by `Naive::DECODE_CHUNK_SIZE`
 | ||||
|     rem: usize, | ||||
|     /// Length of input that is in complete `Naive::DECODE_CHUNK_SIZE`-length chunks
 | ||||
|     complete_chunk_len: usize, | ||||
| } | ||||
| 
 | ||||
| impl NaiveEstimate { | ||||
|     fn new(input_len: usize) -> Self { | ||||
|         let rem = input_len % Naive::DECODE_INPUT_CHUNK_SIZE; | ||||
|         let complete_chunk_len = input_len - rem; | ||||
| 
 | ||||
|         Self { | ||||
|             rem, | ||||
|             complete_chunk_len, | ||||
|         } | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| impl DecodeEstimate for NaiveEstimate { | ||||
|     fn decoded_len_estimate(&self) -> usize { | ||||
|         ((self.complete_chunk_len / 4) + ((self.rem > 0) as usize)) * 3 | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| #[derive(Clone, Copy, Debug)] | ||||
| pub struct NaiveConfig { | ||||
|     pub encode_padding: bool, | ||||
|     pub decode_allow_trailing_bits: bool, | ||||
|     pub decode_padding_mode: DecodePaddingMode, | ||||
| } | ||||
| 
 | ||||
| impl Config for NaiveConfig { | ||||
|     fn encode_padding(&self) -> bool { | ||||
|         self.encode_padding | ||||
|     } | ||||
| } | ||||
							
								
								
									
										1430
									
								
								third_party/rust/base64/src/engine/tests.rs
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										1430
									
								
								third_party/rust/base64/src/engine/tests.rs
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load diff
											
										
									
								
							
							
								
								
									
										306
									
								
								third_party/rust/base64/src/lib.rs
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										306
									
								
								third_party/rust/base64/src/lib.rs
									
									
									
									
										vendored
									
									
								
							|  | @ -1,61 +1,123 @@ | |||
| //! # Configs
 | ||||
| //! # Getting started
 | ||||
| //!
 | ||||
| //! There isn't just one type of Base64; that would be too simple. You need to choose a character
 | ||||
| //! set (standard, URL-safe, etc) and padding suffix (yes/no).
 | ||||
| //! The `Config` struct encapsulates this info. There are some common configs included: `STANDARD`,
 | ||||
| //! `URL_SAFE`, etc. You can also make your own `Config` if needed.
 | ||||
| //! 1. Perhaps one of the preconfigured engines in [engine::general_purpose] will suit, e.g.
 | ||||
| //! [engine::general_purpose::STANDARD_NO_PAD].
 | ||||
| //!     - These are re-exported in [prelude] with a `BASE64_` prefix for those who prefer to
 | ||||
| //!       `use base64::prelude::*` or equivalent, e.g. [prelude::BASE64_STANDARD_NO_PAD]
 | ||||
| //! 1. If not, choose which alphabet you want. Most usage will want [alphabet::STANDARD] or [alphabet::URL_SAFE].
 | ||||
| //! 1. Choose which [Engine] implementation you want. For the moment there is only one: [engine::GeneralPurpose].
 | ||||
| //! 1. Configure the engine appropriately using the engine's `Config` type.
 | ||||
| //!     - This is where you'll select whether to add padding (when encoding) or expect it (when
 | ||||
| //!     decoding). If given the choice, prefer no padding.
 | ||||
| //! 1. Build the engine using the selected alphabet and config.
 | ||||
| //!
 | ||||
| //! The functions that don't have `config` in the name (e.g. `encode()` and `decode()`) use the
 | ||||
| //! `STANDARD` config .
 | ||||
| //! For more detail, see below.
 | ||||
| //!
 | ||||
| //! The functions that write to a slice (the ones that end in `_slice`) are generally the fastest
 | ||||
| //! because they don't need to resize anything. If it fits in your workflow and you care about
 | ||||
| //! performance, keep using the same buffer (growing as need be) and use the `_slice` methods for
 | ||||
| //! the best performance.
 | ||||
| //! ## Alphabets
 | ||||
| //!
 | ||||
| //! An [alphabet::Alphabet] defines what ASCII symbols are used to encode to or decode from.
 | ||||
| //!
 | ||||
| //! Constants in [alphabet] like [alphabet::STANDARD] or [alphabet::URL_SAFE] provide commonly used
 | ||||
| //! alphabets, but you can also build your own custom [alphabet::Alphabet] if needed.
 | ||||
| //!
 | ||||
| //! ## Engines
 | ||||
| //!
 | ||||
| //! Once you have an `Alphabet`, you can pick which `Engine` you want. A few parts of the public
 | ||||
| //! API provide a default, but otherwise the user must provide an `Engine` to use.
 | ||||
| //!
 | ||||
| //! See [Engine] for more.
 | ||||
| //!
 | ||||
| //! ## Config
 | ||||
| //!
 | ||||
| //! In addition to an `Alphabet`, constructing an `Engine` also requires an [engine::Config]. Each
 | ||||
| //! `Engine` has a corresponding `Config` implementation since different `Engine`s may offer different
 | ||||
| //! levels of configurability.
 | ||||
| //!
 | ||||
| //! # Encoding
 | ||||
| //!
 | ||||
| //! Several different encoding functions are available to you depending on your desire for
 | ||||
| //! Several different encoding methods on [Engine] are available to you depending on your desire for
 | ||||
| //! convenience vs performance.
 | ||||
| //!
 | ||||
| //! | Function                | Output                       | Allocates                      |
 | ||||
| //! | ----------------------- | ---------------------------- | ------------------------------ |
 | ||||
| //! | `encode`                | Returns a new `String`       | Always                         |
 | ||||
| //! | `encode_config`         | Returns a new `String`       | Always                         |
 | ||||
| //! | `encode_config_buf`     | Appends to provided `String` | Only if `String` needs to grow |
 | ||||
| //! | `encode_config_slice`   | Writes to provided `&[u8]`   | Never                          |
 | ||||
| //! | Method                   | Output                       | Allocates                      |
 | ||||
| //! | ------------------------ | ---------------------------- | ------------------------------ |
 | ||||
| //! | [Engine::encode]         | Returns a new `String`       | Always                         |
 | ||||
| //! | [Engine::encode_string]  | Appends to provided `String` | Only if `String` needs to grow |
 | ||||
| //! | [Engine::encode_slice]   | Writes to provided `&[u8]`   | Never - fastest                |
 | ||||
| //!
 | ||||
| //! All of the encoding functions that take a `Config` will pad as per the config.
 | ||||
| //! All of the encoding methods will pad as per the engine's config.
 | ||||
| //!
 | ||||
| //! # Decoding
 | ||||
| //!
 | ||||
| //! Just as for encoding, there are different decoding functions available.
 | ||||
| //! Just as for encoding, there are different decoding methods available.
 | ||||
| //!
 | ||||
| //! | Function                | Output                        | Allocates                      |
 | ||||
| //! | ----------------------- | ----------------------------- | ------------------------------ |
 | ||||
| //! | `decode`                | Returns a new `Vec<u8>`       | Always                         |
 | ||||
| //! | `decode_config`         | Returns a new `Vec<u8>`       | Always                         |
 | ||||
| //! | `decode_config_buf`     | Appends to provided `Vec<u8>` | Only if `Vec` needs to grow    |
 | ||||
| //! | `decode_config_slice`   | Writes to provided `&[u8]`    | Never                          |
 | ||||
| //! | Method                   | Output                        | Allocates                      |
 | ||||
| //! | ------------------------ | ----------------------------- | ------------------------------ |
 | ||||
| //! | [Engine::decode]         | Returns a new `Vec<u8>`       | Always                         |
 | ||||
| //! | [Engine::decode_vec]     | Appends to provided `Vec<u8>` | Only if `Vec` needs to grow    |
 | ||||
| //! | [Engine::decode_slice]   | Writes to provided `&[u8]`    | Never - fastest                |
 | ||||
| //!
 | ||||
| //! Unlike encoding, where all possible input is valid, decoding can fail (see `DecodeError`).
 | ||||
| //! Unlike encoding, where all possible input is valid, decoding can fail (see [DecodeError]).
 | ||||
| //!
 | ||||
| //! Input can be invalid because it has invalid characters or invalid padding. (No padding at all is
 | ||||
| //! valid, but excess padding is not.) Whitespace in the input is invalid.
 | ||||
| //! Input can be invalid because it has invalid characters or invalid padding. The nature of how
 | ||||
| //! padding is checked depends on the engine's config.
 | ||||
| //! Whitespace in the input is invalid, just like any other non-base64 byte.
 | ||||
| //!
 | ||||
| //! # `Read` and `Write`
 | ||||
| //!
 | ||||
| //! To map a `Read` of b64 bytes to the decoded bytes, wrap a reader (file, network socket, etc)
 | ||||
| //! with `base64::read::DecoderReader`. To write raw bytes and have them b64 encoded on the fly,
 | ||||
| //! wrap a writer with `base64::write::EncoderWriter`. There is some performance overhead (15% or
 | ||||
| //! so) because of the necessary buffer shuffling -- still fast enough that almost nobody cares.
 | ||||
| //! Also, these implementations do not heap allocate.
 | ||||
| //! To decode a [std::io::Read] of b64 bytes, wrap a reader (file, network socket, etc) with
 | ||||
| //! [read::DecoderReader].
 | ||||
| //!
 | ||||
| //! To write raw bytes and have them b64 encoded on the fly, wrap a [std::io::Write] with
 | ||||
| //! [write::EncoderWriter].
 | ||||
| //!
 | ||||
| //! There is some performance overhead (15% or so) because of the necessary buffer shuffling --
 | ||||
| //! still fast enough that almost nobody cares. Also, these implementations do not heap allocate.
 | ||||
| //!
 | ||||
| //! # `Display`
 | ||||
| //!
 | ||||
| //! See [display] for how to transparently base64 data via a `Display` implementation.
 | ||||
| //!
 | ||||
| //! # Examples
 | ||||
| //!
 | ||||
| //! ## Using predefined engines
 | ||||
| //!
 | ||||
| //! ```
 | ||||
| //! use base64::{Engine as _, engine::general_purpose};
 | ||||
| //!
 | ||||
| //! let orig = b"data";
 | ||||
| //! let encoded: String = general_purpose::STANDARD_NO_PAD.encode(orig);
 | ||||
| //! assert_eq!("ZGF0YQ", encoded);
 | ||||
| //! assert_eq!(orig.as_slice(), &general_purpose::STANDARD_NO_PAD.decode(encoded).unwrap());
 | ||||
| //!
 | ||||
| //! // or, URL-safe
 | ||||
| //! let encoded_url = general_purpose::URL_SAFE_NO_PAD.encode(orig);
 | ||||
| //! ```
 | ||||
| //!
 | ||||
| //! ## Custom alphabet, config, and engine
 | ||||
| //!
 | ||||
| //! ```
 | ||||
| //! use base64::{engine, alphabet, Engine as _};
 | ||||
| //!
 | ||||
| //! // bizarro-world base64: +/ as the first symbols instead of the last
 | ||||
| //! let alphabet =
 | ||||
| //!     alphabet::Alphabet::new("+/ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789")
 | ||||
| //!     .unwrap();
 | ||||
| //!
 | ||||
| //! // a very weird config that encodes with padding but requires no padding when decoding...?
 | ||||
| //! let crazy_config = engine::GeneralPurposeConfig::new()
 | ||||
| //!     .with_decode_allow_trailing_bits(true)
 | ||||
| //!     .with_encode_padding(true)
 | ||||
| //!     .with_decode_padding_mode(engine::DecodePaddingMode::RequireNone);
 | ||||
| //!
 | ||||
| //! let crazy_engine = engine::GeneralPurpose::new(&alphabet, crazy_config);
 | ||||
| //!
 | ||||
| //! let encoded = crazy_engine.encode(b"abc 123");
 | ||||
| //!
 | ||||
| //! ```
 | ||||
| //!
 | ||||
| //! # Panics
 | ||||
| //!
 | ||||
| //! If length calculations result in overflowing `usize`, a panic will result.
 | ||||
| //!
 | ||||
| //! The `_slice` flavors of encode or decode will panic if the provided output slice is too small,
 | ||||
| 
 | ||||
| #![cfg_attr(feature = "cargo-clippy", allow(clippy::cast_lossless))] | ||||
| #![deny(
 | ||||
|  | @ -69,6 +131,9 @@ | |||
|     warnings | ||||
| )] | ||||
| #![forbid(unsafe_code)] | ||||
| // Allow globally until https://github.com/rust-lang/rust-clippy/issues/8768 is resolved.
 | ||||
| // The desired state is to allow it only for the rstest_reuse import.
 | ||||
| #![allow(clippy::single_component_path_imports)] | ||||
| #![cfg_attr(not(any(feature = "std", test)), no_std)] | ||||
| 
 | ||||
| #[cfg(all(feature = "alloc", not(any(feature = "std", test))))] | ||||
|  | @ -76,170 +141,39 @@ extern crate alloc; | |||
| #[cfg(any(feature = "std", test))] | ||||
| extern crate std as alloc; | ||||
| 
 | ||||
| // has to be included at top level because of the way rstest_reuse defines its macros
 | ||||
| #[cfg(test)] | ||||
| use rstest_reuse; | ||||
| 
 | ||||
| mod chunked_encoder; | ||||
| pub mod display; | ||||
| #[cfg(any(feature = "std", test))] | ||||
| pub mod read; | ||||
| mod tables; | ||||
| #[cfg(any(feature = "std", test))] | ||||
| pub mod write; | ||||
| 
 | ||||
| pub mod engine; | ||||
| pub use engine::Engine; | ||||
| 
 | ||||
| pub mod alphabet; | ||||
| 
 | ||||
| mod encode; | ||||
| pub use crate::encode::encode_config_slice; | ||||
| #[allow(deprecated)] | ||||
| #[cfg(any(feature = "alloc", feature = "std", test))] | ||||
| pub use crate::encode::{encode, encode_config, encode_config_buf}; | ||||
| pub use crate::encode::{encode, encode_engine, encode_engine_string}; | ||||
| #[allow(deprecated)] | ||||
| pub use crate::encode::{encode_engine_slice, encoded_len, EncodeSliceError}; | ||||
| 
 | ||||
| mod decode; | ||||
| #[allow(deprecated)] | ||||
| #[cfg(any(feature = "alloc", feature = "std", test))] | ||||
| pub use crate::decode::{decode, decode_config, decode_config_buf}; | ||||
| pub use crate::decode::{decode_config_slice, DecodeError}; | ||||
| pub use crate::decode::{decode, decode_engine, decode_engine_vec}; | ||||
| #[allow(deprecated)] | ||||
| pub use crate::decode::{decode_engine_slice, decoded_len_estimate, DecodeError, DecodeSliceError}; | ||||
| 
 | ||||
| pub mod prelude; | ||||
| 
 | ||||
| #[cfg(test)] | ||||
| mod tests; | ||||
| 
 | ||||
| /// Available encoding character sets
 | ||||
| #[derive(Clone, Copy, Debug)] | ||||
| pub enum CharacterSet { | ||||
|     /// The standard character set (uses `+` and `/`).
 | ||||
|     ///
 | ||||
|     /// See [RFC 3548](https://tools.ietf.org/html/rfc3548#section-3).
 | ||||
|     Standard, | ||||
|     /// The URL safe character set (uses `-` and `_`).
 | ||||
|     ///
 | ||||
|     /// See [RFC 3548](https://tools.ietf.org/html/rfc3548#section-4).
 | ||||
|     UrlSafe, | ||||
|     /// The `crypt(3)` character set (uses `./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz`).
 | ||||
|     ///
 | ||||
|     /// Not standardized, but folk wisdom on the net asserts that this alphabet is what crypt uses.
 | ||||
|     Crypt, | ||||
|     /// The bcrypt character set (uses `./ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789`).
 | ||||
|     Bcrypt, | ||||
|     /// The character set used in IMAP-modified UTF-7 (uses `+` and `,`).
 | ||||
|     ///
 | ||||
|     /// See [RFC 3501](https://tools.ietf.org/html/rfc3501#section-5.1.3)
 | ||||
|     ImapMutf7, | ||||
|     /// The character set used in BinHex 4.0 files.
 | ||||
|     ///
 | ||||
|     /// See [BinHex 4.0 Definition](http://files.stairways.com/other/binhex-40-specs-info.txt)
 | ||||
|     BinHex, | ||||
| } | ||||
| 
 | ||||
| impl CharacterSet { | ||||
|     fn encode_table(self) -> &'static [u8; 64] { | ||||
|         match self { | ||||
|             CharacterSet::Standard => tables::STANDARD_ENCODE, | ||||
|             CharacterSet::UrlSafe => tables::URL_SAFE_ENCODE, | ||||
|             CharacterSet::Crypt => tables::CRYPT_ENCODE, | ||||
|             CharacterSet::Bcrypt => tables::BCRYPT_ENCODE, | ||||
|             CharacterSet::ImapMutf7 => tables::IMAP_MUTF7_ENCODE, | ||||
|             CharacterSet::BinHex => tables::BINHEX_ENCODE, | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     fn decode_table(self) -> &'static [u8; 256] { | ||||
|         match self { | ||||
|             CharacterSet::Standard => tables::STANDARD_DECODE, | ||||
|             CharacterSet::UrlSafe => tables::URL_SAFE_DECODE, | ||||
|             CharacterSet::Crypt => tables::CRYPT_DECODE, | ||||
|             CharacterSet::Bcrypt => tables::BCRYPT_DECODE, | ||||
|             CharacterSet::ImapMutf7 => tables::IMAP_MUTF7_DECODE, | ||||
|             CharacterSet::BinHex => tables::BINHEX_DECODE, | ||||
|         } | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| /// Contains configuration parameters for base64 encoding
 | ||||
| #[derive(Clone, Copy, Debug)] | ||||
| pub struct Config { | ||||
|     /// Character set to use
 | ||||
|     char_set: CharacterSet, | ||||
|     /// True to pad output with `=` characters
 | ||||
|     pad: bool, | ||||
|     /// True to ignore excess nonzero bits in the last few symbols, otherwise an error is returned.
 | ||||
|     decode_allow_trailing_bits: bool, | ||||
| } | ||||
| 
 | ||||
| impl Config { | ||||
|     /// Create a new `Config`.
 | ||||
|     pub const fn new(char_set: CharacterSet, pad: bool) -> Config { | ||||
|         Config { | ||||
|             char_set, | ||||
|             pad, | ||||
|             decode_allow_trailing_bits: false, | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     /// Sets whether to pad output with `=` characters.
 | ||||
|     pub const fn pad(self, pad: bool) -> Config { | ||||
|         Config { pad, ..self } | ||||
|     } | ||||
| 
 | ||||
|     /// Sets whether to emit errors for nonzero trailing bits.
 | ||||
|     ///
 | ||||
|     /// This is useful when implementing
 | ||||
|     /// [forgiving-base64 decode](https://infra.spec.whatwg.org/#forgiving-base64-decode).
 | ||||
|     pub const fn decode_allow_trailing_bits(self, allow: bool) -> Config { | ||||
|         Config { | ||||
|             decode_allow_trailing_bits: allow, | ||||
|             ..self | ||||
|         } | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| /// Standard character set with padding.
 | ||||
| pub const STANDARD: Config = Config { | ||||
|     char_set: CharacterSet::Standard, | ||||
|     pad: true, | ||||
|     decode_allow_trailing_bits: false, | ||||
| }; | ||||
| 
 | ||||
| /// Standard character set without padding.
 | ||||
| pub const STANDARD_NO_PAD: Config = Config { | ||||
|     char_set: CharacterSet::Standard, | ||||
|     pad: false, | ||||
|     decode_allow_trailing_bits: false, | ||||
| }; | ||||
| 
 | ||||
| /// URL-safe character set with padding
 | ||||
| pub const URL_SAFE: Config = Config { | ||||
|     char_set: CharacterSet::UrlSafe, | ||||
|     pad: true, | ||||
|     decode_allow_trailing_bits: false, | ||||
| }; | ||||
| 
 | ||||
| /// URL-safe character set without padding
 | ||||
| pub const URL_SAFE_NO_PAD: Config = Config { | ||||
|     char_set: CharacterSet::UrlSafe, | ||||
|     pad: false, | ||||
|     decode_allow_trailing_bits: false, | ||||
| }; | ||||
| 
 | ||||
| /// As per `crypt(3)` requirements
 | ||||
| pub const CRYPT: Config = Config { | ||||
|     char_set: CharacterSet::Crypt, | ||||
|     pad: false, | ||||
|     decode_allow_trailing_bits: false, | ||||
| }; | ||||
| 
 | ||||
| /// Bcrypt character set
 | ||||
| pub const BCRYPT: Config = Config { | ||||
|     char_set: CharacterSet::Bcrypt, | ||||
|     pad: false, | ||||
|     decode_allow_trailing_bits: false, | ||||
| }; | ||||
| 
 | ||||
| /// IMAP modified UTF-7 requirements
 | ||||
| pub const IMAP_MUTF7: Config = Config { | ||||
|     char_set: CharacterSet::ImapMutf7, | ||||
|     pad: false, | ||||
|     decode_allow_trailing_bits: false, | ||||
| }; | ||||
| 
 | ||||
| /// BinHex character set
 | ||||
| pub const BINHEX: Config = Config { | ||||
|     char_set: CharacterSet::BinHex, | ||||
|     pad: false, | ||||
|     decode_allow_trailing_bits: false, | ||||
| }; | ||||
| 
 | ||||
| const PAD_BYTE: u8 = b'='; | ||||
|  |  | |||
							
								
								
									
										19
									
								
								third_party/rust/base64/src/prelude.rs
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								third_party/rust/base64/src/prelude.rs
									
									
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1,19 @@ | |||
| //! Preconfigured engines for common use cases.
 | ||||
| //!
 | ||||
| //! These are re-exports of `const` engines in [crate::engine::general_purpose], renamed with a `BASE64_`
 | ||||
| //! prefix for those who prefer to `use` the entire path to a name.
 | ||||
| //!
 | ||||
| //! # Examples
 | ||||
| //!
 | ||||
| //! ```
 | ||||
| //! use base64::prelude::{Engine as _, BASE64_STANDARD_NO_PAD};
 | ||||
| //!
 | ||||
| //! assert_eq!("c29tZSBieXRlcw", &BASE64_STANDARD_NO_PAD.encode(b"some bytes"));
 | ||||
| //! ```
 | ||||
| 
 | ||||
| pub use crate::engine::Engine; | ||||
| 
 | ||||
| pub use crate::engine::general_purpose::STANDARD as BASE64_STANDARD; | ||||
| pub use crate::engine::general_purpose::STANDARD_NO_PAD as BASE64_STANDARD_NO_PAD; | ||||
| pub use crate::engine::general_purpose::URL_SAFE as BASE64_URL_SAFE; | ||||
| pub use crate::engine::general_purpose::URL_SAFE_NO_PAD as BASE64_URL_SAFE_NO_PAD; | ||||
							
								
								
									
										79
									
								
								third_party/rust/base64/src/read/decoder.rs
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										79
									
								
								third_party/rust/base64/src/read/decoder.rs
									
									
									
									
										vendored
									
									
								
							|  | @ -1,5 +1,4 @@ | |||
| use crate::{decode_config_slice, Config, DecodeError}; | ||||
| use std::io::Read; | ||||
| use crate::{engine::Engine, DecodeError}; | ||||
| use std::{cmp, fmt, io}; | ||||
| 
 | ||||
| // This should be large, but it has to fit on the stack.
 | ||||
|  | @ -16,11 +15,13 @@ const DECODED_CHUNK_SIZE: usize = 3; | |||
| /// ```
 | ||||
| /// use std::io::Read;
 | ||||
| /// use std::io::Cursor;
 | ||||
| /// use base64::engine::general_purpose;
 | ||||
| ///
 | ||||
| /// // use a cursor as the simplest possible `Read` -- in real code this is probably a file, etc.
 | ||||
| /// let mut wrapped_reader = Cursor::new(b"YXNkZg==");
 | ||||
| /// let mut decoder = base64::read::DecoderReader::new(
 | ||||
| ///     &mut wrapped_reader, base64::STANDARD);
 | ||||
| ///     &mut wrapped_reader,
 | ||||
| ///     &general_purpose::STANDARD);
 | ||||
| ///
 | ||||
| /// // handle errors as you normally would
 | ||||
| /// let mut result = Vec::new();
 | ||||
|  | @ -29,10 +30,10 @@ const DECODED_CHUNK_SIZE: usize = 3; | |||
| /// assert_eq!(b"asdf", &result[..]);
 | ||||
| ///
 | ||||
| /// ```
 | ||||
| pub struct DecoderReader<'a, R: 'a + io::Read> { | ||||
|     config: Config, | ||||
| pub struct DecoderReader<'e, E: Engine, R: io::Read> { | ||||
|     engine: &'e E, | ||||
|     /// Where b64 data is read from
 | ||||
|     r: &'a mut R, | ||||
|     inner: R, | ||||
| 
 | ||||
|     // Holds b64 data read from the delegate reader.
 | ||||
|     b64_buffer: [u8; BUF_SIZE], | ||||
|  | @ -54,10 +55,9 @@ pub struct DecoderReader<'a, R: 'a + io::Read> { | |||
|     total_b64_decoded: usize, | ||||
| } | ||||
| 
 | ||||
| impl<'a, R: io::Read> fmt::Debug for DecoderReader<'a, R> { | ||||
| impl<'e, E: Engine, R: io::Read> fmt::Debug for DecoderReader<'e, E, R> { | ||||
|     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||||
|         f.debug_struct("DecoderReader") | ||||
|             .field("config", &self.config) | ||||
|             .field("b64_offset", &self.b64_offset) | ||||
|             .field("b64_len", &self.b64_len) | ||||
|             .field("decoded_buffer", &self.decoded_buffer) | ||||
|  | @ -68,12 +68,12 @@ impl<'a, R: io::Read> fmt::Debug for DecoderReader<'a, R> { | |||
|     } | ||||
| } | ||||
| 
 | ||||
| impl<'a, R: io::Read> DecoderReader<'a, R> { | ||||
| impl<'e, E: Engine, R: io::Read> DecoderReader<'e, E, R> { | ||||
|     /// Create a new decoder that will read from the provided reader `r`.
 | ||||
|     pub fn new(r: &'a mut R, config: Config) -> Self { | ||||
|     pub fn new(reader: R, engine: &'e E) -> Self { | ||||
|         DecoderReader { | ||||
|             config, | ||||
|             r, | ||||
|             engine, | ||||
|             inner: reader, | ||||
|             b64_buffer: [0; BUF_SIZE], | ||||
|             b64_offset: 0, | ||||
|             b64_len: 0, | ||||
|  | @ -89,7 +89,7 @@ impl<'a, R: io::Read> DecoderReader<'a, R> { | |||
|     /// Returns a Result with the number of (decoded) bytes copied.
 | ||||
|     fn flush_decoded_buf(&mut self, buf: &mut [u8]) -> io::Result<usize> { | ||||
|         debug_assert!(self.decoded_len > 0); | ||||
|         debug_assert!(buf.len() > 0); | ||||
|         debug_assert!(!buf.is_empty()); | ||||
| 
 | ||||
|         let copy_len = cmp::min(self.decoded_len, buf.len()); | ||||
|         debug_assert!(copy_len > 0); | ||||
|  | @ -114,13 +114,13 @@ impl<'a, R: io::Read> DecoderReader<'a, R> { | |||
|         debug_assert!(self.b64_offset + self.b64_len < BUF_SIZE); | ||||
| 
 | ||||
|         let read = self | ||||
|             .r | ||||
|             .inner | ||||
|             .read(&mut self.b64_buffer[self.b64_offset + self.b64_len..])?; | ||||
|         self.b64_len += read; | ||||
| 
 | ||||
|         debug_assert!(self.b64_offset + self.b64_len <= BUF_SIZE); | ||||
| 
 | ||||
|         return Ok(read); | ||||
|         Ok(read) | ||||
|     } | ||||
| 
 | ||||
|     /// Decode the requested number of bytes from the b64 buffer into the provided buffer. It's the
 | ||||
|  | @ -130,23 +130,26 @@ impl<'a, R: io::Read> DecoderReader<'a, R> { | |||
|     fn decode_to_buf(&mut self, num_bytes: usize, buf: &mut [u8]) -> io::Result<usize> { | ||||
|         debug_assert!(self.b64_len >= num_bytes); | ||||
|         debug_assert!(self.b64_offset + self.b64_len <= BUF_SIZE); | ||||
|         debug_assert!(buf.len() > 0); | ||||
|         debug_assert!(!buf.is_empty()); | ||||
| 
 | ||||
|         let decoded = decode_config_slice( | ||||
|             &self.b64_buffer[self.b64_offset..self.b64_offset + num_bytes], | ||||
|             self.config, | ||||
|             &mut buf[..], | ||||
|         ) | ||||
|         .map_err(|e| match e { | ||||
|             DecodeError::InvalidByte(offset, byte) => { | ||||
|                 DecodeError::InvalidByte(self.total_b64_decoded + offset, byte) | ||||
|             } | ||||
|             DecodeError::InvalidLength => DecodeError::InvalidLength, | ||||
|             DecodeError::InvalidLastSymbol(offset, byte) => { | ||||
|                 DecodeError::InvalidLastSymbol(self.total_b64_decoded + offset, byte) | ||||
|             } | ||||
|         }) | ||||
|         .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?; | ||||
|         let decoded = self | ||||
|             .engine | ||||
|             .internal_decode( | ||||
|                 &self.b64_buffer[self.b64_offset..self.b64_offset + num_bytes], | ||||
|                 buf, | ||||
|                 self.engine.internal_decoded_len_estimate(num_bytes), | ||||
|             ) | ||||
|             .map_err(|e| match e { | ||||
|                 DecodeError::InvalidByte(offset, byte) => { | ||||
|                     DecodeError::InvalidByte(self.total_b64_decoded + offset, byte) | ||||
|                 } | ||||
|                 DecodeError::InvalidLength => DecodeError::InvalidLength, | ||||
|                 DecodeError::InvalidLastSymbol(offset, byte) => { | ||||
|                     DecodeError::InvalidLastSymbol(self.total_b64_decoded + offset, byte) | ||||
|                 } | ||||
|                 DecodeError::InvalidPadding => DecodeError::InvalidPadding, | ||||
|             }) | ||||
|             .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?; | ||||
| 
 | ||||
|         self.total_b64_decoded += num_bytes; | ||||
|         self.b64_offset += num_bytes; | ||||
|  | @ -156,9 +159,19 @@ impl<'a, R: io::Read> DecoderReader<'a, R> { | |||
| 
 | ||||
|         Ok(decoded) | ||||
|     } | ||||
| 
 | ||||
|     /// Unwraps this `DecoderReader`, returning the base reader which it reads base64 encoded
 | ||||
|     /// input from.
 | ||||
|     ///
 | ||||
|     /// Because `DecoderReader` performs internal buffering, the state of the inner reader is
 | ||||
|     /// unspecified. This function is mainly provided because the inner reader type may provide
 | ||||
|     /// additional functionality beyond the `Read` implementation which may still be useful.
 | ||||
|     pub fn into_inner(self) -> R { | ||||
|         self.inner | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| impl<'a, R: Read> Read for DecoderReader<'a, R> { | ||||
| impl<'e, E: Engine, R: io::Read> io::Read for DecoderReader<'e, E, R> { | ||||
|     /// Decode input from the wrapped reader.
 | ||||
|     ///
 | ||||
|     /// Under non-error circumstances, this returns `Ok` with the value being the number of bytes
 | ||||
|  | @ -172,7 +185,7 @@ impl<'a, R: Read> Read for DecoderReader<'a, R> { | |||
|     /// Any errors emitted by the delegate reader are returned. Decoding errors due to invalid
 | ||||
|     /// base64 are also possible, and will have `io::ErrorKind::InvalidData`.
 | ||||
|     fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { | ||||
|         if buf.len() == 0 { | ||||
|         if buf.is_empty() { | ||||
|             return Ok(0); | ||||
|         } | ||||
| 
 | ||||
|  |  | |||
|  | @ -1,12 +1,17 @@ | |||
| use std::io::{self, Read}; | ||||
| use std::{ | ||||
|     cmp, | ||||
|     io::{self, Read as _}, | ||||
|     iter, | ||||
| }; | ||||
| 
 | ||||
| use rand::{Rng, RngCore}; | ||||
| use std::{cmp, iter}; | ||||
| use rand::{Rng as _, RngCore as _}; | ||||
| 
 | ||||
| use super::decoder::{DecoderReader, BUF_SIZE}; | ||||
| use crate::encode::encode_config_buf; | ||||
| use crate::tests::random_config; | ||||
| use crate::{decode_config_buf, DecodeError, STANDARD}; | ||||
| use crate::{ | ||||
|     engine::{general_purpose::STANDARD, Engine, GeneralPurpose}, | ||||
|     tests::{random_alphabet, random_config, random_engine}, | ||||
|     DecodeError, | ||||
| }; | ||||
| 
 | ||||
| #[test] | ||||
| fn simple() { | ||||
|  | @ -27,7 +32,7 @@ fn simple() { | |||
|         // Read n bytes at a time.
 | ||||
|         for n in 1..base64data.len() + 1 { | ||||
|             let mut wrapped_reader = io::Cursor::new(base64data); | ||||
|             let mut decoder = DecoderReader::new(&mut wrapped_reader, STANDARD); | ||||
|             let mut decoder = DecoderReader::new(&mut wrapped_reader, &STANDARD); | ||||
| 
 | ||||
|             // handle errors as you normally would
 | ||||
|             let mut text_got = Vec::new(); | ||||
|  | @ -59,7 +64,7 @@ fn trailing_junk() { | |||
|         // Read n bytes at a time.
 | ||||
|         for n in 1..base64data.len() + 1 { | ||||
|             let mut wrapped_reader = io::Cursor::new(base64data); | ||||
|             let mut decoder = DecoderReader::new(&mut wrapped_reader, STANDARD); | ||||
|             let mut decoder = DecoderReader::new(&mut wrapped_reader, &STANDARD); | ||||
| 
 | ||||
|             // handle errors as you normally would
 | ||||
|             let mut buffer = vec![0u8; n]; | ||||
|  | @ -92,14 +97,14 @@ fn handles_short_read_from_delegate() { | |||
|         b64.clear(); | ||||
|         decoded.clear(); | ||||
| 
 | ||||
|         let size = rng.gen_range(0, 10 * BUF_SIZE); | ||||
|         let size = rng.gen_range(0..(10 * BUF_SIZE)); | ||||
|         bytes.extend(iter::repeat(0).take(size)); | ||||
|         bytes.truncate(size); | ||||
|         rng.fill_bytes(&mut bytes[..size]); | ||||
|         assert_eq!(size, bytes.len()); | ||||
| 
 | ||||
|         let config = random_config(&mut rng); | ||||
|         encode_config_buf(&bytes[..], config, &mut b64); | ||||
|         let engine = random_engine(&mut rng); | ||||
|         engine.encode_string(&bytes[..], &mut b64); | ||||
| 
 | ||||
|         let mut wrapped_reader = io::Cursor::new(b64.as_bytes()); | ||||
|         let mut short_reader = RandomShortRead { | ||||
|  | @ -107,7 +112,7 @@ fn handles_short_read_from_delegate() { | |||
|             rng: &mut rng, | ||||
|         }; | ||||
| 
 | ||||
|         let mut decoder = DecoderReader::new(&mut short_reader, config); | ||||
|         let mut decoder = DecoderReader::new(&mut short_reader, &engine); | ||||
| 
 | ||||
|         let decoded_len = decoder.read_to_end(&mut decoded).unwrap(); | ||||
|         assert_eq!(size, decoded_len); | ||||
|  | @ -127,7 +132,7 @@ fn read_in_short_increments() { | |||
|         b64.clear(); | ||||
|         decoded.clear(); | ||||
| 
 | ||||
|         let size = rng.gen_range(0, 10 * BUF_SIZE); | ||||
|         let size = rng.gen_range(0..(10 * BUF_SIZE)); | ||||
|         bytes.extend(iter::repeat(0).take(size)); | ||||
|         // leave room to play around with larger buffers
 | ||||
|         decoded.extend(iter::repeat(0).take(size * 3)); | ||||
|  | @ -135,12 +140,12 @@ fn read_in_short_increments() { | |||
|         rng.fill_bytes(&mut bytes[..]); | ||||
|         assert_eq!(size, bytes.len()); | ||||
| 
 | ||||
|         let config = random_config(&mut rng); | ||||
|         let engine = random_engine(&mut rng); | ||||
| 
 | ||||
|         encode_config_buf(&bytes[..], config, &mut b64); | ||||
|         engine.encode_string(&bytes[..], &mut b64); | ||||
| 
 | ||||
|         let mut wrapped_reader = io::Cursor::new(&b64[..]); | ||||
|         let mut decoder = DecoderReader::new(&mut wrapped_reader, config); | ||||
|         let mut decoder = DecoderReader::new(&mut wrapped_reader, &engine); | ||||
| 
 | ||||
|         consume_with_short_reads_and_validate(&mut rng, &bytes[..], &mut decoded, &mut decoder); | ||||
|     } | ||||
|  | @ -158,7 +163,7 @@ fn read_in_short_increments_with_short_delegate_reads() { | |||
|         b64.clear(); | ||||
|         decoded.clear(); | ||||
| 
 | ||||
|         let size = rng.gen_range(0, 10 * BUF_SIZE); | ||||
|         let size = rng.gen_range(0..(10 * BUF_SIZE)); | ||||
|         bytes.extend(iter::repeat(0).take(size)); | ||||
|         // leave room to play around with larger buffers
 | ||||
|         decoded.extend(iter::repeat(0).take(size * 3)); | ||||
|  | @ -166,18 +171,23 @@ fn read_in_short_increments_with_short_delegate_reads() { | |||
|         rng.fill_bytes(&mut bytes[..]); | ||||
|         assert_eq!(size, bytes.len()); | ||||
| 
 | ||||
|         let config = random_config(&mut rng); | ||||
|         let engine = random_engine(&mut rng); | ||||
| 
 | ||||
|         encode_config_buf(&bytes[..], config, &mut b64); | ||||
|         engine.encode_string(&bytes[..], &mut b64); | ||||
| 
 | ||||
|         let mut base_reader = io::Cursor::new(&b64[..]); | ||||
|         let mut decoder = DecoderReader::new(&mut base_reader, config); | ||||
|         let mut decoder = DecoderReader::new(&mut base_reader, &engine); | ||||
|         let mut short_reader = RandomShortRead { | ||||
|             delegate: &mut decoder, | ||||
|             rng: &mut rand::thread_rng(), | ||||
|         }; | ||||
| 
 | ||||
|         consume_with_short_reads_and_validate(&mut rng, &bytes[..], &mut decoded, &mut short_reader) | ||||
|         consume_with_short_reads_and_validate( | ||||
|             &mut rng, | ||||
|             &bytes[..], | ||||
|             &mut decoded, | ||||
|             &mut short_reader, | ||||
|         ); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
|  | @ -195,32 +205,32 @@ fn reports_invalid_last_symbol_correctly() { | |||
|         b64.clear(); | ||||
|         b64_bytes.clear(); | ||||
| 
 | ||||
|         let size = rng.gen_range(1, 10 * BUF_SIZE); | ||||
|         let size = rng.gen_range(1..(10 * BUF_SIZE)); | ||||
|         bytes.extend(iter::repeat(0).take(size)); | ||||
|         decoded.extend(iter::repeat(0).take(size)); | ||||
|         rng.fill_bytes(&mut bytes[..]); | ||||
|         assert_eq!(size, bytes.len()); | ||||
| 
 | ||||
|         let mut config = random_config(&mut rng); | ||||
|         let config = random_config(&mut rng); | ||||
|         let alphabet = random_alphabet(&mut rng); | ||||
|         // changing padding will cause invalid padding errors when we twiddle the last byte
 | ||||
|         config.pad = false; | ||||
| 
 | ||||
|         encode_config_buf(&bytes[..], config, &mut b64); | ||||
|         let engine = GeneralPurpose::new(alphabet, config.with_encode_padding(false)); | ||||
|         engine.encode_string(&bytes[..], &mut b64); | ||||
|         b64_bytes.extend(b64.bytes()); | ||||
|         assert_eq!(b64_bytes.len(), b64.len()); | ||||
| 
 | ||||
|         // change the last character to every possible symbol. Should behave the same as bulk
 | ||||
|         // decoding whether invalid or valid.
 | ||||
|         for &s1 in config.char_set.encode_table().iter() { | ||||
|         for &s1 in alphabet.symbols.iter() { | ||||
|             decoded.clear(); | ||||
|             bulk_decoded.clear(); | ||||
| 
 | ||||
|             // replace the last
 | ||||
|             *b64_bytes.last_mut().unwrap() = s1; | ||||
|             let bulk_res = decode_config_buf(&b64_bytes[..], config, &mut bulk_decoded); | ||||
|             let bulk_res = engine.decode_vec(&b64_bytes[..], &mut bulk_decoded); | ||||
| 
 | ||||
|             let mut wrapped_reader = io::Cursor::new(&b64_bytes[..]); | ||||
|             let mut decoder = DecoderReader::new(&mut wrapped_reader, config); | ||||
|             let mut decoder = DecoderReader::new(&mut wrapped_reader, &engine); | ||||
| 
 | ||||
|             let stream_res = decoder.read_to_end(&mut decoded).map(|_| ()).map_err(|e| { | ||||
|                 e.into_inner() | ||||
|  | @ -244,20 +254,21 @@ fn reports_invalid_byte_correctly() { | |||
|         b64.clear(); | ||||
|         decoded.clear(); | ||||
| 
 | ||||
|         let size = rng.gen_range(1, 10 * BUF_SIZE); | ||||
|         let size = rng.gen_range(1..(10 * BUF_SIZE)); | ||||
|         bytes.extend(iter::repeat(0).take(size)); | ||||
|         rng.fill_bytes(&mut bytes[..size]); | ||||
|         assert_eq!(size, bytes.len()); | ||||
| 
 | ||||
|         let config = random_config(&mut rng); | ||||
|         encode_config_buf(&bytes[..], config, &mut b64); | ||||
|         let engine = random_engine(&mut rng); | ||||
| 
 | ||||
|         engine.encode_string(&bytes[..], &mut b64); | ||||
|         // replace one byte, somewhere, with '*', which is invalid
 | ||||
|         let bad_byte_pos = rng.gen_range(0, &b64.len()); | ||||
|         let bad_byte_pos = rng.gen_range(0..b64.len()); | ||||
|         let mut b64_bytes = b64.bytes().collect::<Vec<u8>>(); | ||||
|         b64_bytes[bad_byte_pos] = b'*'; | ||||
| 
 | ||||
|         let mut wrapped_reader = io::Cursor::new(b64_bytes.clone()); | ||||
|         let mut decoder = DecoderReader::new(&mut wrapped_reader, config); | ||||
|         let mut decoder = DecoderReader::new(&mut wrapped_reader, &engine); | ||||
| 
 | ||||
|         // some gymnastics to avoid double-moving the io::Error, which is not Copy
 | ||||
|         let read_decode_err = decoder | ||||
|  | @ -273,7 +284,7 @@ fn reports_invalid_byte_correctly() { | |||
|             .and_then(|o| o); | ||||
| 
 | ||||
|         let mut bulk_buf = Vec::new(); | ||||
|         let bulk_decode_err = decode_config_buf(&b64_bytes[..], config, &mut bulk_buf).err(); | ||||
|         let bulk_decode_err = engine.decode_vec(&b64_bytes[..], &mut bulk_buf).err(); | ||||
| 
 | ||||
|         // it's tricky to predict where the invalid data's offset will be since if it's in the last
 | ||||
|         // chunk it will be reported at the first padding location because it's treated as invalid
 | ||||
|  | @ -285,12 +296,12 @@ fn reports_invalid_byte_correctly() { | |||
|     } | ||||
| } | ||||
| 
 | ||||
| fn consume_with_short_reads_and_validate<R: Read>( | ||||
| fn consume_with_short_reads_and_validate<R: io::Read>( | ||||
|     rng: &mut rand::rngs::ThreadRng, | ||||
|     expected_bytes: &[u8], | ||||
|     decoded: &mut Vec<u8>, | ||||
|     decoded: &mut [u8], | ||||
|     short_reader: &mut R, | ||||
| ) -> () { | ||||
| ) { | ||||
|     let mut total_read = 0_usize; | ||||
|     loop { | ||||
|         assert!( | ||||
|  | @ -302,13 +313,13 @@ fn consume_with_short_reads_and_validate<R: Read>( | |||
|         if total_read == expected_bytes.len() { | ||||
|             assert_eq!(expected_bytes, &decoded[..total_read]); | ||||
|             // should be done
 | ||||
|             assert_eq!(0, short_reader.read(&mut decoded[..]).unwrap()); | ||||
|             assert_eq!(0, short_reader.read(&mut *decoded).unwrap()); | ||||
|             // didn't write anything
 | ||||
|             assert_eq!(expected_bytes, &decoded[..total_read]); | ||||
| 
 | ||||
|             break; | ||||
|         } | ||||
|         let decode_len = rng.gen_range(1, cmp::max(2, expected_bytes.len() * 2)); | ||||
|         let decode_len = rng.gen_range(1..cmp::max(2, expected_bytes.len() * 2)); | ||||
| 
 | ||||
|         let read = short_reader | ||||
|             .read(&mut decoded[total_read..total_read + decode_len]) | ||||
|  | @ -328,7 +339,7 @@ struct RandomShortRead<'a, 'b, R: io::Read, N: rand::Rng> { | |||
| impl<'a, 'b, R: io::Read, N: rand::Rng> io::Read for RandomShortRead<'a, 'b, R, N> { | ||||
|     fn read(&mut self, buf: &mut [u8]) -> Result<usize, io::Error> { | ||||
|         // avoid 0 since it means EOF for non-empty buffers
 | ||||
|         let effective_len = cmp::min(self.rng.gen_range(1, 20), buf.len()); | ||||
|         let effective_len = cmp::min(self.rng.gen_range(1..20), buf.len()); | ||||
| 
 | ||||
|         self.delegate.read(&mut buf[..effective_len]) | ||||
|     } | ||||
|  |  | |||
							
								
								
									
										1957
									
								
								third_party/rust/base64/src/tables.rs
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1957
									
								
								third_party/rust/base64/src/tables.rs
									
									
									
									
										vendored
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load diff
											
										
									
								
							
							
								
								
									
										80
									
								
								third_party/rust/base64/src/tests.rs
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										80
									
								
								third_party/rust/base64/src/tests.rs
									
									
									
									
										vendored
									
									
								
							|  | @ -1,11 +1,19 @@ | |||
| use crate::{decode_config, encode::encoded_size, encode_config_buf, CharacterSet, Config}; | ||||
| 
 | ||||
| use std::str; | ||||
| 
 | ||||
| use rand::{ | ||||
|     distributions::{Distribution, Uniform}, | ||||
|     distributions, | ||||
|     distributions::{Distribution as _, Uniform}, | ||||
|     seq::SliceRandom, | ||||
|     FromEntropy, Rng, | ||||
|     Rng, SeedableRng, | ||||
| }; | ||||
| 
 | ||||
| use crate::{ | ||||
|     alphabet, | ||||
|     encode::encoded_len, | ||||
|     engine::{ | ||||
|         general_purpose::{GeneralPurpose, GeneralPurposeConfig}, | ||||
|         Config, DecodePaddingMode, Engine, | ||||
|     }, | ||||
| }; | ||||
| 
 | ||||
| #[test] | ||||
|  | @ -19,10 +27,10 @@ fn roundtrip_random_config_long() { | |||
|     roundtrip_random_config(Uniform::new(0, 1000), 10_000); | ||||
| } | ||||
| 
 | ||||
| pub fn assert_encode_sanity(encoded: &str, config: Config, input_len: usize) { | ||||
| pub fn assert_encode_sanity(encoded: &str, padded: bool, input_len: usize) { | ||||
|     let input_rem = input_len % 3; | ||||
|     let expected_padding_len = if input_rem > 0 { | ||||
|         if config.pad { | ||||
|         if padded { | ||||
|             3 - input_rem | ||||
|         } else { | ||||
|             0 | ||||
|  | @ -31,7 +39,7 @@ pub fn assert_encode_sanity(encoded: &str, config: Config, input_len: usize) { | |||
|         0 | ||||
|     }; | ||||
| 
 | ||||
|     let expected_encoded_len = encoded_size(input_len, config).unwrap(); | ||||
|     let expected_encoded_len = encoded_len(input_len, padded).unwrap(); | ||||
| 
 | ||||
|     assert_eq!(expected_encoded_len, encoded.len()); | ||||
| 
 | ||||
|  | @ -53,29 +61,57 @@ fn roundtrip_random_config(input_len_range: Uniform<usize>, iterations: u32) { | |||
| 
 | ||||
|         let input_len = input_len_range.sample(&mut rng); | ||||
| 
 | ||||
|         let config = random_config(&mut rng); | ||||
|         let engine = random_engine(&mut rng); | ||||
| 
 | ||||
|         for _ in 0..input_len { | ||||
|             input_buf.push(rng.gen()); | ||||
|         } | ||||
| 
 | ||||
|         encode_config_buf(&input_buf, config, &mut encoded_buf); | ||||
|         engine.encode_string(&input_buf, &mut encoded_buf); | ||||
| 
 | ||||
|         assert_encode_sanity(&encoded_buf, config, input_len); | ||||
|         assert_encode_sanity(&encoded_buf, engine.config().encode_padding(), input_len); | ||||
| 
 | ||||
|         assert_eq!(input_buf, decode_config(&encoded_buf, config).unwrap()); | ||||
|         assert_eq!(input_buf, engine.decode(&encoded_buf).unwrap()); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| pub fn random_config<R: Rng>(rng: &mut R) -> Config { | ||||
|     const CHARSETS: &[CharacterSet] = &[ | ||||
|         CharacterSet::UrlSafe, | ||||
|         CharacterSet::Standard, | ||||
|         CharacterSet::Crypt, | ||||
|         CharacterSet::ImapMutf7, | ||||
|         CharacterSet::BinHex, | ||||
|     ]; | ||||
|     let charset = *CHARSETS.choose(rng).unwrap(); | ||||
| 
 | ||||
|     Config::new(charset, rng.gen()) | ||||
| pub fn random_config<R: Rng>(rng: &mut R) -> GeneralPurposeConfig { | ||||
|     let mode = rng.gen(); | ||||
|     GeneralPurposeConfig::new() | ||||
|         .with_encode_padding(match mode { | ||||
|             DecodePaddingMode::Indifferent => rng.gen(), | ||||
|             DecodePaddingMode::RequireCanonical => true, | ||||
|             DecodePaddingMode::RequireNone => false, | ||||
|         }) | ||||
|         .with_decode_padding_mode(mode) | ||||
|         .with_decode_allow_trailing_bits(rng.gen()) | ||||
| } | ||||
| 
 | ||||
| impl distributions::Distribution<DecodePaddingMode> for distributions::Standard { | ||||
|     fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> DecodePaddingMode { | ||||
|         match rng.gen_range(0..=2) { | ||||
|             0 => DecodePaddingMode::Indifferent, | ||||
|             1 => DecodePaddingMode::RequireCanonical, | ||||
|             _ => DecodePaddingMode::RequireNone, | ||||
|         } | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| pub fn random_alphabet<R: Rng>(rng: &mut R) -> &'static alphabet::Alphabet { | ||||
|     ALPHABETS.choose(rng).unwrap() | ||||
| } | ||||
| 
 | ||||
| pub fn random_engine<R: Rng>(rng: &mut R) -> GeneralPurpose { | ||||
|     let alphabet = random_alphabet(rng); | ||||
|     let config = random_config(rng); | ||||
|     GeneralPurpose::new(alphabet, config) | ||||
| } | ||||
| 
 | ||||
| const ALPHABETS: &[alphabet::Alphabet] = &[ | ||||
|     alphabet::URL_SAFE, | ||||
|     alphabet::STANDARD, | ||||
|     alphabet::CRYPT, | ||||
|     alphabet::BCRYPT, | ||||
|     alphabet::IMAP_MUTF7, | ||||
|     alphabet::BIN_HEX, | ||||
| ]; | ||||
|  |  | |||
							
								
								
									
										78
									
								
								third_party/rust/base64/src/write/encoder.rs
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										78
									
								
								third_party/rust/base64/src/write/encoder.rs
									
									
									
									
										vendored
									
									
								
							|  | @ -1,8 +1,7 @@ | |||
| use crate::encode::encode_to_slice; | ||||
| use crate::{encode_config_slice, Config}; | ||||
| use crate::engine::Engine; | ||||
| use std::{ | ||||
|     cmp, fmt, | ||||
|     io::{ErrorKind, Result, Write}, | ||||
|     cmp, fmt, io, | ||||
|     io::{ErrorKind, Result}, | ||||
| }; | ||||
| 
 | ||||
| pub(crate) const BUF_SIZE: usize = 1024; | ||||
|  | @ -23,9 +22,10 @@ const MIN_ENCODE_CHUNK_SIZE: usize = 3; | |||
| ///
 | ||||
| /// ```
 | ||||
| /// use std::io::Write;
 | ||||
| /// use base64::engine::general_purpose;
 | ||||
| ///
 | ||||
| /// // use a vec as the simplest possible `Write` -- in real code this is probably a file, etc.
 | ||||
| /// let mut enc = base64::write::EncoderWriter::new(Vec::new(), base64::STANDARD);
 | ||||
| /// let mut enc = base64::write::EncoderWriter::new(Vec::new(), &general_purpose::STANDARD);
 | ||||
| ///
 | ||||
| /// // handle errors as you normally would
 | ||||
| /// enc.write_all(b"asdf").unwrap();
 | ||||
|  | @ -53,8 +53,15 @@ const MIN_ENCODE_CHUNK_SIZE: usize = 3; | |||
| ///
 | ||||
| /// It has some minor performance loss compared to encoding slices (a couple percent).
 | ||||
| /// It does not do any heap allocation.
 | ||||
| pub struct EncoderWriter<W: Write> { | ||||
|     config: Config, | ||||
| ///
 | ||||
| /// # Limitations
 | ||||
| ///
 | ||||
| /// Owing to the specification of the `write` and `flush` methods on the `Write` trait and their
 | ||||
| /// implications for a buffering implementation, these methods may not behave as expected. In
 | ||||
| /// particular, calling `write_all` on this interface may fail with `io::ErrorKind::WriteZero`.
 | ||||
| /// See the documentation of the `Write` trait implementation for further details.
 | ||||
| pub struct EncoderWriter<'e, E: Engine, W: io::Write> { | ||||
|     engine: &'e E, | ||||
|     /// Where encoded data is written to. It's an Option as it's None immediately before Drop is
 | ||||
|     /// called so that finish() can return the underlying writer. None implies that finish() has
 | ||||
|     /// been called successfully.
 | ||||
|  | @ -73,7 +80,7 @@ pub struct EncoderWriter<W: Write> { | |||
|     panicked: bool, | ||||
| } | ||||
| 
 | ||||
| impl<W: Write> fmt::Debug for EncoderWriter<W> { | ||||
| impl<'e, E: Engine, W: io::Write> fmt::Debug for EncoderWriter<'e, E, W> { | ||||
|     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||||
|         write!( | ||||
|             f, | ||||
|  | @ -86,12 +93,12 @@ impl<W: Write> fmt::Debug for EncoderWriter<W> { | |||
|     } | ||||
| } | ||||
| 
 | ||||
| impl<W: Write> EncoderWriter<W> { | ||||
|     /// Create a new encoder that will write to the provided delegate writer `w`.
 | ||||
|     pub fn new(w: W, config: Config) -> EncoderWriter<W> { | ||||
| impl<'e, E: Engine, W: io::Write> EncoderWriter<'e, E, W> { | ||||
|     /// Create a new encoder that will write to the provided delegate writer.
 | ||||
|     pub fn new(delegate: W, engine: &'e E) -> EncoderWriter<'e, E, W> { | ||||
|         EncoderWriter { | ||||
|             config, | ||||
|             delegate: Some(w), | ||||
|             engine, | ||||
|             delegate: Some(delegate), | ||||
|             extra_input: [0u8; MIN_ENCODE_CHUNK_SIZE], | ||||
|             extra_input_occupied_len: 0, | ||||
|             output: [0u8; BUF_SIZE], | ||||
|  | @ -120,7 +127,7 @@ impl<W: Write> EncoderWriter<W> { | |||
|         // If we could consume self in finish(), we wouldn't have to worry about this case, but
 | ||||
|         // finish() is retryable in the face of I/O errors, so we can't consume here.
 | ||||
|         if self.delegate.is_none() { | ||||
|             panic!("Encoder has already had finish() called") | ||||
|             panic!("Encoder has already had finish() called"); | ||||
|         }; | ||||
| 
 | ||||
|         self.write_final_leftovers()?; | ||||
|  | @ -141,11 +148,13 @@ impl<W: Write> EncoderWriter<W> { | |||
|         self.write_all_encoded_output()?; | ||||
| 
 | ||||
|         if self.extra_input_occupied_len > 0 { | ||||
|             let encoded_len = encode_config_slice( | ||||
|                 &self.extra_input[..self.extra_input_occupied_len], | ||||
|                 self.config, | ||||
|                 &mut self.output[..], | ||||
|             ); | ||||
|             let encoded_len = self | ||||
|                 .engine | ||||
|                 .encode_slice( | ||||
|                     &self.extra_input[..self.extra_input_occupied_len], | ||||
|                     &mut self.output[..], | ||||
|                 ) | ||||
|                 .expect("buffer is large enough"); | ||||
| 
 | ||||
|             self.output_occupied_len = encoded_len; | ||||
| 
 | ||||
|  | @ -182,7 +191,7 @@ impl<W: Write> EncoderWriter<W> { | |||
|                 self.output_occupied_len = current_output_len.checked_sub(consumed).unwrap(); | ||||
|                 // If we're blocking on I/O, the minor inefficiency of copying bytes to the
 | ||||
|                 // start of the buffer is the least of our concerns...
 | ||||
|                 // Rotate moves more than we need to, but copy_within isn't stabilized yet.
 | ||||
|                 // TODO Rotate moves more than we need to; copy_within now stable.
 | ||||
|                 self.output.rotate_left(consumed); | ||||
|             } else { | ||||
|                 self.output_occupied_len = 0; | ||||
|  | @ -215,9 +224,28 @@ impl<W: Write> EncoderWriter<W> { | |||
|         debug_assert_eq!(0, self.output_occupied_len); | ||||
|         Ok(()) | ||||
|     } | ||||
| 
 | ||||
|     /// Unwraps this `EncoderWriter`, returning the base writer it writes base64 encoded output
 | ||||
|     /// to.
 | ||||
|     ///
 | ||||
|     /// Normally this method should not be needed, since `finish()` returns the inner writer if
 | ||||
|     /// it completes successfully. That will also ensure all data has been flushed, which the
 | ||||
|     /// `into_inner()` function does *not* do.
 | ||||
|     ///
 | ||||
|     /// Calling this method after `finish()` has completed successfully will panic, since the
 | ||||
|     /// writer has already been returned.
 | ||||
|     ///
 | ||||
|     /// This method may be useful if the writer implements additional APIs beyond the `Write`
 | ||||
|     /// trait. Note that the inner writer might be in an error state or have an incomplete
 | ||||
|     /// base64 string written to it.
 | ||||
|     pub fn into_inner(mut self) -> W { | ||||
|         self.delegate | ||||
|             .take() | ||||
|             .expect("Encoder has already had finish() called") | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| impl<W: Write> Write for EncoderWriter<W> { | ||||
| impl<'e, E: Engine, W: io::Write> io::Write for EncoderWriter<'e, E, W> { | ||||
|     /// Encode input and then write to the delegate writer.
 | ||||
|     ///
 | ||||
|     /// Under non-error circumstances, this returns `Ok` with the value being the number of bytes
 | ||||
|  | @ -286,10 +314,9 @@ impl<W: Write> Write for EncoderWriter<W> { | |||
|                 self.extra_input[self.extra_input_occupied_len..MIN_ENCODE_CHUNK_SIZE] | ||||
|                     .copy_from_slice(&input[0..extra_input_read_len]); | ||||
| 
 | ||||
|                 let len = encode_to_slice( | ||||
|                 let len = self.engine.internal_encode( | ||||
|                     &self.extra_input[0..MIN_ENCODE_CHUNK_SIZE], | ||||
|                     &mut self.output[..], | ||||
|                     self.config.char_set.encode_table(), | ||||
|                 ); | ||||
|                 debug_assert_eq!(4, len); | ||||
| 
 | ||||
|  | @ -335,10 +362,9 @@ impl<W: Write> Write for EncoderWriter<W> { | |||
|         debug_assert_eq!(0, max_input_len % MIN_ENCODE_CHUNK_SIZE); | ||||
|         debug_assert_eq!(0, input_chunks_to_encode_len % MIN_ENCODE_CHUNK_SIZE); | ||||
| 
 | ||||
|         encoded_size += encode_to_slice( | ||||
|         encoded_size += self.engine.internal_encode( | ||||
|             &input[..(input_chunks_to_encode_len)], | ||||
|             &mut self.output[encoded_size..], | ||||
|             self.config.char_set.encode_table(), | ||||
|         ); | ||||
| 
 | ||||
|         // not updating `self.output_occupied_len` here because if the below write fails, it should
 | ||||
|  | @ -371,7 +397,7 @@ impl<W: Write> Write for EncoderWriter<W> { | |||
|     } | ||||
| } | ||||
| 
 | ||||
| impl<W: Write> Drop for EncoderWriter<W> { | ||||
| impl<'e, E: Engine, W: io::Write> Drop for EncoderWriter<'e, E, W> { | ||||
|     fn drop(&mut self) { | ||||
|         if !self.panicked { | ||||
|             // like `BufWriter`, ignore errors during drop
 | ||||
|  |  | |||
|  | @ -1,10 +1,10 @@ | |||
| use super::encoder::EncoderWriter; | ||||
| use crate::Config; | ||||
| use crate::engine::Engine; | ||||
| use std::io; | ||||
| use std::io::Write; | ||||
| 
 | ||||
| /// A `Write` implementation that base64-encodes data using the provided config and accumulates the
 | ||||
| /// resulting base64 in memory, which is then exposed as a String via `into_inner()`.
 | ||||
| /// resulting base64 utf8 `&str` in a [StrConsumer] implementation (typically `String`), which is
 | ||||
| /// then exposed via `into_inner()`.
 | ||||
| ///
 | ||||
| /// # Examples
 | ||||
| ///
 | ||||
|  | @ -12,8 +12,9 @@ use std::io::Write; | |||
| ///
 | ||||
| /// ```
 | ||||
| /// use std::io::Write;
 | ||||
| /// use base64::engine::general_purpose;
 | ||||
| ///
 | ||||
| /// let mut enc = base64::write::EncoderStringWriter::new(base64::STANDARD);
 | ||||
| /// let mut enc = base64::write::EncoderStringWriter::new(&general_purpose::STANDARD);
 | ||||
| ///
 | ||||
| /// enc.write_all(b"asdf").unwrap();
 | ||||
| ///
 | ||||
|  | @ -23,14 +24,17 @@ use std::io::Write; | |||
| /// assert_eq!("YXNkZg==", &b64_string);
 | ||||
| /// ```
 | ||||
| ///
 | ||||
| /// Or, append to an existing String:
 | ||||
| /// Or, append to an existing `String`, which implements `StrConsumer`:
 | ||||
| ///
 | ||||
| /// ```
 | ||||
| /// use std::io::Write;
 | ||||
| /// use base64::engine::general_purpose;
 | ||||
| ///
 | ||||
| /// let mut buf = String::from("base64: ");
 | ||||
| ///
 | ||||
| /// let mut enc = base64::write::EncoderStringWriter::from(&mut buf, base64::STANDARD);
 | ||||
| /// let mut enc = base64::write::EncoderStringWriter::from_consumer(
 | ||||
| ///     &mut buf,
 | ||||
| ///     &general_purpose::STANDARD);
 | ||||
| ///
 | ||||
| /// enc.write_all(b"asdf").unwrap();
 | ||||
| ///
 | ||||
|  | @ -49,40 +53,38 @@ use std::io::Write; | |||
| ///
 | ||||
| /// Because it has to validate that the base64 is UTF-8, it is about 80% as fast as writing plain
 | ||||
| /// bytes to a `io::Write`.
 | ||||
| pub struct EncoderStringWriter<S: StrConsumer> { | ||||
|     encoder: EncoderWriter<Utf8SingleCodeUnitWriter<S>>, | ||||
| pub struct EncoderStringWriter<'e, E: Engine, S: StrConsumer> { | ||||
|     encoder: EncoderWriter<'e, E, Utf8SingleCodeUnitWriter<S>>, | ||||
| } | ||||
| 
 | ||||
| impl<S: StrConsumer> EncoderStringWriter<S> { | ||||
| impl<'e, E: Engine, S: StrConsumer> EncoderStringWriter<'e, E, S> { | ||||
|     /// Create a EncoderStringWriter that will append to the provided `StrConsumer`.
 | ||||
|     pub fn from(str_consumer: S, config: Config) -> Self { | ||||
|     pub fn from_consumer(str_consumer: S, engine: &'e E) -> Self { | ||||
|         EncoderStringWriter { | ||||
|             encoder: EncoderWriter::new(Utf8SingleCodeUnitWriter { str_consumer }, config), | ||||
|             encoder: EncoderWriter::new(Utf8SingleCodeUnitWriter { str_consumer }, engine), | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     /// Encode all remaining buffered data, including any trailing incomplete input triples and
 | ||||
|     /// associated padding.
 | ||||
|     ///
 | ||||
|     /// Once this succeeds, no further writes or calls to this method are allowed.
 | ||||
|     ///
 | ||||
|     /// Returns the base64-encoded form of the accumulated written data.
 | ||||
|     pub fn into_inner(mut self) -> S { | ||||
|         self.encoder | ||||
|             .finish() | ||||
|             .expect("Writing to a Vec<u8> should never fail") | ||||
|             .expect("Writing to a consumer should never fail") | ||||
|             .str_consumer | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| impl EncoderStringWriter<String> { | ||||
|     /// Create a EncoderStringWriter that will encode into a new String with the provided config.
 | ||||
|     pub fn new(config: Config) -> Self { | ||||
|         EncoderStringWriter::from(String::new(), config) | ||||
| impl<'e, E: Engine> EncoderStringWriter<'e, E, String> { | ||||
|     /// Create a EncoderStringWriter that will encode into a new `String` with the provided config.
 | ||||
|     pub fn new(engine: &'e E) -> Self { | ||||
|         EncoderStringWriter::from_consumer(String::new(), engine) | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| impl<S: StrConsumer> Write for EncoderStringWriter<S> { | ||||
| impl<'e, E: Engine, S: StrConsumer> io::Write for EncoderStringWriter<'e, E, S> { | ||||
|     fn write(&mut self, buf: &[u8]) -> io::Result<usize> { | ||||
|         self.encoder.write(buf) | ||||
|     } | ||||
|  | @ -101,14 +103,14 @@ pub trait StrConsumer { | |||
| /// As for io::Write, `StrConsumer` is implemented automatically for `&mut S`.
 | ||||
| impl<S: StrConsumer + ?Sized> StrConsumer for &mut S { | ||||
|     fn consume(&mut self, buf: &str) { | ||||
|         (**self).consume(buf) | ||||
|         (**self).consume(buf); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| /// Pushes the str onto the end of the String
 | ||||
| impl StrConsumer for String { | ||||
|     fn consume(&mut self, buf: &str) { | ||||
|         self.push_str(buf) | ||||
|         self.push_str(buf); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
|  | @ -138,9 +140,9 @@ impl<S: StrConsumer> io::Write for Utf8SingleCodeUnitWriter<S> { | |||
| 
 | ||||
| #[cfg(test)] | ||||
| mod tests { | ||||
|     use crate::encode_config_buf; | ||||
|     use crate::tests::random_config; | ||||
|     use crate::write::encoder_string_writer::EncoderStringWriter; | ||||
|     use crate::{ | ||||
|         engine::Engine, tests::random_engine, write::encoder_string_writer::EncoderStringWriter, | ||||
|     }; | ||||
|     use rand::Rng; | ||||
|     use std::io::Write; | ||||
| 
 | ||||
|  | @ -160,10 +162,10 @@ mod tests { | |||
|                 orig_data.push(rng.gen()); | ||||
|             } | ||||
| 
 | ||||
|             let config = random_config(&mut rng); | ||||
|             encode_config_buf(&orig_data, config, &mut normal_encoded); | ||||
|             let engine = random_engine(&mut rng); | ||||
|             engine.encode_string(&orig_data, &mut normal_encoded); | ||||
| 
 | ||||
|             let mut stream_encoder = EncoderStringWriter::new(config); | ||||
|             let mut stream_encoder = EncoderStringWriter::new(&engine); | ||||
|             // Write the first i bytes, then the rest
 | ||||
|             stream_encoder.write_all(&orig_data[0..i]).unwrap(); | ||||
|             stream_encoder.write_all(&orig_data[i..]).unwrap(); | ||||
|  |  | |||
							
								
								
									
										158
									
								
								third_party/rust/base64/src/write/encoder_tests.rs
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										158
									
								
								third_party/rust/base64/src/write/encoder_tests.rs
									
									
									
									
										vendored
									
									
								
							|  | @ -1,29 +1,39 @@ | |||
| use super::EncoderWriter; | ||||
| use crate::tests::random_config; | ||||
| use crate::{encode_config, encode_config_buf, STANDARD_NO_PAD, URL_SAFE}; | ||||
| 
 | ||||
| use std::io::{Cursor, Write}; | ||||
| use std::{cmp, io, str}; | ||||
| 
 | ||||
| use rand::Rng; | ||||
| 
 | ||||
| use crate::{ | ||||
|     alphabet::{STANDARD, URL_SAFE}, | ||||
|     engine::{ | ||||
|         general_purpose::{GeneralPurpose, NO_PAD, PAD}, | ||||
|         Engine, | ||||
|     }, | ||||
|     tests::random_engine, | ||||
| }; | ||||
| 
 | ||||
| use super::EncoderWriter; | ||||
| 
 | ||||
| const URL_SAFE_ENGINE: GeneralPurpose = GeneralPurpose::new(&URL_SAFE, PAD); | ||||
| const NO_PAD_ENGINE: GeneralPurpose = GeneralPurpose::new(&STANDARD, NO_PAD); | ||||
| 
 | ||||
| #[test] | ||||
| fn encode_three_bytes() { | ||||
|     let mut c = Cursor::new(Vec::new()); | ||||
|     { | ||||
|         let mut enc = EncoderWriter::new(&mut c, URL_SAFE); | ||||
|         let mut enc = EncoderWriter::new(&mut c, &URL_SAFE_ENGINE); | ||||
| 
 | ||||
|         let sz = enc.write(b"abc").unwrap(); | ||||
|         assert_eq!(sz, 3); | ||||
|     } | ||||
|     assert_eq!(&c.get_ref()[..], encode_config("abc", URL_SAFE).as_bytes()); | ||||
|     assert_eq!(&c.get_ref()[..], URL_SAFE_ENGINE.encode("abc").as_bytes()); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn encode_nine_bytes_two_writes() { | ||||
|     let mut c = Cursor::new(Vec::new()); | ||||
|     { | ||||
|         let mut enc = EncoderWriter::new(&mut c, URL_SAFE); | ||||
|         let mut enc = EncoderWriter::new(&mut c, &URL_SAFE_ENGINE); | ||||
| 
 | ||||
|         let sz = enc.write(b"abcdef").unwrap(); | ||||
|         assert_eq!(sz, 6); | ||||
|  | @ -32,7 +42,7 @@ fn encode_nine_bytes_two_writes() { | |||
|     } | ||||
|     assert_eq!( | ||||
|         &c.get_ref()[..], | ||||
|         encode_config("abcdefghi", URL_SAFE).as_bytes() | ||||
|         URL_SAFE_ENGINE.encode("abcdefghi").as_bytes() | ||||
|     ); | ||||
| } | ||||
| 
 | ||||
|  | @ -40,21 +50,21 @@ fn encode_nine_bytes_two_writes() { | |||
| fn encode_one_then_two_bytes() { | ||||
|     let mut c = Cursor::new(Vec::new()); | ||||
|     { | ||||
|         let mut enc = EncoderWriter::new(&mut c, URL_SAFE); | ||||
|         let mut enc = EncoderWriter::new(&mut c, &URL_SAFE_ENGINE); | ||||
| 
 | ||||
|         let sz = enc.write(b"a").unwrap(); | ||||
|         assert_eq!(sz, 1); | ||||
|         let sz = enc.write(b"bc").unwrap(); | ||||
|         assert_eq!(sz, 2); | ||||
|     } | ||||
|     assert_eq!(&c.get_ref()[..], encode_config("abc", URL_SAFE).as_bytes()); | ||||
|     assert_eq!(&c.get_ref()[..], URL_SAFE_ENGINE.encode("abc").as_bytes()); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn encode_one_then_five_bytes() { | ||||
|     let mut c = Cursor::new(Vec::new()); | ||||
|     { | ||||
|         let mut enc = EncoderWriter::new(&mut c, URL_SAFE); | ||||
|         let mut enc = EncoderWriter::new(&mut c, &URL_SAFE_ENGINE); | ||||
| 
 | ||||
|         let sz = enc.write(b"a").unwrap(); | ||||
|         assert_eq!(sz, 1); | ||||
|  | @ -63,7 +73,7 @@ fn encode_one_then_five_bytes() { | |||
|     } | ||||
|     assert_eq!( | ||||
|         &c.get_ref()[..], | ||||
|         encode_config("abcdef", URL_SAFE).as_bytes() | ||||
|         URL_SAFE_ENGINE.encode("abcdef").as_bytes() | ||||
|     ); | ||||
| } | ||||
| 
 | ||||
|  | @ -71,7 +81,7 @@ fn encode_one_then_five_bytes() { | |||
| fn encode_1_2_3_bytes() { | ||||
|     let mut c = Cursor::new(Vec::new()); | ||||
|     { | ||||
|         let mut enc = EncoderWriter::new(&mut c, URL_SAFE); | ||||
|         let mut enc = EncoderWriter::new(&mut c, &URL_SAFE_ENGINE); | ||||
| 
 | ||||
|         let sz = enc.write(b"a").unwrap(); | ||||
|         assert_eq!(sz, 1); | ||||
|  | @ -82,7 +92,7 @@ fn encode_1_2_3_bytes() { | |||
|     } | ||||
|     assert_eq!( | ||||
|         &c.get_ref()[..], | ||||
|         encode_config("abcdef", URL_SAFE).as_bytes() | ||||
|         URL_SAFE_ENGINE.encode("abcdef").as_bytes() | ||||
|     ); | ||||
| } | ||||
| 
 | ||||
|  | @ -90,20 +100,20 @@ fn encode_1_2_3_bytes() { | |||
| fn encode_with_padding() { | ||||
|     let mut c = Cursor::new(Vec::new()); | ||||
|     { | ||||
|         let mut enc = EncoderWriter::new(&mut c, URL_SAFE); | ||||
|         let mut enc = EncoderWriter::new(&mut c, &URL_SAFE_ENGINE); | ||||
| 
 | ||||
|         enc.write_all(b"abcd").unwrap(); | ||||
| 
 | ||||
|         enc.flush().unwrap(); | ||||
|     } | ||||
|     assert_eq!(&c.get_ref()[..], encode_config("abcd", URL_SAFE).as_bytes()); | ||||
|     assert_eq!(&c.get_ref()[..], URL_SAFE_ENGINE.encode("abcd").as_bytes()); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn encode_with_padding_multiple_writes() { | ||||
|     let mut c = Cursor::new(Vec::new()); | ||||
|     { | ||||
|         let mut enc = EncoderWriter::new(&mut c, URL_SAFE); | ||||
|         let mut enc = EncoderWriter::new(&mut c, &URL_SAFE_ENGINE); | ||||
| 
 | ||||
|         assert_eq!(1, enc.write(b"a").unwrap()); | ||||
|         assert_eq!(2, enc.write(b"bc").unwrap()); | ||||
|  | @ -114,7 +124,7 @@ fn encode_with_padding_multiple_writes() { | |||
|     } | ||||
|     assert_eq!( | ||||
|         &c.get_ref()[..], | ||||
|         encode_config("abcdefg", URL_SAFE).as_bytes() | ||||
|         URL_SAFE_ENGINE.encode("abcdefg").as_bytes() | ||||
|     ); | ||||
| } | ||||
| 
 | ||||
|  | @ -122,7 +132,7 @@ fn encode_with_padding_multiple_writes() { | |||
| fn finish_writes_extra_byte() { | ||||
|     let mut c = Cursor::new(Vec::new()); | ||||
|     { | ||||
|         let mut enc = EncoderWriter::new(&mut c, URL_SAFE); | ||||
|         let mut enc = EncoderWriter::new(&mut c, &URL_SAFE_ENGINE); | ||||
| 
 | ||||
|         assert_eq!(6, enc.write(b"abcdef").unwrap()); | ||||
| 
 | ||||
|  | @ -134,7 +144,7 @@ fn finish_writes_extra_byte() { | |||
|     } | ||||
|     assert_eq!( | ||||
|         &c.get_ref()[..], | ||||
|         encode_config("abcdefg", URL_SAFE).as_bytes() | ||||
|         URL_SAFE_ENGINE.encode("abcdefg").as_bytes() | ||||
|     ); | ||||
| } | ||||
| 
 | ||||
|  | @ -142,17 +152,14 @@ fn finish_writes_extra_byte() { | |||
| fn write_partial_chunk_encodes_partial_chunk() { | ||||
|     let mut c = Cursor::new(Vec::new()); | ||||
|     { | ||||
|         let mut enc = EncoderWriter::new(&mut c, STANDARD_NO_PAD); | ||||
|         let mut enc = EncoderWriter::new(&mut c, &NO_PAD_ENGINE); | ||||
| 
 | ||||
|         // nothing encoded yet
 | ||||
|         assert_eq!(2, enc.write(b"ab").unwrap()); | ||||
|         // encoded here
 | ||||
|         let _ = enc.finish().unwrap(); | ||||
|     } | ||||
|     assert_eq!( | ||||
|         &c.get_ref()[..], | ||||
|         encode_config("ab", STANDARD_NO_PAD).as_bytes() | ||||
|     ); | ||||
|     assert_eq!(&c.get_ref()[..], NO_PAD_ENGINE.encode("ab").as_bytes()); | ||||
|     assert_eq!(3, c.get_ref().len()); | ||||
| } | ||||
| 
 | ||||
|  | @ -160,15 +167,12 @@ fn write_partial_chunk_encodes_partial_chunk() { | |||
| fn write_1_chunk_encodes_complete_chunk() { | ||||
|     let mut c = Cursor::new(Vec::new()); | ||||
|     { | ||||
|         let mut enc = EncoderWriter::new(&mut c, STANDARD_NO_PAD); | ||||
|         let mut enc = EncoderWriter::new(&mut c, &NO_PAD_ENGINE); | ||||
| 
 | ||||
|         assert_eq!(3, enc.write(b"abc").unwrap()); | ||||
|         let _ = enc.finish().unwrap(); | ||||
|     } | ||||
|     assert_eq!( | ||||
|         &c.get_ref()[..], | ||||
|         encode_config("abc", STANDARD_NO_PAD).as_bytes() | ||||
|     ); | ||||
|     assert_eq!(&c.get_ref()[..], NO_PAD_ENGINE.encode("abc").as_bytes()); | ||||
|     assert_eq!(4, c.get_ref().len()); | ||||
| } | ||||
| 
 | ||||
|  | @ -176,16 +180,13 @@ fn write_1_chunk_encodes_complete_chunk() { | |||
| fn write_1_chunk_and_partial_encodes_only_complete_chunk() { | ||||
|     let mut c = Cursor::new(Vec::new()); | ||||
|     { | ||||
|         let mut enc = EncoderWriter::new(&mut c, STANDARD_NO_PAD); | ||||
|         let mut enc = EncoderWriter::new(&mut c, &NO_PAD_ENGINE); | ||||
| 
 | ||||
|         // "d" not written
 | ||||
|         // "d" not consumed since it's not a full chunk
 | ||||
|         assert_eq!(3, enc.write(b"abcd").unwrap()); | ||||
|         let _ = enc.finish().unwrap(); | ||||
|     } | ||||
|     assert_eq!( | ||||
|         &c.get_ref()[..], | ||||
|         encode_config("abc", STANDARD_NO_PAD).as_bytes() | ||||
|     ); | ||||
|     assert_eq!(&c.get_ref()[..], NO_PAD_ENGINE.encode("abc").as_bytes()); | ||||
|     assert_eq!(4, c.get_ref().len()); | ||||
| } | ||||
| 
 | ||||
|  | @ -193,16 +194,13 @@ fn write_1_chunk_and_partial_encodes_only_complete_chunk() { | |||
| fn write_2_partials_to_exactly_complete_chunk_encodes_complete_chunk() { | ||||
|     let mut c = Cursor::new(Vec::new()); | ||||
|     { | ||||
|         let mut enc = EncoderWriter::new(&mut c, STANDARD_NO_PAD); | ||||
|         let mut enc = EncoderWriter::new(&mut c, &NO_PAD_ENGINE); | ||||
| 
 | ||||
|         assert_eq!(1, enc.write(b"a").unwrap()); | ||||
|         assert_eq!(2, enc.write(b"bc").unwrap()); | ||||
|         let _ = enc.finish().unwrap(); | ||||
|     } | ||||
|     assert_eq!( | ||||
|         &c.get_ref()[..], | ||||
|         encode_config("abc", STANDARD_NO_PAD).as_bytes() | ||||
|     ); | ||||
|     assert_eq!(&c.get_ref()[..], NO_PAD_ENGINE.encode("abc").as_bytes()); | ||||
|     assert_eq!(4, c.get_ref().len()); | ||||
| } | ||||
| 
 | ||||
|  | @ -211,17 +209,14 @@ fn write_partial_then_enough_to_complete_chunk_but_not_complete_another_chunk_en | |||
| ) { | ||||
|     let mut c = Cursor::new(Vec::new()); | ||||
|     { | ||||
|         let mut enc = EncoderWriter::new(&mut c, STANDARD_NO_PAD); | ||||
|         let mut enc = EncoderWriter::new(&mut c, &NO_PAD_ENGINE); | ||||
| 
 | ||||
|         assert_eq!(1, enc.write(b"a").unwrap()); | ||||
|         // doesn't consume "d"
 | ||||
|         assert_eq!(2, enc.write(b"bcd").unwrap()); | ||||
|         let _ = enc.finish().unwrap(); | ||||
|     } | ||||
|     assert_eq!( | ||||
|         &c.get_ref()[..], | ||||
|         encode_config("abc", STANDARD_NO_PAD).as_bytes() | ||||
|     ); | ||||
|     assert_eq!(&c.get_ref()[..], NO_PAD_ENGINE.encode("abc").as_bytes()); | ||||
|     assert_eq!(4, c.get_ref().len()); | ||||
| } | ||||
| 
 | ||||
|  | @ -229,17 +224,14 @@ fn write_partial_then_enough_to_complete_chunk_but_not_complete_another_chunk_en | |||
| fn write_partial_then_enough_to_complete_chunk_and_another_chunk_encodes_complete_chunks() { | ||||
|     let mut c = Cursor::new(Vec::new()); | ||||
|     { | ||||
|         let mut enc = EncoderWriter::new(&mut c, STANDARD_NO_PAD); | ||||
|         let mut enc = EncoderWriter::new(&mut c, &NO_PAD_ENGINE); | ||||
| 
 | ||||
|         assert_eq!(1, enc.write(b"a").unwrap()); | ||||
|         // completes partial chunk, and another chunk
 | ||||
|         assert_eq!(5, enc.write(b"bcdef").unwrap()); | ||||
|         let _ = enc.finish().unwrap(); | ||||
|     } | ||||
|     assert_eq!( | ||||
|         &c.get_ref()[..], | ||||
|         encode_config("abcdef", STANDARD_NO_PAD).as_bytes() | ||||
|     ); | ||||
|     assert_eq!(&c.get_ref()[..], NO_PAD_ENGINE.encode("abcdef").as_bytes()); | ||||
|     assert_eq!(8, c.get_ref().len()); | ||||
| } | ||||
| 
 | ||||
|  | @ -248,7 +240,7 @@ fn write_partial_then_enough_to_complete_chunk_and_another_chunk_and_another_par | |||
| ) { | ||||
|     let mut c = Cursor::new(Vec::new()); | ||||
|     { | ||||
|         let mut enc = EncoderWriter::new(&mut c, STANDARD_NO_PAD); | ||||
|         let mut enc = EncoderWriter::new(&mut c, &NO_PAD_ENGINE); | ||||
| 
 | ||||
|         assert_eq!(1, enc.write(b"a").unwrap()); | ||||
|         // completes partial chunk, and another chunk, with one more partial chunk that's not
 | ||||
|  | @ -256,10 +248,7 @@ fn write_partial_then_enough_to_complete_chunk_and_another_chunk_and_another_par | |||
|         assert_eq!(5, enc.write(b"bcdefe").unwrap()); | ||||
|         let _ = enc.finish().unwrap(); | ||||
|     } | ||||
|     assert_eq!( | ||||
|         &c.get_ref()[..], | ||||
|         encode_config("abcdef", STANDARD_NO_PAD).as_bytes() | ||||
|     ); | ||||
|     assert_eq!(&c.get_ref()[..], NO_PAD_ENGINE.encode("abcdef").as_bytes()); | ||||
|     assert_eq!(8, c.get_ref().len()); | ||||
| } | ||||
| 
 | ||||
|  | @ -267,13 +256,10 @@ fn write_partial_then_enough_to_complete_chunk_and_another_chunk_and_another_par | |||
| fn drop_calls_finish_for_you() { | ||||
|     let mut c = Cursor::new(Vec::new()); | ||||
|     { | ||||
|         let mut enc = EncoderWriter::new(&mut c, STANDARD_NO_PAD); | ||||
|         let mut enc = EncoderWriter::new(&mut c, &NO_PAD_ENGINE); | ||||
|         assert_eq!(1, enc.write(b"a").unwrap()); | ||||
|     } | ||||
|     assert_eq!( | ||||
|         &c.get_ref()[..], | ||||
|         encode_config("a", STANDARD_NO_PAD).as_bytes() | ||||
|     ); | ||||
|     assert_eq!(&c.get_ref()[..], NO_PAD_ENGINE.encode("a").as_bytes()); | ||||
|     assert_eq!(2, c.get_ref().len()); | ||||
| } | ||||
| 
 | ||||
|  | @ -295,11 +281,11 @@ fn every_possible_split_of_input() { | |||
|             orig_data.push(rng.gen()); | ||||
|         } | ||||
| 
 | ||||
|         let config = random_config(&mut rng); | ||||
|         encode_config_buf(&orig_data, config, &mut normal_encoded); | ||||
|         let engine = random_engine(&mut rng); | ||||
|         engine.encode_string(&orig_data, &mut normal_encoded); | ||||
| 
 | ||||
|         { | ||||
|             let mut stream_encoder = EncoderWriter::new(&mut stream_encoded, config); | ||||
|             let mut stream_encoder = EncoderWriter::new(&mut stream_encoded, &engine); | ||||
|             // Write the first i bytes, then the rest
 | ||||
|             stream_encoder.write_all(&orig_data[0..i]).unwrap(); | ||||
|             stream_encoder.write_all(&orig_data[i..]).unwrap(); | ||||
|  | @ -312,12 +298,12 @@ fn every_possible_split_of_input() { | |||
| #[test] | ||||
| fn encode_random_config_matches_normal_encode_reasonable_input_len() { | ||||
|     // choose up to 2 * buf size, so ~half the time it'll use a full buffer
 | ||||
|     do_encode_random_config_matches_normal_encode(super::encoder::BUF_SIZE * 2) | ||||
|     do_encode_random_config_matches_normal_encode(super::encoder::BUF_SIZE * 2); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn encode_random_config_matches_normal_encode_tiny_input_len() { | ||||
|     do_encode_random_config_matches_normal_encode(10) | ||||
|     do_encode_random_config_matches_normal_encode(10); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
|  | @ -332,14 +318,14 @@ fn retrying_writes_that_error_with_interrupted_works() { | |||
|         stream_encoded.clear(); | ||||
|         normal_encoded.clear(); | ||||
| 
 | ||||
|         let orig_len: usize = rng.gen_range(100, 20_000); | ||||
|         let orig_len: usize = rng.gen_range(100..20_000); | ||||
|         for _ in 0..orig_len { | ||||
|             orig_data.push(rng.gen()); | ||||
|         } | ||||
| 
 | ||||
|         // encode the normal way
 | ||||
|         let config = random_config(&mut rng); | ||||
|         encode_config_buf(&orig_data, config, &mut normal_encoded); | ||||
|         let engine = random_engine(&mut rng); | ||||
|         engine.encode_string(&orig_data, &mut normal_encoded); | ||||
| 
 | ||||
|         // encode via the stream encoder
 | ||||
|         { | ||||
|  | @ -350,12 +336,12 @@ fn retrying_writes_that_error_with_interrupted_works() { | |||
|                 fraction: 0.8, | ||||
|             }; | ||||
| 
 | ||||
|             let mut stream_encoder = EncoderWriter::new(&mut interrupting_writer, config); | ||||
|             let mut stream_encoder = EncoderWriter::new(&mut interrupting_writer, &engine); | ||||
|             let mut bytes_consumed = 0; | ||||
|             while bytes_consumed < orig_len { | ||||
|                 // use short inputs since we want to use `extra` a lot as that's what needs rollback
 | ||||
|                 // when errors occur
 | ||||
|                 let input_len: usize = cmp::min(rng.gen_range(0, 10), orig_len - bytes_consumed); | ||||
|                 let input_len: usize = cmp::min(rng.gen_range(0..10), orig_len - bytes_consumed); | ||||
| 
 | ||||
|                 retry_interrupted_write_all( | ||||
|                     &mut stream_encoder, | ||||
|  | @ -396,14 +382,14 @@ fn writes_that_only_write_part_of_input_and_sometimes_interrupt_produce_correct_ | |||
|         stream_encoded.clear(); | ||||
|         normal_encoded.clear(); | ||||
| 
 | ||||
|         let orig_len: usize = rng.gen_range(100, 20_000); | ||||
|         let orig_len: usize = rng.gen_range(100..20_000); | ||||
|         for _ in 0..orig_len { | ||||
|             orig_data.push(rng.gen()); | ||||
|         } | ||||
| 
 | ||||
|         // encode the normal way
 | ||||
|         let config = random_config(&mut rng); | ||||
|         encode_config_buf(&orig_data, config, &mut normal_encoded); | ||||
|         let engine = random_engine(&mut rng); | ||||
|         engine.encode_string(&orig_data, &mut normal_encoded); | ||||
| 
 | ||||
|         // encode via the stream encoder
 | ||||
|         { | ||||
|  | @ -415,11 +401,11 @@ fn writes_that_only_write_part_of_input_and_sometimes_interrupt_produce_correct_ | |||
|                 no_interrupt_fraction: 0.1, | ||||
|             }; | ||||
| 
 | ||||
|             let mut stream_encoder = EncoderWriter::new(&mut partial_writer, config); | ||||
|             let mut stream_encoder = EncoderWriter::new(&mut partial_writer, &engine); | ||||
|             let mut bytes_consumed = 0; | ||||
|             while bytes_consumed < orig_len { | ||||
|                 // use at most medium-length inputs to exercise retry logic more aggressively
 | ||||
|                 let input_len: usize = cmp::min(rng.gen_range(0, 100), orig_len - bytes_consumed); | ||||
|                 let input_len: usize = cmp::min(rng.gen_range(0..100), orig_len - bytes_consumed); | ||||
| 
 | ||||
|                 let res = | ||||
|                     stream_encoder.write(&orig_data[bytes_consumed..bytes_consumed + input_len]); | ||||
|  | @ -475,22 +461,22 @@ fn do_encode_random_config_matches_normal_encode(max_input_len: usize) { | |||
|         stream_encoded.clear(); | ||||
|         normal_encoded.clear(); | ||||
| 
 | ||||
|         let orig_len: usize = rng.gen_range(100, 20_000); | ||||
|         let orig_len: usize = rng.gen_range(100..20_000); | ||||
|         for _ in 0..orig_len { | ||||
|             orig_data.push(rng.gen()); | ||||
|         } | ||||
| 
 | ||||
|         // encode the normal way
 | ||||
|         let config = random_config(&mut rng); | ||||
|         encode_config_buf(&orig_data, config, &mut normal_encoded); | ||||
|         let engine = random_engine(&mut rng); | ||||
|         engine.encode_string(&orig_data, &mut normal_encoded); | ||||
| 
 | ||||
|         // encode via the stream encoder
 | ||||
|         { | ||||
|             let mut stream_encoder = EncoderWriter::new(&mut stream_encoded, config); | ||||
|             let mut stream_encoder = EncoderWriter::new(&mut stream_encoded, &engine); | ||||
|             let mut bytes_consumed = 0; | ||||
|             while bytes_consumed < orig_len { | ||||
|                 let input_len: usize = | ||||
|                     cmp::min(rng.gen_range(0, max_input_len), orig_len - bytes_consumed); | ||||
|                     cmp::min(rng.gen_range(0..max_input_len), orig_len - bytes_consumed); | ||||
| 
 | ||||
|                 // write a little bit of the data
 | ||||
|                 stream_encoder | ||||
|  | @ -520,7 +506,7 @@ struct InterruptingWriter<'a, W: 'a + Write, R: 'a + Rng> { | |||
| 
 | ||||
| impl<'a, W: Write, R: Rng> Write for InterruptingWriter<'a, W, R> { | ||||
|     fn write(&mut self, buf: &[u8]) -> io::Result<usize> { | ||||
|         if self.rng.gen_range(0.0, 1.0) <= self.fraction { | ||||
|         if self.rng.gen_range(0.0..1.0) <= self.fraction { | ||||
|             return Err(io::Error::new(io::ErrorKind::Interrupted, "interrupted")); | ||||
|         } | ||||
| 
 | ||||
|  | @ -528,7 +514,7 @@ impl<'a, W: Write, R: Rng> Write for InterruptingWriter<'a, W, R> { | |||
|     } | ||||
| 
 | ||||
|     fn flush(&mut self) -> io::Result<()> { | ||||
|         if self.rng.gen_range(0.0, 1.0) <= self.fraction { | ||||
|         if self.rng.gen_range(0.0..1.0) <= self.fraction { | ||||
|             return Err(io::Error::new(io::ErrorKind::Interrupted, "interrupted")); | ||||
|         } | ||||
| 
 | ||||
|  | @ -548,17 +534,17 @@ struct PartialInterruptingWriter<'a, W: 'a + Write, R: 'a + Rng> { | |||
| 
 | ||||
| impl<'a, W: Write, R: Rng> Write for PartialInterruptingWriter<'a, W, R> { | ||||
|     fn write(&mut self, buf: &[u8]) -> io::Result<usize> { | ||||
|         if self.rng.gen_range(0.0, 1.0) > self.no_interrupt_fraction { | ||||
|         if self.rng.gen_range(0.0..1.0) > self.no_interrupt_fraction { | ||||
|             return Err(io::Error::new(io::ErrorKind::Interrupted, "interrupted")); | ||||
|         } | ||||
| 
 | ||||
|         if self.rng.gen_range(0.0, 1.0) <= self.full_input_fraction || buf.len() == 0 { | ||||
|         if self.rng.gen_range(0.0..1.0) <= self.full_input_fraction || buf.is_empty() { | ||||
|             // pass through the buf untouched
 | ||||
|             self.w.write(buf) | ||||
|         } else { | ||||
|             // only use a prefix of it
 | ||||
|             self.w | ||||
|                 .write(&buf[0..(self.rng.gen_range(0, buf.len() - 1))]) | ||||
|                 .write(&buf[0..(self.rng.gen_range(0..(buf.len() - 1)))]) | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|  |  | |||
							
								
								
									
										7
									
								
								third_party/rust/base64/src/write/mod.rs
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										7
									
								
								third_party/rust/base64/src/write/mod.rs
									
									
									
									
										vendored
									
									
								
							|  | @ -1,8 +1,11 @@ | |||
| //! Implementations of `io::Write` to transparently handle base64.
 | ||||
| mod encoder; | ||||
| mod encoder_string_writer; | ||||
| pub use self::encoder::EncoderWriter; | ||||
| pub use self::encoder_string_writer::EncoderStringWriter; | ||||
| 
 | ||||
| pub use self::{ | ||||
|     encoder::EncoderWriter, | ||||
|     encoder_string_writer::{EncoderStringWriter, StrConsumer}, | ||||
| }; | ||||
| 
 | ||||
| #[cfg(test)] | ||||
| mod encoder_tests; | ||||
|  |  | |||
							
								
								
									
										330
									
								
								third_party/rust/base64/tests/decode.rs
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										330
									
								
								third_party/rust/base64/tests/decode.rs
									
									
									
									
										vendored
									
									
								
							|  | @ -1,330 +0,0 @@ | |||
| extern crate base64; | ||||
| 
 | ||||
| use base64::*; | ||||
| 
 | ||||
| mod helpers; | ||||
| 
 | ||||
| use self::helpers::*; | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_rfc4648_0() { | ||||
|     compare_decode("", ""); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_rfc4648_1() { | ||||
|     compare_decode("f", "Zg=="); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_rfc4648_1_just_a_bit_of_padding() { | ||||
|     // allows less padding than required
 | ||||
|     compare_decode("f", "Zg="); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_rfc4648_1_no_padding() { | ||||
|     compare_decode("f", "Zg"); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_rfc4648_2() { | ||||
|     compare_decode("fo", "Zm8="); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_rfc4648_2_no_padding() { | ||||
|     compare_decode("fo", "Zm8"); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_rfc4648_3() { | ||||
|     compare_decode("foo", "Zm9v"); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_rfc4648_4() { | ||||
|     compare_decode("foob", "Zm9vYg=="); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_rfc4648_4_no_padding() { | ||||
|     compare_decode("foob", "Zm9vYg"); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_rfc4648_5() { | ||||
|     compare_decode("fooba", "Zm9vYmE="); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_rfc4648_5_no_padding() { | ||||
|     compare_decode("fooba", "Zm9vYmE"); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_rfc4648_6() { | ||||
|     compare_decode("foobar", "Zm9vYmFy"); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_reject_null() { | ||||
|     assert_eq!( | ||||
|         DecodeError::InvalidByte(3, 0x0), | ||||
|         decode_config("YWx\0pY2U==", config_std_pad()).unwrap_err() | ||||
|     ); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_single_pad_byte_after_2_chars_in_trailing_quad_ok() { | ||||
|     for num_quads in 0..25 { | ||||
|         let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect(); | ||||
|         s.push_str("Zg="); | ||||
| 
 | ||||
|         let input_len = num_quads * 3 + 1; | ||||
| 
 | ||||
|         // Since there are 3 bytes in the trailing quad, want to be sure this allows for the fact
 | ||||
|         // that it could be bad padding rather than assuming that it will decode to 2 bytes and
 | ||||
|         // therefore allow 1 extra round of fast decode logic (stage 1 / 2).
 | ||||
| 
 | ||||
|         let mut decoded = Vec::new(); | ||||
|         decoded.resize(input_len, 0); | ||||
| 
 | ||||
|         assert_eq!( | ||||
|             input_len, | ||||
|             decode_config_slice(&s, STANDARD, &mut decoded).unwrap() | ||||
|         ); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| //this is a MAY in the rfc: https://tools.ietf.org/html/rfc4648#section-3.3
 | ||||
| #[test] | ||||
| fn decode_1_pad_byte_in_fast_loop_then_extra_padding_chunk_error() { | ||||
|     for num_quads in 0..25 { | ||||
|         let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect(); | ||||
|         s.push_str("YWxpY2U====="); | ||||
| 
 | ||||
|         // since the first 8 bytes are handled in stage 1 or 2, the padding is detected as a
 | ||||
|         // generic invalid byte, not specifcally a padding issue.
 | ||||
|         // Could argue that the *next* padding byte (in the next quad) is technically the first
 | ||||
|         // erroneous one, but reporting that accurately is more complex and probably nobody cares
 | ||||
|         assert_eq!( | ||||
|             DecodeError::InvalidByte(num_quads * 4 + 7, b'='), | ||||
|             decode(&s).unwrap_err() | ||||
|         ); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_2_pad_bytes_in_leftovers_then_extra_padding_chunk_error() { | ||||
|     for num_quads in 0..25 { | ||||
|         let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect(); | ||||
|         s.push_str("YWxpY2UABB===="); | ||||
| 
 | ||||
|         // 6 bytes (4 padding) after last 8-byte chunk, so it's decoded by stage 4.
 | ||||
|         // First padding byte is invalid.
 | ||||
|         assert_eq!( | ||||
|             DecodeError::InvalidByte(num_quads * 4 + 10, b'='), | ||||
|             decode(&s).unwrap_err() | ||||
|         ); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_valid_bytes_after_padding_in_leftovers_error() { | ||||
|     for num_quads in 0..25 { | ||||
|         let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect(); | ||||
|         s.push_str("YWxpY2UABB=B"); | ||||
| 
 | ||||
|         // 4 bytes after last 8-byte chunk, so it's decoded by stage 4.
 | ||||
|         // First (and only) padding byte is invalid.
 | ||||
|         assert_eq!( | ||||
|             DecodeError::InvalidByte(num_quads * 4 + 10, b'='), | ||||
|             decode(&s).unwrap_err() | ||||
|         ); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_absurd_pad_error() { | ||||
|     for num_quads in 0..25 { | ||||
|         let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect(); | ||||
|         s.push_str("==Y=Wx===pY=2U====="); | ||||
| 
 | ||||
|         // Plenty of remaining bytes, so handled by stage 1 or 2.
 | ||||
|         // first padding byte
 | ||||
|         assert_eq!( | ||||
|             DecodeError::InvalidByte(num_quads * 4, b'='), | ||||
|             decode(&s).unwrap_err() | ||||
|         ); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_extra_padding_after_1_pad_bytes_in_trailing_quad_returns_error() { | ||||
|     for num_quads in 0..25 { | ||||
|         let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect(); | ||||
|         s.push_str("EEE==="); | ||||
| 
 | ||||
|         // handled by stage 1, 2, or 4 depending on length
 | ||||
|         // first padding byte -- which would be legal if it was the only padding
 | ||||
|         assert_eq!( | ||||
|             DecodeError::InvalidByte(num_quads * 4 + 3, b'='), | ||||
|             decode(&s).unwrap_err() | ||||
|         ); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_extra_padding_after_2_pad_bytes_in_trailing_quad_2_returns_error() { | ||||
|     for num_quads in 0..25 { | ||||
|         let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect(); | ||||
|         s.push_str("EE===="); | ||||
| 
 | ||||
|         // handled by stage 1, 2, or 4 depending on length
 | ||||
|         // first padding byte -- which would be legal if it was by itself
 | ||||
|         assert_eq!( | ||||
|             DecodeError::InvalidByte(num_quads * 4 + 2, b'='), | ||||
|             decode(&s).unwrap_err() | ||||
|         ); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_start_quad_with_padding_returns_error() { | ||||
|     for num_quads in 0..25 { | ||||
|         // add enough padding to ensure that we'll hit all 4 stages at the different lengths
 | ||||
|         for pad_bytes in 1..32 { | ||||
|             let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect(); | ||||
|             let padding: String = std::iter::repeat("=").take(pad_bytes).collect(); | ||||
|             s.push_str(&padding); | ||||
| 
 | ||||
|             if pad_bytes % 4 == 1 { | ||||
|                 // detected in early length check
 | ||||
|                 assert_eq!(DecodeError::InvalidLength, decode(&s).unwrap_err()); | ||||
|             } else { | ||||
|                 // padding lengths 2 - 8 are handled by stage 4
 | ||||
|                 // padding length >= 8 will hit at least one chunk at stages 1, 2, 3 at different
 | ||||
|                 // prefix lengths
 | ||||
|                 assert_eq!( | ||||
|                     DecodeError::InvalidByte(num_quads * 4, b'='), | ||||
|                     decode(&s).unwrap_err() | ||||
|                 ); | ||||
|             } | ||||
|         } | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_padding_followed_by_non_padding_returns_error() { | ||||
|     for num_quads in 0..25 { | ||||
|         for pad_bytes in 0..31 { | ||||
|             let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect(); | ||||
|             let padding: String = std::iter::repeat("=").take(pad_bytes).collect(); | ||||
|             s.push_str(&padding); | ||||
|             s.push_str("E"); | ||||
| 
 | ||||
|             if pad_bytes % 4 == 0 { | ||||
|                 assert_eq!(DecodeError::InvalidLength, decode(&s).unwrap_err()); | ||||
|             } else { | ||||
|                 // pad len 1 - 8 will be handled by stage 4
 | ||||
|                 // pad len 9 (suffix len 10) will have 8 bytes of padding handled by stage 3
 | ||||
|                 // first padding byte
 | ||||
|                 assert_eq!( | ||||
|                     DecodeError::InvalidByte(num_quads * 4, b'='), | ||||
|                     decode(&s).unwrap_err() | ||||
|                 ); | ||||
|             } | ||||
|         } | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_one_char_in_quad_with_padding_error() { | ||||
|     for num_quads in 0..25 { | ||||
|         let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect(); | ||||
|         s.push_str("E="); | ||||
| 
 | ||||
|         assert_eq!( | ||||
|             DecodeError::InvalidByte(num_quads * 4 + 1, b'='), | ||||
|             decode(&s).unwrap_err() | ||||
|         ); | ||||
| 
 | ||||
|         // more padding doesn't change the error
 | ||||
|         s.push_str("="); | ||||
|         assert_eq!( | ||||
|             DecodeError::InvalidByte(num_quads * 4 + 1, b'='), | ||||
|             decode(&s).unwrap_err() | ||||
|         ); | ||||
| 
 | ||||
|         s.push_str("="); | ||||
|         assert_eq!( | ||||
|             DecodeError::InvalidByte(num_quads * 4 + 1, b'='), | ||||
|             decode(&s).unwrap_err() | ||||
|         ); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_one_char_in_quad_without_padding_error() { | ||||
|     for num_quads in 0..25 { | ||||
|         let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect(); | ||||
|         s.push('E'); | ||||
| 
 | ||||
|         assert_eq!(DecodeError::InvalidLength, decode(&s).unwrap_err()); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_reject_invalid_bytes_with_correct_error() { | ||||
|     for length in 1..100 { | ||||
|         for index in 0_usize..length { | ||||
|             for invalid_byte in " \t\n\r\x0C\x0B\x00%*.".bytes() { | ||||
|                 let prefix: String = std::iter::repeat("A").take(index).collect(); | ||||
|                 let suffix: String = std::iter::repeat("B").take(length - index - 1).collect(); | ||||
| 
 | ||||
|                 let input = prefix + &String::from_utf8(vec![invalid_byte]).unwrap() + &suffix; | ||||
|                 assert_eq!( | ||||
|                     length, | ||||
|                     input.len(), | ||||
|                     "length {} error position {}", | ||||
|                     length, | ||||
|                     index | ||||
|                 ); | ||||
| 
 | ||||
|                 if length % 4 == 1 && !suffix.is_empty() { | ||||
|                     assert_eq!(DecodeError::InvalidLength, decode(&input).unwrap_err()); | ||||
|                 } else { | ||||
|                     assert_eq!( | ||||
|                         DecodeError::InvalidByte(index, invalid_byte), | ||||
|                         decode(&input).unwrap_err() | ||||
|                     ); | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_imap() { | ||||
|     assert_eq!( | ||||
|         decode_config(b"+,,+", crate::IMAP_MUTF7), | ||||
|         decode_config(b"+//+", crate::STANDARD_NO_PAD) | ||||
|     ); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn decode_invalid_trailing_bytes() { | ||||
|     // The case of trailing newlines is common enough to warrant a test for a good error
 | ||||
|     // message.
 | ||||
|     assert_eq!( | ||||
|         Err(DecodeError::InvalidByte(8, b'\n')), | ||||
|         decode(b"Zm9vCg==\n") | ||||
|     ); | ||||
|     // extra padding, however, is still InvalidLength
 | ||||
|     assert_eq!(Err(DecodeError::InvalidLength), decode(b"Zm9vCg===")); | ||||
| } | ||||
| 
 | ||||
| fn config_std_pad() -> Config { | ||||
|     Config::new(CharacterSet::Standard, true) | ||||
| } | ||||
							
								
								
									
										55
									
								
								third_party/rust/base64/tests/encode.rs
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										55
									
								
								third_party/rust/base64/tests/encode.rs
									
									
									
									
										vendored
									
									
								
							|  | @ -1,44 +1,9 @@ | |||
| extern crate base64; | ||||
| 
 | ||||
| use base64::*; | ||||
| use base64::{ | ||||
|     alphabet::URL_SAFE, engine::general_purpose::PAD, engine::general_purpose::STANDARD, *, | ||||
| }; | ||||
| 
 | ||||
| fn compare_encode(expected: &str, target: &[u8]) { | ||||
|     assert_eq!(expected, encode(target)); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn encode_rfc4648_0() { | ||||
|     compare_encode("", b""); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn encode_rfc4648_1() { | ||||
|     compare_encode("Zg==", b"f"); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn encode_rfc4648_2() { | ||||
|     compare_encode("Zm8=", b"fo"); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn encode_rfc4648_3() { | ||||
|     compare_encode("Zm9v", b"foo"); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn encode_rfc4648_4() { | ||||
|     compare_encode("Zm9vYg==", b"foob"); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn encode_rfc4648_5() { | ||||
|     compare_encode("Zm9vYmE=", b"fooba"); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn encode_rfc4648_6() { | ||||
|     compare_encode("Zm9vYmFy", b"foobar"); | ||||
|     assert_eq!(expected, STANDARD.encode(target)); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
|  | @ -90,16 +55,6 @@ fn encode_all_bytes_url() { | |||
|          -AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq\ | ||||
|          -wsbKztLW2t7i5uru8vb6_wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t_g4eLj5OXm5-jp6uvs7e7v8PHy\ | ||||
|          8_T19vf4-fr7_P3-_w==",
 | ||||
|         encode_config(&bytes, URL_SAFE) | ||||
|     ); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn encode_url_safe_without_padding() { | ||||
|     let encoded = encode_config(b"alice", URL_SAFE_NO_PAD); | ||||
|     assert_eq!(&encoded, "YWxpY2U"); | ||||
|     assert_eq!( | ||||
|         String::from_utf8(decode(&encoded).unwrap()).unwrap(), | ||||
|         "alice" | ||||
|         &engine::GeneralPurpose::new(&URL_SAFE, PAD).encode(&bytes) | ||||
|     ); | ||||
| } | ||||
|  |  | |||
							
								
								
									
										14
									
								
								third_party/rust/base64/tests/helpers.rs
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										14
									
								
								third_party/rust/base64/tests/helpers.rs
									
									
									
									
										vendored
									
									
								
							|  | @ -1,14 +0,0 @@ | |||
| extern crate base64; | ||||
| 
 | ||||
| use base64::*; | ||||
| 
 | ||||
| pub fn compare_decode(expected: &str, target: &str) { | ||||
|     assert_eq!( | ||||
|         expected, | ||||
|         String::from_utf8(decode(target).unwrap()).unwrap() | ||||
|     ); | ||||
|     assert_eq!( | ||||
|         expected, | ||||
|         String::from_utf8(decode(target.as_bytes()).unwrap()).unwrap() | ||||
|     ); | ||||
| } | ||||
							
								
								
									
										129
									
								
								third_party/rust/base64/tests/tests.rs
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										129
									
								
								third_party/rust/base64/tests/tests.rs
									
									
									
									
										vendored
									
									
								
							|  | @ -1,18 +1,15 @@ | |||
| extern crate base64; | ||||
| extern crate rand; | ||||
| 
 | ||||
| use rand::{FromEntropy, Rng}; | ||||
| use rand::{Rng, SeedableRng}; | ||||
| 
 | ||||
| use base64::engine::{general_purpose::STANDARD, Engine}; | ||||
| use base64::*; | ||||
| 
 | ||||
| mod helpers; | ||||
| use self::helpers::*; | ||||
| use base64::engine::general_purpose::{GeneralPurpose, NO_PAD}; | ||||
| 
 | ||||
| // generate random contents of the specified length and test encode/decode roundtrip
 | ||||
| fn roundtrip_random( | ||||
| fn roundtrip_random<E: Engine>( | ||||
|     byte_buf: &mut Vec<u8>, | ||||
|     str_buf: &mut String, | ||||
|     config: Config, | ||||
|     engine: &E, | ||||
|     byte_len: usize, | ||||
|     approx_values_per_byte: u8, | ||||
|     max_rounds: u64, | ||||
|  | @ -30,8 +27,8 @@ fn roundtrip_random( | |||
|             byte_buf.push(r.gen::<u8>()); | ||||
|         } | ||||
| 
 | ||||
|         encode_config_buf(&byte_buf, config, str_buf); | ||||
|         decode_config_buf(&str_buf, config, &mut decode_buf).unwrap(); | ||||
|         engine.encode_string(&byte_buf, str_buf); | ||||
|         engine.decode_vec(&str_buf, &mut decode_buf).unwrap(); | ||||
| 
 | ||||
|         assert_eq!(byte_buf, &decode_buf); | ||||
|     } | ||||
|  | @ -52,17 +49,13 @@ fn calculate_number_of_rounds(byte_len: usize, approx_values_per_byte: u8, max: | |||
|     prod | ||||
| } | ||||
| 
 | ||||
| fn no_pad_config() -> Config { | ||||
|     Config::new(CharacterSet::Standard, false) | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn roundtrip_random_short_standard() { | ||||
|     let mut byte_buf: Vec<u8> = Vec::new(); | ||||
|     let mut str_buf = String::new(); | ||||
| 
 | ||||
|     for input_len in 0..40 { | ||||
|         roundtrip_random(&mut byte_buf, &mut str_buf, STANDARD, input_len, 4, 10000); | ||||
|         roundtrip_random(&mut byte_buf, &mut str_buf, &STANDARD, input_len, 4, 10000); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
|  | @ -72,7 +65,7 @@ fn roundtrip_random_with_fast_loop_standard() { | |||
|     let mut str_buf = String::new(); | ||||
| 
 | ||||
|     for input_len in 40..100 { | ||||
|         roundtrip_random(&mut byte_buf, &mut str_buf, STANDARD, input_len, 4, 1000); | ||||
|         roundtrip_random(&mut byte_buf, &mut str_buf, &STANDARD, input_len, 4, 1000); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
|  | @ -81,15 +74,9 @@ fn roundtrip_random_short_no_padding() { | |||
|     let mut byte_buf: Vec<u8> = Vec::new(); | ||||
|     let mut str_buf = String::new(); | ||||
| 
 | ||||
|     let engine = GeneralPurpose::new(&alphabet::STANDARD, NO_PAD); | ||||
|     for input_len in 0..40 { | ||||
|         roundtrip_random( | ||||
|             &mut byte_buf, | ||||
|             &mut str_buf, | ||||
|             no_pad_config(), | ||||
|             input_len, | ||||
|             4, | ||||
|             10000, | ||||
|         ); | ||||
|         roundtrip_random(&mut byte_buf, &mut str_buf, &engine, input_len, 4, 10000); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
|  | @ -98,15 +85,10 @@ fn roundtrip_random_no_padding() { | |||
|     let mut byte_buf: Vec<u8> = Vec::new(); | ||||
|     let mut str_buf = String::new(); | ||||
| 
 | ||||
|     let engine = GeneralPurpose::new(&alphabet::STANDARD, NO_PAD); | ||||
| 
 | ||||
|     for input_len in 40..100 { | ||||
|         roundtrip_random( | ||||
|             &mut byte_buf, | ||||
|             &mut str_buf, | ||||
|             no_pad_config(), | ||||
|             input_len, | ||||
|             4, | ||||
|             1000, | ||||
|         ); | ||||
|         roundtrip_random(&mut byte_buf, &mut str_buf, &engine, input_len, 4, 1000); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
|  | @ -120,13 +102,14 @@ fn roundtrip_decode_trailing_10_bytes() { | |||
|     // to handle that case.
 | ||||
| 
 | ||||
|     for num_quads in 0..25 { | ||||
|         let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect(); | ||||
|         let mut s: String = "ABCD".repeat(num_quads); | ||||
|         s.push_str("EFGHIJKLZg"); | ||||
| 
 | ||||
|         let decoded = decode(&s).unwrap(); | ||||
|         let engine = GeneralPurpose::new(&alphabet::STANDARD, NO_PAD); | ||||
|         let decoded = engine.decode(&s).unwrap(); | ||||
|         assert_eq!(num_quads * 3 + 7, decoded.len()); | ||||
| 
 | ||||
|         assert_eq!(s, encode_config(&decoded, STANDARD_NO_PAD)); | ||||
|         assert_eq!(s, engine.encode(&decoded)); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
|  | @ -140,55 +123,39 @@ fn display_wrapper_matches_normal_encode() { | |||
|     bytes.push(255); | ||||
| 
 | ||||
|     assert_eq!( | ||||
|         encode(&bytes), | ||||
|         format!( | ||||
|             "{}", | ||||
|             base64::display::Base64Display::with_config(&bytes, STANDARD) | ||||
|         ) | ||||
|         STANDARD.encode(&bytes), | ||||
|         format!("{}", display::Base64Display::new(&bytes, &STANDARD)) | ||||
|     ); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn because_we_can() { | ||||
|     compare_decode("alice", "YWxpY2U="); | ||||
|     compare_decode("alice", &encode(b"alice")); | ||||
|     compare_decode("alice", &encode(&decode(&encode(b"alice")).unwrap())); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| fn encode_config_slice_can_use_inline_buffer() { | ||||
|     let mut buf: [u8; 22] = [0; 22]; | ||||
|     let mut larger_buf: [u8; 24] = [0; 24]; | ||||
|     let mut input: [u8; 16] = [0; 16]; | ||||
| 
 | ||||
|     let mut rng = rand::rngs::SmallRng::from_entropy(); | ||||
|     for elt in &mut input { | ||||
|         *elt = rng.gen(); | ||||
| fn encode_engine_slice_error_when_buffer_too_small() { | ||||
|     for num_triples in 1..100 { | ||||
|         let input = "AAA".repeat(num_triples); | ||||
|         let mut vec = vec![0; (num_triples - 1) * 4]; | ||||
|         assert_eq!( | ||||
|             EncodeSliceError::OutputSliceTooSmall, | ||||
|             STANDARD.encode_slice(&input, &mut vec).unwrap_err() | ||||
|         ); | ||||
|         vec.push(0); | ||||
|         assert_eq!( | ||||
|             EncodeSliceError::OutputSliceTooSmall, | ||||
|             STANDARD.encode_slice(&input, &mut vec).unwrap_err() | ||||
|         ); | ||||
|         vec.push(0); | ||||
|         assert_eq!( | ||||
|             EncodeSliceError::OutputSliceTooSmall, | ||||
|             STANDARD.encode_slice(&input, &mut vec).unwrap_err() | ||||
|         ); | ||||
|         vec.push(0); | ||||
|         assert_eq!( | ||||
|             EncodeSliceError::OutputSliceTooSmall, | ||||
|             STANDARD.encode_slice(&input, &mut vec).unwrap_err() | ||||
|         ); | ||||
|         vec.push(0); | ||||
|         assert_eq!( | ||||
|             num_triples * 4, | ||||
|             STANDARD.encode_slice(&input, &mut vec).unwrap() | ||||
|         ); | ||||
|     } | ||||
| 
 | ||||
|     assert_eq!(22, encode_config_slice(&input, STANDARD_NO_PAD, &mut buf)); | ||||
|     let decoded = decode_config(&buf, STANDARD_NO_PAD).unwrap(); | ||||
| 
 | ||||
|     assert_eq!(decoded, input); | ||||
| 
 | ||||
|     // let's try it again with padding
 | ||||
| 
 | ||||
|     assert_eq!(24, encode_config_slice(&input, STANDARD, &mut larger_buf)); | ||||
|     let decoded = decode_config(&buf, STANDARD).unwrap(); | ||||
| 
 | ||||
|     assert_eq!(decoded, input); | ||||
| } | ||||
| 
 | ||||
| #[test] | ||||
| #[should_panic(expected = "index 24 out of range for slice of length 22")] | ||||
| fn encode_config_slice_panics_when_buffer_too_small() { | ||||
|     let mut buf: [u8; 22] = [0; 22]; | ||||
|     let mut input: [u8; 16] = [0; 16]; | ||||
| 
 | ||||
|     let mut rng = rand::rngs::SmallRng::from_entropy(); | ||||
|     for elt in &mut input { | ||||
|         *elt = rng.gen(); | ||||
|     } | ||||
| 
 | ||||
|     encode_config_slice(&input, STANDARD, &mut buf); | ||||
| } | ||||
|  |  | |||
		Loading…
	
		Reference in a new issue
	
	 Emilio Cobos Álvarez
						Emilio Cobos Álvarez