forked from mirrors/gecko-dev
Bug 1877942 - neqo v0.7.0, r=necko-reviewers,glandium,supply-chain-reviewers,valentin
Differential Revision: https://phabricator.services.mozilla.com/D200461
This commit is contained in:
parent
9f654f22ea
commit
4a12f611f8
215 changed files with 8650 additions and 3089 deletions
|
|
@ -20,6 +20,11 @@ git = "https://github.com/chris-zen/coremidi.git"
|
||||||
rev = "fc68464b5445caf111e41f643a2e69ccce0b4f83"
|
rev = "fc68464b5445caf111e41f643a2e69ccce0b4f83"
|
||||||
replace-with = "vendored-sources"
|
replace-with = "vendored-sources"
|
||||||
|
|
||||||
|
[source."git+https://github.com/cloudflare/quiche?rev=09ea4b244096a013071cfe2175bbf2945fb7f8d1"]
|
||||||
|
git = "https://github.com/cloudflare/quiche"
|
||||||
|
rev = "09ea4b244096a013071cfe2175bbf2945fb7f8d1"
|
||||||
|
replace-with = "vendored-sources"
|
||||||
|
|
||||||
[source."git+https://github.com/franziskuskiefer/cose-rust?rev=43c22248d136c8b38fe42ea709d08da6355cf04b"]
|
[source."git+https://github.com/franziskuskiefer/cose-rust?rev=43c22248d136c8b38fe42ea709d08da6355cf04b"]
|
||||||
git = "https://github.com/franziskuskiefer/cose-rust"
|
git = "https://github.com/franziskuskiefer/cose-rust"
|
||||||
rev = "43c22248d136c8b38fe42ea709d08da6355cf04b"
|
rev = "43c22248d136c8b38fe42ea709d08da6355cf04b"
|
||||||
|
|
@ -85,9 +90,9 @@ git = "https://github.com/mozilla/mp4parse-rust"
|
||||||
rev = "a138e40ec1c603615873e524b5b22e11c0ec4820"
|
rev = "a138e40ec1c603615873e524b5b22e11c0ec4820"
|
||||||
replace-with = "vendored-sources"
|
replace-with = "vendored-sources"
|
||||||
|
|
||||||
[source."git+https://github.com/mozilla/neqo?tag=v0.6.8"]
|
[source."git+https://github.com/mozilla/neqo?tag=v0.7.0"]
|
||||||
git = "https://github.com/mozilla/neqo"
|
git = "https://github.com/mozilla/neqo"
|
||||||
tag = "v0.6.8"
|
tag = "v0.7.0"
|
||||||
replace-with = "vendored-sources"
|
replace-with = "vendored-sources"
|
||||||
|
|
||||||
[source."git+https://github.com/mozilla/uniffi-rs.git?rev=afb29ebdc1d9edf15021b1c5332fc9f285bbe13b"]
|
[source."git+https://github.com/mozilla/uniffi-rs.git?rev=afb29ebdc1d9edf15021b1c5332fc9f285bbe13b"]
|
||||||
|
|
|
||||||
57
Cargo.lock
generated
57
Cargo.lock
generated
|
|
@ -1519,6 +1519,26 @@ dependencies = [
|
||||||
"packed_simd",
|
"packed_simd",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "enum-map"
|
||||||
|
version = "2.7.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6866f3bfdf8207509a033af1a75a7b08abda06bbaaeae6669323fd5a097df2e9"
|
||||||
|
dependencies = [
|
||||||
|
"enum-map-derive",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "enum-map-derive"
|
||||||
|
version = "0.17.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f282cfdfe92516eb26c2af8589c274c7c17681f5ecc03c18255fe741c6aa64eb"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "enumset"
|
name = "enumset"
|
||||||
version = "1.1.2"
|
version = "1.1.2"
|
||||||
|
|
@ -3811,9 +3831,10 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "neqo-common"
|
name = "neqo-common"
|
||||||
version = "0.6.8"
|
version = "0.7.0"
|
||||||
source = "git+https://github.com/mozilla/neqo?tag=v0.6.8#83735a88217a6b3a6a9d3cd5d9243040c5e41319"
|
source = "git+https://github.com/mozilla/neqo?tag=v0.7.0#9489511f7c82786f55bc9c713cddbff825507ed7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"enum-map",
|
||||||
"env_logger",
|
"env_logger",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
|
|
@ -3824,10 +3845,10 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "neqo-crypto"
|
name = "neqo-crypto"
|
||||||
version = "0.6.8"
|
version = "0.7.0"
|
||||||
source = "git+https://github.com/mozilla/neqo?tag=v0.6.8#83735a88217a6b3a6a9d3cd5d9243040c5e41319"
|
source = "git+https://github.com/mozilla/neqo?tag=v0.7.0#9489511f7c82786f55bc9c713cddbff825507ed7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bindgen 0.64.999",
|
"bindgen 0.69.2",
|
||||||
"log",
|
"log",
|
||||||
"mozbuild",
|
"mozbuild",
|
||||||
"neqo-common",
|
"neqo-common",
|
||||||
|
|
@ -3838,8 +3859,8 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "neqo-http3"
|
name = "neqo-http3"
|
||||||
version = "0.6.8"
|
version = "0.7.0"
|
||||||
source = "git+https://github.com/mozilla/neqo?tag=v0.6.8#83735a88217a6b3a6a9d3cd5d9243040c5e41319"
|
source = "git+https://github.com/mozilla/neqo?tag=v0.7.0#9489511f7c82786f55bc9c713cddbff825507ed7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"enumset",
|
"enumset",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
|
|
@ -3856,8 +3877,8 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "neqo-qpack"
|
name = "neqo-qpack"
|
||||||
version = "0.6.8"
|
version = "0.7.0"
|
||||||
source = "git+https://github.com/mozilla/neqo?tag=v0.6.8#83735a88217a6b3a6a9d3cd5d9243040c5e41319"
|
source = "git+https://github.com/mozilla/neqo?tag=v0.7.0#9489511f7c82786f55bc9c713cddbff825507ed7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
|
|
@ -3870,8 +3891,8 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "neqo-transport"
|
name = "neqo-transport"
|
||||||
version = "0.6.8"
|
version = "0.7.0"
|
||||||
source = "git+https://github.com/mozilla/neqo?tag=v0.6.8#83735a88217a6b3a6a9d3cd5d9243040c5e41319"
|
source = "git+https://github.com/mozilla/neqo?tag=v0.7.0#9489511f7c82786f55bc9c713cddbff825507ed7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"indexmap 1.9.3",
|
"indexmap 1.9.3",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
|
|
@ -4517,14 +4538,13 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "qlog"
|
name = "qlog"
|
||||||
version = "0.9.0"
|
version = "0.11.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "git+https://github.com/cloudflare/quiche?rev=09ea4b244096a013071cfe2175bbf2945fb7f8d1#09ea4b244096a013071cfe2175bbf2945fb7f8d1"
|
||||||
checksum = "321df7a3199d152be256a416096136191e88b7716f1e2e4c8c05b9f77ffb648b"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_with 1.999.999",
|
"serde_with",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
@ -5014,13 +5034,6 @@ dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "serde_with"
|
|
||||||
version = "1.999.999"
|
|
||||||
dependencies = [
|
|
||||||
"serde_with 3.0.0",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_with"
|
name = "serde_with"
|
||||||
version = "3.0.0"
|
version = "3.0.0"
|
||||||
|
|
|
||||||
|
|
@ -124,9 +124,6 @@ memmap2 = { path = "build/rust/memmap2" }
|
||||||
# Patch cfg-if 0.1 to 1.0
|
# Patch cfg-if 0.1 to 1.0
|
||||||
cfg-if = { path = "build/rust/cfg-if" }
|
cfg-if = { path = "build/rust/cfg-if" }
|
||||||
|
|
||||||
# Patch serde_with 1.0 to 3.0
|
|
||||||
serde_with = { path = "build/rust/serde_with" }
|
|
||||||
|
|
||||||
# Patch redox_users to an empty crate
|
# Patch redox_users to an empty crate
|
||||||
redox_users = { path = "build/rust/redox_users" }
|
redox_users = { path = "build/rust/redox_users" }
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "serde_with"
|
|
||||||
version = "1.999.999"
|
|
||||||
edition = "2018"
|
|
||||||
license = "MPL-2.0"
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
path = "lib.rs"
|
|
||||||
|
|
||||||
[dependencies.serde_with]
|
|
||||||
version = "3"
|
|
||||||
default-features = false
|
|
||||||
features = ["macros"]
|
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
||||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
||||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
|
||||||
|
|
||||||
pub use serde_with::*;
|
|
||||||
|
|
@ -2200,6 +2200,7 @@ void Http3Session::SetSecInfo() {
|
||||||
// 0x00-0xff. (https://tools.ietf.org/html/draft-ietf-quic-tls_34#section-4.8)
|
// 0x00-0xff. (https://tools.ietf.org/html/draft-ietf-quic-tls_34#section-4.8)
|
||||||
// Since telemetry does not allow more than 100 bucket, we use three diffrent
|
// Since telemetry does not allow more than 100 bucket, we use three diffrent
|
||||||
// keys to map all alert codes.
|
// keys to map all alert codes.
|
||||||
|
const uint32_t HTTP3_TELEMETRY_TRANSPORT_INTERNAL_ERROR = 15;
|
||||||
const uint32_t HTTP3_TELEMETRY_TRANSPORT_END = 16;
|
const uint32_t HTTP3_TELEMETRY_TRANSPORT_END = 16;
|
||||||
const uint32_t HTTP3_TELEMETRY_TRANSPORT_UNKNOWN = 17;
|
const uint32_t HTTP3_TELEMETRY_TRANSPORT_UNKNOWN = 17;
|
||||||
const uint32_t HTTP3_TELEMETRY_TRANSPORT_CRYPTO_UNKNOWN = 18;
|
const uint32_t HTTP3_TELEMETRY_TRANSPORT_CRYPTO_UNKNOWN = 18;
|
||||||
|
|
@ -2281,7 +2282,7 @@ void Http3Session::CloseConnectionTelemetry(CloseError& aError, bool aClosing) {
|
||||||
switch (aError.tag) {
|
switch (aError.tag) {
|
||||||
case CloseError::Tag::TransportInternalError:
|
case CloseError::Tag::TransportInternalError:
|
||||||
key = "transport_internal"_ns;
|
key = "transport_internal"_ns;
|
||||||
value = aError.transport_internal_error._0;
|
value = HTTP3_TELEMETRY_TRANSPORT_INTERNAL_ERROR;
|
||||||
break;
|
break;
|
||||||
case CloseError::Tag::TransportInternalErrorOther:
|
case CloseError::Tag::TransportInternalErrorOther:
|
||||||
key = "transport_other"_ns;
|
key = "transport_other"_ns;
|
||||||
|
|
|
||||||
|
|
@ -9,16 +9,16 @@ license = "MPL-2.0"
|
||||||
name = "neqo_glue"
|
name = "neqo_glue"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
neqo-http3 = { tag = "v0.6.8", git = "https://github.com/mozilla/neqo" }
|
neqo-http3 = { tag = "v0.7.0", git = "https://github.com/mozilla/neqo" }
|
||||||
neqo-transport = { tag = "v0.6.8", git = "https://github.com/mozilla/neqo" }
|
neqo-transport = { tag = "v0.7.0", git = "https://github.com/mozilla/neqo" }
|
||||||
neqo-common = { tag = "v0.6.8", git = "https://github.com/mozilla/neqo" }
|
neqo-common = { tag = "v0.7.0", git = "https://github.com/mozilla/neqo" }
|
||||||
neqo-qpack = { tag = "v0.6.8", git = "https://github.com/mozilla/neqo" }
|
neqo-qpack = { tag = "v0.7.0", git = "https://github.com/mozilla/neqo" }
|
||||||
nserror = { path = "../../../xpcom/rust/nserror" }
|
nserror = { path = "../../../xpcom/rust/nserror" }
|
||||||
nsstring = { path = "../../../xpcom/rust/nsstring" }
|
nsstring = { path = "../../../xpcom/rust/nsstring" }
|
||||||
xpcom = { path = "../../../xpcom/rust/xpcom" }
|
xpcom = { path = "../../../xpcom/rust/xpcom" }
|
||||||
thin-vec = { version = "0.2.1", features = ["gecko-ffi"] }
|
thin-vec = { version = "0.2.1", features = ["gecko-ffi"] }
|
||||||
log = "0.4.0"
|
log = "0.4.0"
|
||||||
qlog = "0.9.0"
|
qlog = { git = "https://github.com/cloudflare/quiche", rev = "09ea4b244096a013071cfe2175bbf2945fb7f8d1" }
|
||||||
libc = "0.2.0"
|
libc = "0.2.0"
|
||||||
static_prefs = { path = "../../../modules/libpref/init/static_prefs"}
|
static_prefs = { path = "../../../modules/libpref/init/static_prefs"}
|
||||||
uuid = { version = "1.0", features = ["v4"] }
|
uuid = { version = "1.0", features = ["v4"] }
|
||||||
|
|
@ -27,7 +27,7 @@ uuid = { version = "1.0", features = ["v4"] }
|
||||||
winapi = {version = "0.3", features = ["ws2def"] }
|
winapi = {version = "0.3", features = ["ws2def"] }
|
||||||
|
|
||||||
[dependencies.neqo-crypto]
|
[dependencies.neqo-crypto]
|
||||||
tag = "v0.6.8"
|
tag = "v0.7.0"
|
||||||
git = "https://github.com/mozilla/neqo"
|
git = "https://github.com/mozilla/neqo"
|
||||||
default-features = false
|
default-features = false
|
||||||
features = ["gecko"]
|
features = ["gecko"]
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
use libc::{AF_INET, AF_INET6};
|
use libc::{AF_INET, AF_INET6};
|
||||||
use neqo_common::event::Provider;
|
use neqo_common::event::Provider;
|
||||||
use neqo_common::{self as common, qlog::NeqoQlog, qwarn, Datagram, Header, Role};
|
use neqo_common::{self as common, qlog::NeqoQlog, qwarn, Datagram, Header, IpTos, Role};
|
||||||
use neqo_crypto::{init, PRErrorCode};
|
use neqo_crypto::{init, PRErrorCode};
|
||||||
use neqo_http3::{
|
use neqo_http3::{
|
||||||
features::extended_connect::SessionCloseReason, Error as Http3Error, Http3Client,
|
features::extended_connect::SessionCloseReason, Error as Http3Error, Http3Client,
|
||||||
|
|
@ -318,10 +318,15 @@ pub unsafe extern "C" fn neqo_http3conn_process_input(
|
||||||
Ok(addr) => addr,
|
Ok(addr) => addr,
|
||||||
Err(result) => return result,
|
Err(result) => return result,
|
||||||
};
|
};
|
||||||
conn.conn.process_input(
|
let d = Datagram::new(
|
||||||
Datagram::new(remote, conn.local_addr, (*packet).to_vec()),
|
remote,
|
||||||
get_current_or_last_output_time(&conn.last_output_time),
|
conn.local_addr,
|
||||||
|
IpTos::default(),
|
||||||
|
None,
|
||||||
|
(*packet).to_vec(),
|
||||||
);
|
);
|
||||||
|
conn.conn
|
||||||
|
.process_input(&d, get_current_or_last_output_time(&conn.last_output_time));
|
||||||
return NS_OK;
|
return NS_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -596,7 +601,7 @@ fn crypto_error_code(err: neqo_crypto::Error) -> u64 {
|
||||||
// number.
|
// number.
|
||||||
#[repr(C)]
|
#[repr(C)]
|
||||||
pub enum CloseError {
|
pub enum CloseError {
|
||||||
TransportInternalError(u16),
|
TransportInternalError,
|
||||||
TransportInternalErrorOther(u16),
|
TransportInternalErrorOther(u16),
|
||||||
TransportError(u64),
|
TransportError(u64),
|
||||||
CryptoError(u64),
|
CryptoError(u64),
|
||||||
|
|
@ -610,7 +615,7 @@ pub enum CloseError {
|
||||||
impl From<TransportError> for CloseError {
|
impl From<TransportError> for CloseError {
|
||||||
fn from(error: TransportError) -> CloseError {
|
fn from(error: TransportError) -> CloseError {
|
||||||
match error {
|
match error {
|
||||||
TransportError::InternalError(c) => CloseError::TransportInternalError(c),
|
TransportError::InternalError => CloseError::TransportInternalError,
|
||||||
TransportError::CryptoError(neqo_crypto::Error::EchRetry(_)) => CloseError::EchRetry,
|
TransportError::CryptoError(neqo_crypto::Error::EchRetry(_)) => CloseError::EchRetry,
|
||||||
TransportError::CryptoError(c) => CloseError::CryptoError(crypto_error_code(c)),
|
TransportError::CryptoError(c) => CloseError::CryptoError(crypto_error_code(c)),
|
||||||
TransportError::CryptoAlert(c) => CloseError::CryptoAlert(c),
|
TransportError::CryptoAlert(c) => CloseError::CryptoAlert(c),
|
||||||
|
|
@ -629,7 +634,8 @@ impl From<TransportError> for CloseError {
|
||||||
| TransportError::InvalidToken
|
| TransportError::InvalidToken
|
||||||
| TransportError::KeysExhausted
|
| TransportError::KeysExhausted
|
||||||
| TransportError::ApplicationError
|
| TransportError::ApplicationError
|
||||||
| TransportError::NoAvailablePath => CloseError::TransportError(error.code()),
|
| TransportError::NoAvailablePath
|
||||||
|
| TransportError::CryptoBufferExceeded => CloseError::TransportError(error.code()),
|
||||||
TransportError::EchRetry(_) => CloseError::EchRetry,
|
TransportError::EchRetry(_) => CloseError::EchRetry,
|
||||||
TransportError::AckedUnsentPacket => CloseError::TransportInternalErrorOther(0),
|
TransportError::AckedUnsentPacket => CloseError::TransportInternalErrorOther(0),
|
||||||
TransportError::ConnectionIdLimitExceeded => CloseError::TransportInternalErrorOther(1),
|
TransportError::ConnectionIdLimitExceeded => CloseError::TransportInternalErrorOther(1),
|
||||||
|
|
|
||||||
|
|
@ -6,10 +6,10 @@ edition = "2018"
|
||||||
license = "MPL-2.0"
|
license = "MPL-2.0"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
neqo-transport = { tag = "v0.6.8", git = "https://github.com/mozilla/neqo" }
|
neqo-transport = { tag = "v0.7.0", git = "https://github.com/mozilla/neqo" }
|
||||||
neqo-common = { tag = "v0.6.8", git = "https://github.com/mozilla/neqo" }
|
neqo-common = { tag = "v0.7.0", git = "https://github.com/mozilla/neqo" }
|
||||||
neqo-http3 = { tag = "v0.6.8", git = "https://github.com/mozilla/neqo" }
|
neqo-http3 = { tag = "v0.7.0", git = "https://github.com/mozilla/neqo" }
|
||||||
neqo-qpack = { tag = "v0.6.8", git = "https://github.com/mozilla/neqo" }
|
neqo-qpack = { tag = "v0.7.0", git = "https://github.com/mozilla/neqo" }
|
||||||
mio = "0.6.17"
|
mio = "0.6.17"
|
||||||
mio-extras = "2.0.5"
|
mio-extras = "2.0.5"
|
||||||
log = "0.4.0"
|
log = "0.4.0"
|
||||||
|
|
@ -21,7 +21,7 @@ tokio = { version = "1", features = ["rt-multi-thread"] }
|
||||||
mozilla-central-workspace-hack = { version = "0.1", features = ["http3server"], optional = true }
|
mozilla-central-workspace-hack = { version = "0.1", features = ["http3server"], optional = true }
|
||||||
|
|
||||||
[dependencies.neqo-crypto]
|
[dependencies.neqo-crypto]
|
||||||
tag = "v0.6.8"
|
tag = "v0.7.0"
|
||||||
git = "https://github.com/mozilla/neqo"
|
git = "https://github.com/mozilla/neqo"
|
||||||
default-features = false
|
default-features = false
|
||||||
features = ["gecko"]
|
features = ["gecko"]
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@
|
||||||
#![deny(warnings)]
|
#![deny(warnings)]
|
||||||
|
|
||||||
use base64::prelude::*;
|
use base64::prelude::*;
|
||||||
use neqo_common::{event::Provider, qdebug, qinfo, qtrace, Datagram, Header};
|
use neqo_common::{event::Provider, qdebug, qinfo, qtrace, Datagram, Header, IpTos};
|
||||||
use neqo_crypto::{generate_ech_keys, init_db, AllowZeroRtt, AntiReplay};
|
use neqo_crypto::{generate_ech_keys, init_db, AllowZeroRtt, AntiReplay};
|
||||||
use neqo_http3::{
|
use neqo_http3::{
|
||||||
Error, Http3OrWebTransportStream, Http3Parameters, Http3Server, Http3ServerEvent,
|
Error, Http3OrWebTransportStream, Http3Parameters, Http3Server, Http3ServerEvent,
|
||||||
|
|
@ -193,7 +193,7 @@ impl Http3TestServer {
|
||||||
|
|
||||||
impl HttpServer for Http3TestServer {
|
impl HttpServer for Http3TestServer {
|
||||||
fn process(&mut self, dgram: Option<Datagram>) -> Output {
|
fn process(&mut self, dgram: Option<Datagram>) -> Output {
|
||||||
self.server.process(dgram, Instant::now())
|
self.server.process(dgram.as_ref(), Instant::now())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process_events(&mut self) {
|
fn process_events(&mut self) {
|
||||||
|
|
@ -633,7 +633,7 @@ impl HttpServer for Http3TestServer {
|
||||||
|
|
||||||
impl HttpServer for Server {
|
impl HttpServer for Server {
|
||||||
fn process(&mut self, dgram: Option<Datagram>) -> Output {
|
fn process(&mut self, dgram: Option<Datagram>) -> Output {
|
||||||
self.process(dgram, Instant::now())
|
self.process(dgram.as_ref(), Instant::now())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process_events(&mut self) {
|
fn process_events(&mut self) {
|
||||||
|
|
@ -874,7 +874,7 @@ impl Http3ProxyServer {
|
||||||
|
|
||||||
impl HttpServer for Http3ProxyServer {
|
impl HttpServer for Http3ProxyServer {
|
||||||
fn process(&mut self, dgram: Option<Datagram>) -> Output {
|
fn process(&mut self, dgram: Option<Datagram>) -> Output {
|
||||||
self.server.process(dgram, Instant::now())
|
self.server.process(dgram.as_ref(), Instant::now())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process_events(&mut self) {
|
fn process_events(&mut self) {
|
||||||
|
|
@ -1063,7 +1063,13 @@ fn read_dgram(
|
||||||
eprintln!("zero length datagram received?");
|
eprintln!("zero length datagram received?");
|
||||||
Ok(None)
|
Ok(None)
|
||||||
} else {
|
} else {
|
||||||
Ok(Some(Datagram::new(remote_addr, *local_address, &buf[..sz])))
|
Ok(Some(Datagram::new(
|
||||||
|
remote_addr,
|
||||||
|
*local_address,
|
||||||
|
IpTos::default(),
|
||||||
|
None,
|
||||||
|
&buf[..sz],
|
||||||
|
)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1530,6 +1530,16 @@ who = "Mike Hommey <mh+mozilla@glandium.org>"
|
||||||
criteria = "safe-to-deploy"
|
criteria = "safe-to-deploy"
|
||||||
delta = "0.8.31 -> 0.8.32"
|
delta = "0.8.31 -> 0.8.32"
|
||||||
|
|
||||||
|
[[audits.enum-map]]
|
||||||
|
who = "Kershaw Chang <kershaw@mozilla.com>"
|
||||||
|
criteria = "safe-to-deploy"
|
||||||
|
version = "2.7.3"
|
||||||
|
|
||||||
|
[[audits.enum-map-derive]]
|
||||||
|
who = "Kershaw Chang <kershaw@mozilla.com>"
|
||||||
|
criteria = "safe-to-deploy"
|
||||||
|
version = "0.17.0"
|
||||||
|
|
||||||
[[audits.enum-primitive-derive]]
|
[[audits.enum-primitive-derive]]
|
||||||
who = "Gabriele Svelto <gsvelto@mozilla.com>"
|
who = "Gabriele Svelto <gsvelto@mozilla.com>"
|
||||||
criteria = "safe-to-deploy"
|
criteria = "safe-to-deploy"
|
||||||
|
|
@ -3066,6 +3076,17 @@ who = "Kershaw Chang <kershaw@mozilla.com>"
|
||||||
criteria = "safe-to-deploy"
|
criteria = "safe-to-deploy"
|
||||||
version = "0.9.0"
|
version = "0.9.0"
|
||||||
|
|
||||||
|
[[audits.qlog]]
|
||||||
|
who = "Kershaw Chang <kershaw@mozilla.com>"
|
||||||
|
criteria = "safe-to-deploy"
|
||||||
|
delta = "0.9.0 -> 0.11.0"
|
||||||
|
|
||||||
|
[[audits.qlog]]
|
||||||
|
who = "Kershaw Chang <kershaw@mozilla.com>"
|
||||||
|
criteria = "safe-to-deploy"
|
||||||
|
delta = "0.11.0 -> 0.11.0@git:09ea4b244096a013071cfe2175bbf2945fb7f8d1"
|
||||||
|
importable = false
|
||||||
|
|
||||||
[[audits.quote]]
|
[[audits.quote]]
|
||||||
who = "Nika Layzell <nika@thelayzells.com>"
|
who = "Nika Layzell <nika@thelayzells.com>"
|
||||||
criteria = "safe-to-deploy"
|
criteria = "safe-to-deploy"
|
||||||
|
|
|
||||||
|
|
@ -165,6 +165,10 @@ notes = "This is a first-party crate which is entirely unrelated to the crates.i
|
||||||
audit-as-crates-io = true
|
audit-as-crates-io = true
|
||||||
notes = "This is a first-party crate which is also published to crates.io, but we should publish audits for it for the benefit of the ecosystem."
|
notes = "This is a first-party crate which is also published to crates.io, but we should publish audits for it for the benefit of the ecosystem."
|
||||||
|
|
||||||
|
[policy.qlog]
|
||||||
|
audit-as-crates-io = true
|
||||||
|
notes = "Use this revision (09ea4b244096a013071cfe2175bbf2945fb7f8d1) of qlog temporarily."
|
||||||
|
|
||||||
[policy.rure]
|
[policy.rure]
|
||||||
audit-as-crates-io = true
|
audit-as-crates-io = true
|
||||||
notes = "Identical to upstream, but with cdylib and staticlib targets disabled to avoid unnecessary build artifacts and linker errors."
|
notes = "Identical to upstream, but with cdylib and staticlib targets disabled to avoid unnecessary build artifacts and linker errors."
|
||||||
|
|
|
||||||
1
third_party/rust/enum-map-derive/.cargo-checksum.json
vendored
Normal file
1
third_party/rust/enum-map-derive/.cargo-checksum.json
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
{"files":{"Cargo.toml":"ebe51a5658f0e34f3f31cd0f4af2a90affbc5e562b6d4695fab1ae31348c2590","LICENSES/Apache-2.0.txt":"074e6e32c86a4c0ef8b3ed25b721ca23aca83df277cd88106ef7177c354615ff","LICENSES/CC0-1.0.txt":"a2010f343487d3f7618affe54f789f5487602331c0a8d03f49e9a7c547cf0499","LICENSES/MIT.txt":"b85dcd3e453d05982552c52b5fc9e0bdd6d23c6f8e844b984a88af32570b0cc0","README.md":"9ba5f04156a8fb4aeec6af6db17b6716c18dc73e1db59300340591f246f4558e","src/derive_enum.rs":"fc478b32e580dabfa31f71db2958faf05125e58b17bf420a881930d96e76ce11","src/derive_struct.rs":"ec7e4d1f44925713098713c8c77af87142f499999a812687d52306cc9255224f","src/lib.rs":"531dd501eced4ebf2733d525417adc9ef724e1c117ffb20be7453a79200c2988"},"package":"f282cfdfe92516eb26c2af8589c274c7c17681f5ecc03c18255fe741c6aa64eb"}
|
||||||
54
third_party/rust/enum-map-derive/Cargo.toml
vendored
Normal file
54
third_party/rust/enum-map-derive/Cargo.toml
vendored
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||||
|
#
|
||||||
|
# When uploading crates to the registry Cargo will automatically
|
||||||
|
# "normalize" Cargo.toml files for maximal compatibility
|
||||||
|
# with all versions of Cargo and also rewrite `path` dependencies
|
||||||
|
# to registry (e.g., crates.io) dependencies.
|
||||||
|
#
|
||||||
|
# If you are reading this file be aware that the original Cargo.toml
|
||||||
|
# will likely look very different (and much more reasonable).
|
||||||
|
# See Cargo.toml.orig for the original contents.
|
||||||
|
|
||||||
|
[package]
|
||||||
|
edition = "2021"
|
||||||
|
rust-version = "1.61"
|
||||||
|
name = "enum-map-derive"
|
||||||
|
version = "0.17.0"
|
||||||
|
authors = ["Kamila Borowska <kamila@borowska.pw>"]
|
||||||
|
description = "Macros 1.1 implementation of #[derive(Enum)]"
|
||||||
|
readme = "README.md"
|
||||||
|
keywords = [
|
||||||
|
"data-structure",
|
||||||
|
"no_std",
|
||||||
|
"enum",
|
||||||
|
]
|
||||||
|
categories = [
|
||||||
|
"data-structures",
|
||||||
|
"no-std",
|
||||||
|
]
|
||||||
|
license = "MIT OR Apache-2.0"
|
||||||
|
repository = "https://codeberg.org/xfix/enum-map"
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
proc-macro = true
|
||||||
|
|
||||||
|
[dependencies.proc-macro2]
|
||||||
|
version = "1.0.60"
|
||||||
|
|
||||||
|
[dependencies.quote]
|
||||||
|
version = "1.0.7"
|
||||||
|
|
||||||
|
[dependencies.syn]
|
||||||
|
version = "2.0.0"
|
||||||
|
features = [
|
||||||
|
"derive",
|
||||||
|
"parsing",
|
||||||
|
"printing",
|
||||||
|
"proc-macro",
|
||||||
|
]
|
||||||
|
default-features = false
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
|
||||||
|
[badges.maintenance]
|
||||||
|
status = "passively-maintained"
|
||||||
73
third_party/rust/enum-map-derive/LICENSES/Apache-2.0.txt
vendored
Normal file
73
third_party/rust/enum-map-derive/LICENSES/Apache-2.0.txt
vendored
Normal file
|
|
@ -0,0 +1,73 @@
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
121
third_party/rust/enum-map-derive/LICENSES/CC0-1.0.txt
vendored
Normal file
121
third_party/rust/enum-map-derive/LICENSES/CC0-1.0.txt
vendored
Normal file
|
|
@ -0,0 +1,121 @@
|
||||||
|
Creative Commons Legal Code
|
||||||
|
|
||||||
|
CC0 1.0 Universal
|
||||||
|
|
||||||
|
CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE
|
||||||
|
LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN
|
||||||
|
ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS
|
||||||
|
INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES
|
||||||
|
REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS
|
||||||
|
PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM
|
||||||
|
THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED
|
||||||
|
HEREUNDER.
|
||||||
|
|
||||||
|
Statement of Purpose
|
||||||
|
|
||||||
|
The laws of most jurisdictions throughout the world automatically confer
|
||||||
|
exclusive Copyright and Related Rights (defined below) upon the creator
|
||||||
|
and subsequent owner(s) (each and all, an "owner") of an original work of
|
||||||
|
authorship and/or a database (each, a "Work").
|
||||||
|
|
||||||
|
Certain owners wish to permanently relinquish those rights to a Work for
|
||||||
|
the purpose of contributing to a commons of creative, cultural and
|
||||||
|
scientific works ("Commons") that the public can reliably and without fear
|
||||||
|
of later claims of infringement build upon, modify, incorporate in other
|
||||||
|
works, reuse and redistribute as freely as possible in any form whatsoever
|
||||||
|
and for any purposes, including without limitation commercial purposes.
|
||||||
|
These owners may contribute to the Commons to promote the ideal of a free
|
||||||
|
culture and the further production of creative, cultural and scientific
|
||||||
|
works, or to gain reputation or greater distribution for their Work in
|
||||||
|
part through the use and efforts of others.
|
||||||
|
|
||||||
|
For these and/or other purposes and motivations, and without any
|
||||||
|
expectation of additional consideration or compensation, the person
|
||||||
|
associating CC0 with a Work (the "Affirmer"), to the extent that he or she
|
||||||
|
is an owner of Copyright and Related Rights in the Work, voluntarily
|
||||||
|
elects to apply CC0 to the Work and publicly distribute the Work under its
|
||||||
|
terms, with knowledge of his or her Copyright and Related Rights in the
|
||||||
|
Work and the meaning and intended legal effect of CC0 on those rights.
|
||||||
|
|
||||||
|
1. Copyright and Related Rights. A Work made available under CC0 may be
|
||||||
|
protected by copyright and related or neighboring rights ("Copyright and
|
||||||
|
Related Rights"). Copyright and Related Rights include, but are not
|
||||||
|
limited to, the following:
|
||||||
|
|
||||||
|
i. the right to reproduce, adapt, distribute, perform, display,
|
||||||
|
communicate, and translate a Work;
|
||||||
|
ii. moral rights retained by the original author(s) and/or performer(s);
|
||||||
|
iii. publicity and privacy rights pertaining to a person's image or
|
||||||
|
likeness depicted in a Work;
|
||||||
|
iv. rights protecting against unfair competition in regards to a Work,
|
||||||
|
subject to the limitations in paragraph 4(a), below;
|
||||||
|
v. rights protecting the extraction, dissemination, use and reuse of data
|
||||||
|
in a Work;
|
||||||
|
vi. database rights (such as those arising under Directive 96/9/EC of the
|
||||||
|
European Parliament and of the Council of 11 March 1996 on the legal
|
||||||
|
protection of databases, and under any national implementation
|
||||||
|
thereof, including any amended or successor version of such
|
||||||
|
directive); and
|
||||||
|
vii. other similar, equivalent or corresponding rights throughout the
|
||||||
|
world based on applicable law or treaty, and any national
|
||||||
|
implementations thereof.
|
||||||
|
|
||||||
|
2. Waiver. To the greatest extent permitted by, but not in contravention
|
||||||
|
of, applicable law, Affirmer hereby overtly, fully, permanently,
|
||||||
|
irrevocably and unconditionally waives, abandons, and surrenders all of
|
||||||
|
Affirmer's Copyright and Related Rights and associated claims and causes
|
||||||
|
of action, whether now known or unknown (including existing as well as
|
||||||
|
future claims and causes of action), in the Work (i) in all territories
|
||||||
|
worldwide, (ii) for the maximum duration provided by applicable law or
|
||||||
|
treaty (including future time extensions), (iii) in any current or future
|
||||||
|
medium and for any number of copies, and (iv) for any purpose whatsoever,
|
||||||
|
including without limitation commercial, advertising or promotional
|
||||||
|
purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each
|
||||||
|
member of the public at large and to the detriment of Affirmer's heirs and
|
||||||
|
successors, fully intending that such Waiver shall not be subject to
|
||||||
|
revocation, rescission, cancellation, termination, or any other legal or
|
||||||
|
equitable action to disrupt the quiet enjoyment of the Work by the public
|
||||||
|
as contemplated by Affirmer's express Statement of Purpose.
|
||||||
|
|
||||||
|
3. Public License Fallback. Should any part of the Waiver for any reason
|
||||||
|
be judged legally invalid or ineffective under applicable law, then the
|
||||||
|
Waiver shall be preserved to the maximum extent permitted taking into
|
||||||
|
account Affirmer's express Statement of Purpose. In addition, to the
|
||||||
|
extent the Waiver is so judged Affirmer hereby grants to each affected
|
||||||
|
person a royalty-free, non transferable, non sublicensable, non exclusive,
|
||||||
|
irrevocable and unconditional license to exercise Affirmer's Copyright and
|
||||||
|
Related Rights in the Work (i) in all territories worldwide, (ii) for the
|
||||||
|
maximum duration provided by applicable law or treaty (including future
|
||||||
|
time extensions), (iii) in any current or future medium and for any number
|
||||||
|
of copies, and (iv) for any purpose whatsoever, including without
|
||||||
|
limitation commercial, advertising or promotional purposes (the
|
||||||
|
"License"). The License shall be deemed effective as of the date CC0 was
|
||||||
|
applied by Affirmer to the Work. Should any part of the License for any
|
||||||
|
reason be judged legally invalid or ineffective under applicable law, such
|
||||||
|
partial invalidity or ineffectiveness shall not invalidate the remainder
|
||||||
|
of the License, and in such case Affirmer hereby affirms that he or she
|
||||||
|
will not (i) exercise any of his or her remaining Copyright and Related
|
||||||
|
Rights in the Work or (ii) assert any associated claims and causes of
|
||||||
|
action with respect to the Work, in either case contrary to Affirmer's
|
||||||
|
express Statement of Purpose.
|
||||||
|
|
||||||
|
4. Limitations and Disclaimers.
|
||||||
|
|
||||||
|
a. No trademark or patent rights held by Affirmer are waived, abandoned,
|
||||||
|
surrendered, licensed or otherwise affected by this document.
|
||||||
|
b. Affirmer offers the Work as-is and makes no representations or
|
||||||
|
warranties of any kind concerning the Work, express, implied,
|
||||||
|
statutory or otherwise, including without limitation warranties of
|
||||||
|
title, merchantability, fitness for a particular purpose, non
|
||||||
|
infringement, or the absence of latent or other defects, accuracy, or
|
||||||
|
the present or absence of errors, whether or not discoverable, all to
|
||||||
|
the greatest extent permissible under applicable law.
|
||||||
|
c. Affirmer disclaims responsibility for clearing rights of other persons
|
||||||
|
that may apply to the Work or any use thereof, including without
|
||||||
|
limitation any person's Copyright and Related Rights in the Work.
|
||||||
|
Further, Affirmer disclaims responsibility for obtaining any necessary
|
||||||
|
consents, permissions or other rights required for any use of the
|
||||||
|
Work.
|
||||||
|
d. Affirmer understands and acknowledges that Creative Commons is not a
|
||||||
|
party to this document and has no duty or obligation with respect to
|
||||||
|
this CC0 or use of the Work.
|
||||||
9
third_party/rust/enum-map-derive/LICENSES/MIT.txt
vendored
Normal file
9
third_party/rust/enum-map-derive/LICENSES/MIT.txt
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) <year> <copyright holders>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
10
third_party/rust/enum-map-derive/README.md
vendored
Normal file
10
third_party/rust/enum-map-derive/README.md
vendored
Normal file
|
|
@ -0,0 +1,10 @@
|
||||||
|
<!--
|
||||||
|
SPDX-FileCopyrightText: 2022 Kamila Borowska <kamila@borowska.pw>
|
||||||
|
|
||||||
|
SPDX-License-Identifier: MIT OR Apache-2.0
|
||||||
|
-->
|
||||||
|
|
||||||
|
# enum-map-derive
|
||||||
|
|
||||||
|
This is a derive macro for `enum-map`. You don't need to specify it
|
||||||
|
in dependencies as `enum-map` crate re-exports it.
|
||||||
216
third_party/rust/enum-map-derive/src/derive_enum.rs
vendored
Normal file
216
third_party/rust/enum-map-derive/src/derive_enum.rs
vendored
Normal file
|
|
@ -0,0 +1,216 @@
|
||||||
|
// SPDX-FileCopyrightText: 2021 - 2023 Kamila Borowska <kamila@borowska.pw>
|
||||||
|
// SPDX-FileCopyrightText: 2021 Bruno Corrêa Zimmermann <brunoczim@gmail.com>
|
||||||
|
//
|
||||||
|
// SPDX-License-Identifier: MIT OR Apache-2.0
|
||||||
|
|
||||||
|
use crate::type_length;
|
||||||
|
use proc_macro2::TokenStream;
|
||||||
|
use quote::{format_ident, quote, ToTokens};
|
||||||
|
use syn::{DataEnum, Fields, FieldsNamed, FieldsUnnamed, Ident, Variant};
|
||||||
|
|
||||||
|
pub fn generate(name: Ident, data_enum: DataEnum) -> TokenStream {
|
||||||
|
let mut generator = EnumGenerator::empty();
|
||||||
|
for variant in &data_enum.variants {
|
||||||
|
generator.handle_variant(variant);
|
||||||
|
}
|
||||||
|
generator.finish(&name)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct Length {
|
||||||
|
units: usize,
|
||||||
|
opaque: TokenStream,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToTokens for Length {
|
||||||
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||||
|
let Self { units, opaque } = self;
|
||||||
|
tokens.extend(quote! { (#units + #opaque) });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Total length is the sum of each variant's length. To represent a variant, its number is added to
|
||||||
|
/// the sum of previous variant lengths.
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct EnumGenerator {
|
||||||
|
length: Length,
|
||||||
|
from_usize_arms: TokenStream,
|
||||||
|
into_usize_arms: TokenStream,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EnumGenerator {
|
||||||
|
fn empty() -> Self {
|
||||||
|
Self {
|
||||||
|
length: Length {
|
||||||
|
units: 0,
|
||||||
|
opaque: quote! { 0 },
|
||||||
|
},
|
||||||
|
from_usize_arms: quote! {},
|
||||||
|
into_usize_arms: quote! {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn finish(&self, name: &Ident) -> TokenStream {
|
||||||
|
let Self {
|
||||||
|
length,
|
||||||
|
from_usize_arms,
|
||||||
|
into_usize_arms,
|
||||||
|
} = self;
|
||||||
|
|
||||||
|
quote! {
|
||||||
|
#[automatically_derived]
|
||||||
|
impl ::enum_map::Enum for #name {
|
||||||
|
const LENGTH: ::enum_map::usize = #length;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn from_usize(value: ::enum_map::usize) -> Self {
|
||||||
|
#from_usize_arms {
|
||||||
|
::enum_map::out_of_bounds()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn into_usize(self) -> ::enum_map::usize {
|
||||||
|
match self {
|
||||||
|
#into_usize_arms
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[automatically_derived]
|
||||||
|
impl<V> ::enum_map::EnumArray<V> for #name {
|
||||||
|
type Array = [V; #length];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_variant(&mut self, variant: &Variant) {
|
||||||
|
match &variant.fields {
|
||||||
|
Fields::Unit => self.handle_unit_variant(&variant.ident),
|
||||||
|
Fields::Unnamed(fields) => self.handle_unnamed_variant(&variant.ident, fields),
|
||||||
|
Fields::Named(fields) => self.handle_named_variant(&variant.ident, fields),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Becomes simply `1` in counting, since this is the size of the unit.
|
||||||
|
fn handle_unit_variant(&mut self, variant: &Ident) {
|
||||||
|
let Self {
|
||||||
|
length,
|
||||||
|
from_usize_arms,
|
||||||
|
into_usize_arms,
|
||||||
|
} = self;
|
||||||
|
*into_usize_arms = quote! { #into_usize_arms Self::#variant => #length, };
|
||||||
|
*from_usize_arms = quote! {
|
||||||
|
#from_usize_arms if value == #length {
|
||||||
|
Self::#variant
|
||||||
|
} else
|
||||||
|
};
|
||||||
|
self.length.units += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Its size is the product of the sizes of its members. To represent this variant, one can
|
||||||
|
/// think of this as representing a little-endian number. First member is simply added, but
|
||||||
|
/// next members are multiplied before being added.
|
||||||
|
fn handle_unnamed_variant(&mut self, variant: &Ident, fields: &FieldsUnnamed) {
|
||||||
|
let Self {
|
||||||
|
length,
|
||||||
|
from_usize_arms,
|
||||||
|
into_usize_arms,
|
||||||
|
} = self;
|
||||||
|
let mut expr_into = quote! { #length };
|
||||||
|
let mut fields_length = quote! { 1usize };
|
||||||
|
let mut params_from = quote! {};
|
||||||
|
for (i, field) in fields.unnamed.iter().enumerate() {
|
||||||
|
let ident = format_ident!("p{}", i);
|
||||||
|
let ty = &field.ty;
|
||||||
|
let field_length = type_length(ty);
|
||||||
|
|
||||||
|
expr_into = quote! {
|
||||||
|
(#expr_into + #fields_length * ::enum_map::Enum::into_usize(#ident))
|
||||||
|
};
|
||||||
|
|
||||||
|
params_from = quote! {
|
||||||
|
#params_from <#ty as ::enum_map::Enum>::from_usize(
|
||||||
|
(value - #length) / #fields_length % #field_length
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
fields_length = quote! { (#fields_length * #field_length) };
|
||||||
|
}
|
||||||
|
|
||||||
|
*length = Length {
|
||||||
|
units: 0,
|
||||||
|
opaque: quote! { (#length + #fields_length) },
|
||||||
|
};
|
||||||
|
|
||||||
|
let from_arms = &from_usize_arms;
|
||||||
|
*from_usize_arms = quote! {
|
||||||
|
#from_arms if value < #length {
|
||||||
|
Self::#variant(#params_from)
|
||||||
|
} else
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut params_into = quote! {};
|
||||||
|
for i in 0..fields.unnamed.len() {
|
||||||
|
let ident = format_ident!("p{}", i);
|
||||||
|
params_into = quote! { #params_into #ident, };
|
||||||
|
}
|
||||||
|
|
||||||
|
*into_usize_arms = quote! {
|
||||||
|
#into_usize_arms Self::#variant(#params_into) => #expr_into,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Its size is the product of the sizes of its members. To represent this variant, one can
|
||||||
|
/// think of this as representing a little-endian number. First member is simply added, but
|
||||||
|
/// next members are multiplied before being added.
|
||||||
|
fn handle_named_variant(&mut self, variant: &Ident, fields: &FieldsNamed) {
|
||||||
|
let Self {
|
||||||
|
length,
|
||||||
|
from_usize_arms,
|
||||||
|
into_usize_arms,
|
||||||
|
} = self;
|
||||||
|
let mut expr_into = quote! { #length };
|
||||||
|
let mut fields_length = quote! { 1usize };
|
||||||
|
let mut params_from = quote! {};
|
||||||
|
|
||||||
|
for field in fields.named.iter() {
|
||||||
|
let ident = field.ident.as_ref().unwrap();
|
||||||
|
let ty = &field.ty;
|
||||||
|
let field_length = type_length(ty);
|
||||||
|
|
||||||
|
expr_into = quote! {
|
||||||
|
(#expr_into + #fields_length * ::enum_map::Enum::into_usize(#ident))
|
||||||
|
};
|
||||||
|
|
||||||
|
params_from = quote! {
|
||||||
|
#params_from #ident: <#ty as ::enum_map::Enum>::from_usize(
|
||||||
|
(value - #length) / #fields_length % #field_length
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
fields_length = quote! { (#fields_length * #field_length) };
|
||||||
|
}
|
||||||
|
|
||||||
|
*length = Length {
|
||||||
|
units: 0,
|
||||||
|
opaque: quote! { (#length + #fields_length) },
|
||||||
|
};
|
||||||
|
|
||||||
|
*from_usize_arms = quote! {
|
||||||
|
#from_usize_arms if value < #length {
|
||||||
|
Self::#variant { #params_from }
|
||||||
|
} else
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut params_into = quote! {};
|
||||||
|
for field in fields.named.iter() {
|
||||||
|
let ident = field.ident.as_ref().unwrap();
|
||||||
|
params_into = quote! { #params_into #ident, };
|
||||||
|
}
|
||||||
|
|
||||||
|
*into_usize_arms = quote! {
|
||||||
|
#into_usize_arms Self::#variant { #params_into } => #expr_into,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
129
third_party/rust/enum-map-derive/src/derive_struct.rs
vendored
Normal file
129
third_party/rust/enum-map-derive/src/derive_struct.rs
vendored
Normal file
|
|
@ -0,0 +1,129 @@
|
||||||
|
// SPDX-FileCopyrightText: 2021 - 2022 Kamila Borowska <kamila@borowska.pw>
|
||||||
|
// SPDX-FileCopyrightText: 2021 Bruno Corrêa Zimmermann <brunoczim@gmail.com>
|
||||||
|
//
|
||||||
|
// SPDX-License-Identifier: MIT OR Apache-2.0
|
||||||
|
|
||||||
|
use crate::type_length;
|
||||||
|
use proc_macro2::TokenStream;
|
||||||
|
use quote::quote;
|
||||||
|
use syn::{DataStruct, Fields, FieldsNamed, FieldsUnnamed, Ident, Index};
|
||||||
|
|
||||||
|
pub fn generate(name: Ident, data_struct: DataStruct) -> TokenStream {
|
||||||
|
StructGenerator::from_fields(&data_struct.fields).finish(&name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Total length is the product of each member's length. To represent a struct, one can
|
||||||
|
/// think of this as representing a little-endian number. First member is simply added, but
|
||||||
|
/// next members are multiplied before being added.
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct StructGenerator {
|
||||||
|
length: TokenStream,
|
||||||
|
from_usize: TokenStream,
|
||||||
|
into_usize: TokenStream,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl StructGenerator {
|
||||||
|
fn from_fields(fields: &Fields) -> Self {
|
||||||
|
match fields {
|
||||||
|
Fields::Unit => Self::from_unit_fields(),
|
||||||
|
Fields::Unnamed(fields_data) => Self::from_unnamed_fields(fields_data),
|
||||||
|
Fields::Named(fields_data) => Self::from_named_fields(fields_data),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_unit_fields() -> Self {
|
||||||
|
Self {
|
||||||
|
length: quote! { 1usize },
|
||||||
|
from_usize: quote! { Self },
|
||||||
|
into_usize: quote! { 0usize },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_unnamed_fields(fields: &FieldsUnnamed) -> Self {
|
||||||
|
let mut params_from = quote! {};
|
||||||
|
let mut into_usize = quote! { 0usize };
|
||||||
|
let mut length = quote! { 1usize };
|
||||||
|
for (i, field) in fields.unnamed.iter().enumerate() {
|
||||||
|
let ty = &field.ty;
|
||||||
|
let index_ident = Index::from(i);
|
||||||
|
let field_length = type_length(ty);
|
||||||
|
|
||||||
|
into_usize = quote! {
|
||||||
|
(#into_usize + #length * ::enum_map::Enum::into_usize(self.#index_ident))
|
||||||
|
};
|
||||||
|
|
||||||
|
params_from = quote! {
|
||||||
|
#params_from <#ty as ::enum_map::Enum>::from_usize(
|
||||||
|
value / #length % #field_length
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
length = quote! { (#length * #field_length) };
|
||||||
|
}
|
||||||
|
|
||||||
|
let from_usize = quote! { Self(#params_from) };
|
||||||
|
Self {
|
||||||
|
length,
|
||||||
|
from_usize,
|
||||||
|
into_usize,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_named_fields(fields: &FieldsNamed) -> Self {
|
||||||
|
let mut params_from = quote! {};
|
||||||
|
let mut into_usize = quote! { 0usize };
|
||||||
|
let mut length = quote! { 1usize };
|
||||||
|
for field in fields.named.iter() {
|
||||||
|
let ty = &field.ty;
|
||||||
|
let ident = field.ident.as_ref().unwrap();
|
||||||
|
let field_length = type_length(ty);
|
||||||
|
|
||||||
|
into_usize = quote! {
|
||||||
|
(#into_usize + #length * ::enum_map::Enum::into_usize(self.#ident))
|
||||||
|
};
|
||||||
|
|
||||||
|
params_from = quote! {
|
||||||
|
#params_from #ident: <#ty as ::enum_map::Enum>::from_usize(
|
||||||
|
value / #length % #field_length
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
length = quote! { (#field_length * #length) };
|
||||||
|
}
|
||||||
|
|
||||||
|
let from_usize = quote! { Self { #params_from } };
|
||||||
|
Self {
|
||||||
|
length,
|
||||||
|
from_usize,
|
||||||
|
into_usize,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn finish(&self, name: &Ident) -> TokenStream {
|
||||||
|
let length = &self.length;
|
||||||
|
let from_usize = &self.from_usize;
|
||||||
|
let into_usize = &self.into_usize;
|
||||||
|
|
||||||
|
quote! {
|
||||||
|
#[automatically_derived]
|
||||||
|
impl ::enum_map::Enum for #name {
|
||||||
|
const LENGTH: ::enum_map::usize = #length;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn from_usize(value: ::enum_map::usize) -> Self {
|
||||||
|
#from_usize
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn into_usize(self) -> ::enum_map::usize {
|
||||||
|
#into_usize
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[automatically_derived]
|
||||||
|
impl<V> ::enum_map::EnumArray<V> for #name {
|
||||||
|
type Array = [V; #length];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
178
third_party/rust/enum-map-derive/src/lib.rs
vendored
Normal file
178
third_party/rust/enum-map-derive/src/lib.rs
vendored
Normal file
|
|
@ -0,0 +1,178 @@
|
||||||
|
// SPDX-FileCopyrightText: 2017 - 2022 Kamila Borowska <kamila@borowska.pw>
|
||||||
|
// SPDX-FileCopyrightText: 2018 hcpl <hcpl.prog@gmail.com>
|
||||||
|
// SPDX-FileCopyrightText: 2019 mara <vmedea@protonmail.com>
|
||||||
|
// SPDX-FileCopyrightText: 2021 Bruno Corrêa Zimmermann <brunoczim@gmail.com>
|
||||||
|
// SPDX-FileCopyrightText: 2021 Dietrich <dietrich@teilgedanken.de>
|
||||||
|
//
|
||||||
|
// SPDX-License-Identifier: MIT OR Apache-2.0
|
||||||
|
|
||||||
|
//! Procedural macro implementing `#[derive(Enum)]`
|
||||||
|
//!
|
||||||
|
//! This is supposed to used with `enum-map` crate, which provides the
|
||||||
|
//! actual usage documentation.
|
||||||
|
|
||||||
|
mod derive_enum;
|
||||||
|
mod derive_struct;
|
||||||
|
|
||||||
|
use proc_macro2::TokenStream;
|
||||||
|
use quote::quote;
|
||||||
|
use syn::{Data, DeriveInput, Type};
|
||||||
|
|
||||||
|
/// Derive macro generating an implementation of trait `Enum`.
|
||||||
|
///
|
||||||
|
/// When using a derive, enum maps are maintained in the order in which
|
||||||
|
/// enum variants are declared. This is reflected in the value returned
|
||||||
|
/// by `Enum::into_usize`, iterators of enum map as well as
|
||||||
|
/// `EnumMap::as_slice` method.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ## Enums Without Payload
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::Enum;
|
||||||
|
///
|
||||||
|
/// #[derive(Enum, Debug, PartialEq, Eq)]
|
||||||
|
/// enum A {
|
||||||
|
/// B,
|
||||||
|
/// C,
|
||||||
|
/// D,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// assert_eq!(A::B.into_usize(), 0);
|
||||||
|
/// assert_eq!(A::C.into_usize(), 1);
|
||||||
|
/// assert_eq!(A::D.into_usize(), 2);
|
||||||
|
///
|
||||||
|
/// assert_eq!(A::from_usize(0), A::B);
|
||||||
|
/// assert_eq!(A::from_usize(1), A::C);
|
||||||
|
/// assert_eq!(A::from_usize(2), A::D);
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// ## Enums With Payload
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::Enum;
|
||||||
|
///
|
||||||
|
/// #[derive(Enum, Debug, PartialEq, Eq)]
|
||||||
|
/// enum A {
|
||||||
|
/// B,
|
||||||
|
/// C,
|
||||||
|
/// D,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// #[derive(Enum, Debug, PartialEq, Eq)]
|
||||||
|
/// enum X {
|
||||||
|
/// Y,
|
||||||
|
/// Z,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// #[derive(Enum, Debug, PartialEq, Eq)]
|
||||||
|
/// enum Foo {
|
||||||
|
/// Bar(bool, A),
|
||||||
|
/// Empty,
|
||||||
|
/// Baz { fa: A, fx: X },
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// assert_eq!(Foo::Bar(false, A::B).into_usize(), 0);
|
||||||
|
/// assert_eq!(Foo::Bar(false, A::D).into_usize(), 4);
|
||||||
|
/// assert_eq!(Foo::Bar(true, A::B).into_usize(), 1);
|
||||||
|
/// assert_eq!(Foo::Bar(true, A::C).into_usize(), 3);
|
||||||
|
/// assert_eq!(Foo::Empty.into_usize(), 6);
|
||||||
|
/// assert_eq!(Foo::Baz { fa: A::B, fx: X::Y }.into_usize(), 7);
|
||||||
|
/// assert_eq!(Foo::Baz { fa: A::B, fx: X::Z }.into_usize(), 10);
|
||||||
|
/// assert_eq!(Foo::Baz { fa: A::D, fx: X::Y }.into_usize(), 9);
|
||||||
|
///
|
||||||
|
/// assert_eq!(Foo::from_usize(0), Foo::Bar(false, A::B));
|
||||||
|
/// assert_eq!(Foo::from_usize(4), Foo::Bar(false, A::D));
|
||||||
|
/// assert_eq!(Foo::from_usize(1), Foo::Bar(true, A::B));
|
||||||
|
/// assert_eq!(Foo::from_usize(3), Foo::Bar(true, A::C));
|
||||||
|
/// assert_eq!(Foo::from_usize(6), Foo::Empty);
|
||||||
|
/// assert_eq!(Foo::from_usize(7), Foo::Baz { fa: A::B, fx: X::Y });
|
||||||
|
/// assert_eq!(Foo::from_usize(10), Foo::Baz { fa: A::B, fx: X::Z });
|
||||||
|
/// assert_eq!(Foo::from_usize(9), Foo::Baz { fa: A::D, fx: X::Y });
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// ## Structs
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::Enum;
|
||||||
|
///
|
||||||
|
/// #[derive(Enum, Debug, PartialEq, Eq)]
|
||||||
|
/// enum A {
|
||||||
|
/// B,
|
||||||
|
/// C,
|
||||||
|
/// D,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// #[derive(Enum, Debug, PartialEq, Eq)]
|
||||||
|
/// enum X {
|
||||||
|
/// Y,
|
||||||
|
/// Z,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// #[derive(Enum, Debug, PartialEq, Eq)]
|
||||||
|
/// struct Foo {
|
||||||
|
/// bar: bool,
|
||||||
|
/// baz: A,
|
||||||
|
/// end: X,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// assert_eq!(Foo { bar: false, baz: A::B, end: X::Y }.into_usize(), 0);
|
||||||
|
/// assert_eq!(Foo { bar: true, baz: A::B, end: X::Y }.into_usize(), 1);
|
||||||
|
/// assert_eq!(Foo { bar: false, baz: A::D, end: X::Y }.into_usize(), 4);
|
||||||
|
/// assert_eq!(Foo { bar: true, baz: A::C, end: X::Z }.into_usize(), 9);
|
||||||
|
///
|
||||||
|
/// assert_eq!(Foo::from_usize(0), Foo { bar: false, baz: A::B, end: X::Y });
|
||||||
|
/// assert_eq!(Foo::from_usize(1), Foo { bar: true, baz: A::B, end: X::Y });
|
||||||
|
/// assert_eq!(Foo::from_usize(4), Foo { bar: false, baz: A::D, end: X::Y });
|
||||||
|
/// assert_eq!(Foo::from_usize(9), Foo { bar: true, baz: A::C, end: X::Z });
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// ## Tuple Structs
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::Enum;
|
||||||
|
///
|
||||||
|
/// #[derive(Enum, Debug, PartialEq, Eq)]
|
||||||
|
/// enum A {
|
||||||
|
/// B,
|
||||||
|
/// C,
|
||||||
|
/// D,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// #[derive(Enum, Debug, PartialEq, Eq)]
|
||||||
|
/// enum X {
|
||||||
|
/// Y,
|
||||||
|
/// Z,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// #[derive(Enum, Debug, PartialEq, Eq)]
|
||||||
|
/// struct Foo(bool, A, X);
|
||||||
|
///
|
||||||
|
/// assert_eq!(Foo(false, A::B, X::Y ).into_usize(), 0);
|
||||||
|
/// assert_eq!(Foo(true, A::B, X::Y ).into_usize(), 1);
|
||||||
|
/// assert_eq!(Foo(false, A::D, X::Y ).into_usize(), 4);
|
||||||
|
/// assert_eq!(Foo(true, A::C, X::Z ).into_usize(), 9);
|
||||||
|
///
|
||||||
|
/// assert_eq!(Foo::from_usize(0), Foo(false, A::B, X::Y));
|
||||||
|
/// assert_eq!(Foo::from_usize(1), Foo(true, A::B, X::Y));
|
||||||
|
/// assert_eq!(Foo::from_usize(4), Foo(false, A::D, X::Y));
|
||||||
|
/// assert_eq!(Foo::from_usize(9), Foo(true, A::C, X::Z));
|
||||||
|
#[proc_macro_derive(Enum)]
|
||||||
|
pub fn derive_enum_map(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||||
|
let input: DeriveInput = syn::parse(input).unwrap();
|
||||||
|
|
||||||
|
let result = match input.data {
|
||||||
|
Data::Enum(data_enum) => derive_enum::generate(input.ident, data_enum),
|
||||||
|
Data::Struct(data_struct) => derive_struct::generate(input.ident, data_struct),
|
||||||
|
_ => quote! { compile_error! {"#[derive(Enum)] is only defined for enums and structs"} },
|
||||||
|
};
|
||||||
|
|
||||||
|
result.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn type_length(ty: &Type) -> TokenStream {
|
||||||
|
quote! {
|
||||||
|
<#ty as ::enum_map::Enum>::LENGTH
|
||||||
|
}
|
||||||
|
}
|
||||||
1
third_party/rust/enum-map/.cargo-checksum.json
vendored
Normal file
1
third_party/rust/enum-map/.cargo-checksum.json
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
{"files":{"CHANGELOG.md":"d64d088715bcbff31fb85a6694fbbaef2802ab19ff91d5f0d71ac715d1fa4417","Cargo.toml":"94a380dc0296261c3f80d1ef71a7c1355757bb67ca902367432084bde5b87233","LICENSES/Apache-2.0.txt":"074e6e32c86a4c0ef8b3ed25b721ca23aca83df277cd88106ef7177c354615ff","LICENSES/CC0-1.0.txt":"a2010f343487d3f7618affe54f789f5487602331c0a8d03f49e9a7c547cf0499","LICENSES/MIT.txt":"b85dcd3e453d05982552c52b5fc9e0bdd6d23c6f8e844b984a88af32570b0cc0","README.md":"eec9d4461de2e1e0f26251a98fd53feaad8582e6614a5a23be17a5b0c9491637","src/arbitrary.rs":"30e90bc431c4e74f756a619f432b30ccadc9b3288fc85f2d4bd160a374d0dfdb","src/enum_map_impls.rs":"9a03bf2500215e3c2dcb7e93cfc4d466bd767bdb9b83673b013f7ef50ac08d04","src/internal.rs":"ea03c0ca57eb52012e77dc3928ba5407109c5d24d373b7d7359ba037d890716f","src/iter.rs":"cc75658ea4c61ed63b35bf12c8f14534e36b2d3c9b1a29ab2efc028de0181d44","src/lib.rs":"3e34c9461dfb5b102d620b0196b30dab2d8d429e801e193f8e2e78adfef9039e","src/serde.rs":"f3a9919e2f6c0721a7e1fa386eb712875c5fce01d4f01d5d4775bc246d45ba90","tests/serde.rs":"98b788415b99c7b764ce2271260a4c38feeec9e539e8d8b01909b052ed41db85","tests/test.rs":"bfab8d9752b73de9085e0c59cc478ad09f87014a2c84b7b8bc16ecedf128b559"},"package":"6866f3bfdf8207509a033af1a75a7b08abda06bbaaeae6669323fd5a097df2e9"}
|
||||||
285
third_party/rust/enum-map/CHANGELOG.md
vendored
Normal file
285
third_party/rust/enum-map/CHANGELOG.md
vendored
Normal file
|
|
@ -0,0 +1,285 @@
|
||||||
|
<!--
|
||||||
|
SPDX-FileCopyrightText: 2018 - 2023 Kamila Borowska <kamila@borowska.pw>
|
||||||
|
SPDX-FileCopyrightText: 2021 Alex Sayers <alex@asayers.com>
|
||||||
|
|
||||||
|
SPDX-License-Identifier: MIT OR Apache-2.0
|
||||||
|
-->
|
||||||
|
|
||||||
|
# Version 2.7.3
|
||||||
|
|
||||||
|
## Other changes
|
||||||
|
|
||||||
|
- Fixed [a regression introduced in 2.7.2 that caused `#[derive(Enum)]` to
|
||||||
|
generate incorrect code when dealing with enums containing
|
||||||
|
fields](https://codeberg.org/xfix/enum-map/issues/112).
|
||||||
|
|
||||||
|
# Version 2.7.2
|
||||||
|
|
||||||
|
## Other changes
|
||||||
|
|
||||||
|
- Reduced RAM usage and improved compilation times when using `derive(Enum)`
|
||||||
|
for large enums with `overflow-checks` enabled.
|
||||||
|
|
||||||
|
# Version 2.7.1
|
||||||
|
|
||||||
|
## Other changes
|
||||||
|
|
||||||
|
- Updated author name.
|
||||||
|
|
||||||
|
# Version 2.7.0
|
||||||
|
|
||||||
|
## New features
|
||||||
|
|
||||||
|
- Implemented `EnumMap::from_fn`.
|
||||||
|
|
||||||
|
# Version 2.6.3
|
||||||
|
|
||||||
|
## Other changes
|
||||||
|
|
||||||
|
- Updated the repository URL as the project was migrated from GitHub
|
||||||
|
to Codeberg.
|
||||||
|
|
||||||
|
- This project is now compliant with the REUSE Specification.
|
||||||
|
|
||||||
|
# Version 2.6.2
|
||||||
|
|
||||||
|
## Other changes
|
||||||
|
|
||||||
|
- Hide `out_of_bounds` reexport from documentation.
|
||||||
|
|
||||||
|
# Version 2.6.1
|
||||||
|
|
||||||
|
## Other changes
|
||||||
|
|
||||||
|
- Provide better panic message when providing out-of-bounds index
|
||||||
|
to `Enum::from_usize``.
|
||||||
|
|
||||||
|
# Version 2.6.0
|
||||||
|
|
||||||
|
## New features
|
||||||
|
|
||||||
|
- `EnumMap::as_array` is now usable in const contexts.
|
||||||
|
|
||||||
|
## Other changes
|
||||||
|
|
||||||
|
- This crate now follows "N minus two" MSRV policy. This means that
|
||||||
|
it supports the current Rust release, as well as the two before
|
||||||
|
that.
|
||||||
|
|
||||||
|
- Upgraded syn to 2.0.0.
|
||||||
|
|
||||||
|
# Version 2.5.0
|
||||||
|
|
||||||
|
## New features
|
||||||
|
|
||||||
|
- Implemented `EnumMap::as_array` and `EnumMap::as_mut_array`
|
||||||
|
(implemented by [@Fuuzetsu](https://github.com/Fuuzetsu)).
|
||||||
|
|
||||||
|
- Implemented `PartialOrd` and `Ord` for `EnumMap` (implemented by
|
||||||
|
[@nicarran](https://github.com/nicarran)).
|
||||||
|
|
||||||
|
# Version 2.4.2
|
||||||
|
|
||||||
|
## Other changes
|
||||||
|
|
||||||
|
- Added license files to crate tarball.
|
||||||
|
- Added changelog to crate tarball.
|
||||||
|
|
||||||
|
# Version 2.4.1
|
||||||
|
|
||||||
|
## Other changes
|
||||||
|
|
||||||
|
- Improved performance of code generated for `from_usize` when
|
||||||
|
deriving `Enum`.
|
||||||
|
|
||||||
|
# Version 2.4.0
|
||||||
|
|
||||||
|
## New features
|
||||||
|
|
||||||
|
- Implemented `Enum` for `()` (unit type) and `core::cmp::Ordering`
|
||||||
|
(implemented by [@phimuemue](https://github.com/phimuemue)).
|
||||||
|
|
||||||
|
- Implemented `EnumMap::into_array`.
|
||||||
|
|
||||||
|
# Version 2.3.0
|
||||||
|
|
||||||
|
## New features
|
||||||
|
|
||||||
|
- `EnumMap::len` is now usable in const contexts.
|
||||||
|
|
||||||
|
## Other changes
|
||||||
|
|
||||||
|
- `Enum` derive now can deal with re-definitions of `usize` and
|
||||||
|
`unimplemented`.
|
||||||
|
|
||||||
|
# Version 2.2.0
|
||||||
|
|
||||||
|
## New features
|
||||||
|
|
||||||
|
- `EnumMap::from_array` is now usable in const contexts.
|
||||||
|
|
||||||
|
# Version 2.1.0
|
||||||
|
|
||||||
|
## New features
|
||||||
|
|
||||||
|
- Implemented `DoubleEndedIterator` for `IntoIter`.
|
||||||
|
|
||||||
|
- Implemented `EnumMap::into_values`.
|
||||||
|
|
||||||
|
## Other changes
|
||||||
|
|
||||||
|
- Changed behavior of `IntoIter` so that it drops rest of the elements
|
||||||
|
when one destructor panics.
|
||||||
|
|
||||||
|
# Version 2.0.3
|
||||||
|
|
||||||
|
## Other changes
|
||||||
|
|
||||||
|
- Optimized performance of `enum_map!` macro.
|
||||||
|
|
||||||
|
# Version 2.0.2
|
||||||
|
|
||||||
|
## Other changes
|
||||||
|
|
||||||
|
- Fixed safety problem when using `enum_map!` macro with enums that
|
||||||
|
incorrectly implemented `Enum` trait.
|
||||||
|
|
||||||
|
# Version 2.0.1
|
||||||
|
|
||||||
|
## Other changes
|
||||||
|
|
||||||
|
- Adjusted crate metadata to avoid lib.rs warnings.
|
||||||
|
|
||||||
|
# Version 2.0.0
|
||||||
|
|
||||||
|
## New features
|
||||||
|
|
||||||
|
- Implemented `FromIterator` for `EnumMap` (implemented by @bit_network
|
||||||
|
on GitLab).
|
||||||
|
|
||||||
|
- Implemented `EnumMap::map`.
|
||||||
|
|
||||||
|
- Derives support product types in addition to sum types (implemented
|
||||||
|
by @bzim on GitLab).
|
||||||
|
|
||||||
|
- It's now possible to access enum length by accessing `LENGTH` in
|
||||||
|
`Enum` trait.
|
||||||
|
|
||||||
|
## Breaking changes
|
||||||
|
|
||||||
|
- `Enum` trait was split into two traits, `Enum` and `EnumArray`.
|
||||||
|
|
||||||
|
# Version 1.1.1
|
||||||
|
|
||||||
|
## Other changes
|
||||||
|
|
||||||
|
- Worked around a bug in Clippy that caused false positives when using
|
||||||
|
`use_self` lint for code that derived `Enum` trait.
|
||||||
|
|
||||||
|
# Version 1.1.0
|
||||||
|
|
||||||
|
## New features
|
||||||
|
|
||||||
|
- Implemented `Arbitrary` for maps where the value type also implements
|
||||||
|
`Arbitrary`. (You have to enable the "arbitrary" feature.)
|
||||||
|
|
||||||
|
# Version 1.0.0
|
||||||
|
|
||||||
|
## New features
|
||||||
|
|
||||||
|
- It's now possible to use `return` and `?` within `macro_rules!` macro.
|
||||||
|
|
||||||
|
- `Enum` trait is much simpler having two methods only.
|
||||||
|
|
||||||
|
## Other changes
|
||||||
|
|
||||||
|
- Removed previously deprecated features.
|
||||||
|
|
||||||
|
- Renamed `to_usize` to `into_usize` matching the naming convention
|
||||||
|
used in Rust programming language.
|
||||||
|
|
||||||
|
# Version 0.6.5
|
||||||
|
|
||||||
|
## Other changes
|
||||||
|
|
||||||
|
- Deprecated `EnumMap::is_empty` and `EnumMap::new`. `EnumMap::new` usages
|
||||||
|
can be replaced with `EnumMap::default`.
|
||||||
|
|
||||||
|
# Version 0.6.4
|
||||||
|
|
||||||
|
## Other changes
|
||||||
|
|
||||||
|
- Deprecated `EnumMap::as_ptr` and `EnumMap::as_mut_ptr`.
|
||||||
|
|
||||||
|
# Version 0.6.3
|
||||||
|
|
||||||
|
## New features
|
||||||
|
|
||||||
|
- `Iter` and `Values` now implements `Clone` (added by @amanieu).
|
||||||
|
|
||||||
|
# Version 0.6.2.
|
||||||
|
|
||||||
|
## New features
|
||||||
|
|
||||||
|
- Added `EnumMap#clear` method (added by @Riey, thanks :)).
|
||||||
|
|
||||||
|
# Version 0.6.0
|
||||||
|
|
||||||
|
## Incompatible changes
|
||||||
|
|
||||||
|
- Now requires Rust 1.36.
|
||||||
|
|
||||||
|
# Version 0.5.0
|
||||||
|
|
||||||
|
- Fixed the issue where an aliasing `From` trait implementation caused
|
||||||
|
compilation errors with `enum_map!` macro.
|
||||||
|
|
||||||
|
## Incompatible changes
|
||||||
|
|
||||||
|
- Now requires Rust 1.31.
|
||||||
|
|
||||||
|
# Version 0.4.1
|
||||||
|
|
||||||
|
## New features
|
||||||
|
|
||||||
|
- Default `serde` features are disabled. This allows enabling serde feature when
|
||||||
|
compiling without `std`.
|
||||||
|
|
||||||
|
# Version 0.4.0
|
||||||
|
|
||||||
|
Change of `#[derive(EnumMap)]` to `#[derive(Enum)]` was supposed to appear in 0.3.0,
|
||||||
|
but it was forgotten about. This release fixes just that.
|
||||||
|
|
||||||
|
## Incompatible changes
|
||||||
|
|
||||||
|
- Changed `#[derive(EnumMap)]` to `#[derive(Enum)]` to match trait name.
|
||||||
|
|
||||||
|
# Version 0.3.1
|
||||||
|
|
||||||
|
- Updated README use `#[derive(EnumMap)]` instead of `#[derive(Enum)]`.
|
||||||
|
|
||||||
|
# Version 0.3.0
|
||||||
|
|
||||||
|
## New features
|
||||||
|
|
||||||
|
- Implemented compact serde serialization for binary formats like bincode.
|
||||||
|
|
||||||
|
- Iterator traits with exception now implement `FusedIterator`.
|
||||||
|
|
||||||
|
## Incompatible changes
|
||||||
|
|
||||||
|
- Increased required Rust version to 1.26.0.
|
||||||
|
|
||||||
|
- Renamed `Internal` trait to `Enum`.
|
||||||
|
|
||||||
|
- Added new associated constant `POSSIBLE_VALUES` to `Enum` trait,
|
||||||
|
representing the number of possible values the type can have. Manual
|
||||||
|
implementations are required to provide it.
|
||||||
|
|
||||||
|
- Removed `Enum` implementation for `Option<T>`.
|
||||||
|
|
||||||
|
- Implemented compact serialization, for formats like `bincode`. This
|
||||||
|
makes it impossible to deserialize non-compact representation used by
|
||||||
|
enum-map 0.2.0.
|
||||||
|
|
||||||
|
- `values` method returns `Values<V>` as opposed to `slice::Iter<V>`.
|
||||||
65
third_party/rust/enum-map/Cargo.toml
vendored
Normal file
65
third_party/rust/enum-map/Cargo.toml
vendored
Normal file
|
|
@ -0,0 +1,65 @@
|
||||||
|
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||||
|
#
|
||||||
|
# When uploading crates to the registry Cargo will automatically
|
||||||
|
# "normalize" Cargo.toml files for maximal compatibility
|
||||||
|
# with all versions of Cargo and also rewrite `path` dependencies
|
||||||
|
# to registry (e.g., crates.io) dependencies.
|
||||||
|
#
|
||||||
|
# If you are reading this file be aware that the original Cargo.toml
|
||||||
|
# will likely look very different (and much more reasonable).
|
||||||
|
# See Cargo.toml.orig for the original contents.
|
||||||
|
|
||||||
|
[package]
|
||||||
|
edition = "2021"
|
||||||
|
rust-version = "1.61"
|
||||||
|
name = "enum-map"
|
||||||
|
version = "2.7.3"
|
||||||
|
authors = ["Kamila Borowska <kamila@borowska.pw>"]
|
||||||
|
description = "A map with C-like enum keys represented internally as an array"
|
||||||
|
documentation = "https://docs.rs/enum-map"
|
||||||
|
readme = "README.md"
|
||||||
|
keywords = [
|
||||||
|
"data-structure",
|
||||||
|
"no_std",
|
||||||
|
"enum",
|
||||||
|
]
|
||||||
|
categories = [
|
||||||
|
"data-structures",
|
||||||
|
"no-std",
|
||||||
|
]
|
||||||
|
license = "MIT OR Apache-2.0"
|
||||||
|
repository = "https://codeberg.org/xfix/enum-map"
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
features = [
|
||||||
|
"arbitrary",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
|
[dependencies.arbitrary]
|
||||||
|
version = "1.0.0"
|
||||||
|
optional = true
|
||||||
|
|
||||||
|
[dependencies.enum-map-derive]
|
||||||
|
version = "0.17.0"
|
||||||
|
|
||||||
|
[dependencies.serde]
|
||||||
|
version = "1.0.16"
|
||||||
|
optional = true
|
||||||
|
default-features = false
|
||||||
|
|
||||||
|
[dev-dependencies.bincode]
|
||||||
|
version = "1.0.0"
|
||||||
|
|
||||||
|
[dev-dependencies.serde]
|
||||||
|
version = "1.0.103"
|
||||||
|
features = ["derive"]
|
||||||
|
|
||||||
|
[dev-dependencies.serde_json]
|
||||||
|
version = "1.0.2"
|
||||||
|
|
||||||
|
[dev-dependencies.serde_test]
|
||||||
|
version = "1.0.19"
|
||||||
|
|
||||||
|
[badges.maintenance]
|
||||||
|
status = "passively-maintained"
|
||||||
73
third_party/rust/enum-map/LICENSES/Apache-2.0.txt
vendored
Normal file
73
third_party/rust/enum-map/LICENSES/Apache-2.0.txt
vendored
Normal file
|
|
@ -0,0 +1,73 @@
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
121
third_party/rust/enum-map/LICENSES/CC0-1.0.txt
vendored
Normal file
121
third_party/rust/enum-map/LICENSES/CC0-1.0.txt
vendored
Normal file
|
|
@ -0,0 +1,121 @@
|
||||||
|
Creative Commons Legal Code
|
||||||
|
|
||||||
|
CC0 1.0 Universal
|
||||||
|
|
||||||
|
CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE
|
||||||
|
LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN
|
||||||
|
ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS
|
||||||
|
INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES
|
||||||
|
REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS
|
||||||
|
PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM
|
||||||
|
THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED
|
||||||
|
HEREUNDER.
|
||||||
|
|
||||||
|
Statement of Purpose
|
||||||
|
|
||||||
|
The laws of most jurisdictions throughout the world automatically confer
|
||||||
|
exclusive Copyright and Related Rights (defined below) upon the creator
|
||||||
|
and subsequent owner(s) (each and all, an "owner") of an original work of
|
||||||
|
authorship and/or a database (each, a "Work").
|
||||||
|
|
||||||
|
Certain owners wish to permanently relinquish those rights to a Work for
|
||||||
|
the purpose of contributing to a commons of creative, cultural and
|
||||||
|
scientific works ("Commons") that the public can reliably and without fear
|
||||||
|
of later claims of infringement build upon, modify, incorporate in other
|
||||||
|
works, reuse and redistribute as freely as possible in any form whatsoever
|
||||||
|
and for any purposes, including without limitation commercial purposes.
|
||||||
|
These owners may contribute to the Commons to promote the ideal of a free
|
||||||
|
culture and the further production of creative, cultural and scientific
|
||||||
|
works, or to gain reputation or greater distribution for their Work in
|
||||||
|
part through the use and efforts of others.
|
||||||
|
|
||||||
|
For these and/or other purposes and motivations, and without any
|
||||||
|
expectation of additional consideration or compensation, the person
|
||||||
|
associating CC0 with a Work (the "Affirmer"), to the extent that he or she
|
||||||
|
is an owner of Copyright and Related Rights in the Work, voluntarily
|
||||||
|
elects to apply CC0 to the Work and publicly distribute the Work under its
|
||||||
|
terms, with knowledge of his or her Copyright and Related Rights in the
|
||||||
|
Work and the meaning and intended legal effect of CC0 on those rights.
|
||||||
|
|
||||||
|
1. Copyright and Related Rights. A Work made available under CC0 may be
|
||||||
|
protected by copyright and related or neighboring rights ("Copyright and
|
||||||
|
Related Rights"). Copyright and Related Rights include, but are not
|
||||||
|
limited to, the following:
|
||||||
|
|
||||||
|
i. the right to reproduce, adapt, distribute, perform, display,
|
||||||
|
communicate, and translate a Work;
|
||||||
|
ii. moral rights retained by the original author(s) and/or performer(s);
|
||||||
|
iii. publicity and privacy rights pertaining to a person's image or
|
||||||
|
likeness depicted in a Work;
|
||||||
|
iv. rights protecting against unfair competition in regards to a Work,
|
||||||
|
subject to the limitations in paragraph 4(a), below;
|
||||||
|
v. rights protecting the extraction, dissemination, use and reuse of data
|
||||||
|
in a Work;
|
||||||
|
vi. database rights (such as those arising under Directive 96/9/EC of the
|
||||||
|
European Parliament and of the Council of 11 March 1996 on the legal
|
||||||
|
protection of databases, and under any national implementation
|
||||||
|
thereof, including any amended or successor version of such
|
||||||
|
directive); and
|
||||||
|
vii. other similar, equivalent or corresponding rights throughout the
|
||||||
|
world based on applicable law or treaty, and any national
|
||||||
|
implementations thereof.
|
||||||
|
|
||||||
|
2. Waiver. To the greatest extent permitted by, but not in contravention
|
||||||
|
of, applicable law, Affirmer hereby overtly, fully, permanently,
|
||||||
|
irrevocably and unconditionally waives, abandons, and surrenders all of
|
||||||
|
Affirmer's Copyright and Related Rights and associated claims and causes
|
||||||
|
of action, whether now known or unknown (including existing as well as
|
||||||
|
future claims and causes of action), in the Work (i) in all territories
|
||||||
|
worldwide, (ii) for the maximum duration provided by applicable law or
|
||||||
|
treaty (including future time extensions), (iii) in any current or future
|
||||||
|
medium and for any number of copies, and (iv) for any purpose whatsoever,
|
||||||
|
including without limitation commercial, advertising or promotional
|
||||||
|
purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each
|
||||||
|
member of the public at large and to the detriment of Affirmer's heirs and
|
||||||
|
successors, fully intending that such Waiver shall not be subject to
|
||||||
|
revocation, rescission, cancellation, termination, or any other legal or
|
||||||
|
equitable action to disrupt the quiet enjoyment of the Work by the public
|
||||||
|
as contemplated by Affirmer's express Statement of Purpose.
|
||||||
|
|
||||||
|
3. Public License Fallback. Should any part of the Waiver for any reason
|
||||||
|
be judged legally invalid or ineffective under applicable law, then the
|
||||||
|
Waiver shall be preserved to the maximum extent permitted taking into
|
||||||
|
account Affirmer's express Statement of Purpose. In addition, to the
|
||||||
|
extent the Waiver is so judged Affirmer hereby grants to each affected
|
||||||
|
person a royalty-free, non transferable, non sublicensable, non exclusive,
|
||||||
|
irrevocable and unconditional license to exercise Affirmer's Copyright and
|
||||||
|
Related Rights in the Work (i) in all territories worldwide, (ii) for the
|
||||||
|
maximum duration provided by applicable law or treaty (including future
|
||||||
|
time extensions), (iii) in any current or future medium and for any number
|
||||||
|
of copies, and (iv) for any purpose whatsoever, including without
|
||||||
|
limitation commercial, advertising or promotional purposes (the
|
||||||
|
"License"). The License shall be deemed effective as of the date CC0 was
|
||||||
|
applied by Affirmer to the Work. Should any part of the License for any
|
||||||
|
reason be judged legally invalid or ineffective under applicable law, such
|
||||||
|
partial invalidity or ineffectiveness shall not invalidate the remainder
|
||||||
|
of the License, and in such case Affirmer hereby affirms that he or she
|
||||||
|
will not (i) exercise any of his or her remaining Copyright and Related
|
||||||
|
Rights in the Work or (ii) assert any associated claims and causes of
|
||||||
|
action with respect to the Work, in either case contrary to Affirmer's
|
||||||
|
express Statement of Purpose.
|
||||||
|
|
||||||
|
4. Limitations and Disclaimers.
|
||||||
|
|
||||||
|
a. No trademark or patent rights held by Affirmer are waived, abandoned,
|
||||||
|
surrendered, licensed or otherwise affected by this document.
|
||||||
|
b. Affirmer offers the Work as-is and makes no representations or
|
||||||
|
warranties of any kind concerning the Work, express, implied,
|
||||||
|
statutory or otherwise, including without limitation warranties of
|
||||||
|
title, merchantability, fitness for a particular purpose, non
|
||||||
|
infringement, or the absence of latent or other defects, accuracy, or
|
||||||
|
the present or absence of errors, whether or not discoverable, all to
|
||||||
|
the greatest extent permissible under applicable law.
|
||||||
|
c. Affirmer disclaims responsibility for clearing rights of other persons
|
||||||
|
that may apply to the Work or any use thereof, including without
|
||||||
|
limitation any person's Copyright and Related Rights in the Work.
|
||||||
|
Further, Affirmer disclaims responsibility for obtaining any necessary
|
||||||
|
consents, permissions or other rights required for any use of the
|
||||||
|
Work.
|
||||||
|
d. Affirmer understands and acknowledges that Creative Commons is not a
|
||||||
|
party to this document and has no duty or obligation with respect to
|
||||||
|
this CC0 or use of the Work.
|
||||||
9
third_party/rust/enum-map/LICENSES/MIT.txt
vendored
Normal file
9
third_party/rust/enum-map/LICENSES/MIT.txt
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) <year> <copyright holders>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
45
third_party/rust/enum-map/README.md
vendored
Normal file
45
third_party/rust/enum-map/README.md
vendored
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
<!--
|
||||||
|
SPDX-FileCopyrightText: 2017 - 2023 Kamila Borowska <kamila@borowska.pw>
|
||||||
|
|
||||||
|
SPDX-License-Identifier: MIT OR Apache-2.0
|
||||||
|
-->
|
||||||
|
|
||||||
|
# enum-map
|
||||||
|
|
||||||
|
A library providing enum map providing type safe enum array. It is
|
||||||
|
implemented using regular Rust arrays, so using them is as fast
|
||||||
|
as using regular Rust arrays.
|
||||||
|
|
||||||
|
This crate follows the "N minus two" MSRV policy. This means that it
|
||||||
|
supports the current Rust release, as well as the two before that.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#[macro_use]
|
||||||
|
extern crate enum_map;
|
||||||
|
|
||||||
|
use enum_map::EnumMap;
|
||||||
|
|
||||||
|
#[derive(Debug, Enum)]
|
||||||
|
enum Example {
|
||||||
|
A,
|
||||||
|
B,
|
||||||
|
C,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let mut map = enum_map! {
|
||||||
|
Example::A => 1,
|
||||||
|
Example::B => 2,
|
||||||
|
Example::C => 3,
|
||||||
|
};
|
||||||
|
map[Example::C] = 4;
|
||||||
|
|
||||||
|
assert_eq!(map[Example::A], 1);
|
||||||
|
|
||||||
|
for (key, &value) in &map {
|
||||||
|
println!("{:?} has {} as value.", key, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
28
third_party/rust/enum-map/src/arbitrary.rs
vendored
Normal file
28
third_party/rust/enum-map/src/arbitrary.rs
vendored
Normal file
|
|
@ -0,0 +1,28 @@
|
||||||
|
// SPDX-FileCopyrightText: 2021 Bruno Corrêa Zimmermann <brunoczim@gmail.com>
|
||||||
|
// SPDX-FileCopyrightText: 2021 Kamila Borowska <kamila@borowska.pw>
|
||||||
|
//
|
||||||
|
// SPDX-License-Identifier: MIT OR Apache-2.0
|
||||||
|
|
||||||
|
use crate::{enum_map, EnumArray, EnumMap};
|
||||||
|
use arbitrary::{Arbitrary, Result, Unstructured};
|
||||||
|
|
||||||
|
/// Requires crate feature `"arbitrary"`
|
||||||
|
impl<'a, K: EnumArray<V>, V: Arbitrary<'a>> Arbitrary<'a> for EnumMap<K, V> {
|
||||||
|
fn arbitrary(u: &mut Unstructured<'a>) -> Result<EnumMap<K, V>> {
|
||||||
|
Ok(enum_map! {
|
||||||
|
_ => Arbitrary::arbitrary(u)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn size_hint(depth: usize) -> (usize, Option<usize>) {
|
||||||
|
if K::LENGTH == 0 {
|
||||||
|
(0, Some(0))
|
||||||
|
} else {
|
||||||
|
let (lo, hi) = V::size_hint(depth);
|
||||||
|
(
|
||||||
|
lo.saturating_mul(K::LENGTH),
|
||||||
|
hi.and_then(|hi| hi.checked_mul(K::LENGTH)),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
115
third_party/rust/enum-map/src/enum_map_impls.rs
vendored
Normal file
115
third_party/rust/enum-map/src/enum_map_impls.rs
vendored
Normal file
|
|
@ -0,0 +1,115 @@
|
||||||
|
// SPDX-FileCopyrightText: 2017 - 2021 Kamila Borowska <kamila@borowska.pw>
|
||||||
|
// SPDX-FileCopyrightText: 2021 Bruno Corrêa Zimmermann <brunoczim@gmail.com>
|
||||||
|
// SPDX-FileCopyrightText: 2021 micycle
|
||||||
|
// SPDX-FileCopyrightText: 2023 Nicolas Carranza <nicarran@yandex.com>
|
||||||
|
//
|
||||||
|
// SPDX-License-Identifier: MIT OR Apache-2.0
|
||||||
|
|
||||||
|
use crate::{enum_map, EnumArray, EnumMap};
|
||||||
|
use core::fmt::{self, Debug, Formatter};
|
||||||
|
use core::hash::{Hash, Hasher};
|
||||||
|
use core::iter::{Extend, FromIterator};
|
||||||
|
use core::ops::{Index, IndexMut};
|
||||||
|
|
||||||
|
impl<K: EnumArray<V> + Debug, V: Debug> Debug for EnumMap<K, V> {
|
||||||
|
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||||
|
f.debug_map().entries(self).finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: EnumArray<V>, V> Extend<(K, V)> for EnumMap<K, V> {
|
||||||
|
fn extend<I: IntoIterator<Item = (K, V)>>(&mut self, iter: I) {
|
||||||
|
for (key, value) in iter {
|
||||||
|
self[key] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, K, V> Extend<(&'a K, &'a V)> for EnumMap<K, V>
|
||||||
|
where
|
||||||
|
K: EnumArray<V> + Copy,
|
||||||
|
V: Copy,
|
||||||
|
{
|
||||||
|
fn extend<I: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: I) {
|
||||||
|
self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K, V> FromIterator<(K, V)> for EnumMap<K, V>
|
||||||
|
where
|
||||||
|
Self: Default,
|
||||||
|
K: EnumArray<V>,
|
||||||
|
{
|
||||||
|
fn from_iter<I: IntoIterator<Item = (K, V)>>(iter: I) -> Self {
|
||||||
|
let mut map = EnumMap::default();
|
||||||
|
map.extend(iter);
|
||||||
|
map
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: EnumArray<V>, V> Index<K> for EnumMap<K, V> {
|
||||||
|
type Output = V;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn index(&self, key: K) -> &V {
|
||||||
|
&self.as_slice()[key.into_usize()]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: EnumArray<V>, V> IndexMut<K> for EnumMap<K, V> {
|
||||||
|
#[inline]
|
||||||
|
fn index_mut(&mut self, key: K) -> &mut V {
|
||||||
|
&mut self.as_mut_slice()[key.into_usize()]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Implementations provided by derive attribute are too specific, and put requirements on K.
|
||||||
|
// This is caused by rust-lang/rust#26925.
|
||||||
|
impl<K: EnumArray<V>, V> Clone for EnumMap<K, V>
|
||||||
|
where
|
||||||
|
K::Array: Clone,
|
||||||
|
{
|
||||||
|
#[inline]
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
EnumMap {
|
||||||
|
array: self.array.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: EnumArray<V>, V> Copy for EnumMap<K, V> where K::Array: Copy {}
|
||||||
|
|
||||||
|
impl<K: EnumArray<V>, V: PartialEq> PartialEq for EnumMap<K, V> {
|
||||||
|
#[inline]
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.as_slice() == other.as_slice()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: EnumArray<V>, V: Eq> Eq for EnumMap<K, V> {}
|
||||||
|
|
||||||
|
impl<K: EnumArray<V>, V: Hash> Hash for EnumMap<K, V> {
|
||||||
|
#[inline]
|
||||||
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
|
self.as_slice().hash(state);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: EnumArray<V>, V: Default> Default for EnumMap<K, V> {
|
||||||
|
#[inline]
|
||||||
|
fn default() -> Self {
|
||||||
|
enum_map! { _ => V::default() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: EnumArray<V>, V: PartialOrd> PartialOrd for EnumMap<K, V> {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
|
||||||
|
self.as_slice().partial_cmp(other.as_slice())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: EnumArray<V>, V: Ord> Ord for EnumMap<K, V> {
|
||||||
|
fn cmp(&self, other: &Self) -> core::cmp::Ordering {
|
||||||
|
self.as_slice().cmp(other.as_slice())
|
||||||
|
}
|
||||||
|
}
|
||||||
159
third_party/rust/enum-map/src/internal.rs
vendored
Normal file
159
third_party/rust/enum-map/src/internal.rs
vendored
Normal file
|
|
@ -0,0 +1,159 @@
|
||||||
|
// SPDX-FileCopyrightText: 2017 - 2023 Kamila Borowska <kamila@borowska.pw>
|
||||||
|
// SPDX-FileCopyrightText: 2021 Bruno Corrêa Zimmermann <brunoczim@gmail.com>
|
||||||
|
// SPDX-FileCopyrightText: 2022 philipp <descpl@yahoo.de>
|
||||||
|
//
|
||||||
|
// SPDX-License-Identifier: MIT OR Apache-2.0
|
||||||
|
|
||||||
|
use core::cmp::Ordering;
|
||||||
|
use core::convert::Infallible;
|
||||||
|
|
||||||
|
/// Enum mapping type.
|
||||||
|
///
|
||||||
|
/// This trait is implemented by `#[derive(Enum)]`.
|
||||||
|
///
|
||||||
|
/// This trait is also implemented by `bool` and `u8`. While `u8` is
|
||||||
|
/// strictly speaking not an actual enum, there are good reasons to consider
|
||||||
|
/// it like one, as array of `u8` keys is a relatively common pattern.
|
||||||
|
pub trait Enum: Sized {
|
||||||
|
/// Length of the enum.
|
||||||
|
const LENGTH: usize;
|
||||||
|
|
||||||
|
/// Takes an usize, and returns an element matching `into_usize` function.
|
||||||
|
fn from_usize(value: usize) -> Self;
|
||||||
|
/// Returns an unique identifier for a value within range of `0..Array::LENGTH`.
|
||||||
|
fn into_usize(self) -> usize;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Trait associating enum with an array.
|
||||||
|
///
|
||||||
|
/// This exists due to limitations of Rust compiler that prevent arrays from using
|
||||||
|
/// associated constants in structures. The array length must match `LENGTH` of an
|
||||||
|
/// `Enum`.
|
||||||
|
pub trait EnumArray<V>: Enum {
|
||||||
|
/// Representation of an enum map for type `V`.
|
||||||
|
type Array: Array<V>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Array for enum-map storage.
|
||||||
|
///
|
||||||
|
/// This trait is inteded for primitive array types (with fixed length).
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The array length needs to match actual storage.
|
||||||
|
pub unsafe trait Array<V> {
|
||||||
|
// This is necessary duplication because the length in Enum trait can be
|
||||||
|
// provided by user and may not be trustworthy for unsafe code.
|
||||||
|
const LENGTH: usize;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl<V, const N: usize> Array<V> for [V; N] {
|
||||||
|
const LENGTH: usize = N;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[inline]
|
||||||
|
pub fn out_of_bounds() -> ! {
|
||||||
|
panic!("index out of range for Enum::from_usize");
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Enum for bool {
|
||||||
|
const LENGTH: usize = 2;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn from_usize(value: usize) -> Self {
|
||||||
|
match value {
|
||||||
|
0 => false,
|
||||||
|
1 => true,
|
||||||
|
_ => out_of_bounds(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[inline]
|
||||||
|
fn into_usize(self) -> usize {
|
||||||
|
usize::from(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> EnumArray<T> for bool {
|
||||||
|
type Array = [T; Self::LENGTH];
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Enum for () {
|
||||||
|
const LENGTH: usize = 1;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn from_usize(value: usize) -> Self {
|
||||||
|
match value {
|
||||||
|
0 => (),
|
||||||
|
_ => out_of_bounds(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[inline]
|
||||||
|
fn into_usize(self) -> usize {
|
||||||
|
0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> EnumArray<T> for () {
|
||||||
|
type Array = [T; Self::LENGTH];
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Enum for u8 {
|
||||||
|
const LENGTH: usize = 256;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn from_usize(value: usize) -> Self {
|
||||||
|
value.try_into().unwrap_or_else(|_| out_of_bounds())
|
||||||
|
}
|
||||||
|
#[inline]
|
||||||
|
fn into_usize(self) -> usize {
|
||||||
|
usize::from(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> EnumArray<T> for u8 {
|
||||||
|
type Array = [T; Self::LENGTH];
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Enum for Infallible {
|
||||||
|
const LENGTH: usize = 0;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn from_usize(_: usize) -> Self {
|
||||||
|
out_of_bounds();
|
||||||
|
}
|
||||||
|
#[inline]
|
||||||
|
fn into_usize(self) -> usize {
|
||||||
|
match self {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> EnumArray<T> for Infallible {
|
||||||
|
type Array = [T; Self::LENGTH];
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Enum for Ordering {
|
||||||
|
const LENGTH: usize = 3;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn from_usize(value: usize) -> Self {
|
||||||
|
match value {
|
||||||
|
0 => Ordering::Less,
|
||||||
|
1 => Ordering::Equal,
|
||||||
|
2 => Ordering::Greater,
|
||||||
|
_ => out_of_bounds(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[inline]
|
||||||
|
fn into_usize(self) -> usize {
|
||||||
|
match self {
|
||||||
|
Ordering::Less => 0,
|
||||||
|
Ordering::Equal => 1,
|
||||||
|
Ordering::Greater => 2,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> EnumArray<T> for Ordering {
|
||||||
|
type Array = [T; Self::LENGTH];
|
||||||
|
}
|
||||||
415
third_party/rust/enum-map/src/iter.rs
vendored
Normal file
415
third_party/rust/enum-map/src/iter.rs
vendored
Normal file
|
|
@ -0,0 +1,415 @@
|
||||||
|
#![allow(clippy::module_name_repetitions)]
|
||||||
|
|
||||||
|
// SPDX-FileCopyrightText: 2017 - 2022 Kamila Borowska <kamila@borowska.pw>
|
||||||
|
// SPDX-FileCopyrightText: 2020 Amanieu d'Antras <amanieu@gmail.com>
|
||||||
|
// SPDX-FileCopyrightText: 2021 Bruno Corrêa Zimmermann <brunoczim@gmail.com>
|
||||||
|
//
|
||||||
|
// SPDX-License-Identifier: MIT OR Apache-2.0
|
||||||
|
|
||||||
|
use crate::{EnumArray, EnumMap};
|
||||||
|
use core::iter::{Enumerate, FusedIterator};
|
||||||
|
use core::marker::PhantomData;
|
||||||
|
use core::mem::ManuallyDrop;
|
||||||
|
use core::ops::Range;
|
||||||
|
use core::ptr;
|
||||||
|
use core::slice;
|
||||||
|
|
||||||
|
/// Immutable enum map iterator
|
||||||
|
///
|
||||||
|
/// This struct is created by `iter` method or `into_iter` on a reference
|
||||||
|
/// to `EnumMap`.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::{enum_map, Enum};
|
||||||
|
///
|
||||||
|
/// #[derive(Enum)]
|
||||||
|
/// enum Example {
|
||||||
|
/// A,
|
||||||
|
/// B,
|
||||||
|
/// C,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// let mut map = enum_map! { Example::A => 3, _ => 0 };
|
||||||
|
/// assert_eq!(map[Example::A], 3);
|
||||||
|
/// for (key, &value) in &map {
|
||||||
|
/// assert_eq!(value, match key {
|
||||||
|
/// Example::A => 3,
|
||||||
|
/// _ => 0,
|
||||||
|
/// });
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Iter<'a, K, V: 'a> {
|
||||||
|
_phantom: PhantomData<fn() -> K>,
|
||||||
|
iterator: Enumerate<slice::Iter<'a, V>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, K: EnumArray<V>, V> Clone for Iter<'a, K, V> {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
Iter {
|
||||||
|
_phantom: PhantomData,
|
||||||
|
iterator: self.iterator.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, K: EnumArray<V>, V> Iterator for Iter<'a, K, V> {
|
||||||
|
type Item = (K, &'a V);
|
||||||
|
#[inline]
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
self.iterator
|
||||||
|
.next()
|
||||||
|
.map(|(index, item)| (K::from_usize(index), item))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
|
self.iterator.size_hint()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fold<B, F>(self, init: B, f: F) -> B
|
||||||
|
where
|
||||||
|
F: FnMut(B, Self::Item) -> B,
|
||||||
|
{
|
||||||
|
self.iterator
|
||||||
|
.map(|(index, item)| (K::from_usize(index), item))
|
||||||
|
.fold(init, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, K: EnumArray<V>, V> DoubleEndedIterator for Iter<'a, K, V> {
|
||||||
|
#[inline]
|
||||||
|
fn next_back(&mut self) -> Option<Self::Item> {
|
||||||
|
self.iterator
|
||||||
|
.next_back()
|
||||||
|
.map(|(index, item)| (K::from_usize(index), item))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, K: EnumArray<V>, V> ExactSizeIterator for Iter<'a, K, V> {}
|
||||||
|
|
||||||
|
impl<'a, K: EnumArray<V>, V> FusedIterator for Iter<'a, K, V> {}
|
||||||
|
|
||||||
|
impl<'a, K: EnumArray<V>, V> IntoIterator for &'a EnumMap<K, V> {
|
||||||
|
type Item = (K, &'a V);
|
||||||
|
type IntoIter = Iter<'a, K, V>;
|
||||||
|
#[inline]
|
||||||
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
|
Iter {
|
||||||
|
_phantom: PhantomData,
|
||||||
|
iterator: self.as_slice().iter().enumerate(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Mutable map iterator
|
||||||
|
///
|
||||||
|
/// This struct is created by `iter_mut` method or `into_iter` on a mutable
|
||||||
|
/// reference to `EnumMap`.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::{enum_map, Enum};
|
||||||
|
///
|
||||||
|
/// #[derive(Debug, Enum)]
|
||||||
|
/// enum Example {
|
||||||
|
/// A,
|
||||||
|
/// B,
|
||||||
|
/// C,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// let mut map = enum_map! { Example::A => 3, _ => 0 };
|
||||||
|
/// for (_, value) in &mut map {
|
||||||
|
/// *value += 1;
|
||||||
|
/// }
|
||||||
|
/// assert_eq!(map, enum_map! { Example::A => 4, _ => 1 });
|
||||||
|
/// ```
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct IterMut<'a, K, V: 'a> {
|
||||||
|
_phantom: PhantomData<fn() -> K>,
|
||||||
|
iterator: Enumerate<slice::IterMut<'a, V>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, K: EnumArray<V>, V> Iterator for IterMut<'a, K, V> {
|
||||||
|
type Item = (K, &'a mut V);
|
||||||
|
#[inline]
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
self.iterator
|
||||||
|
.next()
|
||||||
|
.map(|(index, item)| (K::from_usize(index), item))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
|
self.iterator.size_hint()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fold<B, F>(self, init: B, f: F) -> B
|
||||||
|
where
|
||||||
|
F: FnMut(B, Self::Item) -> B,
|
||||||
|
{
|
||||||
|
self.iterator
|
||||||
|
.map(|(index, item)| (K::from_usize(index), item))
|
||||||
|
.fold(init, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, K: EnumArray<V>, V> DoubleEndedIterator for IterMut<'a, K, V> {
|
||||||
|
#[inline]
|
||||||
|
fn next_back(&mut self) -> Option<Self::Item> {
|
||||||
|
self.iterator
|
||||||
|
.next_back()
|
||||||
|
.map(|(index, item)| (K::from_usize(index), item))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, K: EnumArray<V>, V> ExactSizeIterator for IterMut<'a, K, V> {}
|
||||||
|
|
||||||
|
impl<'a, K: EnumArray<V>, V> FusedIterator for IterMut<'a, K, V> {}
|
||||||
|
|
||||||
|
impl<'a, K: EnumArray<V>, V> IntoIterator for &'a mut EnumMap<K, V> {
|
||||||
|
type Item = (K, &'a mut V);
|
||||||
|
type IntoIter = IterMut<'a, K, V>;
|
||||||
|
#[inline]
|
||||||
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
|
IterMut {
|
||||||
|
_phantom: PhantomData,
|
||||||
|
iterator: self.as_mut_slice().iter_mut().enumerate(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A map iterator that moves out of map.
|
||||||
|
///
|
||||||
|
/// This struct is created by `into_iter` on `EnumMap`.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::{enum_map, Enum};
|
||||||
|
///
|
||||||
|
/// #[derive(Debug, Enum)]
|
||||||
|
/// enum Example {
|
||||||
|
/// A,
|
||||||
|
/// B,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// let map = enum_map! { Example::A | Example::B => String::from("123") };
|
||||||
|
/// for (_, value) in map {
|
||||||
|
/// assert_eq!(value + "4", "1234");
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
pub struct IntoIter<K: EnumArray<V>, V> {
|
||||||
|
map: ManuallyDrop<EnumMap<K, V>>,
|
||||||
|
alive: Range<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: EnumArray<V>, V> Iterator for IntoIter<K, V> {
|
||||||
|
type Item = (K, V);
|
||||||
|
fn next(&mut self) -> Option<(K, V)> {
|
||||||
|
let position = self.alive.next()?;
|
||||||
|
Some((K::from_usize(position), unsafe {
|
||||||
|
ptr::read(&self.map.as_slice()[position])
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
|
self.alive.size_hint()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: EnumArray<V>, V> DoubleEndedIterator for IntoIter<K, V> {
|
||||||
|
fn next_back(&mut self) -> Option<(K, V)> {
|
||||||
|
let position = self.alive.next_back()?;
|
||||||
|
Some((K::from_usize(position), unsafe {
|
||||||
|
ptr::read(&self.map.as_slice()[position])
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: EnumArray<V>, V> ExactSizeIterator for IntoIter<K, V> {}
|
||||||
|
|
||||||
|
impl<K: EnumArray<V>, V> FusedIterator for IntoIter<K, V> {}
|
||||||
|
|
||||||
|
impl<K: EnumArray<V>, V> Drop for IntoIter<K, V> {
|
||||||
|
#[inline]
|
||||||
|
fn drop(&mut self) {
|
||||||
|
unsafe {
|
||||||
|
ptr::drop_in_place(&mut self.map.as_mut_slice()[self.alive.clone()]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: EnumArray<V>, V> IntoIterator for EnumMap<K, V> {
|
||||||
|
type Item = (K, V);
|
||||||
|
type IntoIter = IntoIter<K, V>;
|
||||||
|
#[inline]
|
||||||
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
|
let len = self.len();
|
||||||
|
IntoIter {
|
||||||
|
map: ManuallyDrop::new(self),
|
||||||
|
alive: 0..len,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: EnumArray<V>, V> EnumMap<K, V> {
|
||||||
|
/// An iterator visiting all values. The iterator type is `&V`.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::enum_map;
|
||||||
|
///
|
||||||
|
/// let map = enum_map! { false => 3, true => 4 };
|
||||||
|
/// let mut values = map.values();
|
||||||
|
/// assert_eq!(values.next(), Some(&3));
|
||||||
|
/// assert_eq!(values.next(), Some(&4));
|
||||||
|
/// assert_eq!(values.next(), None);
|
||||||
|
/// ```
|
||||||
|
#[inline]
|
||||||
|
pub fn values(&self) -> Values<V> {
|
||||||
|
Values(self.as_slice().iter())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An iterator visiting all values mutably. The iterator type is `&mut V`.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::enum_map;
|
||||||
|
///
|
||||||
|
/// let mut map = enum_map! { _ => 2 };
|
||||||
|
/// for value in map.values_mut() {
|
||||||
|
/// *value += 2;
|
||||||
|
/// }
|
||||||
|
/// assert_eq!(map[false], 4);
|
||||||
|
/// assert_eq!(map[true], 4);
|
||||||
|
/// ```
|
||||||
|
#[inline]
|
||||||
|
pub fn values_mut(&mut self) -> ValuesMut<V> {
|
||||||
|
ValuesMut(self.as_mut_slice().iter_mut())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a consuming iterator visiting all the values. The map
|
||||||
|
/// cannot be used after calling this. The iterator element type
|
||||||
|
/// is `V`.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::enum_map;
|
||||||
|
///
|
||||||
|
/// let mut map = enum_map! { false => "hello", true => "goodbye" };
|
||||||
|
/// assert_eq!(map.into_values().collect::<Vec<_>>(), ["hello", "goodbye"]);
|
||||||
|
/// ```
|
||||||
|
#[inline]
|
||||||
|
pub fn into_values(self) -> IntoValues<K, V> {
|
||||||
|
IntoValues {
|
||||||
|
inner: self.into_iter(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An iterator over the values of `EnumMap`.
|
||||||
|
///
|
||||||
|
/// This `struct` is created by the `values` method of `EnumMap`.
|
||||||
|
/// See its documentation for more.
|
||||||
|
pub struct Values<'a, V: 'a>(slice::Iter<'a, V>);
|
||||||
|
|
||||||
|
impl<'a, V> Clone for Values<'a, V> {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
Values(self.0.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, V: 'a> Iterator for Values<'a, V> {
|
||||||
|
type Item = &'a V;
|
||||||
|
#[inline]
|
||||||
|
fn next(&mut self) -> Option<&'a V> {
|
||||||
|
self.0.next()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
|
self.0.size_hint()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, V: 'a> DoubleEndedIterator for Values<'a, V> {
|
||||||
|
#[inline]
|
||||||
|
fn next_back(&mut self) -> Option<&'a V> {
|
||||||
|
self.0.next_back()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, V: 'a> ExactSizeIterator for Values<'a, V> {}
|
||||||
|
|
||||||
|
impl<'a, V: 'a> FusedIterator for Values<'a, V> {}
|
||||||
|
|
||||||
|
/// A mutable iterator over the values of `EnumMap`.
|
||||||
|
///
|
||||||
|
/// This `struct` is created by the `values_mut` method of `EnumMap`.
|
||||||
|
/// See its documentation for more.
|
||||||
|
pub struct ValuesMut<'a, V: 'a>(slice::IterMut<'a, V>);
|
||||||
|
|
||||||
|
impl<'a, V: 'a> Iterator for ValuesMut<'a, V> {
|
||||||
|
type Item = &'a mut V;
|
||||||
|
#[inline]
|
||||||
|
fn next(&mut self) -> Option<&'a mut V> {
|
||||||
|
self.0.next()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
|
self.0.size_hint()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, V: 'a> DoubleEndedIterator for ValuesMut<'a, V> {
|
||||||
|
#[inline]
|
||||||
|
fn next_back(&mut self) -> Option<&'a mut V> {
|
||||||
|
self.0.next_back()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, V: 'a> ExactSizeIterator for ValuesMut<'a, V> {}
|
||||||
|
|
||||||
|
impl<'a, V: 'a> FusedIterator for ValuesMut<'a, V> {}
|
||||||
|
|
||||||
|
/// An owning iterator over the values of an `EnumMap`.
|
||||||
|
///
|
||||||
|
/// This `struct` is created by the `into_values` method of `EnumMap`.
|
||||||
|
/// See its documentation for more.
|
||||||
|
pub struct IntoValues<K: EnumArray<V>, V> {
|
||||||
|
inner: IntoIter<K, V>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K, V> Iterator for IntoValues<K, V>
|
||||||
|
where
|
||||||
|
K: EnumArray<V>,
|
||||||
|
{
|
||||||
|
type Item = V;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<V> {
|
||||||
|
Some(self.inner.next()?.1)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
|
self.inner.size_hint()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: EnumArray<V>, V> DoubleEndedIterator for IntoValues<K, V> {
|
||||||
|
fn next_back(&mut self) -> Option<V> {
|
||||||
|
Some(self.inner.next_back()?.1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K, V> ExactSizeIterator for IntoValues<K, V> where K: EnumArray<V> {}
|
||||||
|
|
||||||
|
impl<K, V> FusedIterator for IntoValues<K, V> where K: EnumArray<V> {}
|
||||||
509
third_party/rust/enum-map/src/lib.rs
vendored
Normal file
509
third_party/rust/enum-map/src/lib.rs
vendored
Normal file
|
|
@ -0,0 +1,509 @@
|
||||||
|
// SPDX-FileCopyrightText: 2017 - 2023 Kamila Borowska <kamila@borowska.pw>
|
||||||
|
// SPDX-FileCopyrightText: 2019 Riey <creeper844@gmail.com>
|
||||||
|
// SPDX-FileCopyrightText: 2021 Alex Sayers <alex@asayers.com>
|
||||||
|
// SPDX-FileCopyrightText: 2021 Bruno Corrêa Zimmermann <brunoczim@gmail.com>
|
||||||
|
// SPDX-FileCopyrightText: 2022 Cass Fridkin <cass@cloudflare.com>
|
||||||
|
// SPDX-FileCopyrightText: 2022 Mateusz Kowalczyk <fuuzetsu@fuuzetsu.co.uk>
|
||||||
|
//
|
||||||
|
// SPDX-License-Identifier: MIT OR Apache-2.0
|
||||||
|
|
||||||
|
//! An enum mapping type.
|
||||||
|
//!
|
||||||
|
//! It is implemented using an array type, so using it is as fast as using Rust
|
||||||
|
//! arrays.
|
||||||
|
//!
|
||||||
|
//! # Examples
|
||||||
|
//!
|
||||||
|
//! ```
|
||||||
|
//! use enum_map::{enum_map, Enum, EnumMap};
|
||||||
|
//!
|
||||||
|
//! #[derive(Debug, Enum)]
|
||||||
|
//! enum Example {
|
||||||
|
//! A(bool),
|
||||||
|
//! B,
|
||||||
|
//! C,
|
||||||
|
//! }
|
||||||
|
//!
|
||||||
|
//! let mut map = enum_map! {
|
||||||
|
//! Example::A(false) => 0,
|
||||||
|
//! Example::A(true) => 1,
|
||||||
|
//! Example::B => 2,
|
||||||
|
//! Example::C => 3,
|
||||||
|
//! };
|
||||||
|
//! map[Example::C] = 4;
|
||||||
|
//!
|
||||||
|
//! assert_eq!(map[Example::A(true)], 1);
|
||||||
|
//!
|
||||||
|
//! for (key, &value) in &map {
|
||||||
|
//! println!("{:?} has {} as value.", key, value);
|
||||||
|
//! }
|
||||||
|
//! ```
|
||||||
|
|
||||||
|
#![no_std]
|
||||||
|
#![deny(missing_docs)]
|
||||||
|
#![warn(clippy::pedantic)]
|
||||||
|
|
||||||
|
#[cfg(feature = "arbitrary")]
|
||||||
|
mod arbitrary;
|
||||||
|
mod enum_map_impls;
|
||||||
|
mod internal;
|
||||||
|
mod iter;
|
||||||
|
#[cfg(feature = "serde")]
|
||||||
|
mod serde;
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub use core::mem::{self, ManuallyDrop, MaybeUninit};
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub use core::primitive::usize;
|
||||||
|
use core::slice;
|
||||||
|
#[doc(hidden)]
|
||||||
|
// unreachable needs to be exported for compatibility with older versions of enum-map-derive
|
||||||
|
pub use core::{panic, ptr, unreachable};
|
||||||
|
pub use enum_map_derive::Enum;
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub use internal::out_of_bounds;
|
||||||
|
use internal::Array;
|
||||||
|
pub use internal::{Enum, EnumArray};
|
||||||
|
pub use iter::{IntoIter, IntoValues, Iter, IterMut, Values, ValuesMut};
|
||||||
|
|
||||||
|
// SAFETY: initialized needs to represent number of initialized elements
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub struct Guard<'a, K, V>
|
||||||
|
where
|
||||||
|
K: EnumArray<V>,
|
||||||
|
{
|
||||||
|
array_mut: &'a mut MaybeUninit<K::Array>,
|
||||||
|
initialized: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K, V> Drop for Guard<'_, K, V>
|
||||||
|
where
|
||||||
|
K: EnumArray<V>,
|
||||||
|
{
|
||||||
|
fn drop(&mut self) {
|
||||||
|
// This is safe as arr[..len] is initialized due to
|
||||||
|
// Guard's type invariant.
|
||||||
|
unsafe {
|
||||||
|
ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.initialized).drop_in_place();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, K, V> Guard<'a, K, V>
|
||||||
|
where
|
||||||
|
K: EnumArray<V>,
|
||||||
|
{
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub fn as_mut_ptr(&mut self) -> *mut V {
|
||||||
|
self.array_mut.as_mut_ptr().cast::<V>()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[must_use]
|
||||||
|
pub fn new(array_mut: &'a mut MaybeUninit<K::Array>) -> Self {
|
||||||
|
Self {
|
||||||
|
array_mut,
|
||||||
|
initialized: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[must_use]
|
||||||
|
#[allow(clippy::unused_self)]
|
||||||
|
pub fn storage_length(&self) -> usize {
|
||||||
|
// SAFETY: We need to use LENGTH from K::Array, as K::LENGTH is
|
||||||
|
// untrustworthy.
|
||||||
|
K::Array::LENGTH
|
||||||
|
}
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[must_use]
|
||||||
|
pub fn get_key(&self) -> K {
|
||||||
|
K::from_usize(self.initialized)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
// Unsafe as it can write out of bounds.
|
||||||
|
pub unsafe fn push(&mut self, value: V) {
|
||||||
|
self.as_mut_ptr().add(self.initialized).write(value);
|
||||||
|
self.initialized += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub struct TypeEqualizer<'a, K, V>
|
||||||
|
where
|
||||||
|
K: EnumArray<V>,
|
||||||
|
{
|
||||||
|
pub enum_map: [EnumMap<K, V>; 0],
|
||||||
|
pub guard: Guard<'a, K, V>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Enum map constructor.
|
||||||
|
///
|
||||||
|
/// This macro allows to create a new enum map in a type safe way. It takes
|
||||||
|
/// a list of `,` separated pairs separated by `=>`. Left side is `|`
|
||||||
|
/// separated list of enum keys, or `_` to match all unmatched enum keys,
|
||||||
|
/// while right side is a value.
|
||||||
|
///
|
||||||
|
/// The iteration order when using this macro is not guaranteed to be
|
||||||
|
/// consistent. Future releases of this crate may change it, and this is not
|
||||||
|
/// considered to be a breaking change.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::{enum_map, Enum};
|
||||||
|
///
|
||||||
|
/// #[derive(Enum)]
|
||||||
|
/// enum Example {
|
||||||
|
/// A,
|
||||||
|
/// B,
|
||||||
|
/// C,
|
||||||
|
/// D,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// let enum_map = enum_map! {
|
||||||
|
/// Example::A | Example::B => 1,
|
||||||
|
/// Example::C => 2,
|
||||||
|
/// _ => 3,
|
||||||
|
/// };
|
||||||
|
/// assert_eq!(enum_map[Example::A], 1);
|
||||||
|
/// assert_eq!(enum_map[Example::B], 1);
|
||||||
|
/// assert_eq!(enum_map[Example::C], 2);
|
||||||
|
/// assert_eq!(enum_map[Example::D], 3);
|
||||||
|
/// ```
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! enum_map {
|
||||||
|
{$($t:tt)*} => {{
|
||||||
|
let mut uninit = $crate::MaybeUninit::uninit();
|
||||||
|
let mut eq = $crate::TypeEqualizer {
|
||||||
|
enum_map: [],
|
||||||
|
guard: $crate::Guard::new(&mut uninit),
|
||||||
|
};
|
||||||
|
if false {
|
||||||
|
// Safe because this code is unreachable
|
||||||
|
unsafe { (&mut eq.enum_map).as_mut_ptr().read() }
|
||||||
|
} else {
|
||||||
|
for _ in 0..(&eq.guard).storage_length() {
|
||||||
|
struct __PleaseDoNotUseBreakWithoutLabel;
|
||||||
|
let _please_do_not_use_continue_without_label;
|
||||||
|
let value;
|
||||||
|
#[allow(unreachable_code)]
|
||||||
|
loop {
|
||||||
|
_please_do_not_use_continue_without_label = ();
|
||||||
|
value = match (&eq.guard).get_key() { $($t)* };
|
||||||
|
break __PleaseDoNotUseBreakWithoutLabel;
|
||||||
|
};
|
||||||
|
|
||||||
|
unsafe { (&mut eq.guard).push(value); }
|
||||||
|
}
|
||||||
|
$crate::mem::forget(eq);
|
||||||
|
// Safe because the array was fully initialized.
|
||||||
|
$crate::EnumMap::from_array(unsafe { uninit.assume_init() })
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An enum mapping.
|
||||||
|
///
|
||||||
|
/// This internally uses an array which stores a value for each possible
|
||||||
|
/// enum value. To work, it requires implementation of internal (private,
|
||||||
|
/// although public due to macro limitations) trait which allows extracting
|
||||||
|
/// information about an enum, which can be automatically generated using
|
||||||
|
/// `#[derive(Enum)]` macro.
|
||||||
|
///
|
||||||
|
/// Additionally, `bool` and `u8` automatically derives from `Enum`. While
|
||||||
|
/// `u8` is not technically an enum, it's convenient to consider it like one.
|
||||||
|
/// In particular, [reverse-complement in benchmark game] could be using `u8`
|
||||||
|
/// as an enum.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::{enum_map, Enum, EnumMap};
|
||||||
|
///
|
||||||
|
/// #[derive(Enum)]
|
||||||
|
/// enum Example {
|
||||||
|
/// A,
|
||||||
|
/// B,
|
||||||
|
/// C,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// let mut map = EnumMap::default();
|
||||||
|
/// // new initializes map with default values
|
||||||
|
/// assert_eq!(map[Example::A], 0);
|
||||||
|
/// map[Example::A] = 3;
|
||||||
|
/// assert_eq!(map[Example::A], 3);
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// [reverse-complement in benchmark game]:
|
||||||
|
/// http://benchmarksgame.alioth.debian.org/u64q/program.php?test=revcomp&lang=rust&id=2
|
||||||
|
pub struct EnumMap<K: EnumArray<V>, V> {
|
||||||
|
array: K::Array,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: EnumArray<V>, V: Default> EnumMap<K, V> {
|
||||||
|
/// Clear enum map with default values.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::{Enum, EnumMap};
|
||||||
|
///
|
||||||
|
/// #[derive(Enum)]
|
||||||
|
/// enum Example {
|
||||||
|
/// A,
|
||||||
|
/// B,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// let mut enum_map = EnumMap::<_, String>::default();
|
||||||
|
/// enum_map[Example::B] = "foo".into();
|
||||||
|
/// enum_map.clear();
|
||||||
|
/// assert_eq!(enum_map[Example::A], "");
|
||||||
|
/// assert_eq!(enum_map[Example::B], "");
|
||||||
|
/// ```
|
||||||
|
#[inline]
|
||||||
|
pub fn clear(&mut self) {
|
||||||
|
for v in self.as_mut_slice() {
|
||||||
|
*v = V::default();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::len_without_is_empty)]
|
||||||
|
impl<K: EnumArray<V>, V> EnumMap<K, V> {
|
||||||
|
/// Creates an enum map from array.
|
||||||
|
#[inline]
|
||||||
|
pub const fn from_array(array: K::Array) -> EnumMap<K, V> {
|
||||||
|
EnumMap { array }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create an enum map, where each value is the returned value from `cb`
|
||||||
|
/// using provided enum key.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # use enum_map_derive::*;
|
||||||
|
/// use enum_map::{enum_map, Enum, EnumMap};
|
||||||
|
///
|
||||||
|
/// #[derive(Enum, PartialEq, Debug)]
|
||||||
|
/// enum Example {
|
||||||
|
/// A,
|
||||||
|
/// B,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// let map = EnumMap::from_fn(|k| k == Example::A);
|
||||||
|
/// assert_eq!(map, enum_map! { Example::A => true, Example::B => false })
|
||||||
|
/// ```
|
||||||
|
pub fn from_fn<F>(mut cb: F) -> Self
|
||||||
|
where
|
||||||
|
F: FnMut(K) -> V,
|
||||||
|
{
|
||||||
|
enum_map! { k => cb(k) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns an iterator over enum map.
|
||||||
|
///
|
||||||
|
/// The iteration order is deterministic, and when using [macro@Enum] derive
|
||||||
|
/// it will be the order in which enum variants are declared.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::{enum_map, Enum};
|
||||||
|
///
|
||||||
|
/// #[derive(Enum, PartialEq)]
|
||||||
|
/// enum E {
|
||||||
|
/// A,
|
||||||
|
/// B,
|
||||||
|
/// C,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// let map = enum_map! { E::A => 1, E::B => 2, E::C => 3};
|
||||||
|
/// assert!(map.iter().eq([(E::A, &1), (E::B, &2), (E::C, &3)]));
|
||||||
|
/// ```
|
||||||
|
#[inline]
|
||||||
|
pub fn iter(&self) -> Iter<K, V> {
|
||||||
|
self.into_iter()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a mutable iterator over enum map.
|
||||||
|
#[inline]
|
||||||
|
pub fn iter_mut(&mut self) -> IterMut<K, V> {
|
||||||
|
self.into_iter()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns number of elements in enum map.
|
||||||
|
#[inline]
|
||||||
|
#[allow(clippy::unused_self)]
|
||||||
|
pub const fn len(&self) -> usize {
|
||||||
|
K::Array::LENGTH
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Swaps two indexes.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::enum_map;
|
||||||
|
///
|
||||||
|
/// let mut map = enum_map! { false => 0, true => 1 };
|
||||||
|
/// map.swap(false, true);
|
||||||
|
/// assert_eq!(map[false], 1);
|
||||||
|
/// assert_eq!(map[true], 0);
|
||||||
|
/// ```
|
||||||
|
#[inline]
|
||||||
|
pub fn swap(&mut self, a: K, b: K) {
|
||||||
|
self.as_mut_slice().swap(a.into_usize(), b.into_usize());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Consumes an enum map and returns the underlying array.
|
||||||
|
///
|
||||||
|
/// The order of elements is deterministic, and when using [macro@Enum]
|
||||||
|
/// derive it will be the order in which enum variants are declared.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::{enum_map, Enum};
|
||||||
|
///
|
||||||
|
/// #[derive(Enum, PartialEq)]
|
||||||
|
/// enum E {
|
||||||
|
/// A,
|
||||||
|
/// B,
|
||||||
|
/// C,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// let map = enum_map! { E::A => 1, E::B => 2, E::C => 3};
|
||||||
|
/// assert_eq!(map.into_array(), [1, 2, 3]);
|
||||||
|
/// ```
|
||||||
|
pub fn into_array(self) -> K::Array {
|
||||||
|
self.array
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a reference to the underlying array.
|
||||||
|
///
|
||||||
|
/// The order of elements is deterministic, and when using [macro@Enum]
|
||||||
|
/// derive it will be the order in which enum variants are declared.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::{enum_map, Enum};
|
||||||
|
///
|
||||||
|
/// #[derive(Enum, PartialEq)]
|
||||||
|
/// enum E {
|
||||||
|
/// A,
|
||||||
|
/// B,
|
||||||
|
/// C,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// let map = enum_map! { E::A => 1, E::B => 2, E::C => 3};
|
||||||
|
/// assert_eq!(map.as_array(), &[1, 2, 3]);
|
||||||
|
/// ```
|
||||||
|
pub const fn as_array(&self) -> &K::Array {
|
||||||
|
&self.array
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a mutable reference to the underlying array.
|
||||||
|
///
|
||||||
|
/// The order of elements is deterministic, and when using [macro@Enum]
|
||||||
|
/// derive it will be the order in which enum variants are declared.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::{enum_map, Enum};
|
||||||
|
///
|
||||||
|
/// #[derive(Enum, PartialEq)]
|
||||||
|
/// enum E {
|
||||||
|
/// A,
|
||||||
|
/// B,
|
||||||
|
/// C,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// let mut map = enum_map! { E::A => 1, E::B => 2, E::C => 3};
|
||||||
|
/// map.as_mut_array()[1] = 42;
|
||||||
|
/// assert_eq!(map.as_array(), &[1, 42, 3]);
|
||||||
|
/// ```
|
||||||
|
pub fn as_mut_array(&mut self) -> &mut K::Array {
|
||||||
|
&mut self.array
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts an enum map to a slice representing values.
|
||||||
|
///
|
||||||
|
/// The order of elements is deterministic, and when using [macro@Enum]
|
||||||
|
/// derive it will be the order in which enum variants are declared.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::{enum_map, Enum};
|
||||||
|
///
|
||||||
|
/// #[derive(Enum, PartialEq)]
|
||||||
|
/// enum E {
|
||||||
|
/// A,
|
||||||
|
/// B,
|
||||||
|
/// C,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// let map = enum_map! { E::A => 1, E::B => 2, E::C => 3};
|
||||||
|
/// assert_eq!(map.as_slice(), &[1, 2, 3]);
|
||||||
|
/// ```
|
||||||
|
#[inline]
|
||||||
|
pub fn as_slice(&self) -> &[V] {
|
||||||
|
unsafe { slice::from_raw_parts(ptr::addr_of!(self.array).cast(), K::Array::LENGTH) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts a mutable enum map to a mutable slice representing values.
|
||||||
|
#[inline]
|
||||||
|
pub fn as_mut_slice(&mut self) -> &mut [V] {
|
||||||
|
unsafe { slice::from_raw_parts_mut(ptr::addr_of_mut!(self.array).cast(), K::Array::LENGTH) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns an enum map with function `f` applied to each element in order.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use enum_map::enum_map;
|
||||||
|
///
|
||||||
|
/// let a = enum_map! { false => 0, true => 1 };
|
||||||
|
/// let b = a.map(|_, x| f64::from(x) + 0.5);
|
||||||
|
/// assert_eq!(b, enum_map! { false => 0.5, true => 1.5 });
|
||||||
|
/// ```
|
||||||
|
pub fn map<F, T>(self, mut f: F) -> EnumMap<K, T>
|
||||||
|
where
|
||||||
|
F: FnMut(K, V) -> T,
|
||||||
|
K: EnumArray<T>,
|
||||||
|
{
|
||||||
|
struct DropOnPanic<K, V>
|
||||||
|
where
|
||||||
|
K: EnumArray<V>,
|
||||||
|
{
|
||||||
|
position: usize,
|
||||||
|
map: ManuallyDrop<EnumMap<K, V>>,
|
||||||
|
}
|
||||||
|
impl<K, V> Drop for DropOnPanic<K, V>
|
||||||
|
where
|
||||||
|
K: EnumArray<V>,
|
||||||
|
{
|
||||||
|
fn drop(&mut self) {
|
||||||
|
unsafe {
|
||||||
|
ptr::drop_in_place(&mut self.map.as_mut_slice()[self.position..]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mut drop_protect = DropOnPanic {
|
||||||
|
position: 0,
|
||||||
|
map: ManuallyDrop::new(self),
|
||||||
|
};
|
||||||
|
enum_map! {
|
||||||
|
k => {
|
||||||
|
let value = unsafe { ptr::read(&drop_protect.map.as_slice()[drop_protect.position]) };
|
||||||
|
drop_protect.position += 1;
|
||||||
|
f(k, value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
98
third_party/rust/enum-map/src/serde.rs
vendored
Normal file
98
third_party/rust/enum-map/src/serde.rs
vendored
Normal file
|
|
@ -0,0 +1,98 @@
|
||||||
|
// SPDX-FileCopyrightText: 2017 - 2023 Kamila Borowska <kamila@borowska.pw>
|
||||||
|
// SPDX-FileCopyrightText: 2021 Bruno Corrêa Zimmermann <brunoczim@gmail.com>
|
||||||
|
//
|
||||||
|
// SPDX-License-Identifier: MIT OR Apache-2.0
|
||||||
|
|
||||||
|
use crate::{enum_map, EnumArray, EnumMap};
|
||||||
|
use core::fmt;
|
||||||
|
use core::marker::PhantomData;
|
||||||
|
use serde::de::{self, Deserialize, Deserializer, Error, MapAccess, SeqAccess};
|
||||||
|
use serde::ser::{Serialize, SerializeTuple, Serializer};
|
||||||
|
|
||||||
|
/// Requires crate feature `"serde"`
|
||||||
|
impl<K: EnumArray<V> + Serialize, V: Serialize> Serialize for EnumMap<K, V> {
|
||||||
|
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||||
|
if serializer.is_human_readable() {
|
||||||
|
serializer.collect_map(self)
|
||||||
|
} else {
|
||||||
|
let mut tup = serializer.serialize_tuple(self.len())?;
|
||||||
|
for value in self.values() {
|
||||||
|
tup.serialize_element(value)?;
|
||||||
|
}
|
||||||
|
tup.end()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Requires crate feature `"serde"`
|
||||||
|
impl<'de, K, V> Deserialize<'de> for EnumMap<K, V>
|
||||||
|
where
|
||||||
|
K: EnumArray<V> + EnumArray<Option<V>> + Deserialize<'de>,
|
||||||
|
V: Deserialize<'de>,
|
||||||
|
{
|
||||||
|
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
|
||||||
|
if deserializer.is_human_readable() {
|
||||||
|
deserializer.deserialize_map(HumanReadableVisitor(PhantomData))
|
||||||
|
} else {
|
||||||
|
deserializer.deserialize_tuple(K::LENGTH, CompactVisitor(PhantomData))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct HumanReadableVisitor<K, V>(PhantomData<(K, V)>);
|
||||||
|
|
||||||
|
impl<'de, K, V> de::Visitor<'de> for HumanReadableVisitor<K, V>
|
||||||
|
where
|
||||||
|
K: EnumArray<V> + EnumArray<Option<V>> + Deserialize<'de>,
|
||||||
|
V: Deserialize<'de>,
|
||||||
|
{
|
||||||
|
type Value = EnumMap<K, V>;
|
||||||
|
|
||||||
|
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(formatter, "a map")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_map<M: MapAccess<'de>>(self, mut access: M) -> Result<Self::Value, M::Error> {
|
||||||
|
let mut entries = EnumMap::default();
|
||||||
|
while let Some((key, value)) = access.next_entry()? {
|
||||||
|
entries[key] = Some(value);
|
||||||
|
}
|
||||||
|
for value in entries.values() {
|
||||||
|
value
|
||||||
|
.as_ref()
|
||||||
|
.ok_or_else(|| M::Error::custom("key not specified"))?;
|
||||||
|
}
|
||||||
|
Ok(enum_map! { key => entries[key].take().unwrap() })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct CompactVisitor<K, V>(PhantomData<(K, V)>);
|
||||||
|
|
||||||
|
impl<'de, K, V> de::Visitor<'de> for CompactVisitor<K, V>
|
||||||
|
where
|
||||||
|
K: EnumArray<V> + EnumArray<Option<V>> + Deserialize<'de>,
|
||||||
|
V: Deserialize<'de>,
|
||||||
|
{
|
||||||
|
type Value = EnumMap<K, V>;
|
||||||
|
|
||||||
|
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(formatter, "a sequence")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_seq<M: SeqAccess<'de>>(self, mut access: M) -> Result<Self::Value, M::Error> {
|
||||||
|
let mut entries = EnumMap::default();
|
||||||
|
let len = entries.len();
|
||||||
|
{
|
||||||
|
let mut iter = entries.values_mut();
|
||||||
|
while let Some(place) = iter.next() {
|
||||||
|
*place = Some(access.next_element()?.ok_or_else(|| {
|
||||||
|
M::Error::invalid_length(
|
||||||
|
len - iter.len() - 1,
|
||||||
|
&"a sequence with as many elements as there are variants",
|
||||||
|
)
|
||||||
|
})?);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(enum_map! { key => entries[key].take().unwrap() })
|
||||||
|
}
|
||||||
|
}
|
||||||
115
third_party/rust/enum-map/tests/serde.rs
vendored
Normal file
115
third_party/rust/enum-map/tests/serde.rs
vendored
Normal file
|
|
@ -0,0 +1,115 @@
|
||||||
|
#![cfg(feature = "serde")]
|
||||||
|
|
||||||
|
// SPDX-FileCopyrightText: 2017 - 2022 Kamila Borowska <kamila@borowska.pw>
|
||||||
|
// SPDX-FileCopyrightText: 2022 Cass Fridkin <cass@cloudflare.com>
|
||||||
|
//
|
||||||
|
// SPDX-License-Identifier: MIT OR Apache-2.0
|
||||||
|
|
||||||
|
use enum_map::{enum_map, Enum, EnumMap};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_test::{assert_de_tokens_error, assert_tokens, Compact, Configure, Token};
|
||||||
|
|
||||||
|
#[derive(Debug, Enum, Deserialize, Serialize)]
|
||||||
|
enum Example {
|
||||||
|
A,
|
||||||
|
B,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn serialization() {
|
||||||
|
let map = enum_map! { Example::A => 5, Example::B => 10 };
|
||||||
|
assert_tokens(
|
||||||
|
&map.readable(),
|
||||||
|
&[
|
||||||
|
Token::Map { len: Some(2) },
|
||||||
|
Token::UnitVariant {
|
||||||
|
name: "Example",
|
||||||
|
variant: "A",
|
||||||
|
},
|
||||||
|
Token::I32(5),
|
||||||
|
Token::UnitVariant {
|
||||||
|
name: "Example",
|
||||||
|
variant: "B",
|
||||||
|
},
|
||||||
|
Token::I32(10),
|
||||||
|
Token::MapEnd,
|
||||||
|
],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn compact_serialization() {
|
||||||
|
let map = enum_map! { Example::A => 5, Example::B => 10 };
|
||||||
|
assert_tokens(
|
||||||
|
&map.compact(),
|
||||||
|
&[
|
||||||
|
Token::Tuple { len: 2 },
|
||||||
|
Token::I32(5),
|
||||||
|
Token::I32(10),
|
||||||
|
Token::TupleEnd,
|
||||||
|
],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn invalid_compact_deserialization() {
|
||||||
|
assert_de_tokens_error::<Compact<EnumMap<bool, bool>>>(
|
||||||
|
&[Token::I32(4)],
|
||||||
|
"invalid type: integer `4`, expected a sequence",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn too_short_compact_deserialization() {
|
||||||
|
assert_de_tokens_error::<Compact<EnumMap<bool, bool>>>(
|
||||||
|
&[Token::Seq { len: None }, Token::Bool(true), Token::SeqEnd],
|
||||||
|
"invalid length 1, expected a sequence with as many elements as there are variants",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const JSON: &str = r#"{"A":5,"B":10}"#;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn json_serialization() {
|
||||||
|
let map = enum_map! { Example::A => 5, Example::B => 10 };
|
||||||
|
assert_eq!(serde_json::to_string(&map).unwrap(), String::from(JSON));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn json_deserialization() {
|
||||||
|
let example: EnumMap<Example, i32> = serde_json::from_str(JSON).unwrap();
|
||||||
|
assert_eq!(example, enum_map! { Example::A => 5, Example::B => 10 });
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn json_invalid_deserialization() {
|
||||||
|
let example: Result<EnumMap<Example, i32>, _> = serde_json::from_str(r"{}");
|
||||||
|
assert!(example.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn json_invalid_type() {
|
||||||
|
let example: Result<EnumMap<Example, i32>, _> = serde_json::from_str("4");
|
||||||
|
assert!(example.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn json_invalid_key() {
|
||||||
|
let example: Result<EnumMap<Example, i32>, _> =
|
||||||
|
serde_json::from_str(r#"{"a": 5, "b": 10, "c": 6}"#);
|
||||||
|
assert!(example.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn bincode_serialization() {
|
||||||
|
let example = enum_map! { false => 3u8, true => 4u8 };
|
||||||
|
let serialized = bincode::serialize(&example).unwrap();
|
||||||
|
assert_eq!(example, bincode::deserialize(&serialized).unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn bincode_too_short_deserialization() {
|
||||||
|
assert!(
|
||||||
|
bincode::deserialize::<EnumMap<bool, bool>>(&bincode::serialize(&()).unwrap()).is_err()
|
||||||
|
);
|
||||||
|
}
|
||||||
696
third_party/rust/enum-map/tests/test.rs
vendored
Normal file
696
third_party/rust/enum-map/tests/test.rs
vendored
Normal file
|
|
@ -0,0 +1,696 @@
|
||||||
|
// SPDX-FileCopyrightText: 2018 - 2022 Kamila Borowska <kamila@borowska.pw>
|
||||||
|
// SPDX-FileCopyrightText: 2019 Riey <creeper844@gmail.com>
|
||||||
|
// SPDX-FileCopyrightText: 2020 Amanieu d'Antras <amanieu@gmail.com>
|
||||||
|
// SPDX-FileCopyrightText: 2021 Bruno Corrêa Zimmermann <brunoczim@gmail.com>
|
||||||
|
// SPDX-FileCopyrightText: 2021 micycle
|
||||||
|
// SPDX-FileCopyrightText: 2022 Cass Fridkin <cass@cloudflare.com>
|
||||||
|
//
|
||||||
|
// SPDX-License-Identifier: MIT OR Apache-2.0
|
||||||
|
|
||||||
|
#[macro_use]
|
||||||
|
extern crate enum_map;
|
||||||
|
|
||||||
|
use enum_map::{Enum, EnumArray, EnumMap, IntoIter};
|
||||||
|
|
||||||
|
use std::cell::{Cell, RefCell};
|
||||||
|
use std::collections::HashSet;
|
||||||
|
use std::convert::Infallible;
|
||||||
|
use std::marker::PhantomData;
|
||||||
|
use std::num::ParseIntError;
|
||||||
|
use std::panic::{catch_unwind, UnwindSafe};
|
||||||
|
|
||||||
|
trait From<T>: Sized {
|
||||||
|
fn from(_: T) -> Self {
|
||||||
|
unreachable!();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T, U> From<T> for U {}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, Enum, PartialEq)]
|
||||||
|
enum Example {
|
||||||
|
A,
|
||||||
|
B,
|
||||||
|
C,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_bool() {
|
||||||
|
let mut map = enum_map! { false => 24, true => 42 };
|
||||||
|
assert_eq!(map[false], 24);
|
||||||
|
assert_eq!(map[true], 42);
|
||||||
|
map[false] += 1;
|
||||||
|
assert_eq!(map[false], 25);
|
||||||
|
for (key, item) in &mut map {
|
||||||
|
if !key {
|
||||||
|
*item += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assert_eq!(map[false], 26);
|
||||||
|
assert_eq!(map[true], 42);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_clone() {
|
||||||
|
let map = enum_map! { false => 3, true => 5 };
|
||||||
|
assert_eq!(map.clone(), map);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_debug() {
|
||||||
|
let map = enum_map! { false => 3, true => 5 };
|
||||||
|
assert_eq!(format!("{:?}", map), "{false: 3, true: 5}");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_hash() {
|
||||||
|
let map = enum_map! { false => 3, true => 5 };
|
||||||
|
let mut set = HashSet::new();
|
||||||
|
set.insert(map);
|
||||||
|
assert!(set.contains(&map));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_clear() {
|
||||||
|
let mut map = enum_map! { false => 1, true => 2 };
|
||||||
|
map.clear();
|
||||||
|
assert_eq!(map[true], 0);
|
||||||
|
assert_eq!(map[false], 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn struct_of_enum() {
|
||||||
|
#[derive(Copy, Clone, Debug, Enum, PartialEq)]
|
||||||
|
struct Product {
|
||||||
|
example: Example,
|
||||||
|
is_done: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut map = enum_map! {
|
||||||
|
Product { example: Example::A, is_done: false } => "foo",
|
||||||
|
Product { example: Example::B, is_done: false } => "bar",
|
||||||
|
Product { example: Example::C, is_done: false } => "baz",
|
||||||
|
Product { example: Example::A, is_done: true } => "done foo",
|
||||||
|
Product { example: Example::B, is_done: true } => "bar done",
|
||||||
|
Product { example: Example::C, is_done: true } => "doooozne",
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
map[Product {
|
||||||
|
example: Example::B,
|
||||||
|
is_done: false
|
||||||
|
}],
|
||||||
|
"bar"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
map[Product {
|
||||||
|
example: Example::C,
|
||||||
|
is_done: false
|
||||||
|
}],
|
||||||
|
"baz"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
map[Product {
|
||||||
|
example: Example::B,
|
||||||
|
is_done: true
|
||||||
|
}],
|
||||||
|
"bar done"
|
||||||
|
);
|
||||||
|
|
||||||
|
map[Product {
|
||||||
|
example: Example::B,
|
||||||
|
is_done: true,
|
||||||
|
}] = "not really done";
|
||||||
|
assert_eq!(
|
||||||
|
map[Product {
|
||||||
|
example: Example::B,
|
||||||
|
is_done: false
|
||||||
|
}],
|
||||||
|
"bar"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
map[Product {
|
||||||
|
example: Example::C,
|
||||||
|
is_done: false
|
||||||
|
}],
|
||||||
|
"baz"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
map[Product {
|
||||||
|
example: Example::B,
|
||||||
|
is_done: true
|
||||||
|
}],
|
||||||
|
"not really done"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tuple_struct_of_enum() {
|
||||||
|
#[derive(Copy, Clone, Debug, Enum, PartialEq)]
|
||||||
|
struct Product(Example, bool);
|
||||||
|
|
||||||
|
let mut map = enum_map! {
|
||||||
|
Product(Example::A, false) => "foo",
|
||||||
|
Product(Example::B, false) => "bar",
|
||||||
|
Product(Example::C, false) => "baz",
|
||||||
|
Product(Example::A, true) => "done foo",
|
||||||
|
Product(Example::B, true) => "bar done",
|
||||||
|
Product(Example::C, true) => "doooozne",
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(map[Product(Example::B, false)], "bar");
|
||||||
|
assert_eq!(map[Product(Example::C, false)], "baz");
|
||||||
|
assert_eq!(map[Product(Example::B, true)], "bar done");
|
||||||
|
|
||||||
|
map[Product(Example::B, true)] = "not really done";
|
||||||
|
assert_eq!(map[Product(Example::B, false)], "bar");
|
||||||
|
assert_eq!(map[Product(Example::C, false)], "baz");
|
||||||
|
assert_eq!(map[Product(Example::B, true)], "not really done");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn discriminants() {
|
||||||
|
#[derive(Debug, Enum, PartialEq)]
|
||||||
|
enum Discriminants {
|
||||||
|
A = 2000,
|
||||||
|
B = 3000,
|
||||||
|
C = 1000,
|
||||||
|
}
|
||||||
|
let mut map = EnumMap::default();
|
||||||
|
map[Discriminants::A] = 3;
|
||||||
|
map[Discriminants::B] = 2;
|
||||||
|
map[Discriminants::C] = 1;
|
||||||
|
let mut pairs = map.iter();
|
||||||
|
assert_eq!(pairs.next(), Some((Discriminants::A, &3)));
|
||||||
|
assert_eq!(pairs.next(), Some((Discriminants::B, &2)));
|
||||||
|
assert_eq!(pairs.next(), Some((Discriminants::C, &1)));
|
||||||
|
assert_eq!(pairs.next(), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn extend() {
|
||||||
|
let mut map = enum_map! { _ => 0 };
|
||||||
|
map.extend(vec![(Example::A, 3)]);
|
||||||
|
map.extend(vec![(&Example::B, &4)]);
|
||||||
|
assert_eq!(
|
||||||
|
map,
|
||||||
|
enum_map! { Example::A => 3, Example::B => 4, Example::C => 0 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn collect() {
|
||||||
|
let iter = vec![(Example::A, 5), (Example::B, 7)]
|
||||||
|
.into_iter()
|
||||||
|
.map(|(k, v)| (k, v + 1));
|
||||||
|
assert_eq!(
|
||||||
|
iter.collect::<EnumMap<_, _>>(),
|
||||||
|
enum_map! { Example::A => 6, Example::B => 8, Example::C => 0 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn huge_enum() {
|
||||||
|
#[derive(Enum)]
|
||||||
|
enum Example {
|
||||||
|
A,
|
||||||
|
B,
|
||||||
|
C,
|
||||||
|
D,
|
||||||
|
E,
|
||||||
|
F,
|
||||||
|
G,
|
||||||
|
H,
|
||||||
|
I,
|
||||||
|
J,
|
||||||
|
K,
|
||||||
|
L,
|
||||||
|
M,
|
||||||
|
N,
|
||||||
|
O,
|
||||||
|
P,
|
||||||
|
Q,
|
||||||
|
R,
|
||||||
|
S,
|
||||||
|
T,
|
||||||
|
U,
|
||||||
|
V,
|
||||||
|
W,
|
||||||
|
X,
|
||||||
|
Y,
|
||||||
|
Z,
|
||||||
|
Aa,
|
||||||
|
Bb,
|
||||||
|
Cc,
|
||||||
|
Dd,
|
||||||
|
Ee,
|
||||||
|
Ff,
|
||||||
|
Gg,
|
||||||
|
Hh,
|
||||||
|
Ii,
|
||||||
|
Jj,
|
||||||
|
Kk,
|
||||||
|
Ll,
|
||||||
|
Mm,
|
||||||
|
Nn,
|
||||||
|
Oo,
|
||||||
|
Pp,
|
||||||
|
Qq,
|
||||||
|
Rr,
|
||||||
|
Ss,
|
||||||
|
Tt,
|
||||||
|
Uu,
|
||||||
|
Vv,
|
||||||
|
Ww,
|
||||||
|
Xx,
|
||||||
|
Yy,
|
||||||
|
Zz,
|
||||||
|
}
|
||||||
|
|
||||||
|
let map = enum_map! { _ => 2 };
|
||||||
|
assert_eq!(map[Example::Xx], 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn iterator_len() {
|
||||||
|
assert_eq!(
|
||||||
|
enum_map! { Example::A | Example::B | Example::C => 0 }
|
||||||
|
.iter()
|
||||||
|
.len(),
|
||||||
|
3
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn iter_mut_len() {
|
||||||
|
assert_eq!(
|
||||||
|
enum_map! { Example::A | Example::B | Example::C => 0 }
|
||||||
|
.iter_mut()
|
||||||
|
.len(),
|
||||||
|
3
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn into_iter_len() {
|
||||||
|
assert_eq!(enum_map! { Example::A | _ => 0 }.into_iter().len(), 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn iterator_next_back() {
|
||||||
|
assert_eq!(
|
||||||
|
enum_map! { Example::A => 1, Example::B => 2, Example::C => 3 }
|
||||||
|
.iter()
|
||||||
|
.next_back(),
|
||||||
|
Some((Example::C, &3))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn iter_mut_next_back() {
|
||||||
|
assert_eq!(
|
||||||
|
enum_map! { Example::A => 1, Example::B => 2, Example::C => 3 }
|
||||||
|
.iter_mut()
|
||||||
|
.next_back(),
|
||||||
|
Some((Example::C, &mut 3))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn into_iter() {
|
||||||
|
let mut iter = enum_map! { true => 5, false => 7 }.into_iter();
|
||||||
|
assert_eq!(iter.next(), Some((false, 7)));
|
||||||
|
assert_eq!(iter.next(), Some((true, 5)));
|
||||||
|
assert_eq!(iter.next(), None);
|
||||||
|
assert_eq!(iter.next(), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn into_iter_u8() {
|
||||||
|
assert_eq!(
|
||||||
|
enum_map! { i => i }.into_iter().collect::<Vec<_>>(),
|
||||||
|
(0..256).map(|x| (x as u8, x as u8)).collect::<Vec<_>>()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
struct DropReporter<'a> {
|
||||||
|
into: &'a RefCell<Vec<usize>>,
|
||||||
|
value: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Drop for DropReporter<'a> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
self.into.borrow_mut().push(self.value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn into_iter_drop() {
|
||||||
|
let dropped = RefCell::new(Vec::default());
|
||||||
|
let mut a: IntoIter<Example, _> = enum_map! {
|
||||||
|
k => DropReporter {
|
||||||
|
into: &dropped,
|
||||||
|
value: k as usize,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
.into_iter();
|
||||||
|
assert_eq!(a.next().unwrap().0, Example::A);
|
||||||
|
assert_eq!(*dropped.borrow(), &[0]);
|
||||||
|
drop(a);
|
||||||
|
assert_eq!(*dropped.borrow(), &[0, 1, 2]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn into_iter_double_ended_iterator() {
|
||||||
|
let mut iter = enum_map! { 0 => 5, 255 => 7, _ => 0 }.into_iter();
|
||||||
|
assert_eq!(iter.next(), Some((0, 5)));
|
||||||
|
assert_eq!(iter.next_back(), Some((255, 7)));
|
||||||
|
assert_eq!(iter.next(), Some((1, 0)));
|
||||||
|
assert_eq!(iter.next_back(), Some((254, 0)));
|
||||||
|
assert!(iter.rev().eq((2..254).rev().map(|i| (i, 0))));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn values_rev_collect() {
|
||||||
|
assert_eq!(
|
||||||
|
vec![3, 2, 1],
|
||||||
|
enum_map! { Example::A => 1, Example::B => 2, Example::C => 3 }
|
||||||
|
.values()
|
||||||
|
.rev()
|
||||||
|
.cloned()
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn values_len() {
|
||||||
|
assert_eq!(enum_map! { false => 0, true => 1 }.values().len(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn into_values_rev_collect() {
|
||||||
|
assert_eq!(
|
||||||
|
vec![3, 2, 1],
|
||||||
|
enum_map! { Example::A => 1, Example::B => 2, Example::C => 3 }
|
||||||
|
.into_values()
|
||||||
|
.rev()
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn into_values_len() {
|
||||||
|
assert_eq!(enum_map! { false => 0, true => 1 }.into_values().len(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn values_mut_next_back() {
|
||||||
|
let mut map = enum_map! { false => 0, true => 1 };
|
||||||
|
assert_eq!(map.values_mut().next_back(), Some(&mut 1));
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
|
fn test_u8() {
|
||||||
|
let mut map = enum_map! { b'a' => 4, _ => 0 };
|
||||||
|
map[b'c'] = 3;
|
||||||
|
assert_eq!(map[b'a'], 4);
|
||||||
|
assert_eq!(map[b'b'], 0);
|
||||||
|
assert_eq!(map[b'c'], 3);
|
||||||
|
assert_eq!(map.iter().next(), Some((0, &0)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Enum)]
|
||||||
|
enum Void {}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty_map() {
|
||||||
|
let void: EnumMap<Void, Void> = enum_map! {};
|
||||||
|
assert_eq!(void.len(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic]
|
||||||
|
fn empty_value() {
|
||||||
|
let _void: EnumMap<bool, Void> = enum_map! { _ => unreachable!() };
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty_infallible_map() {
|
||||||
|
let void: EnumMap<Infallible, Infallible> = enum_map! {};
|
||||||
|
assert_eq!(void.len(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
enum X {
|
||||||
|
A(PhantomData<*const ()>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Enum for X {
|
||||||
|
const LENGTH: usize = 1;
|
||||||
|
|
||||||
|
fn from_usize(arg: usize) -> X {
|
||||||
|
assert_eq!(arg, 0);
|
||||||
|
X::A(PhantomData)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn into_usize(self) -> usize {
|
||||||
|
0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<V> EnumArray<V> for X {
|
||||||
|
type Array = [V; Self::LENGTH];
|
||||||
|
}
|
||||||
|
|
||||||
|
fn assert_sync_send<T: Sync + Send>(_: T) {}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn assert_enum_map_does_not_copy_sync_send_dependency_of_keys() {
|
||||||
|
let mut map = enum_map! { X::A(PhantomData) => true };
|
||||||
|
assert_sync_send(map);
|
||||||
|
assert_sync_send(&map);
|
||||||
|
assert_sync_send(&mut map);
|
||||||
|
assert_sync_send(map.iter());
|
||||||
|
assert_sync_send(map.iter_mut());
|
||||||
|
assert_sync_send(map.into_iter());
|
||||||
|
assert!(map[X::A(PhantomData)]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_sum() {
|
||||||
|
assert_eq!(
|
||||||
|
enum_map! { i => u8::into(i) }
|
||||||
|
.iter()
|
||||||
|
.map(|(_, v)| v)
|
||||||
|
.sum::<u32>(),
|
||||||
|
32_640
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_sum_mut() {
|
||||||
|
assert_eq!(
|
||||||
|
enum_map! { i => u8::into(i) }
|
||||||
|
.iter_mut()
|
||||||
|
.map(|(_, &mut v)| -> u32 { v })
|
||||||
|
.sum::<u32>(),
|
||||||
|
32_640
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_iter_clone() {
|
||||||
|
struct S(u8);
|
||||||
|
let map = enum_map! {
|
||||||
|
Example::A => S(3),
|
||||||
|
Example::B => S(4),
|
||||||
|
Example::C => S(1),
|
||||||
|
};
|
||||||
|
let iter = map.iter();
|
||||||
|
assert_eq!(iter.clone().map(|(_, S(v))| v).sum::<u8>(), 8);
|
||||||
|
assert_eq!(iter.map(|(_, S(v))| v).sum::<u8>(), 8);
|
||||||
|
let values = map.values();
|
||||||
|
assert_eq!(values.clone().map(|S(v)| v).sum::<u8>(), 8);
|
||||||
|
assert_eq!(values.map(|S(v)| v).sum::<u8>(), 8);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn question_mark() -> Result<(), ParseIntError> {
|
||||||
|
let map = enum_map! { false => "2".parse()?, true => "5".parse()? };
|
||||||
|
assert_eq!(map, enum_map! { false => 2, true => 5 });
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn question_mark_failure() {
|
||||||
|
struct IncOnDrop<'a>(&'a Cell<i32>);
|
||||||
|
|
||||||
|
impl Drop for IncOnDrop<'_> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
self.0.set(self.0.get() + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn failible() -> Result<IncOnDrop<'static>, &'static str> {
|
||||||
|
Err("ERROR!")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_block(inc: &Cell<i32>) -> Result<(), &'static str> {
|
||||||
|
enum_map! {
|
||||||
|
32 => failible()?,
|
||||||
|
_ => {
|
||||||
|
IncOnDrop(inc)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
let value = Cell::new(0);
|
||||||
|
assert_eq!(try_block(&value), Err("ERROR!"));
|
||||||
|
assert_eq!(value.get(), 32);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic = "Intentional panic"]
|
||||||
|
fn map_panic() {
|
||||||
|
let map: EnumMap<u8, String> = enum_map! { i => i.to_string() };
|
||||||
|
map.map(|k, v| {
|
||||||
|
if k == 2 {
|
||||||
|
panic!("Intentional panic");
|
||||||
|
}
|
||||||
|
v + " modified"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! make_enum_map_macro_safety_test {
|
||||||
|
($a:tt $b:tt) => {
|
||||||
|
// This is misuse of an API, however we need to test that to ensure safety
|
||||||
|
// as we use unsafe code.
|
||||||
|
enum E {
|
||||||
|
A,
|
||||||
|
B,
|
||||||
|
C,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Enum for E {
|
||||||
|
const LENGTH: usize = $a;
|
||||||
|
|
||||||
|
fn from_usize(value: usize) -> E {
|
||||||
|
match value {
|
||||||
|
0 => E::A,
|
||||||
|
1 => E::B,
|
||||||
|
2 => E::C,
|
||||||
|
_ => unimplemented!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn into_usize(self) -> usize {
|
||||||
|
self as usize
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<V> EnumArray<V> for E {
|
||||||
|
type Array = [V; $b];
|
||||||
|
}
|
||||||
|
|
||||||
|
let map: EnumMap<E, String> = enum_map! { _ => "Hello, world!".into() };
|
||||||
|
map.into_iter();
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn enum_map_macro_safety_under() {
|
||||||
|
make_enum_map_macro_safety_test!(2 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn enum_map_macro_safety_over() {
|
||||||
|
make_enum_map_macro_safety_test!(3 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn drop_panic_into_iter() {
|
||||||
|
struct DropHandler<'a>(&'a Cell<usize>);
|
||||||
|
impl Drop for DropHandler<'_> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
self.0.set(self.0.get() + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl UnwindSafe for DropHandler<'_> {}
|
||||||
|
struct Storage<'a> {
|
||||||
|
should_panic: bool,
|
||||||
|
_drop_handler: DropHandler<'a>,
|
||||||
|
}
|
||||||
|
impl Drop for Storage<'_> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
if self.should_panic {
|
||||||
|
panic!();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let cell = Cell::new(0);
|
||||||
|
let map: EnumMap<Example, _> = enum_map! {
|
||||||
|
v => Storage { should_panic: v == Example::B, _drop_handler: DropHandler(&cell) },
|
||||||
|
};
|
||||||
|
assert!(catch_unwind(|| {
|
||||||
|
map.into_iter();
|
||||||
|
})
|
||||||
|
.is_err());
|
||||||
|
assert_eq!(cell.get(), 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_const_enum_map_from_array() {
|
||||||
|
const CONST_ENUM_MAP_FROM_ARRAY: EnumMap<bool, u32> = EnumMap::from_array([4, 8]);
|
||||||
|
assert_eq!(
|
||||||
|
CONST_ENUM_MAP_FROM_ARRAY,
|
||||||
|
enum_map! { false => 4, true => 8 },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn usize_override() {
|
||||||
|
#[allow(non_camel_case_types, dead_code)]
|
||||||
|
type usize = ();
|
||||||
|
#[derive(Enum)]
|
||||||
|
enum X {
|
||||||
|
A,
|
||||||
|
B,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Regression test for https://codeberg.org/xfix/enum-map/issues/112
|
||||||
|
#[test]
|
||||||
|
fn test_issue_112() {
|
||||||
|
#[derive(Enum, PartialEq, Debug)]
|
||||||
|
enum Inner {
|
||||||
|
Inner1,
|
||||||
|
Inner2,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Enum, PartialEq, Debug)]
|
||||||
|
enum Outer {
|
||||||
|
A,
|
||||||
|
B(Inner),
|
||||||
|
C,
|
||||||
|
D(Inner, Inner),
|
||||||
|
E,
|
||||||
|
}
|
||||||
|
|
||||||
|
assert_eq!(Outer::A.into_usize(), 0);
|
||||||
|
assert_eq!(Outer::A, Outer::from_usize(0));
|
||||||
|
assert_eq!(Outer::B(Inner::Inner1).into_usize(), 1);
|
||||||
|
assert_eq!(Outer::B(Inner::Inner1), Outer::from_usize(1));
|
||||||
|
assert_eq!(Outer::B(Inner::Inner2).into_usize(), 2);
|
||||||
|
assert_eq!(Outer::B(Inner::Inner2), Outer::from_usize(2));
|
||||||
|
assert_eq!(Outer::C.into_usize(), 3);
|
||||||
|
assert_eq!(Outer::C, Outer::from_usize(3));
|
||||||
|
assert_eq!(Outer::D(Inner::Inner1, Inner::Inner1).into_usize(), 4);
|
||||||
|
assert_eq!(Outer::D(Inner::Inner1, Inner::Inner1), Outer::from_usize(4));
|
||||||
|
assert_eq!(Outer::D(Inner::Inner2, Inner::Inner1).into_usize(), 5);
|
||||||
|
assert_eq!(Outer::D(Inner::Inner2, Inner::Inner1), Outer::from_usize(5));
|
||||||
|
assert_eq!(Outer::D(Inner::Inner1, Inner::Inner2).into_usize(), 6);
|
||||||
|
assert_eq!(Outer::D(Inner::Inner1, Inner::Inner2), Outer::from_usize(6));
|
||||||
|
assert_eq!(Outer::D(Inner::Inner2, Inner::Inner2).into_usize(), 7);
|
||||||
|
assert_eq!(Outer::D(Inner::Inner2, Inner::Inner2), Outer::from_usize(7));
|
||||||
|
assert_eq!(Outer::E.into_usize(), 8);
|
||||||
|
assert_eq!(Outer::E, Outer::from_usize(8));
|
||||||
|
}
|
||||||
|
|
@ -1 +1 @@
|
||||||
{"files":{"Cargo.toml":"b29d3a82e784eaa91625dd36e843cd34800e6528573eb7684cb34989b9a6cbc3","build.rs":"a17b1bb1bd3de3fc958f72d4d1357f7bc4432faa26640c95b5fbfccf40579d67","src/codec.rs":"a317da96f2a2e70313f1a6ed4741261191cef690db25dc789973fd980aafc77f","src/datagram.rs":"742aa0f39ac24d63431b58e23ebf925e27ec42340e5911020475de5f7f457a6d","src/event.rs":"f60fee9f4b09ef47ff5e4bfa21c07e45ffd5873c292f2605f24d834070127d62","src/header.rs":"467b947f78bfe354d8bb51e8df0c2be69e75a45e2be688d81f0d268aa77c89ef","src/hrtime.rs":"fdb72b347c94eefb2fa0bdb669cca4549853d7f767ad7572941adf265f1621f9","src/incrdecoder.rs":"f3b6e964d02c34e7c8fc5f048b4f99cc3b0f2567cabb2e078f0b7894e1baa50e","src/lib.rs":"b1d5f72196b9e846fdc10b099468e3f437b3121b3b2d72727a1f55b5c77c455c","src/log.rs":"2713e29de2d4718b65ad1b0d922702629845b830b195a5b01b018dc395039a85","src/qlog.rs":"c9f4a32950d405fdfbb317c61a4089fa1b75c5de40698e851a62413ceac46c8a","src/timer.rs":"8da10e8300be0795367e2823d3ecf7ec46bcadbedfc28ed5a013794bcd73cfc7","tests/log.rs":"480b165b7907ec642c508b303d63005eee1427115d6973a349eaf6b2242ed18d"},"package":null}
|
{"files":{"Cargo.toml":"dbb5500f87df7aee6e680ac210ddb56b833aa82d6be5c407474de0895cee14e9","build.rs":"a17b1bb1bd3de3fc958f72d4d1357f7bc4432faa26640c95b5fbfccf40579d67","src/codec.rs":"8c14f09864b095e28ff52e7d96a12a6591fc9c4b20a9cafca6720d132c80efdc","src/datagram.rs":"1a7028d96a2e7385e94265de53189eb824b7cf12e0e2de5d67c3f3f8751b6043","src/event.rs":"4ef9e6f3f5168f2eacb7be982e062e743c64a64e809765d2139122839aa407e5","src/header.rs":"467b947f78bfe354d8bb51e8df0c2be69e75a45e2be688d81f0d268aa77c89ef","src/hrtime.rs":"d7c8849e9ec7a312878ea2bc28939717fa03969fb9aee259a4a516351ee37643","src/incrdecoder.rs":"577c32b9ace51f2daaf940be6d0c391c4f55cd42ef6848c68c1ffc970d8c57b5","src/lib.rs":"47c14084c6d475ebb855f3ed9302b31fa42780b93a816bf098c96987ffe33572","src/log.rs":"c68099eae0e9014be35173ac802165b128433d973390e1111c08df56e71df063","src/qlog.rs":"3f43dc4e5fdccb9d6ee74d9e7b3ff29da63e4eb9f631e4e35446e452d8ec7af6","src/timer.rs":"50a2de20933b7b5884337aded69e59e2523503481308f25de1bba1a11d505be8","src/tos.rs":"5b5a61c699266716afce2f5bda7c98151db3223ede41ce451c390863198e30a2","tests/log.rs":"480b165b7907ec642c508b303d63005eee1427115d6973a349eaf6b2242ed18d"},"package":null}
|
||||||
20
third_party/rust/neqo-common/Cargo.toml
vendored
20
third_party/rust/neqo-common/Cargo.toml
vendored
|
|
@ -11,30 +11,38 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
rust-version = "1.65.0"
|
rust-version = "1.70.0"
|
||||||
name = "neqo-common"
|
name = "neqo-common"
|
||||||
version = "0.6.8"
|
version = "0.7.0"
|
||||||
authors = ["Bobby Holley <bobbyholley@gmail.com>"]
|
authors = ["Bobby Holley <bobbyholley@gmail.com>"]
|
||||||
build = "build.rs"
|
build = "build.rs"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
lazy_static = "1.3.0"
|
enum-map = "2.7"
|
||||||
qlog = "0.9.0"
|
lazy_static = "1.4"
|
||||||
|
|
||||||
[dependencies.env_logger]
|
[dependencies.env_logger]
|
||||||
version = "0.10"
|
version = "0.10"
|
||||||
default-features = false
|
default-features = false
|
||||||
|
|
||||||
[dependencies.log]
|
[dependencies.log]
|
||||||
version = "0.4.0"
|
version = "0.4"
|
||||||
default-features = false
|
default-features = false
|
||||||
|
|
||||||
|
[dependencies.qlog]
|
||||||
|
git = "https://github.com/cloudflare/quiche"
|
||||||
|
rev = "09ea4b244096a013071cfe2175bbf2945fb7f8d1"
|
||||||
|
|
||||||
[dependencies.time]
|
[dependencies.time]
|
||||||
version = "=0.3.23"
|
version = "0.3.23"
|
||||||
features = ["formatting"]
|
features = ["formatting"]
|
||||||
|
|
||||||
|
[dev-dependencies.test-fixture]
|
||||||
|
path = "../test-fixture"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
ci = []
|
||||||
deny-warnings = []
|
deny-warnings = []
|
||||||
|
|
||||||
[target."cfg(windows)".dependencies.winapi]
|
[target."cfg(windows)".dependencies.winapi]
|
||||||
|
|
|
||||||
22
third_party/rust/neqo-common/src/codec.rs
vendored
22
third_party/rust/neqo-common/src/codec.rs
vendored
|
|
@ -34,7 +34,9 @@ impl<'a> Decoder<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Skip n bytes.
|
/// Skip n bytes.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// If the remaining quantity is less than `n`.
|
/// If the remaining quantity is less than `n`.
|
||||||
pub fn skip(&mut self, n: usize) {
|
pub fn skip(&mut self, n: usize) {
|
||||||
assert!(self.remaining() >= n, "insufficient data");
|
assert!(self.remaining() >= n, "insufficient data");
|
||||||
|
|
@ -90,7 +92,9 @@ impl<'a> Decoder<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Decodes an unsigned integer of length 1..=8.
|
/// Decodes an unsigned integer of length 1..=8.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// This panics if `n` is not in the range `1..=8`.
|
/// This panics if `n` is not in the range `1..=8`.
|
||||||
pub fn decode_uint(&mut self, n: usize) -> Option<u64> {
|
pub fn decode_uint(&mut self, n: usize) -> Option<u64> {
|
||||||
assert!(n > 0 && n <= 8);
|
assert!(n > 0 && n <= 8);
|
||||||
|
|
@ -198,7 +202,9 @@ pub struct Encoder {
|
||||||
|
|
||||||
impl Encoder {
|
impl Encoder {
|
||||||
/// Static helper function for previewing the results of encoding without doing it.
|
/// Static helper function for previewing the results of encoding without doing it.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// When `v` is too large.
|
/// When `v` is too large.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub const fn varint_len(v: u64) -> usize {
|
pub const fn varint_len(v: u64) -> usize {
|
||||||
|
|
@ -212,7 +218,9 @@ impl Encoder {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Static helper to determine how long a varint-prefixed array encodes to.
|
/// Static helper to determine how long a varint-prefixed array encodes to.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// When `len` doesn't fit in a `u64`.
|
/// When `len` doesn't fit in a `u64`.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn vvec_len(len: usize) -> usize {
|
pub fn vvec_len(len: usize) -> usize {
|
||||||
|
|
@ -261,7 +269,9 @@ impl Encoder {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Don't use this except in testing.
|
/// Don't use this except in testing.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// When `s` contains non-hex values or an odd number of values.
|
/// When `s` contains non-hex values or an odd number of values.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn from_hex(s: impl AsRef<str>) -> Self {
|
pub fn from_hex(s: impl AsRef<str>) -> Self {
|
||||||
|
|
@ -291,7 +301,9 @@ impl Encoder {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encode an integer of any size up to u64.
|
/// Encode an integer of any size up to u64.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// When `n` is outside the range `1..=8`.
|
/// When `n` is outside the range `1..=8`.
|
||||||
#[allow(clippy::cast_possible_truncation)]
|
#[allow(clippy::cast_possible_truncation)]
|
||||||
pub fn encode_uint<T: Into<u64>>(&mut self, n: usize, v: T) -> &mut Self {
|
pub fn encode_uint<T: Into<u64>>(&mut self, n: usize, v: T) -> &mut Self {
|
||||||
|
|
@ -304,7 +316,9 @@ impl Encoder {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encode a QUIC varint.
|
/// Encode a QUIC varint.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// When `v >= 1<<62`.
|
/// When `v >= 1<<62`.
|
||||||
pub fn encode_varint<T: Into<u64>>(&mut self, v: T) -> &mut Self {
|
pub fn encode_varint<T: Into<u64>>(&mut self, v: T) -> &mut Self {
|
||||||
let v = v.into();
|
let v = v.into();
|
||||||
|
|
@ -319,7 +333,9 @@ impl Encoder {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encode a vector in TLS style.
|
/// Encode a vector in TLS style.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// When `v` is longer than 2^64.
|
/// When `v` is longer than 2^64.
|
||||||
pub fn encode_vec(&mut self, n: usize, v: &[u8]) -> &mut Self {
|
pub fn encode_vec(&mut self, n: usize, v: &[u8]) -> &mut Self {
|
||||||
self.encode_uint(n, u64::try_from(v.as_ref().len()).unwrap())
|
self.encode_uint(n, u64::try_from(v.as_ref().len()).unwrap())
|
||||||
|
|
@ -327,7 +343,9 @@ impl Encoder {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encode a vector in TLS style using a closure for the contents.
|
/// Encode a vector in TLS style using a closure for the contents.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// When `f()` returns a length larger than `2^8n`.
|
/// When `f()` returns a length larger than `2^8n`.
|
||||||
#[allow(clippy::cast_possible_truncation)]
|
#[allow(clippy::cast_possible_truncation)]
|
||||||
pub fn encode_vec_with<F: FnOnce(&mut Self)>(&mut self, n: usize, f: F) -> &mut Self {
|
pub fn encode_vec_with<F: FnOnce(&mut Self)>(&mut self, n: usize, f: F) -> &mut Self {
|
||||||
|
|
@ -343,7 +361,9 @@ impl Encoder {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encode a vector with a varint length.
|
/// Encode a vector with a varint length.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// When `v` is longer than 2^64.
|
/// When `v` is longer than 2^64.
|
||||||
pub fn encode_vvec(&mut self, v: &[u8]) -> &mut Self {
|
pub fn encode_vvec(&mut self, v: &[u8]) -> &mut Self {
|
||||||
self.encode_varint(u64::try_from(v.as_ref().len()).unwrap())
|
self.encode_varint(u64::try_from(v.as_ref().len()).unwrap())
|
||||||
|
|
@ -351,7 +371,9 @@ impl Encoder {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encode a vector with a varint length using a closure.
|
/// Encode a vector with a varint length using a closure.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// When `f()` writes more than 2^62 bytes.
|
/// When `f()` writes more than 2^62 bytes.
|
||||||
#[allow(clippy::cast_possible_truncation)]
|
#[allow(clippy::cast_possible_truncation)]
|
||||||
pub fn encode_vvec_with<F: FnOnce(&mut Self)>(&mut self, f: F) -> &mut Self {
|
pub fn encode_vvec_with<F: FnOnce(&mut Self)>(&mut self, f: F) -> &mut Self {
|
||||||
|
|
|
||||||
46
third_party/rust/neqo-common/src/datagram.rs
vendored
46
third_party/rust/neqo-common/src/datagram.rs
vendored
|
|
@ -4,23 +4,32 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use std::net::SocketAddr;
|
use std::{net::SocketAddr, ops::Deref};
|
||||||
use std::ops::Deref;
|
|
||||||
|
|
||||||
use crate::hex_with_len;
|
use crate::{hex_with_len, IpTos};
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, Clone)]
|
#[derive(Clone, PartialEq, Eq)]
|
||||||
pub struct Datagram {
|
pub struct Datagram {
|
||||||
src: SocketAddr,
|
src: SocketAddr,
|
||||||
dst: SocketAddr,
|
dst: SocketAddr,
|
||||||
|
tos: IpTos,
|
||||||
|
ttl: Option<u8>,
|
||||||
d: Vec<u8>,
|
d: Vec<u8>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Datagram {
|
impl Datagram {
|
||||||
pub fn new<V: Into<Vec<u8>>>(src: SocketAddr, dst: SocketAddr, d: V) -> Self {
|
pub fn new<V: Into<Vec<u8>>>(
|
||||||
|
src: SocketAddr,
|
||||||
|
dst: SocketAddr,
|
||||||
|
tos: IpTos,
|
||||||
|
ttl: Option<u8>,
|
||||||
|
d: V,
|
||||||
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
src,
|
src,
|
||||||
dst,
|
dst,
|
||||||
|
tos,
|
||||||
|
ttl,
|
||||||
d: d.into(),
|
d: d.into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -34,6 +43,16 @@ impl Datagram {
|
||||||
pub fn destination(&self) -> SocketAddr {
|
pub fn destination(&self) -> SocketAddr {
|
||||||
self.dst
|
self.dst
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn tos(&self) -> IpTos {
|
||||||
|
self.tos
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn ttl(&self) -> Option<u8> {
|
||||||
|
self.ttl
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Deref for Datagram {
|
impl Deref for Datagram {
|
||||||
|
|
@ -48,10 +67,25 @@ impl std::fmt::Debug for Datagram {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"Datagram {:?}->{:?}: {}",
|
"Datagram {:?} TTL {:?} {:?}->{:?}: {}",
|
||||||
|
self.tos,
|
||||||
|
self.ttl,
|
||||||
self.src,
|
self.src,
|
||||||
self.dst,
|
self.dst,
|
||||||
hex_with_len(&self.d)
|
hex_with_len(&self.d)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
use test_fixture::datagram;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn fmt_datagram() {
|
||||||
|
let d = datagram([0; 1].to_vec());
|
||||||
|
assert_eq!(
|
||||||
|
format!("{d:?}"),
|
||||||
|
"Datagram IpTos(Cs0, NotEct) TTL Some(128) [fe80::1]:443->[fe80::1]:443: [1]: 00"
|
||||||
|
.to_string()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
||||||
3
third_party/rust/neqo-common/src/event.rs
vendored
3
third_party/rust/neqo-common/src/event.rs
vendored
|
|
@ -4,8 +4,7 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use std::iter::Iterator;
|
use std::{iter::Iterator, marker::PhantomData};
|
||||||
use std::marker::PhantomData;
|
|
||||||
|
|
||||||
/// An event provider is able to generate a stream of events.
|
/// An event provider is able to generate a stream of events.
|
||||||
pub trait Provider {
|
pub trait Provider {
|
||||||
|
|
|
||||||
26
third_party/rust/neqo-common/src/hrtime.rs
vendored
26
third_party/rust/neqo-common/src/hrtime.rs
vendored
|
|
@ -27,12 +27,12 @@ impl Period {
|
||||||
const MIN: Period = Period(1);
|
const MIN: Period = Period(1);
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
fn as_uint(&self) -> UINT {
|
fn as_uint(self) -> UINT {
|
||||||
UINT::from(self.0)
|
UINT::from(self.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(target_os = "macos")]
|
#[cfg(target_os = "macos")]
|
||||||
fn scaled(&self, scale: f64) -> f64 {
|
fn scaled(self, scale: f64) -> f64 {
|
||||||
scale * f64::from(self.0)
|
scale * f64::from(self.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -126,6 +126,7 @@ mod mac {
|
||||||
}
|
}
|
||||||
|
|
||||||
const THREAD_TIME_CONSTRAINT_POLICY: thread_policy_flavor_t = 2;
|
const THREAD_TIME_CONSTRAINT_POLICY: thread_policy_flavor_t = 2;
|
||||||
|
#[allow(clippy::cast_possible_truncation)]
|
||||||
const THREAD_TIME_CONSTRAINT_POLICY_COUNT: mach_msg_type_number_t =
|
const THREAD_TIME_CONSTRAINT_POLICY_COUNT: mach_msg_type_number_t =
|
||||||
(size_of::<thread_time_constraint_policy>() / size_of::<integer_t>())
|
(size_of::<thread_time_constraint_policy>() / size_of::<integer_t>())
|
||||||
as mach_msg_type_number_t;
|
as mach_msg_type_number_t;
|
||||||
|
|
@ -163,7 +164,7 @@ mod mac {
|
||||||
thread_policy_set(
|
thread_policy_set(
|
||||||
pthread_mach_thread_np(pthread_self()),
|
pthread_mach_thread_np(pthread_self()),
|
||||||
THREAD_TIME_CONSTRAINT_POLICY,
|
THREAD_TIME_CONSTRAINT_POLICY,
|
||||||
addr_of_mut!(policy) as _, // horror!
|
addr_of_mut!(policy).cast(), // horror!
|
||||||
THREAD_TIME_CONSTRAINT_POLICY_COUNT,
|
THREAD_TIME_CONSTRAINT_POLICY_COUNT,
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|
@ -180,6 +181,7 @@ mod mac {
|
||||||
|
|
||||||
/// Create a realtime policy and set it.
|
/// Create a realtime policy and set it.
|
||||||
pub fn set_realtime(base: f64) {
|
pub fn set_realtime(base: f64) {
|
||||||
|
#[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)]
|
||||||
let policy = thread_time_constraint_policy {
|
let policy = thread_time_constraint_policy {
|
||||||
period: base as u32, // Base interval
|
period: base as u32, // Base interval
|
||||||
computation: (base * 0.5) as u32,
|
computation: (base * 0.5) as u32,
|
||||||
|
|
@ -198,7 +200,7 @@ mod mac {
|
||||||
thread_policy_get(
|
thread_policy_get(
|
||||||
pthread_mach_thread_np(pthread_self()),
|
pthread_mach_thread_np(pthread_self()),
|
||||||
THREAD_TIME_CONSTRAINT_POLICY,
|
THREAD_TIME_CONSTRAINT_POLICY,
|
||||||
addr_of_mut!(policy) as _, // horror!
|
addr_of_mut!(policy).cast(), // horror!
|
||||||
&mut count,
|
&mut count,
|
||||||
&mut get_default,
|
&mut get_default,
|
||||||
)
|
)
|
||||||
|
|
@ -292,14 +294,14 @@ impl Time {
|
||||||
if let Some(p) = self.active {
|
if let Some(p) = self.active {
|
||||||
mac::set_realtime(p.scaled(self.scale));
|
mac::set_realtime(p.scaled(self.scale));
|
||||||
} else {
|
} else {
|
||||||
mac::set_thread_policy(self.deflt.clone());
|
mac::set_thread_policy(self.deflt);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
{
|
{
|
||||||
if let Some(p) = self.active {
|
if let Some(p) = self.active {
|
||||||
assert_eq!(0, unsafe { timeBeginPeriod(p.as_uint()) });
|
_ = unsafe { timeBeginPeriod(p.as_uint()) };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -309,7 +311,7 @@ impl Time {
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
{
|
{
|
||||||
if let Some(p) = self.active {
|
if let Some(p) = self.active {
|
||||||
assert_eq!(0, unsafe { timeEndPeriod(p.as_uint()) });
|
_ = unsafe { timeEndPeriod(p.as_uint()) };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -370,14 +372,20 @@ impl Drop for Time {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
// Only run these tests in CI on platforms other than MacOS and Windows, where the timer
|
||||||
|
// inaccuracies are too high to pass the tests.
|
||||||
|
#[cfg(all(
|
||||||
|
test,
|
||||||
|
not(all(any(target_os = "macos", target_os = "windows"), feature = "ci"))
|
||||||
|
))]
|
||||||
mod test {
|
mod test {
|
||||||
use super::Time;
|
|
||||||
use std::{
|
use std::{
|
||||||
thread::{sleep, spawn},
|
thread::{sleep, spawn},
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use super::Time;
|
||||||
|
|
||||||
const ONE: Duration = Duration::from_millis(1);
|
const ONE: Duration = Duration::from_millis(1);
|
||||||
const ONE_AND_A_BIT: Duration = Duration::from_micros(1500);
|
const ONE_AND_A_BIT: Duration = Duration::from_micros(1500);
|
||||||
/// A limit for when high resolution timers are disabled.
|
/// A limit for when high resolution timers are disabled.
|
||||||
|
|
|
||||||
|
|
@ -21,7 +21,9 @@ impl IncrementalDecoderUint {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consume some data.
|
/// Consume some data.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// Never, but this is not something the compiler can tell.
|
/// Never, but this is not something the compiler can tell.
|
||||||
pub fn consume(&mut self, dv: &mut Decoder) -> Option<u64> {
|
pub fn consume(&mut self, dv: &mut Decoder) -> Option<u64> {
|
||||||
if let Some(r) = &mut self.remaining {
|
if let Some(r) = &mut self.remaining {
|
||||||
|
|
@ -87,7 +89,9 @@ impl IncrementalDecoderBuffer {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consume some bytes from the decoder.
|
/// Consume some bytes from the decoder.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// Never; but rust doesn't know that.
|
/// Never; but rust doesn't know that.
|
||||||
pub fn consume(&mut self, dv: &mut Decoder) -> Option<Vec<u8>> {
|
pub fn consume(&mut self, dv: &mut Decoder) -> Option<Vec<u8>> {
|
||||||
let amount = min(self.remaining, dv.remaining());
|
let amount = min(self.remaining, dv.remaining());
|
||||||
|
|
@ -109,7 +113,9 @@ pub struct IncrementalDecoderIgnore {
|
||||||
|
|
||||||
impl IncrementalDecoderIgnore {
|
impl IncrementalDecoderIgnore {
|
||||||
/// Make a new ignoring decoder.
|
/// Make a new ignoring decoder.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// If the amount to ignore is zero.
|
/// If the amount to ignore is zero.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn new(n: usize) -> Self {
|
pub fn new(n: usize) -> Self {
|
||||||
|
|
|
||||||
20
third_party/rust/neqo-common/src/lib.rs
vendored
20
third_party/rust/neqo-common/src/lib.rs
vendored
|
|
@ -16,16 +16,20 @@ mod incrdecoder;
|
||||||
pub mod log;
|
pub mod log;
|
||||||
pub mod qlog;
|
pub mod qlog;
|
||||||
pub mod timer;
|
pub mod timer;
|
||||||
|
pub mod tos;
|
||||||
pub use self::codec::{Decoder, Encoder};
|
|
||||||
pub use self::datagram::Datagram;
|
|
||||||
pub use self::header::Header;
|
|
||||||
pub use self::incrdecoder::{
|
|
||||||
IncrementalDecoderBuffer, IncrementalDecoderIgnore, IncrementalDecoderUint,
|
|
||||||
};
|
|
||||||
|
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
|
|
||||||
|
use enum_map::Enum;
|
||||||
|
|
||||||
|
pub use self::{
|
||||||
|
codec::{Decoder, Encoder},
|
||||||
|
datagram::Datagram,
|
||||||
|
header::Header,
|
||||||
|
incrdecoder::{IncrementalDecoderBuffer, IncrementalDecoderIgnore, IncrementalDecoderUint},
|
||||||
|
tos::{IpTos, IpTosDscp, IpTosEcn},
|
||||||
|
};
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn hex(buf: impl AsRef<[u8]>) -> String {
|
pub fn hex(buf: impl AsRef<[u8]>) -> String {
|
||||||
let mut ret = String::with_capacity(buf.as_ref().len() * 2);
|
let mut ret = String::with_capacity(buf.as_ref().len() * 2);
|
||||||
|
|
@ -75,7 +79,7 @@ pub const fn const_min(a: usize, b: usize) -> usize {
|
||||||
[a, b][(a >= b) as usize]
|
[a, b][(a >= b) as usize]
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
|
#[derive(Debug, PartialEq, Eq, Copy, Clone, Enum)]
|
||||||
/// Client or Server.
|
/// Client or Server.
|
||||||
pub enum Role {
|
pub enum Role {
|
||||||
Client,
|
Client,
|
||||||
|
|
|
||||||
5
third_party/rust/neqo-common/src/log.rs
vendored
5
third_party/rust/neqo-common/src/log.rs
vendored
|
|
@ -6,11 +6,10 @@
|
||||||
|
|
||||||
#![allow(clippy::module_name_repetitions)]
|
#![allow(clippy::module_name_repetitions)]
|
||||||
|
|
||||||
|
use std::{io::Write, sync::Once, time::Instant};
|
||||||
|
|
||||||
use env_logger::Builder;
|
use env_logger::Builder;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use std::io::Write;
|
|
||||||
use std::sync::Once;
|
|
||||||
use std::time::Instant;
|
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! do_log {
|
macro_rules! do_log {
|
||||||
|
|
|
||||||
42
third_party/rust/neqo-common/src/qlog.rs
vendored
42
third_party/rust/neqo-common/src/qlog.rs
vendored
|
|
@ -31,6 +31,7 @@ pub struct NeqoQlogShared {
|
||||||
|
|
||||||
impl NeqoQlog {
|
impl NeqoQlog {
|
||||||
/// Create an enabled `NeqoQlog` configuration.
|
/// Create an enabled `NeqoQlog` configuration.
|
||||||
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
///
|
///
|
||||||
/// Will return `qlog::Error` if cannot write to the new log.
|
/// Will return `qlog::Error` if cannot write to the new log.
|
||||||
|
|
@ -48,6 +49,11 @@ impl NeqoQlog {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn inner(&self) -> Rc<RefCell<Option<NeqoQlogShared>>> {
|
||||||
|
Rc::clone(&self.inner)
|
||||||
|
}
|
||||||
|
|
||||||
/// Create a disabled `NeqoQlog` configuration.
|
/// Create a disabled `NeqoQlog` configuration.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn disabled() -> Self {
|
pub fn disabled() -> Self {
|
||||||
|
|
@ -144,3 +150,39 @@ pub fn new_trace(role: Role) -> qlog::TraceSeq {
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use qlog::events::Event;
|
||||||
|
use test_fixture::EXPECTED_LOG_HEADER;
|
||||||
|
|
||||||
|
const EV_DATA: qlog::events::EventData =
|
||||||
|
qlog::events::EventData::SpinBitUpdated(qlog::events::connectivity::SpinBitUpdated {
|
||||||
|
state: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
const EXPECTED_LOG_EVENT: &str = concat!(
|
||||||
|
"\u{1e}",
|
||||||
|
r#"{"time":0.0,"name":"connectivity:spin_bit_updated","data":{"state":true}}"#,
|
||||||
|
"\n"
|
||||||
|
);
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn new_neqo_qlog() {
|
||||||
|
let (_log, contents) = test_fixture::new_neqo_qlog();
|
||||||
|
assert_eq!(contents.to_string(), EXPECTED_LOG_HEADER);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn add_event() {
|
||||||
|
let (mut log, contents) = test_fixture::new_neqo_qlog();
|
||||||
|
log.add_event(|| Some(Event::with_time(1.1, EV_DATA)));
|
||||||
|
assert_eq!(
|
||||||
|
contents.to_string(),
|
||||||
|
format!(
|
||||||
|
"{EXPECTED_LOG_HEADER}{e}",
|
||||||
|
e = EXPECTED_LOG_EVENT.replace("\"time\":0.0,", "\"time\":1.1,")
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
23
third_party/rust/neqo-common/src/timer.rs
vendored
23
third_party/rust/neqo-common/src/timer.rs
vendored
|
|
@ -4,9 +4,11 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use std::convert::TryFrom;
|
use std::{
|
||||||
use std::mem;
|
convert::TryFrom,
|
||||||
use std::time::{Duration, Instant};
|
mem,
|
||||||
|
time::{Duration, Instant},
|
||||||
|
};
|
||||||
|
|
||||||
/// Internal structure for a timer item.
|
/// Internal structure for a timer item.
|
||||||
struct TimerItem<T> {
|
struct TimerItem<T> {
|
||||||
|
|
@ -21,10 +23,10 @@ impl<T> TimerItem<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A timer queue.
|
/// A timer queue.
|
||||||
/// This uses a classic timer wheel arrangement, with some characteristics that might be considered peculiar.
|
/// This uses a classic timer wheel arrangement, with some characteristics that might be considered
|
||||||
/// Each slot in the wheel is sorted (complexity O(N) insertions, but O(logN) to find cut points).
|
/// peculiar. Each slot in the wheel is sorted (complexity O(N) insertions, but O(logN) to find cut
|
||||||
/// Time is relative, the wheel has an origin time and it is unable to represent times that are more than
|
/// points). Time is relative, the wheel has an origin time and it is unable to represent times that
|
||||||
/// `granularity * capacity` past that time.
|
/// are more than `granularity * capacity` past that time.
|
||||||
pub struct Timer<T> {
|
pub struct Timer<T> {
|
||||||
items: Vec<Vec<TimerItem<T>>>,
|
items: Vec<Vec<TimerItem<T>>>,
|
||||||
now: Instant,
|
now: Instant,
|
||||||
|
|
@ -34,7 +36,9 @@ pub struct Timer<T> {
|
||||||
|
|
||||||
impl<T> Timer<T> {
|
impl<T> Timer<T> {
|
||||||
/// Construct a new wheel at the given granularity, starting at the given time.
|
/// Construct a new wheel at the given granularity, starting at the given time.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// When `capacity` is too large to fit in `u32` or `granularity` is zero.
|
/// When `capacity` is too large to fit in `u32` or `granularity` is zero.
|
||||||
pub fn new(now: Instant, granularity: Duration, capacity: usize) -> Self {
|
pub fn new(now: Instant, granularity: Duration, capacity: usize) -> Self {
|
||||||
assert!(u32::try_from(capacity).is_ok());
|
assert!(u32::try_from(capacity).is_ok());
|
||||||
|
|
@ -109,7 +113,9 @@ impl<T> Timer<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Asserts if the time given is in the past or too far in the future.
|
/// Asserts if the time given is in the past or too far in the future.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// When `time` is in the past relative to previous calls.
|
/// When `time` is in the past relative to previous calls.
|
||||||
pub fn add(&mut self, time: Instant, item: T) {
|
pub fn add(&mut self, time: Instant, item: T) {
|
||||||
assert!(time >= self.now);
|
assert!(time >= self.now);
|
||||||
|
|
@ -241,9 +247,10 @@ impl<T> Timer<T> {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::{Duration, Instant, Timer};
|
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
|
|
||||||
|
use super::{Duration, Instant, Timer};
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref NOW: Instant = Instant::now();
|
static ref NOW: Instant = Instant::now();
|
||||||
}
|
}
|
||||||
|
|
|
||||||
290
third_party/rust/neqo-common/src/tos.rs
vendored
Normal file
290
third_party/rust/neqo-common/src/tos.rs
vendored
Normal file
|
|
@ -0,0 +1,290 @@
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
use enum_map::Enum;
|
||||||
|
|
||||||
|
/// ECN (Explicit Congestion Notification) codepoints mapped to the
|
||||||
|
/// lower 2 bits of the TOS field.
|
||||||
|
/// <https://www.iana.org/assignments/dscp-registry/dscp-registry.xhtml>
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, Enum, Default, Debug)]
|
||||||
|
#[repr(u8)]
|
||||||
|
pub enum IpTosEcn {
|
||||||
|
#[default]
|
||||||
|
/// Not-ECT, Not ECN-Capable Transport, RFC3168
|
||||||
|
NotEct = 0b00,
|
||||||
|
|
||||||
|
/// ECT(1), ECN-Capable Transport(1), RFC8311 and RFC9331
|
||||||
|
Ect1 = 0b01,
|
||||||
|
|
||||||
|
/// ECT(0), ECN-Capable Transport(0), RFC3168
|
||||||
|
Ect0 = 0b10,
|
||||||
|
|
||||||
|
/// CE, Congestion Experienced, RFC3168
|
||||||
|
Ce = 0b11,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<IpTosEcn> for u8 {
|
||||||
|
fn from(v: IpTosEcn) -> Self {
|
||||||
|
v as u8
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<u8> for IpTosEcn {
|
||||||
|
fn from(v: u8) -> Self {
|
||||||
|
match v & 0b11 {
|
||||||
|
0b00 => IpTosEcn::NotEct,
|
||||||
|
0b01 => IpTosEcn::Ect1,
|
||||||
|
0b10 => IpTosEcn::Ect0,
|
||||||
|
0b11 => IpTosEcn::Ce,
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Diffserv Codepoints, mapped to the upper six bits of the TOS field.
|
||||||
|
/// <https://www.iana.org/assignments/dscp-registry/dscp-registry.xhtml>
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, Enum, Default, Debug)]
|
||||||
|
#[repr(u8)]
|
||||||
|
pub enum IpTosDscp {
|
||||||
|
#[default]
|
||||||
|
/// Class Selector 0, RFC2474
|
||||||
|
Cs0 = 0b0000_0000,
|
||||||
|
|
||||||
|
/// Class Selector 1, RFC2474
|
||||||
|
Cs1 = 0b0010_0000,
|
||||||
|
|
||||||
|
/// Class Selector 2, RFC2474
|
||||||
|
Cs2 = 0b0100_0000,
|
||||||
|
|
||||||
|
/// Class Selector 3, RFC2474
|
||||||
|
Cs3 = 0b0110_0000,
|
||||||
|
|
||||||
|
/// Class Selector 4, RFC2474
|
||||||
|
Cs4 = 0b1000_0000,
|
||||||
|
|
||||||
|
/// Class Selector 5, RFC2474
|
||||||
|
Cs5 = 0b1010_0000,
|
||||||
|
|
||||||
|
/// Class Selector 6, RFC2474
|
||||||
|
Cs6 = 0b1100_0000,
|
||||||
|
|
||||||
|
/// Class Selector 7, RFC2474
|
||||||
|
Cs7 = 0b1110_0000,
|
||||||
|
|
||||||
|
/// Assured Forwarding 11, RFC2597
|
||||||
|
Af11 = 0b0010_1000,
|
||||||
|
|
||||||
|
/// Assured Forwarding 12, RFC2597
|
||||||
|
Af12 = 0b0011_0000,
|
||||||
|
|
||||||
|
/// Assured Forwarding 13, RFC2597
|
||||||
|
Af13 = 0b0011_1000,
|
||||||
|
|
||||||
|
/// Assured Forwarding 21, RFC2597
|
||||||
|
Af21 = 0b0100_1000,
|
||||||
|
|
||||||
|
/// Assured Forwarding 22, RFC2597
|
||||||
|
Af22 = 0b0101_0000,
|
||||||
|
|
||||||
|
/// Assured Forwarding 23, RFC2597
|
||||||
|
Af23 = 0b0101_1000,
|
||||||
|
|
||||||
|
/// Assured Forwarding 31, RFC2597
|
||||||
|
Af31 = 0b0110_1000,
|
||||||
|
|
||||||
|
/// Assured Forwarding 32, RFC2597
|
||||||
|
Af32 = 0b0111_0000,
|
||||||
|
|
||||||
|
/// Assured Forwarding 33, RFC2597
|
||||||
|
Af33 = 0b0111_1000,
|
||||||
|
|
||||||
|
/// Assured Forwarding 41, RFC2597
|
||||||
|
Af41 = 0b1000_1000,
|
||||||
|
|
||||||
|
/// Assured Forwarding 42, RFC2597
|
||||||
|
Af42 = 0b1001_0000,
|
||||||
|
|
||||||
|
/// Assured Forwarding 43, RFC2597
|
||||||
|
Af43 = 0b1001_1000,
|
||||||
|
|
||||||
|
/// Expedited Forwarding, RFC3246
|
||||||
|
Ef = 0b1011_1000,
|
||||||
|
|
||||||
|
/// Capacity-Admitted Traffic, RFC5865
|
||||||
|
VoiceAdmit = 0b1011_0000,
|
||||||
|
|
||||||
|
/// Lower-Effort, RFC8622
|
||||||
|
Le = 0b0000_0100,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<IpTosDscp> for u8 {
|
||||||
|
fn from(v: IpTosDscp) -> Self {
|
||||||
|
v as u8
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<u8> for IpTosDscp {
|
||||||
|
fn from(v: u8) -> Self {
|
||||||
|
match v & 0b1111_1100 {
|
||||||
|
0b0000_0000 => IpTosDscp::Cs0,
|
||||||
|
0b0010_0000 => IpTosDscp::Cs1,
|
||||||
|
0b0100_0000 => IpTosDscp::Cs2,
|
||||||
|
0b0110_0000 => IpTosDscp::Cs3,
|
||||||
|
0b1000_0000 => IpTosDscp::Cs4,
|
||||||
|
0b1010_0000 => IpTosDscp::Cs5,
|
||||||
|
0b1100_0000 => IpTosDscp::Cs6,
|
||||||
|
0b1110_0000 => IpTosDscp::Cs7,
|
||||||
|
0b0010_1000 => IpTosDscp::Af11,
|
||||||
|
0b0011_0000 => IpTosDscp::Af12,
|
||||||
|
0b0011_1000 => IpTosDscp::Af13,
|
||||||
|
0b0100_1000 => IpTosDscp::Af21,
|
||||||
|
0b0101_0000 => IpTosDscp::Af22,
|
||||||
|
0b0101_1000 => IpTosDscp::Af23,
|
||||||
|
0b0110_1000 => IpTosDscp::Af31,
|
||||||
|
0b0111_0000 => IpTosDscp::Af32,
|
||||||
|
0b0111_1000 => IpTosDscp::Af33,
|
||||||
|
0b1000_1000 => IpTosDscp::Af41,
|
||||||
|
0b1001_0000 => IpTosDscp::Af42,
|
||||||
|
0b1001_1000 => IpTosDscp::Af43,
|
||||||
|
0b1011_1000 => IpTosDscp::Ef,
|
||||||
|
0b1011_0000 => IpTosDscp::VoiceAdmit,
|
||||||
|
0b0000_0100 => IpTosDscp::Le,
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The type-of-service field in an IP packet.
|
||||||
|
#[allow(clippy::module_name_repetitions)]
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||||
|
pub struct IpTos(u8);
|
||||||
|
|
||||||
|
impl From<IpTosEcn> for IpTos {
|
||||||
|
fn from(v: IpTosEcn) -> Self {
|
||||||
|
Self(u8::from(v))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl From<IpTosDscp> for IpTos {
|
||||||
|
fn from(v: IpTosDscp) -> Self {
|
||||||
|
Self(u8::from(v))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl From<(IpTosDscp, IpTosEcn)> for IpTos {
|
||||||
|
fn from(v: (IpTosDscp, IpTosEcn)) -> Self {
|
||||||
|
Self(u8::from(v.0) | u8::from(v.1))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl From<IpTos> for u8 {
|
||||||
|
fn from(v: IpTos) -> Self {
|
||||||
|
v.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Debug for IpTos {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.debug_tuple("IpTos")
|
||||||
|
.field(&IpTosDscp::from(self.0 & 0xfc))
|
||||||
|
.field(&IpTosEcn::from(self.0 & 0x3))
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for IpTos {
|
||||||
|
fn default() -> Self {
|
||||||
|
(IpTosDscp::default(), IpTosEcn::default()).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn iptosecn_into_u8() {
|
||||||
|
assert_eq!(u8::from(IpTosEcn::NotEct), 0b00);
|
||||||
|
assert_eq!(u8::from(IpTosEcn::Ect1), 0b01);
|
||||||
|
assert_eq!(u8::from(IpTosEcn::Ect0), 0b10);
|
||||||
|
assert_eq!(u8::from(IpTosEcn::Ce), 0b11);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn u8_into_iptosecn() {
|
||||||
|
assert_eq!(IpTosEcn::from(0b00), IpTosEcn::NotEct);
|
||||||
|
assert_eq!(IpTosEcn::from(0b01), IpTosEcn::Ect1);
|
||||||
|
assert_eq!(IpTosEcn::from(0b10), IpTosEcn::Ect0);
|
||||||
|
assert_eq!(IpTosEcn::from(0b11), IpTosEcn::Ce);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn iptosdscp_into_u8() {
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Cs0), 0b0000_0000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Cs1), 0b0010_0000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Cs2), 0b0100_0000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Cs3), 0b0110_0000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Cs4), 0b1000_0000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Cs5), 0b1010_0000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Cs6), 0b1100_0000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Cs7), 0b1110_0000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Af11), 0b0010_1000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Af12), 0b0011_0000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Af13), 0b0011_1000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Af21), 0b0100_1000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Af22), 0b0101_0000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Af23), 0b0101_1000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Af31), 0b0110_1000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Af32), 0b0111_0000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Af33), 0b0111_1000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Af41), 0b1000_1000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Af42), 0b1001_0000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Af43), 0b1001_1000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Ef), 0b1011_1000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::VoiceAdmit), 0b1011_0000);
|
||||||
|
assert_eq!(u8::from(IpTosDscp::Le), 0b0000_0100);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn u8_into_iptosdscp() {
|
||||||
|
assert_eq!(IpTosDscp::from(0b0000_0000), IpTosDscp::Cs0);
|
||||||
|
assert_eq!(IpTosDscp::from(0b0010_0000), IpTosDscp::Cs1);
|
||||||
|
assert_eq!(IpTosDscp::from(0b0100_0000), IpTosDscp::Cs2);
|
||||||
|
assert_eq!(IpTosDscp::from(0b0110_0000), IpTosDscp::Cs3);
|
||||||
|
assert_eq!(IpTosDscp::from(0b1000_0000), IpTosDscp::Cs4);
|
||||||
|
assert_eq!(IpTosDscp::from(0b1010_0000), IpTosDscp::Cs5);
|
||||||
|
assert_eq!(IpTosDscp::from(0b1100_0000), IpTosDscp::Cs6);
|
||||||
|
assert_eq!(IpTosDscp::from(0b1110_0000), IpTosDscp::Cs7);
|
||||||
|
assert_eq!(IpTosDscp::from(0b0010_1000), IpTosDscp::Af11);
|
||||||
|
assert_eq!(IpTosDscp::from(0b0011_0000), IpTosDscp::Af12);
|
||||||
|
assert_eq!(IpTosDscp::from(0b0011_1000), IpTosDscp::Af13);
|
||||||
|
assert_eq!(IpTosDscp::from(0b0100_1000), IpTosDscp::Af21);
|
||||||
|
assert_eq!(IpTosDscp::from(0b0101_0000), IpTosDscp::Af22);
|
||||||
|
assert_eq!(IpTosDscp::from(0b0101_1000), IpTosDscp::Af23);
|
||||||
|
assert_eq!(IpTosDscp::from(0b0110_1000), IpTosDscp::Af31);
|
||||||
|
assert_eq!(IpTosDscp::from(0b0111_0000), IpTosDscp::Af32);
|
||||||
|
assert_eq!(IpTosDscp::from(0b0111_1000), IpTosDscp::Af33);
|
||||||
|
assert_eq!(IpTosDscp::from(0b1000_1000), IpTosDscp::Af41);
|
||||||
|
assert_eq!(IpTosDscp::from(0b1001_0000), IpTosDscp::Af42);
|
||||||
|
assert_eq!(IpTosDscp::from(0b1001_1000), IpTosDscp::Af43);
|
||||||
|
assert_eq!(IpTosDscp::from(0b1011_1000), IpTosDscp::Ef);
|
||||||
|
assert_eq!(IpTosDscp::from(0b1011_0000), IpTosDscp::VoiceAdmit);
|
||||||
|
assert_eq!(IpTosDscp::from(0b0000_0100), IpTosDscp::Le);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn iptosecn_into_iptos() {
|
||||||
|
let ecn = IpTosEcn::default();
|
||||||
|
let iptos_ecn: IpTos = ecn.into();
|
||||||
|
assert_eq!(u8::from(iptos_ecn), ecn as u8);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn iptosdscp_into_iptos() {
|
||||||
|
let dscp = IpTosDscp::default();
|
||||||
|
let iptos_dscp: IpTos = dscp.into();
|
||||||
|
assert_eq!(u8::from(iptos_dscp), dscp as u8);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1 +1 @@
|
||||||
{"files":{"Cargo.toml":"efe17a5183bd81198a0cee488fde3f73b0aabc79b9275018805e67bb9de50f4a","TODO":"ac0f1c2ebcca03f5b3c0cc56c5aedbb030a4b511e438bc07a57361c789f91e9f","bindings/bindings.toml":"b1a04fda8f317d1cfb7dfdbe87f2887a825c054c61f4858d49f66b0429d8e27c","bindings/mozpkix.hpp":"77072c8bb0f6eb6bfe8cbadc111dcd92e0c79936d13f2e501aae1e5d289a6675","bindings/nspr_err.h":"2d5205d017b536c2d838bcf9bc4ec79f96dd50e7bb9b73892328781f1ee6629d","bindings/nspr_error.h":"e41c03c77b8c22046f8618832c9569fbcc7b26d8b9bbc35eea7168f35e346889","bindings/nspr_io.h":"085b289849ef0e77f88512a27b4d9bdc28252bd4d39c6a17303204e46ef45f72","bindings/nspr_time.h":"2e637fd338a5cf0fd3fb0070a47f474a34c2a7f4447f31b6875f5a9928d0a261","bindings/nss_ciphers.h":"95ec6344a607558b3c5ba8510f463b6295f3a2fb3f538a01410531045a5f62d1","bindings/nss_init.h":"ef49045063782fb612aff459172cc6a89340f15005808608ade5320ca9974310","bindings/nss_p11.h":"0b81e64fe6db49b2ecff94edd850be111ef99ec11220e88ceb1c67be90143a78","bindings/nss_secerr.h":"713e8368bdae5159af7893cfa517dabfe5103cede051dee9c9557c850a2defc6","bindings/nss_ssl.h":"af222fb957b989e392e762fa2125c82608a0053aff4fb97e556691646c88c335","bindings/nss_sslerr.h":"24b97f092183d8486f774cdaef5030d0249221c78343570d83a4ee5b594210ae","bindings/nss_sslopt.h":"b7807eb7abdad14db6ad7bc51048a46b065a0ea65a4508c95a12ce90e59d1eea","build.rs":"973971b2e1acd73efdb0166d5a4b07aa6ce1cf74fd02dc66d69fd2a74e521e48","src/aead.rs":"fd4c6669ad978cc4f7027b8cfac1228e84db288b294fdb0558e730bc1515136d","src/aead_fuzzing.rs":"20de62c7b23a048030a4ac7c7ef7ce53e31289f375a24ccb7e5c36c3625a62c1","src/agent.rs":"3b189ab5150f54e0416b60c95904456cdcf379f4eb564feb7c2c7bc2b8729cad","src/agentio.rs":"bce4c3dfcfa433209a409ac0c0752f8c95ab37bb6239a42f99b83858e8747bd1","src/auth.rs":"ced1a18f691894984244088020ea25dc1ee678603317f0c7dfc8b8842fa750b4","src/cert.rs":"2a353b3230e3bd0c9ff91620a2e43c1dcaf493b1ff7997e6b51f8af589b925c1","src/constants.rs":"998e77bee88197a240032c1bfbddcff417a25ba82e576a0d2fe18ee9b63cefc7","src/ech.rs":"f67072f51dc52f7444c1983ba404b3db7366d84e9a798421d978ab244b6b57e9","src/err.rs":"8b2156f280c8823b2f9767bdd98fcf5c20ddc2e650301be005d1b96e15256f4a","src/exp.rs":"cec59d61fc95914f9703d2fb6490a8507af993c9db710dde894f2f8fd38123c7","src/ext.rs":"4b3bc6a9eb8f4d466e627281902f2385549531ea627a76d9b8d223f212c04284","src/hkdf.rs":"a53a414bc53dd080a9cb716e604aa73b81df80a8ef4f7406d54264e078e8a867","src/hp.rs":"7f98ef9b79262a3628f0bfdc6acd33d3c0ef4c20931b62c46ec0adcf2a337497","src/lib.rs":"59fbfc50f27addbdeac0dfd3e34f652eb51e2a3056ce11c587ff0da35b453843","src/once.rs":"b9850384899a1a016e839743d3489c0d4d916e1973746ef8c89872105d7d9736","src/p11.rs":"c2f70d8772c816761fb089000f1249af25754c7faf3c02dfd9e18bbfcbc6cbf6","src/prio.rs":"e5e169296c0ac69919c59fb6c1f8bd6bf079452eaa13d75da0edd41d435d3f6f","src/replay.rs":"ad019f543c36d217f7e4ec23cd46117c17aaca75d56147bfc80c286f208659d2","src/result.rs":"0587cbb6aace71a7f9765ef7c01dcd9f73a49dcc6331e1d8fe4de2aef6ca65b6","src/secrets.rs":"076b72410af797e48c528287159181acf30d5cf88c1213ad539d3446aecf7388","src/selfencrypt.rs":"6f39a62424048790e9a0b4660ed15644f09927b926b1dfa6d5e4695921d2e082","src/ssl.rs":"b16a5f154353a1cac1b5f887f25b8f934d60f9847e1e8e0e87ddaf980b64f530","src/time.rs":"88647eb712825cacf5b551e2d26abcad57b343fe3fbbaa3a94de75955247083c","tests/aead.rs":"5ee882f316c49b0217273583cb7c12396da444d7cdb60e354b40aac38fc70a63","tests/agent.rs":"101a905a1168d139d9b0a5598036eaf3e7b2c110644e01200a6a7ba0a65aa51f","tests/ext.rs":"6ad47ca33c55aa7e775b6dd4bab55144d17ac0aba3b8fdbc203ece6f11851a82","tests/handshake.rs":"d5d542203d9f322e39391fe2748b2de38ee11da1b5577cd81b179a8613e424a7","tests/hkdf.rs":"47830c1ea58a02d100522bdde6fabc02bb447ccb85affa0cdc44bc25da1be32a","tests/hp.rs":"ec8b38b5421e52bee18da3417e0e52afe270dbe2d365ba8c3956f878ea7aa2d2","tests/init.rs":"fc9e392b1efa0d8efb28952f73ffc05e5348e7b2b69207b60e375c3888a252a2","tests/selfencrypt.rs":"1125c858ec4e0a6994f34d162aa066cb003c61b324f268529ea04bcb641347cb"},"package":null}
|
{"files":{"Cargo.toml":"7f7348b55033e19bbe51b07ee50313c87237fe09b56b338af9ab24e00aab32c6","bindings/bindings.toml":"0660c1661318b8a5094834c2f1bb12266287ef467307f66947eff7762528f70a","bindings/mozpkix.hpp":"77072c8bb0f6eb6bfe8cbadc111dcd92e0c79936d13f2e501aae1e5d289a6675","bindings/nspr_err.h":"2d5205d017b536c2d838bcf9bc4ec79f96dd50e7bb9b73892328781f1ee6629d","bindings/nspr_error.h":"e41c03c77b8c22046f8618832c9569fbcc7b26d8b9bbc35eea7168f35e346889","bindings/nspr_io.h":"085b289849ef0e77f88512a27b4d9bdc28252bd4d39c6a17303204e46ef45f72","bindings/nspr_time.h":"2e637fd338a5cf0fd3fb0070a47f474a34c2a7f4447f31b6875f5a9928d0a261","bindings/nss_ciphers.h":"95ec6344a607558b3c5ba8510f463b6295f3a2fb3f538a01410531045a5f62d1","bindings/nss_init.h":"ef49045063782fb612aff459172cc6a89340f15005808608ade5320ca9974310","bindings/nss_p11.h":"0b81e64fe6db49b2ecff94edd850be111ef99ec11220e88ceb1c67be90143a78","bindings/nss_secerr.h":"713e8368bdae5159af7893cfa517dabfe5103cede051dee9c9557c850a2defc6","bindings/nss_ssl.h":"af222fb957b989e392e762fa2125c82608a0053aff4fb97e556691646c88c335","bindings/nss_sslerr.h":"24b97f092183d8486f774cdaef5030d0249221c78343570d83a4ee5b594210ae","bindings/nss_sslopt.h":"b7807eb7abdad14db6ad7bc51048a46b065a0ea65a4508c95a12ce90e59d1eea","build.rs":"e712c16cb830a83eb4ea1f50dd341a4c30e1cce95d8c45af97030bc8ad0ae829","src/aead.rs":"b7cda4b89298cfd122cd2e1e94c462840e966c60f4832eb441106563ac332e00","src/aead_fuzzing.rs":"c3e590572314e0bb3fafa13dac3c831358b8a7b5570fe9cfe592752fce8cbdee","src/agent.rs":"c4fe47f9f5b0af20e3418da2e2ddce0ac2ca9665c0502115904f66a554e486ee","src/agentio.rs":"847ac63f6406e33bf20a861cadbfe6301ffa15bd73a5291298ffa93511b87dd5","src/auth.rs":"ced1a18f691894984244088020ea25dc1ee678603317f0c7dfc8b8842fa750b4","src/cert.rs":"6fc09012f994300ff4a7951bf8981aa266220521f58b8ff0989fee6dc1f27df9","src/constants.rs":"f22bf16bd8cb539862cb1e47138dbba79e93fe738f4b907e465891326f98883c","src/ech.rs":"58b7e0a1d2d52c59889cf8b735902577f7c3df93dfb89c72af2646b7aef29f39","src/err.rs":"fca0222167883231a5e0a569a593f44214501819adf5aadf814be27891c87c24","src/exp.rs":"cec59d61fc95914f9703d2fb6490a8507af993c9db710dde894f2f8fd38123c7","src/ext.rs":"c6ab9aefbbca531466dea938d853b1e42ed51816238afe400b20dbdb0111690b","src/hkdf.rs":"8e6cc5dce0f36efa4e13f5a24e2879bdbf10fb9a2b7dc8f13692e47d8959cdc8","src/hp.rs":"62ec073d99cf8bf3a123838c7d9b51bfdf68887148961f6307288e8dd56ac711","src/lib.rs":"40d9ac97c307c8161c2bf48156cc82377f81ad6e709f99cfd7dc0131dc192f86","src/once.rs":"b9850384899a1a016e839743d3489c0d4d916e1973746ef8c89872105d7d9736","src/p11.rs":"6c0f2f1b18e9bf9088a5ca5bdc99e789bb42234f7d2fe24d0b463bc957cb84a2","src/prio.rs":"e5e169296c0ac69919c59fb6c1f8bd6bf079452eaa13d75da0edd41d435d3f6f","src/replay.rs":"1ff4a12f6135ef2c42aef2b0947e26fd6241cd4b359020245608046452a7fcb0","src/result.rs":"0587cbb6aace71a7f9765ef7c01dcd9f73a49dcc6331e1d8fe4de2aef6ca65b6","src/secrets.rs":"4ffaa66f25df47dadf042063bff5953effa7bf2f4920cafe827757d6a659cb58","src/selfencrypt.rs":"4d2f4a6ea0fc94502130413ab5e2ea82612228f38a96a1865bf7d2b3f440620e","src/ssl.rs":"c83baa5518b81dd06f2e4072ea3c2d666ccdeb8b1ff6e3746eea9f1af47023a6","src/time.rs":"9204f3a384fb9dd2c3816c88666ad61ac3538f9e2f028954e81fd335a1479070","tests/aead.rs":"efdb92a060ca1957d890da1604513369559cb43195ee54149ed3ab47958dad59","tests/agent.rs":"0e55354595ae5f0e1ab83731564da57ba88a296e00692147c47df7067a0f416a","tests/ext.rs":"54657b45bd86d2561bb0f548736bc6f141bb664a5b043506f428422919ab95d4","tests/handshake.rs":"40701bc22f16d1ba9b9bd9683738e52b96faafee4119f7057437dae705f7867a","tests/hkdf.rs":"4160978b96505c1f1b7d6c4b5f43536ff7bd791c8746f9546c9fbc0fce5cf1c7","tests/hp.rs":"8eeee21a439e0f991145dff07b01283ae39ccd4b8dac4d011d43a464f73db670","tests/init.rs":"fc9e392b1efa0d8efb28952f73ffc05e5348e7b2b69207b60e375c3888a252a2","tests/selfencrypt.rs":"6edd0914b8466d79ecfb569c6d86995fd364b0dc71be2a0554e82f736ebd6b7c"},"package":null}
|
||||||
14
third_party/rust/neqo-crypto/Cargo.toml
vendored
14
third_party/rust/neqo-crypto/Cargo.toml
vendored
|
|
@ -11,15 +11,15 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
rust-version = "1.65.0"
|
rust-version = "1.70.0"
|
||||||
name = "neqo-crypto"
|
name = "neqo-crypto"
|
||||||
version = "0.6.8"
|
version = "0.7.0"
|
||||||
authors = ["Martin Thomson <mt@lowentropy.net>"]
|
authors = ["Martin Thomson <mt@lowentropy.net>"]
|
||||||
build = "build.rs"
|
build = "build.rs"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
|
|
||||||
[dependencies.log]
|
[dependencies.log]
|
||||||
version = "0.4.0"
|
version = "~0.4.17"
|
||||||
default-features = false
|
default-features = false
|
||||||
|
|
||||||
[dependencies.neqo-common]
|
[dependencies.neqo-common]
|
||||||
|
|
@ -29,12 +29,12 @@ path = "../neqo-common"
|
||||||
path = "../test-fixture"
|
path = "../test-fixture"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
serde = "1.0"
|
serde = "1.0.195"
|
||||||
serde_derive = "1.0"
|
serde_derive = "1.0.195"
|
||||||
toml = "0.5"
|
toml = "0.5.11"
|
||||||
|
|
||||||
[build-dependencies.bindgen]
|
[build-dependencies.bindgen]
|
||||||
version = "0.64"
|
version = "0.69.1"
|
||||||
features = ["runtime"]
|
features = ["runtime"]
|
||||||
default-features = false
|
default-features = false
|
||||||
|
|
||||||
|
|
|
||||||
4
third_party/rust/neqo-crypto/TODO
vendored
4
third_party/rust/neqo-crypto/TODO
vendored
|
|
@ -1,4 +0,0 @@
|
||||||
early data - API in place for inspection, but depends on resumption
|
|
||||||
handle panics more gracefully for extension handlers
|
|
||||||
client certificates
|
|
||||||
read/write - probably never
|
|
||||||
|
|
@ -49,6 +49,7 @@ functions = [
|
||||||
"SSL_PeerSignedCertTimestamps",
|
"SSL_PeerSignedCertTimestamps",
|
||||||
"SSL_PeerStapledOCSPResponses",
|
"SSL_PeerStapledOCSPResponses",
|
||||||
"SSL_ResetHandshake",
|
"SSL_ResetHandshake",
|
||||||
|
"SSL_SendAdditionalKeyShares",
|
||||||
"SSL_SetNextProtoNego",
|
"SSL_SetNextProtoNego",
|
||||||
"SSL_SetURL",
|
"SSL_SetURL",
|
||||||
"SSL_VersionRangeSet",
|
"SSL_VersionRangeSet",
|
||||||
|
|
|
||||||
22
third_party/rust/neqo-crypto/build.rs
vendored
22
third_party/rust/neqo-crypto/build.rs
vendored
|
|
@ -7,13 +7,15 @@
|
||||||
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
|
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
|
||||||
#![warn(clippy::pedantic)]
|
#![warn(clippy::pedantic)]
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
env, fs,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
process::Command,
|
||||||
|
};
|
||||||
|
|
||||||
use bindgen::Builder;
|
use bindgen::Builder;
|
||||||
use serde_derive::Deserialize;
|
use serde_derive::Deserialize;
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::env;
|
|
||||||
use std::fs;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::process::Command;
|
|
||||||
|
|
||||||
const BINDINGS_DIR: &str = "bindings";
|
const BINDINGS_DIR: &str = "bindings";
|
||||||
const BINDINGS_CONFIG: &str = "bindings.toml";
|
const BINDINGS_CONFIG: &str = "bindings.toml";
|
||||||
|
|
@ -60,7 +62,8 @@ fn is_debug() -> bool {
|
||||||
// On windows, this doesn't just work, you have to set LIBCLANG_PATH.
|
// On windows, this doesn't just work, you have to set LIBCLANG_PATH.
|
||||||
// Rather than download the 400Mb+ files, like gecko does, let's just reuse their work.
|
// Rather than download the 400Mb+ files, like gecko does, let's just reuse their work.
|
||||||
fn setup_clang() {
|
fn setup_clang() {
|
||||||
if env::consts::OS != "windows" {
|
// If this isn't Windows, or we're in CI, then we don't need to do anything.
|
||||||
|
if env::consts::OS != "windows" || env::var("GITHUB_WORKFLOW").unwrap() == "CI" {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
println!("rerun-if-env-changed=LIBCLANG_PATH");
|
println!("rerun-if-env-changed=LIBCLANG_PATH");
|
||||||
|
|
@ -130,6 +133,11 @@ fn nss_dir() -> PathBuf {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_bash() -> PathBuf {
|
fn get_bash() -> PathBuf {
|
||||||
|
// If BASH is set, use that.
|
||||||
|
if let Ok(bash) = env::var("BASH") {
|
||||||
|
return PathBuf::from(bash);
|
||||||
|
}
|
||||||
|
|
||||||
// When running under MOZILLABUILD, we need to make sure not to invoke
|
// When running under MOZILLABUILD, we need to make sure not to invoke
|
||||||
// another instance of bash that might be sitting around (like WSL).
|
// another instance of bash that might be sitting around (like WSL).
|
||||||
match env::var("MOZILLABUILD") {
|
match env::var("MOZILLABUILD") {
|
||||||
|
|
@ -257,7 +265,7 @@ fn build_bindings(base: &str, bindings: &Bindings, flags: &[String], gecko: bool
|
||||||
builder = builder.clang_arg("-DANDROID");
|
builder = builder.clang_arg("-DANDROID");
|
||||||
}
|
}
|
||||||
if bindings.cplusplus {
|
if bindings.cplusplus {
|
||||||
builder = builder.clang_args(&["-x", "c++", "-std=c++11"]);
|
builder = builder.clang_args(&["-x", "c++", "-std=c++14"]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
19
third_party/rust/neqo-crypto/src/aead.rs
vendored
19
third_party/rust/neqo-crypto/src/aead.rs
vendored
|
|
@ -4,6 +4,14 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
convert::{TryFrom, TryInto},
|
||||||
|
fmt,
|
||||||
|
ops::{Deref, DerefMut},
|
||||||
|
os::raw::{c_char, c_uint},
|
||||||
|
ptr::null_mut,
|
||||||
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
constants::{Cipher, Version},
|
constants::{Cipher, Version},
|
||||||
err::Res,
|
err::Res,
|
||||||
|
|
@ -13,14 +21,6 @@ use crate::{
|
||||||
ssl::{self, PRUint16, PRUint64, PRUint8, SSLAeadContext},
|
ssl::{self, PRUint16, PRUint64, PRUint8, SSLAeadContext},
|
||||||
};
|
};
|
||||||
|
|
||||||
use std::{
|
|
||||||
convert::{TryFrom, TryInto},
|
|
||||||
fmt,
|
|
||||||
ops::{Deref, DerefMut},
|
|
||||||
os::raw::{c_char, c_uint},
|
|
||||||
ptr::null_mut,
|
|
||||||
};
|
|
||||||
|
|
||||||
experimental_api!(SSL_MakeAead(
|
experimental_api!(SSL_MakeAead(
|
||||||
version: PRUint16,
|
version: PRUint16,
|
||||||
cipher: PRUint16,
|
cipher: PRUint16,
|
||||||
|
|
@ -62,6 +62,7 @@ impl RealAead {
|
||||||
/// Create a new AEAD based on the indicated TLS version and cipher suite.
|
/// Create a new AEAD based on the indicated TLS version and cipher suite.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Returns `Error` when the supporting NSS functions fail.
|
/// Returns `Error` when the supporting NSS functions fail.
|
||||||
pub fn new(
|
pub fn new(
|
||||||
_fuzzing: bool,
|
_fuzzing: bool,
|
||||||
|
|
@ -107,6 +108,7 @@ impl RealAead {
|
||||||
/// the value provided in `Aead::expansion`.
|
/// the value provided in `Aead::expansion`.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// If the input can't be protected or any input is too large for NSS.
|
/// If the input can't be protected or any input is too large for NSS.
|
||||||
pub fn encrypt<'a>(
|
pub fn encrypt<'a>(
|
||||||
&self,
|
&self,
|
||||||
|
|
@ -139,6 +141,7 @@ impl RealAead {
|
||||||
/// the final result will be shorter.
|
/// the final result will be shorter.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// If the input isn't authenticated or any input is too large for NSS.
|
/// If the input isn't authenticated or any input is too large for NSS.
|
||||||
pub fn decrypt<'a>(
|
pub fn decrypt<'a>(
|
||||||
&self,
|
&self,
|
||||||
|
|
|
||||||
15
third_party/rust/neqo-crypto/src/aead_fuzzing.rs
vendored
15
third_party/rust/neqo-crypto/src/aead_fuzzing.rs
vendored
|
|
@ -4,12 +4,15 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use crate::constants::{Cipher, Version};
|
|
||||||
use crate::err::{sec::SEC_ERROR_BAD_DATA, Error, Res};
|
|
||||||
use crate::p11::SymKey;
|
|
||||||
use crate::RealAead;
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
constants::{Cipher, Version},
|
||||||
|
err::{sec::SEC_ERROR_BAD_DATA, Error, Res},
|
||||||
|
p11::SymKey,
|
||||||
|
RealAead,
|
||||||
|
};
|
||||||
|
|
||||||
pub const FIXED_TAG_FUZZING: &[u8] = &[0x0a; 16];
|
pub const FIXED_TAG_FUZZING: &[u8] = &[0x0a; 16];
|
||||||
|
|
||||||
pub struct FuzzingAead {
|
pub struct FuzzingAead {
|
||||||
|
|
@ -76,8 +79,8 @@ impl FuzzingAead {
|
||||||
let len_encrypted = input.len() - FIXED_TAG_FUZZING.len();
|
let len_encrypted = input.len() - FIXED_TAG_FUZZING.len();
|
||||||
// Check that:
|
// Check that:
|
||||||
// 1) expansion is all zeros and
|
// 1) expansion is all zeros and
|
||||||
// 2) if the encrypted data is also supplied that at least some values
|
// 2) if the encrypted data is also supplied that at least some values are no zero
|
||||||
// are no zero (otherwise padding will be interpreted as a valid packet)
|
// (otherwise padding will be interpreted as a valid packet)
|
||||||
if &input[len_encrypted..] == FIXED_TAG_FUZZING
|
if &input[len_encrypted..] == FIXED_TAG_FUZZING
|
||||||
&& (len_encrypted == 0 || input[..len_encrypted].iter().any(|x| *x != 0x0))
|
&& (len_encrypted == 0 || input[..len_encrypted].iter().any(|x| *x != 0x0))
|
||||||
{
|
{
|
||||||
|
|
|
||||||
77
third_party/rust/neqo-crypto/src/agent.rs
vendored
77
third_party/rust/neqo-crypto/src/agent.rs
vendored
|
|
@ -4,6 +4,21 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
cell::RefCell,
|
||||||
|
convert::TryFrom,
|
||||||
|
ffi::{CStr, CString},
|
||||||
|
mem::{self, MaybeUninit},
|
||||||
|
ops::{Deref, DerefMut},
|
||||||
|
os::raw::{c_uint, c_void},
|
||||||
|
pin::Pin,
|
||||||
|
ptr::{null, null_mut},
|
||||||
|
rc::Rc,
|
||||||
|
time::Instant,
|
||||||
|
};
|
||||||
|
|
||||||
|
use neqo_common::{hex_snip_middle, hex_with_len, qdebug, qinfo, qtrace, qwarn};
|
||||||
|
|
||||||
pub use crate::{
|
pub use crate::{
|
||||||
agentio::{as_c_void, Record, RecordList},
|
agentio::{as_c_void, Record, RecordList},
|
||||||
cert::CertificateInfo,
|
cert::CertificateInfo,
|
||||||
|
|
@ -25,19 +40,6 @@ use crate::{
|
||||||
ssl::{self, PRBool},
|
ssl::{self, PRBool},
|
||||||
time::{Time, TimeHolder},
|
time::{Time, TimeHolder},
|
||||||
};
|
};
|
||||||
use neqo_common::{hex_snip_middle, hex_with_len, qdebug, qinfo, qtrace, qwarn};
|
|
||||||
use std::{
|
|
||||||
cell::RefCell,
|
|
||||||
convert::TryFrom,
|
|
||||||
ffi::{CStr, CString},
|
|
||||||
mem::{self, MaybeUninit},
|
|
||||||
ops::{Deref, DerefMut},
|
|
||||||
os::raw::{c_uint, c_void},
|
|
||||||
pin::Pin,
|
|
||||||
ptr::{null, null_mut},
|
|
||||||
rc::Rc,
|
|
||||||
time::Instant,
|
|
||||||
};
|
|
||||||
|
|
||||||
/// The maximum number of tickets to remember for a given connection.
|
/// The maximum number of tickets to remember for a given connection.
|
||||||
const MAX_TICKETS: usize = 4;
|
const MAX_TICKETS: usize = 4;
|
||||||
|
|
@ -157,6 +159,7 @@ impl SecretAgentPreInfo {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// If `usize` is less than 32 bits and the value is too large.
|
/// If `usize` is less than 32 bits and the value is too large.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn max_early_data(&self) -> usize {
|
pub fn max_early_data(&self) -> usize {
|
||||||
|
|
@ -183,6 +186,7 @@ impl SecretAgentPreInfo {
|
||||||
/// which contains a valid ECH configuration.
|
/// which contains a valid ECH configuration.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// When the public name is not valid UTF-8. (Note: names should be ASCII.)
|
/// When the public name is not valid UTF-8. (Note: names should be ASCII.)
|
||||||
pub fn ech_public_name(&self) -> Res<Option<&str>> {
|
pub fn ech_public_name(&self) -> Res<Option<&str>> {
|
||||||
if self.info.valuesSet & ssl::ssl_preinfo_ech == 0 || self.info.echPublicName.is_null() {
|
if self.info.valuesSet & ssl::ssl_preinfo_ech == 0 || self.info.echPublicName.is_null() {
|
||||||
|
|
@ -395,6 +399,7 @@ impl SecretAgent {
|
||||||
/// Default configuration.
|
/// Default configuration.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// If `set_version_range` fails.
|
/// If `set_version_range` fails.
|
||||||
fn configure(&mut self, grease: bool) -> Res<()> {
|
fn configure(&mut self, grease: bool) -> Res<()> {
|
||||||
self.set_version_range(TLS_VERSION_1_3, TLS_VERSION_1_3)?;
|
self.set_version_range(TLS_VERSION_1_3, TLS_VERSION_1_3)?;
|
||||||
|
|
@ -411,6 +416,7 @@ impl SecretAgent {
|
||||||
/// Set the versions that are supported.
|
/// Set the versions that are supported.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// If the range of versions isn't supported.
|
/// If the range of versions isn't supported.
|
||||||
pub fn set_version_range(&mut self, min: Version, max: Version) -> Res<()> {
|
pub fn set_version_range(&mut self, min: Version, max: Version) -> Res<()> {
|
||||||
let range = ssl::SSLVersionRange { min, max };
|
let range = ssl::SSLVersionRange { min, max };
|
||||||
|
|
@ -420,6 +426,7 @@ impl SecretAgent {
|
||||||
/// Enable a set of ciphers. Note that the order of these is not respected.
|
/// Enable a set of ciphers. Note that the order of these is not respected.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// If NSS can't enable or disable ciphers.
|
/// If NSS can't enable or disable ciphers.
|
||||||
pub fn set_ciphers(&mut self, ciphers: &[Cipher]) -> Res<()> {
|
pub fn set_ciphers(&mut self, ciphers: &[Cipher]) -> Res<()> {
|
||||||
if self.state != HandshakeState::New {
|
if self.state != HandshakeState::New {
|
||||||
|
|
@ -447,6 +454,7 @@ impl SecretAgent {
|
||||||
/// Set key exchange groups.
|
/// Set key exchange groups.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// If the underlying API fails (which shouldn't happen).
|
/// If the underlying API fails (which shouldn't happen).
|
||||||
pub fn set_groups(&mut self, groups: &[Group]) -> Res<()> {
|
pub fn set_groups(&mut self, groups: &[Group]) -> Res<()> {
|
||||||
// SSLNamedGroup is a different size to Group, so copy one by one.
|
// SSLNamedGroup is a different size to Group, so copy one by one.
|
||||||
|
|
@ -461,9 +469,21 @@ impl SecretAgent {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Set the number of additional key shares that will be sent in the client hello
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// If the underlying API fails (which shouldn't happen).
|
||||||
|
pub fn send_additional_key_shares(&mut self, count: usize) -> Res<()> {
|
||||||
|
secstatus_to_res(unsafe {
|
||||||
|
ssl::SSL_SendAdditionalKeyShares(self.fd, c_uint::try_from(count)?)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
/// Set TLS options.
|
/// Set TLS options.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Returns an error if the option or option value is invalid; i.e., never.
|
/// Returns an error if the option or option value is invalid; i.e., never.
|
||||||
pub fn set_option(&mut self, opt: ssl::Opt, value: bool) -> Res<()> {
|
pub fn set_option(&mut self, opt: ssl::Opt, value: bool) -> Res<()> {
|
||||||
opt.set(self.fd, value)
|
opt.set(self.fd, value)
|
||||||
|
|
@ -472,6 +492,7 @@ impl SecretAgent {
|
||||||
/// Enable 0-RTT.
|
/// Enable 0-RTT.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// See `set_option`.
|
/// See `set_option`.
|
||||||
pub fn enable_0rtt(&mut self) -> Res<()> {
|
pub fn enable_0rtt(&mut self) -> Res<()> {
|
||||||
self.set_option(ssl::Opt::EarlyData, true)
|
self.set_option(ssl::Opt::EarlyData, true)
|
||||||
|
|
@ -480,6 +501,7 @@ impl SecretAgent {
|
||||||
/// Disable the `EndOfEarlyData` message.
|
/// Disable the `EndOfEarlyData` message.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// See `set_option`.
|
/// See `set_option`.
|
||||||
pub fn disable_end_of_early_data(&mut self) -> Res<()> {
|
pub fn disable_end_of_early_data(&mut self) -> Res<()> {
|
||||||
self.set_option(ssl::Opt::SuppressEndOfEarlyData, true)
|
self.set_option(ssl::Opt::SuppressEndOfEarlyData, true)
|
||||||
|
|
@ -493,8 +515,11 @@ impl SecretAgent {
|
||||||
/// 255 octets in length.
|
/// 255 octets in length.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// This should always panic rather than return an error.
|
/// This should always panic rather than return an error.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// If any of the provided `protocols` are more than 255 bytes long.
|
/// If any of the provided `protocols` are more than 255 bytes long.
|
||||||
///
|
///
|
||||||
/// [RFC7301]: https://datatracker.ietf.org/doc/html/rfc7301
|
/// [RFC7301]: https://datatracker.ietf.org/doc/html/rfc7301
|
||||||
|
|
@ -539,11 +564,12 @@ impl SecretAgent {
|
||||||
|
|
||||||
/// Install an extension handler.
|
/// Install an extension handler.
|
||||||
///
|
///
|
||||||
/// This can be called multiple times with different values for `ext`. The handler is provided as
|
/// This can be called multiple times with different values for `ext`. The handler is provided
|
||||||
/// `Rc<RefCell<dyn T>>` so that the caller is able to hold a reference to the handler and later
|
/// as `Rc<RefCell<dyn T>>` so that the caller is able to hold a reference to the handler
|
||||||
/// access any state that it accumulates.
|
/// and later access any state that it accumulates.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// When the extension handler can't be successfully installed.
|
/// When the extension handler can't be successfully installed.
|
||||||
pub fn extension_handler(
|
pub fn extension_handler(
|
||||||
&mut self,
|
&mut self,
|
||||||
|
|
@ -587,6 +613,7 @@ impl SecretAgent {
|
||||||
/// Calling this function collects all the relevant information.
|
/// Calling this function collects all the relevant information.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// When the underlying socket functions fail.
|
/// When the underlying socket functions fail.
|
||||||
pub fn preinfo(&self) -> Res<SecretAgentPreInfo> {
|
pub fn preinfo(&self) -> Res<SecretAgentPreInfo> {
|
||||||
SecretAgentPreInfo::new(self.fd)
|
SecretAgentPreInfo::new(self.fd)
|
||||||
|
|
@ -605,7 +632,9 @@ impl SecretAgent {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Call this function to mark the peer as authenticated.
|
/// Call this function to mark the peer as authenticated.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// If the handshake doesn't need to be authenticated.
|
/// If the handshake doesn't need to be authenticated.
|
||||||
pub fn authenticated(&mut self, status: AuthenticationStatus) {
|
pub fn authenticated(&mut self, status: AuthenticationStatus) {
|
||||||
assert!(self.state.authentication_needed());
|
assert!(self.state.authentication_needed());
|
||||||
|
|
@ -654,6 +683,7 @@ impl SecretAgent {
|
||||||
/// function if you want to proceed, because this will mark the certificate as OK.
|
/// function if you want to proceed, because this will mark the certificate as OK.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// When the handshake fails this returns an error.
|
/// When the handshake fails this returns an error.
|
||||||
pub fn handshake(&mut self, now: Instant, input: &[u8]) -> Res<Vec<u8>> {
|
pub fn handshake(&mut self, now: Instant, input: &[u8]) -> Res<Vec<u8>> {
|
||||||
self.now.set(now)?;
|
self.now.set(now)?;
|
||||||
|
|
@ -690,6 +720,7 @@ impl SecretAgent {
|
||||||
/// If you send data from multiple epochs, you might end up being sad.
|
/// If you send data from multiple epochs, you might end up being sad.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// When the handshake fails this returns an error.
|
/// When the handshake fails this returns an error.
|
||||||
pub fn handshake_raw(&mut self, now: Instant, input: Option<Record>) -> Res<RecordList> {
|
pub fn handshake_raw(&mut self, now: Instant, input: Option<Record>) -> Res<RecordList> {
|
||||||
self.now.set(now)?;
|
self.now.set(now)?;
|
||||||
|
|
@ -717,6 +748,7 @@ impl SecretAgent {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// If setup fails.
|
/// If setup fails.
|
||||||
#[allow(unknown_lints, clippy::branches_sharing_code)]
|
#[allow(unknown_lints, clippy::branches_sharing_code)]
|
||||||
pub fn close(&mut self) {
|
pub fn close(&mut self) {
|
||||||
|
|
@ -822,6 +854,7 @@ impl Client {
|
||||||
/// Create a new client agent.
|
/// Create a new client agent.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Errors returned if the socket can't be created or configured.
|
/// Errors returned if the socket can't be created or configured.
|
||||||
pub fn new(server_name: impl Into<String>, grease: bool) -> Res<Self> {
|
pub fn new(server_name: impl Into<String>, grease: bool) -> Res<Self> {
|
||||||
let server_name = server_name.into();
|
let server_name = server_name.into();
|
||||||
|
|
@ -911,6 +944,7 @@ impl Client {
|
||||||
/// Enable resumption, using a token previously provided.
|
/// Enable resumption, using a token previously provided.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Error returned when the resumption token is invalid or
|
/// Error returned when the resumption token is invalid or
|
||||||
/// the socket is not able to use the value.
|
/// the socket is not able to use the value.
|
||||||
pub fn enable_resumption(&mut self, token: impl AsRef<[u8]>) -> Res<()> {
|
pub fn enable_resumption(&mut self, token: impl AsRef<[u8]>) -> Res<()> {
|
||||||
|
|
@ -934,6 +968,7 @@ impl Client {
|
||||||
/// ECH greasing. When that is done, there is no need to look for `EchRetry`
|
/// ECH greasing. When that is done, there is no need to look for `EchRetry`
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Error returned when the configuration is invalid.
|
/// Error returned when the configuration is invalid.
|
||||||
pub fn enable_ech(&mut self, ech_config_list: impl AsRef<[u8]>) -> Res<()> {
|
pub fn enable_ech(&mut self, ech_config_list: impl AsRef<[u8]>) -> Res<()> {
|
||||||
let config = ech_config_list.as_ref();
|
let config = ech_config_list.as_ref();
|
||||||
|
|
@ -986,7 +1021,8 @@ pub enum ZeroRttCheckResult {
|
||||||
Fail,
|
Fail,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A `ZeroRttChecker` is used by the agent to validate the application token (as provided by `send_ticket`)
|
/// A `ZeroRttChecker` is used by the agent to validate the application token (as provided by
|
||||||
|
/// `send_ticket`)
|
||||||
pub trait ZeroRttChecker: std::fmt::Debug + std::marker::Unpin {
|
pub trait ZeroRttChecker: std::fmt::Debug + std::marker::Unpin {
|
||||||
fn check(&self, token: &[u8]) -> ZeroRttCheckResult;
|
fn check(&self, token: &[u8]) -> ZeroRttCheckResult;
|
||||||
}
|
}
|
||||||
|
|
@ -1027,6 +1063,7 @@ impl Server {
|
||||||
/// Create a new server agent.
|
/// Create a new server agent.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Errors returned when NSS fails.
|
/// Errors returned when NSS fails.
|
||||||
pub fn new(certificates: &[impl AsRef<str>]) -> Res<Self> {
|
pub fn new(certificates: &[impl AsRef<str>]) -> Res<Self> {
|
||||||
let mut agent = SecretAgent::new()?;
|
let mut agent = SecretAgent::new()?;
|
||||||
|
|
@ -1080,7 +1117,8 @@ impl Server {
|
||||||
ssl::SSLHelloRetryRequestAction::ssl_hello_retry_reject_0rtt
|
ssl::SSLHelloRetryRequestAction::ssl_hello_retry_reject_0rtt
|
||||||
}
|
}
|
||||||
ZeroRttCheckResult::HelloRetryRequest(tok) => {
|
ZeroRttCheckResult::HelloRetryRequest(tok) => {
|
||||||
// Don't bother propagating errors from this, because it should be caught in testing.
|
// Don't bother propagating errors from this, because it should be caught in
|
||||||
|
// testing.
|
||||||
assert!(tok.len() <= usize::try_from(retry_token_max).unwrap());
|
assert!(tok.len() <= usize::try_from(retry_token_max).unwrap());
|
||||||
let slc = std::slice::from_raw_parts_mut(retry_token, tok.len());
|
let slc = std::slice::from_raw_parts_mut(retry_token, tok.len());
|
||||||
slc.copy_from_slice(&tok);
|
slc.copy_from_slice(&tok);
|
||||||
|
|
@ -1094,6 +1132,7 @@ impl Server {
|
||||||
/// via the Deref implementation on Server.
|
/// via the Deref implementation on Server.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Returns an error if the underlying NSS functions fail.
|
/// Returns an error if the underlying NSS functions fail.
|
||||||
pub fn enable_0rtt(
|
pub fn enable_0rtt(
|
||||||
&mut self,
|
&mut self,
|
||||||
|
|
@ -1121,6 +1160,7 @@ impl Server {
|
||||||
/// The records that are sent are captured and returned.
|
/// The records that are sent are captured and returned.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// If NSS is unable to send a ticket, or if this agent is incorrectly configured.
|
/// If NSS is unable to send a ticket, or if this agent is incorrectly configured.
|
||||||
pub fn send_ticket(&mut self, now: Instant, extra: &[u8]) -> Res<RecordList> {
|
pub fn send_ticket(&mut self, now: Instant, extra: &[u8]) -> Res<RecordList> {
|
||||||
self.agent.now.set(now)?;
|
self.agent.now.set(now)?;
|
||||||
|
|
@ -1136,6 +1176,7 @@ impl Server {
|
||||||
/// Enable encrypted client hello (ECH).
|
/// Enable encrypted client hello (ECH).
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Fails when NSS cannot create a key pair.
|
/// Fails when NSS cannot create a key pair.
|
||||||
pub fn enable_ech(
|
pub fn enable_ech(
|
||||||
&mut self,
|
&mut self,
|
||||||
|
|
|
||||||
29
third_party/rust/neqo-crypto/src/agentio.rs
vendored
29
third_party/rust/neqo-crypto/src/agentio.rs
vendored
|
|
@ -4,21 +4,24 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use crate::constants::{ContentType, Epoch};
|
use std::{
|
||||||
use crate::err::{nspr, Error, PR_SetError, Res};
|
cmp::min,
|
||||||
use crate::prio;
|
convert::{TryFrom, TryInto},
|
||||||
use crate::ssl;
|
fmt, mem,
|
||||||
|
ops::Deref,
|
||||||
|
os::raw::{c_uint, c_void},
|
||||||
|
pin::Pin,
|
||||||
|
ptr::{null, null_mut},
|
||||||
|
vec::Vec,
|
||||||
|
};
|
||||||
|
|
||||||
use neqo_common::{hex, hex_with_len, qtrace};
|
use neqo_common::{hex, hex_with_len, qtrace};
|
||||||
use std::cmp::min;
|
|
||||||
use std::convert::{TryFrom, TryInto};
|
use crate::{
|
||||||
use std::fmt;
|
constants::{ContentType, Epoch},
|
||||||
use std::mem;
|
err::{nspr, Error, PR_SetError, Res},
|
||||||
use std::ops::Deref;
|
prio, ssl,
|
||||||
use std::os::raw::{c_uint, c_void};
|
};
|
||||||
use std::pin::Pin;
|
|
||||||
use std::ptr::{null, null_mut};
|
|
||||||
use std::vec::Vec;
|
|
||||||
|
|
||||||
// Alias common types.
|
// Alias common types.
|
||||||
type PrFd = *mut prio::PRFileDesc;
|
type PrFd = *mut prio::PRFileDesc;
|
||||||
|
|
|
||||||
22
third_party/rust/neqo-crypto/src/cert.rs
vendored
22
third_party/rust/neqo-crypto/src/cert.rs
vendored
|
|
@ -4,18 +4,22 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use crate::err::secstatus_to_res;
|
use std::{
|
||||||
use crate::p11::{CERTCertListNode, CERT_GetCertificateDer, CertList, Item, SECItem, SECItemArray};
|
convert::TryFrom,
|
||||||
use crate::ssl::{
|
ptr::{addr_of, NonNull},
|
||||||
PRFileDesc, SSL_PeerCertificateChain, SSL_PeerSignedCertTimestamps,
|
slice,
|
||||||
SSL_PeerStapledOCSPResponses,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use neqo_common::qerror;
|
use neqo_common::qerror;
|
||||||
|
|
||||||
use std::convert::TryFrom;
|
use crate::{
|
||||||
use std::ptr::{addr_of, NonNull};
|
err::secstatus_to_res,
|
||||||
|
p11::{CERTCertListNode, CERT_GetCertificateDer, CertList, Item, SECItem, SECItemArray},
|
||||||
use std::slice;
|
ssl::{
|
||||||
|
PRFileDesc, SSL_PeerCertificateChain, SSL_PeerSignedCertTimestamps,
|
||||||
|
SSL_PeerStapledOCSPResponses,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
pub struct CertificateInfo {
|
pub struct CertificateInfo {
|
||||||
certs: CertList,
|
certs: CertList,
|
||||||
|
|
|
||||||
|
|
@ -62,6 +62,7 @@ remap_enum! {
|
||||||
TLS_GRP_EC_SECP384R1 = ssl_grp_ec_secp384r1,
|
TLS_GRP_EC_SECP384R1 = ssl_grp_ec_secp384r1,
|
||||||
TLS_GRP_EC_SECP521R1 = ssl_grp_ec_secp521r1,
|
TLS_GRP_EC_SECP521R1 = ssl_grp_ec_secp521r1,
|
||||||
TLS_GRP_EC_X25519 = ssl_grp_ec_curve25519,
|
TLS_GRP_EC_X25519 = ssl_grp_ec_curve25519,
|
||||||
|
TLS_GRP_KEM_XYBER768D00 = ssl_grp_kem_xyber768d00,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
23
third_party/rust/neqo-crypto/src/ech.rs
vendored
23
third_party/rust/neqo-crypto/src/ech.rs
vendored
|
|
@ -4,6 +4,15 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
convert::TryFrom,
|
||||||
|
ffi::CString,
|
||||||
|
os::raw::{c_char, c_uint},
|
||||||
|
ptr::{addr_of_mut, null_mut},
|
||||||
|
};
|
||||||
|
|
||||||
|
use neqo_common::qtrace;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
err::{ssl::SSL_ERROR_ECH_RETRY_WITH_ECH, Error, Res},
|
err::{ssl::SSL_ERROR_ECH_RETRY_WITH_ECH, Error, Res},
|
||||||
experimental_api,
|
experimental_api,
|
||||||
|
|
@ -13,14 +22,6 @@ use crate::{
|
||||||
},
|
},
|
||||||
ssl::{PRBool, PRFileDesc},
|
ssl::{PRBool, PRFileDesc},
|
||||||
};
|
};
|
||||||
use neqo_common::qtrace;
|
|
||||||
use std::{
|
|
||||||
convert::TryFrom,
|
|
||||||
ffi::CString,
|
|
||||||
os::raw::{c_char, c_uint},
|
|
||||||
ptr::{addr_of_mut, null_mut},
|
|
||||||
};
|
|
||||||
|
|
||||||
pub use crate::{
|
pub use crate::{
|
||||||
p11::{HpkeAeadId as AeadId, HpkeKdfId as KdfId, HpkeKemId as KemId},
|
p11::{HpkeAeadId as AeadId, HpkeKdfId as KdfId, HpkeKemId as KemId},
|
||||||
ssl::HpkeSymmetricSuite as SymmetricSuite,
|
ssl::HpkeSymmetricSuite as SymmetricSuite,
|
||||||
|
|
@ -89,8 +90,11 @@ pub fn convert_ech_error(fd: *mut PRFileDesc, err: Error) -> Error {
|
||||||
/// Generate a key pair for encrypted client hello (ECH).
|
/// Generate a key pair for encrypted client hello (ECH).
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// When NSS fails to generate a key pair or when the KEM is not supported.
|
/// When NSS fails to generate a key pair or when the KEM is not supported.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// When underlying types aren't large enough to hold keys. So never.
|
/// When underlying types aren't large enough to hold keys. So never.
|
||||||
pub fn generate_keys() -> Res<(PrivateKey, PublicKey)> {
|
pub fn generate_keys() -> Res<(PrivateKey, PublicKey)> {
|
||||||
let slot = Slot::internal()?;
|
let slot = Slot::internal()?;
|
||||||
|
|
@ -109,6 +113,7 @@ pub fn generate_keys() -> Res<(PrivateKey, PublicKey)> {
|
||||||
|
|
||||||
// If we have tracing on, try to ensure that key data can be read.
|
// If we have tracing on, try to ensure that key data can be read.
|
||||||
let insensitive_secret_ptr = if log::log_enabled!(log::Level::Trace) {
|
let insensitive_secret_ptr = if log::log_enabled!(log::Level::Trace) {
|
||||||
|
#[allow(clippy::useless_conversion)] // TODO: Remove when we bump the MSRV to 1.74.0.
|
||||||
unsafe {
|
unsafe {
|
||||||
p11::PK11_GenerateKeyPairWithOpFlags(
|
p11::PK11_GenerateKeyPairWithOpFlags(
|
||||||
*slot,
|
*slot,
|
||||||
|
|
@ -126,6 +131,7 @@ pub fn generate_keys() -> Res<(PrivateKey, PublicKey)> {
|
||||||
};
|
};
|
||||||
assert_eq!(insensitive_secret_ptr.is_null(), public_ptr.is_null());
|
assert_eq!(insensitive_secret_ptr.is_null(), public_ptr.is_null());
|
||||||
let secret_ptr = if insensitive_secret_ptr.is_null() {
|
let secret_ptr = if insensitive_secret_ptr.is_null() {
|
||||||
|
#[allow(clippy::useless_conversion)] // TODO: Remove when we bump the MSRV to 1.74.0.
|
||||||
unsafe {
|
unsafe {
|
||||||
p11::PK11_GenerateKeyPairWithOpFlags(
|
p11::PK11_GenerateKeyPairWithOpFlags(
|
||||||
*slot,
|
*slot,
|
||||||
|
|
@ -151,6 +157,7 @@ pub fn generate_keys() -> Res<(PrivateKey, PublicKey)> {
|
||||||
/// Encode a configuration for encrypted client hello (ECH).
|
/// Encode a configuration for encrypted client hello (ECH).
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// When NSS fails to generate a valid configuration encoding (i.e., unlikely).
|
/// When NSS fails to generate a valid configuration encoding (i.e., unlikely).
|
||||||
pub fn encode_config(config: u8, public_name: &str, pk: &PublicKey) -> Res<Vec<u8>> {
|
pub fn encode_config(config: u8, public_name: &str, pk: &PublicKey) -> Res<Vec<u8>> {
|
||||||
// A sensible fixed value for the maximum length of a name.
|
// A sensible fixed value for the maximum length of a name.
|
||||||
|
|
|
||||||
14
third_party/rust/neqo-crypto/src/err.rs
vendored
14
third_party/rust/neqo-crypto/src/err.rs
vendored
|
|
@ -7,8 +7,7 @@
|
||||||
#![allow(dead_code)]
|
#![allow(dead_code)]
|
||||||
#![allow(clippy::upper_case_acronyms)]
|
#![allow(clippy::upper_case_acronyms)]
|
||||||
|
|
||||||
use std::os::raw::c_char;
|
use std::{os::raw::c_char, str::Utf8Error};
|
||||||
use std::str::Utf8Error;
|
|
||||||
|
|
||||||
use crate::ssl::{SECStatus, SECSuccess};
|
use crate::ssl::{SECStatus, SECSuccess};
|
||||||
|
|
||||||
|
|
@ -19,9 +18,7 @@ mod codes {
|
||||||
include!(concat!(env!("OUT_DIR"), "/nss_sslerr.rs"));
|
include!(concat!(env!("OUT_DIR"), "/nss_sslerr.rs"));
|
||||||
include!(concat!(env!("OUT_DIR"), "/mozpkix.rs"));
|
include!(concat!(env!("OUT_DIR"), "/mozpkix.rs"));
|
||||||
}
|
}
|
||||||
pub use codes::mozilla_pkix_ErrorCode as mozpkix;
|
pub use codes::{mozilla_pkix_ErrorCode as mozpkix, SECErrorCodes as sec, SSLErrorCodes as ssl};
|
||||||
pub use codes::SECErrorCodes as sec;
|
|
||||||
pub use codes::SSLErrorCodes as ssl;
|
|
||||||
pub mod nspr {
|
pub mod nspr {
|
||||||
include!(concat!(env!("OUT_DIR"), "/nspr_err.rs"));
|
include!(concat!(env!("OUT_DIR"), "/nspr_err.rs"));
|
||||||
}
|
}
|
||||||
|
|
@ -137,10 +134,13 @@ pub fn is_blocked(result: &Res<()>) -> bool {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::err::{self, is_blocked, secstatus_to_res, Error, PRErrorCode, PR_SetError};
|
|
||||||
use crate::ssl::{SECFailure, SECSuccess};
|
|
||||||
use test_fixture::fixture_init;
|
use test_fixture::fixture_init;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
err::{self, is_blocked, secstatus_to_res, Error, PRErrorCode, PR_SetError},
|
||||||
|
ssl::{SECFailure, SECSuccess},
|
||||||
|
};
|
||||||
|
|
||||||
fn set_error_code(code: PRErrorCode) {
|
fn set_error_code(code: PRErrorCode) {
|
||||||
// This code doesn't work without initializing NSS first.
|
// This code doesn't work without initializing NSS first.
|
||||||
fixture_init();
|
fixture_init();
|
||||||
|
|
|
||||||
21
third_party/rust/neqo-crypto/src/ext.rs
vendored
21
third_party/rust/neqo-crypto/src/ext.rs
vendored
|
|
@ -4,6 +4,14 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
cell::RefCell,
|
||||||
|
convert::TryFrom,
|
||||||
|
os::raw::{c_uint, c_void},
|
||||||
|
pin::Pin,
|
||||||
|
rc::Rc,
|
||||||
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
agentio::as_c_void,
|
agentio::as_c_void,
|
||||||
constants::{Extension, HandshakeMessage, TLS_HS_CLIENT_HELLO, TLS_HS_ENCRYPTED_EXTENSIONS},
|
constants::{Extension, HandshakeMessage, TLS_HS_CLIENT_HELLO, TLS_HS_ENCRYPTED_EXTENSIONS},
|
||||||
|
|
@ -13,13 +21,6 @@ use crate::{
|
||||||
SSLExtensionHandler, SSLExtensionWriter, SSLHandshakeType,
|
SSLExtensionHandler, SSLExtensionWriter, SSLHandshakeType,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use std::{
|
|
||||||
cell::RefCell,
|
|
||||||
convert::TryFrom,
|
|
||||||
os::raw::{c_uint, c_void},
|
|
||||||
pin::Pin,
|
|
||||||
rc::Rc,
|
|
||||||
};
|
|
||||||
|
|
||||||
experimental_api!(SSL_InstallExtensionHooks(
|
experimental_api!(SSL_InstallExtensionHooks(
|
||||||
fd: *mut PRFileDesc,
|
fd: *mut PRFileDesc,
|
||||||
|
|
@ -74,7 +75,7 @@ impl ExtensionTracker {
|
||||||
f(&mut *rc.borrow_mut())
|
f(&mut *rc.borrow_mut())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::cast_possible_truncation)]
|
#[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)]
|
||||||
unsafe extern "C" fn extension_writer(
|
unsafe extern "C" fn extension_writer(
|
||||||
_fd: *mut PRFileDesc,
|
_fd: *mut PRFileDesc,
|
||||||
message: SSLHandshakeType::Type,
|
message: SSLHandshakeType::Type,
|
||||||
|
|
@ -105,7 +106,7 @@ impl ExtensionTracker {
|
||||||
arg: *mut c_void,
|
arg: *mut c_void,
|
||||||
) -> SECStatus {
|
) -> SECStatus {
|
||||||
let d = std::slice::from_raw_parts(data, len as usize);
|
let d = std::slice::from_raw_parts(data, len as usize);
|
||||||
#[allow(clippy::cast_possible_truncation)]
|
#[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)]
|
||||||
Self::wrap_handler_call(arg, |handler| {
|
Self::wrap_handler_call(arg, |handler| {
|
||||||
// Cast is safe here because the message type is always part of the enum
|
// Cast is safe here because the message type is always part of the enum
|
||||||
match handler.handle(message as HandshakeMessage, d) {
|
match handler.handle(message as HandshakeMessage, d) {
|
||||||
|
|
@ -121,11 +122,13 @@ impl ExtensionTracker {
|
||||||
/// Use the provided handler to manage an extension. This is quite unsafe.
|
/// Use the provided handler to manage an extension. This is quite unsafe.
|
||||||
///
|
///
|
||||||
/// # Safety
|
/// # Safety
|
||||||
|
///
|
||||||
/// The holder of this `ExtensionTracker` needs to ensure that it lives at
|
/// The holder of this `ExtensionTracker` needs to ensure that it lives at
|
||||||
/// least as long as the file descriptor, as NSS provides no way to remove
|
/// least as long as the file descriptor, as NSS provides no way to remove
|
||||||
/// an extension handler once it is configured.
|
/// an extension handler once it is configured.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// If the underlying NSS API fails to register a handler.
|
/// If the underlying NSS API fails to register a handler.
|
||||||
pub unsafe fn new(
|
pub unsafe fn new(
|
||||||
fd: *mut PRFileDesc,
|
fd: *mut PRFileDesc,
|
||||||
|
|
|
||||||
17
third_party/rust/neqo-crypto/src/hkdf.rs
vendored
17
third_party/rust/neqo-crypto/src/hkdf.rs
vendored
|
|
@ -4,6 +4,12 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
convert::TryFrom,
|
||||||
|
os::raw::{c_char, c_uint},
|
||||||
|
ptr::null_mut,
|
||||||
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
constants::{
|
constants::{
|
||||||
Cipher, Version, TLS_AES_128_GCM_SHA256, TLS_AES_256_GCM_SHA384,
|
Cipher, Version, TLS_AES_128_GCM_SHA256, TLS_AES_256_GCM_SHA384,
|
||||||
|
|
@ -16,12 +22,6 @@ use crate::{
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
use std::{
|
|
||||||
convert::TryFrom,
|
|
||||||
os::raw::{c_char, c_uint},
|
|
||||||
ptr::null_mut,
|
|
||||||
};
|
|
||||||
|
|
||||||
experimental_api!(SSL_HkdfExtract(
|
experimental_api!(SSL_HkdfExtract(
|
||||||
version: Version,
|
version: Version,
|
||||||
cipher: Cipher,
|
cipher: Cipher,
|
||||||
|
|
@ -54,6 +54,7 @@ fn key_size(version: Version, cipher: Cipher) -> Res<usize> {
|
||||||
/// Generate a random key of the right size for the given suite.
|
/// Generate a random key of the right size for the given suite.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Only if NSS fails.
|
/// Only if NSS fails.
|
||||||
pub fn generate_key(version: Version, cipher: Cipher) -> Res<SymKey> {
|
pub fn generate_key(version: Version, cipher: Cipher) -> Res<SymKey> {
|
||||||
import_key(version, &random(key_size(version, cipher)?))
|
import_key(version, &random(key_size(version, cipher)?))
|
||||||
|
|
@ -62,12 +63,14 @@ pub fn generate_key(version: Version, cipher: Cipher) -> Res<SymKey> {
|
||||||
/// Import a symmetric key for use with HKDF.
|
/// Import a symmetric key for use with HKDF.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Errors returned if the key buffer is an incompatible size or the NSS functions fail.
|
/// Errors returned if the key buffer is an incompatible size or the NSS functions fail.
|
||||||
pub fn import_key(version: Version, buf: &[u8]) -> Res<SymKey> {
|
pub fn import_key(version: Version, buf: &[u8]) -> Res<SymKey> {
|
||||||
if version != TLS_VERSION_1_3 {
|
if version != TLS_VERSION_1_3 {
|
||||||
return Err(Error::UnsupportedVersion);
|
return Err(Error::UnsupportedVersion);
|
||||||
}
|
}
|
||||||
let slot = Slot::internal()?;
|
let slot = Slot::internal()?;
|
||||||
|
#[allow(clippy::useless_conversion)] // TODO: Remove when we bump the MSRV to 1.74.0.
|
||||||
let key_ptr = unsafe {
|
let key_ptr = unsafe {
|
||||||
PK11_ImportDataKey(
|
PK11_ImportDataKey(
|
||||||
*slot,
|
*slot,
|
||||||
|
|
@ -84,6 +87,7 @@ pub fn import_key(version: Version, buf: &[u8]) -> Res<SymKey> {
|
||||||
/// Extract a PRK from the given salt and IKM using the algorithm defined in RFC 5869.
|
/// Extract a PRK from the given salt and IKM using the algorithm defined in RFC 5869.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Errors returned if inputs are too large or the NSS functions fail.
|
/// Errors returned if inputs are too large or the NSS functions fail.
|
||||||
pub fn extract(
|
pub fn extract(
|
||||||
version: Version,
|
version: Version,
|
||||||
|
|
@ -103,6 +107,7 @@ pub fn extract(
|
||||||
/// Expand a PRK using the HKDF-Expand-Label function defined in RFC 8446.
|
/// Expand a PRK using the HKDF-Expand-Label function defined in RFC 8446.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Errors returned if inputs are too large or the NSS functions fail.
|
/// Errors returned if inputs are too large or the NSS functions fail.
|
||||||
pub fn expand_label(
|
pub fn expand_label(
|
||||||
version: Version,
|
version: Version,
|
||||||
|
|
|
||||||
28
third_party/rust/neqo-crypto/src/hp.rs
vendored
28
third_party/rust/neqo-crypto/src/hp.rs
vendored
|
|
@ -4,6 +4,15 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
cell::RefCell,
|
||||||
|
convert::TryFrom,
|
||||||
|
fmt::{self, Debug},
|
||||||
|
os::raw::{c_char, c_int, c_uint},
|
||||||
|
ptr::{addr_of_mut, null, null_mut},
|
||||||
|
rc::Rc,
|
||||||
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
constants::{
|
constants::{
|
||||||
Cipher, Version, TLS_AES_128_GCM_SHA256, TLS_AES_256_GCM_SHA384,
|
Cipher, Version, TLS_AES_128_GCM_SHA256, TLS_AES_256_GCM_SHA384,
|
||||||
|
|
@ -16,14 +25,6 @@ use crate::{
|
||||||
CK_CHACHA20_PARAMS, CK_MECHANISM_TYPE,
|
CK_CHACHA20_PARAMS, CK_MECHANISM_TYPE,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use std::{
|
|
||||||
cell::RefCell,
|
|
||||||
convert::TryFrom,
|
|
||||||
fmt::{self, Debug},
|
|
||||||
os::raw::{c_char, c_int, c_uint},
|
|
||||||
ptr::{addr_of_mut, null, null_mut},
|
|
||||||
rc::Rc,
|
|
||||||
};
|
|
||||||
|
|
||||||
experimental_api!(SSL_HkdfExpandLabelWithMech(
|
experimental_api!(SSL_HkdfExpandLabelWithMech(
|
||||||
version: Version,
|
version: Version,
|
||||||
|
|
@ -62,8 +63,11 @@ impl HpKey {
|
||||||
/// QUIC-specific API for extracting a header-protection key.
|
/// QUIC-specific API for extracting a header-protection key.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Errors if HKDF fails or if the label is too long to fit in a `c_uint`.
|
/// Errors if HKDF fails or if the label is too long to fit in a `c_uint`.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// When `cipher` is not known to this code.
|
/// When `cipher` is not known to this code.
|
||||||
#[allow(clippy::cast_sign_loss)] // Cast for PK11_GetBlockSize is safe.
|
#[allow(clippy::cast_sign_loss)] // Cast for PK11_GetBlockSize is safe.
|
||||||
pub fn extract(version: Version, cipher: Cipher, prk: &SymKey, label: &str) -> Res<Self> {
|
pub fn extract(version: Version, cipher: Cipher, prk: &SymKey, label: &str) -> Res<Self> {
|
||||||
|
|
@ -72,6 +76,7 @@ impl HpKey {
|
||||||
let l = label.as_bytes();
|
let l = label.as_bytes();
|
||||||
let mut secret: *mut PK11SymKey = null_mut();
|
let mut secret: *mut PK11SymKey = null_mut();
|
||||||
|
|
||||||
|
#[allow(clippy::useless_conversion)] // TODO: Remove when we bump the MSRV to 1.74.0.
|
||||||
let (mech, key_size) = match cipher {
|
let (mech, key_size) = match cipher {
|
||||||
TLS_AES_128_GCM_SHA256 => (CK_MECHANISM_TYPE::from(CKM_AES_ECB), 16),
|
TLS_AES_128_GCM_SHA256 => (CK_MECHANISM_TYPE::from(CKM_AES_ECB), 16),
|
||||||
TLS_AES_256_GCM_SHA384 => (CK_MECHANISM_TYPE::from(CKM_AES_ECB), 32),
|
TLS_AES_256_GCM_SHA384 => (CK_MECHANISM_TYPE::from(CKM_AES_ECB), 32),
|
||||||
|
|
@ -99,6 +104,8 @@ impl HpKey {
|
||||||
|
|
||||||
let res = match cipher {
|
let res = match cipher {
|
||||||
TLS_AES_128_GCM_SHA256 | TLS_AES_256_GCM_SHA384 => {
|
TLS_AES_128_GCM_SHA256 | TLS_AES_256_GCM_SHA384 => {
|
||||||
|
// TODO: Remove when we bump the MSRV to 1.74.0.
|
||||||
|
#[allow(clippy::useless_conversion)]
|
||||||
let context_ptr = unsafe {
|
let context_ptr = unsafe {
|
||||||
PK11_CreateContextBySymKey(
|
PK11_CreateContextBySymKey(
|
||||||
mech,
|
mech,
|
||||||
|
|
@ -138,9 +145,12 @@ impl HpKey {
|
||||||
/// Generate a header protection mask for QUIC.
|
/// Generate a header protection mask for QUIC.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// An error is returned if the NSS functions fail; a sample of the
|
/// An error is returned if the NSS functions fail; a sample of the
|
||||||
/// wrong size is the obvious cause.
|
/// wrong size is the obvious cause.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// When the mechanism for our key is not supported.
|
/// When the mechanism for our key is not supported.
|
||||||
pub fn mask(&self, sample: &[u8]) -> Res<Vec<u8>> {
|
pub fn mask(&self, sample: &[u8]) -> Res<Vec<u8>> {
|
||||||
let mut output = vec![0_u8; self.block_size()];
|
let mut output = vec![0_u8; self.block_size()];
|
||||||
|
|
@ -171,6 +181,8 @@ impl HpKey {
|
||||||
};
|
};
|
||||||
let mut output_len: c_uint = 0;
|
let mut output_len: c_uint = 0;
|
||||||
let mut param_item = Item::wrap_struct(¶ms);
|
let mut param_item = Item::wrap_struct(¶ms);
|
||||||
|
// TODO: Remove when we bump the MSRV to 1.74.0.
|
||||||
|
#[allow(clippy::useless_conversion)]
|
||||||
secstatus_to_res(unsafe {
|
secstatus_to_res(unsafe {
|
||||||
PK11_Encrypt(
|
PK11_Encrypt(
|
||||||
**key,
|
**key,
|
||||||
|
|
|
||||||
32
third_party/rust/neqo-crypto/src/lib.rs
vendored
32
third_party/rust/neqo-crypto/src/lib.rs
vendored
|
|
@ -37,15 +37,19 @@ pub mod selfencrypt;
|
||||||
mod ssl;
|
mod ssl;
|
||||||
mod time;
|
mod time;
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
ffi::CString,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
ptr::null,
|
||||||
|
};
|
||||||
|
|
||||||
#[cfg(not(feature = "fuzzing"))]
|
#[cfg(not(feature = "fuzzing"))]
|
||||||
pub use self::aead::RealAead as Aead;
|
pub use self::aead::RealAead as Aead;
|
||||||
|
|
||||||
#[cfg(feature = "fuzzing")]
|
|
||||||
pub use self::aead_fuzzing::FuzzingAead as Aead;
|
|
||||||
|
|
||||||
#[cfg(feature = "fuzzing")]
|
#[cfg(feature = "fuzzing")]
|
||||||
pub use self::aead::RealAead;
|
pub use self::aead::RealAead;
|
||||||
|
#[cfg(feature = "fuzzing")]
|
||||||
|
pub use self::aead_fuzzing::FuzzingAead as Aead;
|
||||||
|
use self::once::OnceResult;
|
||||||
pub use self::{
|
pub use self::{
|
||||||
agent::{
|
agent::{
|
||||||
Agent, AllowZeroRtt, Client, HandshakeState, Record, RecordList, ResumptionToken,
|
Agent, AllowZeroRtt, Client, HandshakeState, Record, RecordList, ResumptionToken,
|
||||||
|
|
@ -66,15 +70,7 @@ pub use self::{
|
||||||
ssl::Opt,
|
ssl::Opt,
|
||||||
};
|
};
|
||||||
|
|
||||||
use self::once::OnceResult;
|
const MINIMUM_NSS_VERSION: &str = "3.97";
|
||||||
|
|
||||||
use std::{
|
|
||||||
ffi::CString,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
ptr::null,
|
|
||||||
};
|
|
||||||
|
|
||||||
const MINIMUM_NSS_VERSION: &str = "3.74";
|
|
||||||
|
|
||||||
#[allow(non_upper_case_globals, clippy::redundant_static_lifetimes)]
|
#[allow(non_upper_case_globals, clippy::redundant_static_lifetimes)]
|
||||||
#[allow(clippy::upper_case_acronyms)]
|
#[allow(clippy::upper_case_acronyms)]
|
||||||
|
|
@ -119,8 +115,11 @@ fn version_check() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Initialize NSS. This only executes the initialization routines once, so if there is any chance that
|
/// Initialize NSS. This only executes the initialization routines once, so if there is any chance
|
||||||
|
/// that
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// When NSS initialization fails.
|
/// When NSS initialization fails.
|
||||||
pub fn init() {
|
pub fn init() {
|
||||||
// Set time zero.
|
// Set time zero.
|
||||||
|
|
@ -153,7 +152,9 @@ fn enable_ssl_trace() {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Initialize with a database.
|
/// Initialize with a database.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// If NSS cannot be initialized.
|
/// If NSS cannot be initialized.
|
||||||
pub fn init_db<P: Into<PathBuf>>(dir: P) {
|
pub fn init_db<P: Into<PathBuf>>(dir: P) {
|
||||||
time::init();
|
time::init();
|
||||||
|
|
@ -196,6 +197,7 @@ pub fn init_db<P: Into<PathBuf>>(dir: P) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// If NSS isn't initialized.
|
/// If NSS isn't initialized.
|
||||||
pub fn assert_initialized() {
|
pub fn assert_initialized() {
|
||||||
unsafe {
|
unsafe {
|
||||||
|
|
|
||||||
21
third_party/rust/neqo-crypto/src/p11.rs
vendored
21
third_party/rust/neqo-crypto/src/p11.rs
vendored
|
|
@ -9,8 +9,6 @@
|
||||||
#![allow(non_camel_case_types)]
|
#![allow(non_camel_case_types)]
|
||||||
#![allow(non_snake_case)]
|
#![allow(non_snake_case)]
|
||||||
|
|
||||||
use crate::err::{secstatus_to_res, Error, Res};
|
|
||||||
use neqo_common::hex_with_len;
|
|
||||||
use std::{
|
use std::{
|
||||||
convert::TryFrom,
|
convert::TryFrom,
|
||||||
mem,
|
mem,
|
||||||
|
|
@ -19,6 +17,10 @@ use std::{
|
||||||
ptr::null_mut,
|
ptr::null_mut,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use neqo_common::hex_with_len;
|
||||||
|
|
||||||
|
use crate::err::{secstatus_to_res, Error, Res};
|
||||||
|
|
||||||
#[allow(clippy::upper_case_acronyms)]
|
#[allow(clippy::upper_case_acronyms)]
|
||||||
#[allow(clippy::unreadable_literal)]
|
#[allow(clippy::unreadable_literal)]
|
||||||
#[allow(unknown_lints, clippy::borrow_as_ptr)]
|
#[allow(unknown_lints, clippy::borrow_as_ptr)]
|
||||||
|
|
@ -39,6 +41,7 @@ macro_rules! scoped_ptr {
|
||||||
/// Create a new instance of `$scoped` from a pointer.
|
/// Create a new instance of `$scoped` from a pointer.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// When passed a null pointer generates an error.
|
/// When passed a null pointer generates an error.
|
||||||
pub fn from_ptr(ptr: *mut $target) -> Result<Self, $crate::err::Error> {
|
pub fn from_ptr(ptr: *mut $target) -> Result<Self, $crate::err::Error> {
|
||||||
if ptr.is_null() {
|
if ptr.is_null() {
|
||||||
|
|
@ -80,8 +83,11 @@ impl PublicKey {
|
||||||
/// Get the HPKE serialization of the public key.
|
/// Get the HPKE serialization of the public key.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// When the key cannot be exported, which can be because the type is not supported.
|
/// When the key cannot be exported, which can be because the type is not supported.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// When keys are too large to fit in `c_uint/usize`. So only on programming error.
|
/// When keys are too large to fit in `c_uint/usize`. So only on programming error.
|
||||||
pub fn key_data(&self) -> Res<Vec<u8>> {
|
pub fn key_data(&self) -> Res<Vec<u8>> {
|
||||||
let mut buf = vec![0; 100];
|
let mut buf = vec![0; 100];
|
||||||
|
|
@ -124,12 +130,16 @@ impl PrivateKey {
|
||||||
/// Get the bits of the private key.
|
/// Get the bits of the private key.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// When the key cannot be exported, which can be because the type is not supported
|
/// When the key cannot be exported, which can be because the type is not supported
|
||||||
/// or because the key data cannot be extracted from the PKCS#11 module.
|
/// or because the key data cannot be extracted from the PKCS#11 module.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// When the values are too large to fit. So never.
|
/// When the values are too large to fit. So never.
|
||||||
pub fn key_data(&self) -> Res<Vec<u8>> {
|
pub fn key_data(&self) -> Res<Vec<u8>> {
|
||||||
let mut key_item = Item::make_empty();
|
let mut key_item = Item::make_empty();
|
||||||
|
#[allow(clippy::useless_conversion)] // TODO: Remove when we bump the MSRV to 1.74.0.
|
||||||
secstatus_to_res(unsafe {
|
secstatus_to_res(unsafe {
|
||||||
PK11_ReadRawAttribute(
|
PK11_ReadRawAttribute(
|
||||||
PK11ObjectType::PK11_TypePrivKey,
|
PK11ObjectType::PK11_TypePrivKey,
|
||||||
|
|
@ -187,6 +197,7 @@ impl SymKey {
|
||||||
/// You really don't want to use this.
|
/// You really don't want to use this.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Internal errors in case of failures in NSS.
|
/// Internal errors in case of failures in NSS.
|
||||||
pub fn as_bytes(&self) -> Res<&[u8]> {
|
pub fn as_bytes(&self) -> Res<&[u8]> {
|
||||||
secstatus_to_res(unsafe { PK11_ExtractKeyValue(self.ptr) })?;
|
secstatus_to_res(unsafe { PK11_ExtractKeyValue(self.ptr) })?;
|
||||||
|
|
@ -268,6 +279,7 @@ impl Item {
|
||||||
/// content that is referenced there.
|
/// content that is referenced there.
|
||||||
///
|
///
|
||||||
/// # Safety
|
/// # Safety
|
||||||
|
///
|
||||||
/// This dereferences two pointers. It doesn't get much less safe.
|
/// This dereferences two pointers. It doesn't get much less safe.
|
||||||
pub unsafe fn into_vec(self) -> Vec<u8> {
|
pub unsafe fn into_vec(self) -> Vec<u8> {
|
||||||
let b = self.ptr.as_ref().unwrap();
|
let b = self.ptr.as_ref().unwrap();
|
||||||
|
|
@ -279,7 +291,9 @@ impl Item {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate a randomized buffer.
|
/// Generate a randomized buffer.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// When `size` is too large or NSS fails.
|
/// When `size` is too large or NSS fails.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn random(size: usize) -> Vec<u8> {
|
pub fn random(size: usize) -> Vec<u8> {
|
||||||
|
|
@ -293,9 +307,10 @@ pub fn random(size: usize) -> Vec<u8> {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::random;
|
|
||||||
use test_fixture::fixture_init;
|
use test_fixture::fixture_init;
|
||||||
|
|
||||||
|
use super::random;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn randomness() {
|
fn randomness() {
|
||||||
fixture_init();
|
fixture_init();
|
||||||
|
|
|
||||||
12
third_party/rust/neqo-crypto/src/replay.rs
vendored
12
third_party/rust/neqo-crypto/src/replay.rs
vendored
|
|
@ -4,11 +4,6 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use crate::{
|
|
||||||
err::Res,
|
|
||||||
ssl::PRFileDesc,
|
|
||||||
time::{Interval, PRTime, Time},
|
|
||||||
};
|
|
||||||
use std::{
|
use std::{
|
||||||
convert::{TryFrom, TryInto},
|
convert::{TryFrom, TryInto},
|
||||||
ops::{Deref, DerefMut},
|
ops::{Deref, DerefMut},
|
||||||
|
|
@ -17,6 +12,12 @@ use std::{
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
err::Res,
|
||||||
|
ssl::PRFileDesc,
|
||||||
|
time::{Interval, PRTime, Time},
|
||||||
|
};
|
||||||
|
|
||||||
// This is an opaque struct in NSS.
|
// This is an opaque struct in NSS.
|
||||||
#[allow(clippy::upper_case_acronyms)]
|
#[allow(clippy::upper_case_acronyms)]
|
||||||
#[allow(clippy::empty_enum)]
|
#[allow(clippy::empty_enum)]
|
||||||
|
|
@ -55,6 +56,7 @@ impl AntiReplay {
|
||||||
/// See the documentation in NSS for advice on how to set these values.
|
/// See the documentation in NSS for advice on how to set these values.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Returns an error if `now` is in the past relative to our baseline or
|
/// Returns an error if `now` is in the past relative to our baseline or
|
||||||
/// NSS is unable to generate an anti-replay context.
|
/// NSS is unable to generate an anti-replay context.
|
||||||
pub fn new(now: Instant, window: Duration, k: usize, bits: usize) -> Res<Self> {
|
pub fn new(now: Instant, window: Duration, k: usize, bits: usize) -> Res<Self> {
|
||||||
|
|
|
||||||
6
third_party/rust/neqo-crypto/src/secrets.rs
vendored
6
third_party/rust/neqo-crypto/src/secrets.rs
vendored
|
|
@ -4,6 +4,10 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
|
use std::{os::raw::c_void, pin::Pin};
|
||||||
|
|
||||||
|
use neqo_common::qdebug;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
agentio::as_c_void,
|
agentio::as_c_void,
|
||||||
constants::Epoch,
|
constants::Epoch,
|
||||||
|
|
@ -11,8 +15,6 @@ use crate::{
|
||||||
p11::{PK11SymKey, PK11_ReferenceSymKey, SymKey},
|
p11::{PK11SymKey, PK11_ReferenceSymKey, SymKey},
|
||||||
ssl::{PRFileDesc, SSLSecretCallback, SSLSecretDirection},
|
ssl::{PRFileDesc, SSLSecretCallback, SSLSecretDirection},
|
||||||
};
|
};
|
||||||
use neqo_common::qdebug;
|
|
||||||
use std::{os::raw::c_void, pin::Pin};
|
|
||||||
|
|
||||||
experimental_api!(SSL_SecretCallback(
|
experimental_api!(SSL_SecretCallback(
|
||||||
fd: *mut PRFileDesc,
|
fd: *mut PRFileDesc,
|
||||||
|
|
|
||||||
20
third_party/rust/neqo-crypto/src/selfencrypt.rs
vendored
20
third_party/rust/neqo-crypto/src/selfencrypt.rs
vendored
|
|
@ -4,14 +4,17 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use crate::constants::{Cipher, Version};
|
use std::mem;
|
||||||
use crate::err::{Error, Res};
|
|
||||||
use crate::p11::{random, SymKey};
|
|
||||||
use crate::{hkdf, Aead};
|
|
||||||
|
|
||||||
use neqo_common::{hex, qinfo, qtrace, Encoder};
|
use neqo_common::{hex, qinfo, qtrace, Encoder};
|
||||||
|
|
||||||
use std::mem;
|
use crate::{
|
||||||
|
constants::{Cipher, Version},
|
||||||
|
err::{Error, Res},
|
||||||
|
hkdf,
|
||||||
|
p11::{random, SymKey},
|
||||||
|
Aead,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct SelfEncrypt {
|
pub struct SelfEncrypt {
|
||||||
|
|
@ -27,6 +30,7 @@ impl SelfEncrypt {
|
||||||
const SALT_LENGTH: usize = 16;
|
const SALT_LENGTH: usize = 16;
|
||||||
|
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Failure to generate a new HKDF key using NSS results in an error.
|
/// Failure to generate a new HKDF key using NSS results in an error.
|
||||||
pub fn new(version: Version, cipher: Cipher) -> Res<Self> {
|
pub fn new(version: Version, cipher: Cipher) -> Res<Self> {
|
||||||
let key = hkdf::generate_key(version, cipher)?;
|
let key = hkdf::generate_key(version, cipher)?;
|
||||||
|
|
@ -46,9 +50,11 @@ impl SelfEncrypt {
|
||||||
Aead::new(false, self.version, self.cipher, &secret, "neqo self")
|
Aead::new(false, self.version, self.cipher, &secret, "neqo self")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Rotate keys. This causes any previous key that is being held to be replaced by the current key.
|
/// Rotate keys. This causes any previous key that is being held to be replaced by the current
|
||||||
|
/// key.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Failure to generate a new HKDF key using NSS results in an error.
|
/// Failure to generate a new HKDF key using NSS results in an error.
|
||||||
pub fn rotate(&mut self) -> Res<()> {
|
pub fn rotate(&mut self) -> Res<()> {
|
||||||
let new_key = hkdf::generate_key(self.version, self.cipher)?;
|
let new_key = hkdf::generate_key(self.version, self.cipher)?;
|
||||||
|
|
@ -65,6 +71,7 @@ impl SelfEncrypt {
|
||||||
/// caller is responsible for carrying the AAD as appropriate.
|
/// caller is responsible for carrying the AAD as appropriate.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Failure to protect using NSS AEAD APIs produces an error.
|
/// Failure to protect using NSS AEAD APIs produces an error.
|
||||||
pub fn seal(&self, aad: &[u8], plaintext: &[u8]) -> Res<Vec<u8>> {
|
pub fn seal(&self, aad: &[u8], plaintext: &[u8]) -> Res<Vec<u8>> {
|
||||||
// Format is:
|
// Format is:
|
||||||
|
|
@ -117,6 +124,7 @@ impl SelfEncrypt {
|
||||||
/// Open the protected `ciphertext`.
|
/// Open the protected `ciphertext`.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Returns an error when the self-encrypted object is invalid;
|
/// Returns an error when the self-encrypted object is invalid;
|
||||||
/// when the keys have been rotated; or when NSS fails.
|
/// when the keys have been rotated; or when NSS fails.
|
||||||
#[allow(clippy::similar_names)] // aad is similar to aead
|
#[allow(clippy::similar_names)] // aad is similar to aead
|
||||||
|
|
|
||||||
8
third_party/rust/neqo-crypto/src/ssl.rs
vendored
8
third_party/rust/neqo-crypto/src/ssl.rs
vendored
|
|
@ -15,11 +15,13 @@
|
||||||
clippy::borrow_as_ptr
|
clippy::borrow_as_ptr
|
||||||
)]
|
)]
|
||||||
|
|
||||||
use crate::constants::Epoch;
|
|
||||||
use crate::err::{secstatus_to_res, Res};
|
|
||||||
|
|
||||||
use std::os::raw::{c_uint, c_void};
|
use std::os::raw::{c_uint, c_void};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
constants::Epoch,
|
||||||
|
err::{secstatus_to_res, Res},
|
||||||
|
};
|
||||||
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/nss_ssl.rs"));
|
include!(concat!(env!("OUT_DIR"), "/nss_ssl.rs"));
|
||||||
mod SSLOption {
|
mod SSLOption {
|
||||||
include!(concat!(env!("OUT_DIR"), "/nss_sslopt.rs"));
|
include!(concat!(env!("OUT_DIR"), "/nss_sslopt.rs"));
|
||||||
|
|
|
||||||
19
third_party/rust/neqo-crypto/src/time.rs
vendored
19
third_party/rust/neqo-crypto/src/time.rs
vendored
|
|
@ -6,13 +6,6 @@
|
||||||
|
|
||||||
#![allow(clippy::upper_case_acronyms)]
|
#![allow(clippy::upper_case_acronyms)]
|
||||||
|
|
||||||
use crate::{
|
|
||||||
agentio::as_c_void,
|
|
||||||
err::{Error, Res},
|
|
||||||
once::OnceResult,
|
|
||||||
ssl::{PRFileDesc, SSLTimeFunc},
|
|
||||||
};
|
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
boxed::Box,
|
boxed::Box,
|
||||||
convert::{TryFrom, TryInto},
|
convert::{TryFrom, TryInto},
|
||||||
|
|
@ -22,6 +15,13 @@ use std::{
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
agentio::as_c_void,
|
||||||
|
err::{Error, Res},
|
||||||
|
once::OnceResult,
|
||||||
|
ssl::{PRFileDesc, SSLTimeFunc},
|
||||||
|
};
|
||||||
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/nspr_time.rs"));
|
include!(concat!(env!("OUT_DIR"), "/nspr_time.rs"));
|
||||||
|
|
||||||
experimental_api!(SSL_SetTimeFunc(
|
experimental_api!(SSL_SetTimeFunc(
|
||||||
|
|
@ -207,13 +207,14 @@ impl Default for TimeHolder {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::{get_base, init, Interval, PRTime, Time};
|
|
||||||
use crate::err::Res;
|
|
||||||
use std::{
|
use std::{
|
||||||
convert::{TryFrom, TryInto},
|
convert::{TryFrom, TryInto},
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use super::{get_base, init, Interval, PRTime, Time};
|
||||||
|
use crate::err::Res;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn convert_stable() {
|
fn convert_stable() {
|
||||||
init();
|
init();
|
||||||
|
|
|
||||||
7
third_party/rust/neqo-crypto/tests/aead.rs
vendored
7
third_party/rust/neqo-crypto/tests/aead.rs
vendored
|
|
@ -2,9 +2,10 @@
|
||||||
#![warn(clippy::pedantic)]
|
#![warn(clippy::pedantic)]
|
||||||
#![cfg(not(feature = "fuzzing"))]
|
#![cfg(not(feature = "fuzzing"))]
|
||||||
|
|
||||||
use neqo_crypto::constants::{Cipher, TLS_AES_128_GCM_SHA256, TLS_VERSION_1_3};
|
use neqo_crypto::{
|
||||||
use neqo_crypto::hkdf;
|
constants::{Cipher, TLS_AES_128_GCM_SHA256, TLS_VERSION_1_3},
|
||||||
use neqo_crypto::Aead;
|
hkdf, Aead,
|
||||||
|
};
|
||||||
use test_fixture::fixture_init;
|
use test_fixture::fixture_init;
|
||||||
|
|
||||||
const AAD: &[u8] = &[
|
const AAD: &[u8] = &[
|
||||||
|
|
|
||||||
72
third_party/rust/neqo-crypto/tests/agent.rs
vendored
72
third_party/rust/neqo-crypto/tests/agent.rs
vendored
|
|
@ -1,20 +1,21 @@
|
||||||
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
|
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
|
||||||
#![warn(clippy::pedantic)]
|
#![warn(clippy::pedantic)]
|
||||||
|
|
||||||
|
use std::boxed::Box;
|
||||||
|
|
||||||
use neqo_crypto::{
|
use neqo_crypto::{
|
||||||
generate_ech_keys, AuthenticationStatus, Client, Error, HandshakeState, SecretAgentPreInfo,
|
generate_ech_keys, AuthenticationStatus, Client, Error, HandshakeState, SecretAgentPreInfo,
|
||||||
Server, ZeroRttCheckResult, ZeroRttChecker, TLS_AES_128_GCM_SHA256,
|
Server, ZeroRttCheckResult, ZeroRttChecker, TLS_AES_128_GCM_SHA256,
|
||||||
TLS_CHACHA20_POLY1305_SHA256, TLS_GRP_EC_SECP256R1, TLS_VERSION_1_3,
|
TLS_CHACHA20_POLY1305_SHA256, TLS_GRP_EC_SECP256R1, TLS_GRP_EC_X25519, TLS_VERSION_1_3,
|
||||||
};
|
};
|
||||||
|
|
||||||
use std::boxed::Box;
|
|
||||||
|
|
||||||
mod handshake;
|
mod handshake;
|
||||||
|
use test_fixture::{fixture_init, now};
|
||||||
|
|
||||||
use crate::handshake::{
|
use crate::handshake::{
|
||||||
connect, connect_fail, forward_records, resumption_setup, PermissiveZeroRttChecker, Resumption,
|
connect, connect_fail, forward_records, resumption_setup, PermissiveZeroRttChecker, Resumption,
|
||||||
ZERO_RTT_TOKEN_DATA,
|
ZERO_RTT_TOKEN_DATA,
|
||||||
};
|
};
|
||||||
use test_fixture::{fixture_init, now};
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn make_client() {
|
fn make_client() {
|
||||||
|
|
@ -155,6 +156,48 @@ fn chacha_client() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn server_prefers_first_client_share() {
|
||||||
|
fixture_init();
|
||||||
|
let mut client = Client::new("server.example", true).expect("should create client");
|
||||||
|
let mut server = Server::new(&["key"]).expect("should create server");
|
||||||
|
server
|
||||||
|
.set_groups(&[TLS_GRP_EC_X25519, TLS_GRP_EC_SECP256R1])
|
||||||
|
.expect("groups set");
|
||||||
|
client
|
||||||
|
.set_groups(&[TLS_GRP_EC_X25519, TLS_GRP_EC_SECP256R1])
|
||||||
|
.expect("groups set");
|
||||||
|
client
|
||||||
|
.send_additional_key_shares(1)
|
||||||
|
.expect("should set additional key share count");
|
||||||
|
|
||||||
|
connect(&mut client, &mut server);
|
||||||
|
|
||||||
|
assert_eq!(client.info().unwrap().key_exchange(), TLS_GRP_EC_X25519);
|
||||||
|
assert_eq!(server.info().unwrap().key_exchange(), TLS_GRP_EC_X25519);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn server_prefers_second_client_share() {
|
||||||
|
fixture_init();
|
||||||
|
let mut client = Client::new("server.example", true).expect("should create client");
|
||||||
|
let mut server = Server::new(&["key"]).expect("should create server");
|
||||||
|
server
|
||||||
|
.set_groups(&[TLS_GRP_EC_SECP256R1, TLS_GRP_EC_X25519])
|
||||||
|
.expect("groups set");
|
||||||
|
client
|
||||||
|
.set_groups(&[TLS_GRP_EC_X25519, TLS_GRP_EC_SECP256R1])
|
||||||
|
.expect("groups set");
|
||||||
|
client
|
||||||
|
.send_additional_key_shares(1)
|
||||||
|
.expect("should set additional key share count");
|
||||||
|
|
||||||
|
connect(&mut client, &mut server);
|
||||||
|
|
||||||
|
assert_eq!(client.info().unwrap().key_exchange(), TLS_GRP_EC_SECP256R1);
|
||||||
|
assert_eq!(server.info().unwrap().key_exchange(), TLS_GRP_EC_SECP256R1);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn p256_server() {
|
fn p256_server() {
|
||||||
fixture_init();
|
fixture_init();
|
||||||
|
|
@ -170,6 +213,27 @@ fn p256_server() {
|
||||||
assert_eq!(server.info().unwrap().key_exchange(), TLS_GRP_EC_SECP256R1);
|
assert_eq!(server.info().unwrap().key_exchange(), TLS_GRP_EC_SECP256R1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn p256_server_hrr() {
|
||||||
|
fixture_init();
|
||||||
|
let mut client = Client::new("server.example", true).expect("should create client");
|
||||||
|
let mut server = Server::new(&["key"]).expect("should create server");
|
||||||
|
server
|
||||||
|
.set_groups(&[TLS_GRP_EC_SECP256R1])
|
||||||
|
.expect("groups set");
|
||||||
|
client
|
||||||
|
.set_groups(&[TLS_GRP_EC_X25519, TLS_GRP_EC_SECP256R1])
|
||||||
|
.expect("groups set");
|
||||||
|
client
|
||||||
|
.send_additional_key_shares(0)
|
||||||
|
.expect("should set additional key share count");
|
||||||
|
|
||||||
|
connect(&mut client, &mut server);
|
||||||
|
|
||||||
|
assert_eq!(client.info().unwrap().key_exchange(), TLS_GRP_EC_SECP256R1);
|
||||||
|
assert_eq!(server.info().unwrap().key_exchange(), TLS_GRP_EC_SECP256R1);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn alpn() {
|
fn alpn() {
|
||||||
fixture_init();
|
fixture_init();
|
||||||
|
|
|
||||||
12
third_party/rust/neqo-crypto/tests/ext.rs
vendored
12
third_party/rust/neqo-crypto/tests/ext.rs
vendored
|
|
@ -1,11 +1,13 @@
|
||||||
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
|
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
|
||||||
#![warn(clippy::pedantic)]
|
#![warn(clippy::pedantic)]
|
||||||
|
|
||||||
use neqo_crypto::constants::{HandshakeMessage, TLS_HS_CLIENT_HELLO, TLS_HS_ENCRYPTED_EXTENSIONS};
|
use std::{cell::RefCell, rc::Rc};
|
||||||
use neqo_crypto::ext::{ExtensionHandler, ExtensionHandlerResult, ExtensionWriterResult};
|
|
||||||
use neqo_crypto::{Client, Server};
|
use neqo_crypto::{
|
||||||
use std::cell::RefCell;
|
constants::{HandshakeMessage, TLS_HS_CLIENT_HELLO, TLS_HS_ENCRYPTED_EXTENSIONS},
|
||||||
use std::rc::Rc;
|
ext::{ExtensionHandler, ExtensionHandlerResult, ExtensionWriterResult},
|
||||||
|
Client, Server,
|
||||||
|
};
|
||||||
use test_fixture::fixture_init;
|
use test_fixture::fixture_init;
|
||||||
|
|
||||||
mod handshake;
|
mod handshake;
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,12 @@
|
||||||
#![allow(dead_code)]
|
#![allow(dead_code)]
|
||||||
|
|
||||||
|
use std::{mem, time::Instant};
|
||||||
|
|
||||||
use neqo_common::qinfo;
|
use neqo_common::qinfo;
|
||||||
use neqo_crypto::{
|
use neqo_crypto::{
|
||||||
AntiReplay, AuthenticationStatus, Client, HandshakeState, RecordList, Res, ResumptionToken,
|
AntiReplay, AuthenticationStatus, Client, HandshakeState, RecordList, Res, ResumptionToken,
|
||||||
SecretAgent, Server, ZeroRttCheckResult, ZeroRttChecker,
|
SecretAgent, Server, ZeroRttCheckResult, ZeroRttChecker,
|
||||||
};
|
};
|
||||||
use std::mem;
|
|
||||||
use std::time::Instant;
|
|
||||||
use test_fixture::{anti_replay, fixture_init, now};
|
use test_fixture::{anti_replay, fixture_init, now};
|
||||||
|
|
||||||
/// Consume records until the handshake state changes.
|
/// Consume records until the handshake state changes.
|
||||||
|
|
|
||||||
10
third_party/rust/neqo-crypto/tests/hkdf.rs
vendored
10
third_party/rust/neqo-crypto/tests/hkdf.rs
vendored
|
|
@ -1,11 +1,13 @@
|
||||||
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
|
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
|
||||||
#![warn(clippy::pedantic)]
|
#![warn(clippy::pedantic)]
|
||||||
|
|
||||||
use neqo_crypto::constants::{
|
use neqo_crypto::{
|
||||||
Cipher, TLS_AES_128_GCM_SHA256, TLS_AES_256_GCM_SHA384, TLS_CHACHA20_POLY1305_SHA256,
|
constants::{
|
||||||
TLS_VERSION_1_3,
|
Cipher, TLS_AES_128_GCM_SHA256, TLS_AES_256_GCM_SHA384, TLS_CHACHA20_POLY1305_SHA256,
|
||||||
|
TLS_VERSION_1_3,
|
||||||
|
},
|
||||||
|
hkdf, SymKey,
|
||||||
};
|
};
|
||||||
use neqo_crypto::{hkdf, SymKey};
|
|
||||||
use test_fixture::fixture_init;
|
use test_fixture::fixture_init;
|
||||||
|
|
||||||
const SALT: &[u8] = &[
|
const SALT: &[u8] = &[
|
||||||
|
|
|
||||||
3
third_party/rust/neqo-crypto/tests/hp.rs
vendored
3
third_party/rust/neqo-crypto/tests/hp.rs
vendored
|
|
@ -1,6 +1,8 @@
|
||||||
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
|
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
|
||||||
#![warn(clippy::pedantic)]
|
#![warn(clippy::pedantic)]
|
||||||
|
|
||||||
|
use std::mem;
|
||||||
|
|
||||||
use neqo_crypto::{
|
use neqo_crypto::{
|
||||||
constants::{
|
constants::{
|
||||||
Cipher, TLS_AES_128_GCM_SHA256, TLS_AES_256_GCM_SHA384, TLS_CHACHA20_POLY1305_SHA256,
|
Cipher, TLS_AES_128_GCM_SHA256, TLS_AES_256_GCM_SHA384, TLS_CHACHA20_POLY1305_SHA256,
|
||||||
|
|
@ -9,7 +11,6 @@ use neqo_crypto::{
|
||||||
hkdf,
|
hkdf,
|
||||||
hp::HpKey,
|
hp::HpKey,
|
||||||
};
|
};
|
||||||
use std::mem;
|
|
||||||
use test_fixture::fixture_init;
|
use test_fixture::fixture_init;
|
||||||
|
|
||||||
fn make_hp(cipher: Cipher) -> HpKey {
|
fn make_hp(cipher: Cipher) -> HpKey {
|
||||||
|
|
|
||||||
|
|
@ -2,8 +2,12 @@
|
||||||
#![warn(clippy::pedantic)]
|
#![warn(clippy::pedantic)]
|
||||||
#![cfg(not(feature = "fuzzing"))]
|
#![cfg(not(feature = "fuzzing"))]
|
||||||
|
|
||||||
use neqo_crypto::constants::{TLS_AES_128_GCM_SHA256, TLS_VERSION_1_3};
|
use neqo_crypto::{
|
||||||
use neqo_crypto::{init, selfencrypt::SelfEncrypt, Error};
|
constants::{TLS_AES_128_GCM_SHA256, TLS_VERSION_1_3},
|
||||||
|
init,
|
||||||
|
selfencrypt::SelfEncrypt,
|
||||||
|
Error,
|
||||||
|
};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn se_create() {
|
fn se_create() {
|
||||||
|
|
|
||||||
|
|
@ -1 +1 @@
|
||||||
{"files":{"Cargo.toml":"5f26a2e98550f2d981392bc0d401d3673a2e59e4fceb32561062138f8c6711ea","src/buffered_send_stream.rs":"4bc45ca03252dc34ab421a2af3499191b182a619143c61d5609a46377c9a0f3d","src/client_events.rs":"b22ab079e7a06a3c51fdd3b642c0f503ca25c6d242eb5a20cbe9c86f43a8048d","src/conn_params.rs":"409f2899475fe6dfecacf191f08ea0905b0477aebacd165424542d40bbd62991","src/connection.rs":"49745c98331acd9b9eb3bb877675e0706e150627e305c192fa9e14deb17eb81a","src/connection_client.rs":"04844490ada0f00ea09e667a1f6f03e558a9f1c9ce3f00bd9552a0beef24b952","src/connection_server.rs":"542233f40064a6deba1ed54f81570760047c158cace41a7221a79be1ead462b9","src/control_stream_local.rs":"4b0224ecced01d2fce4192c72d682c01a8486fbdb2c575e5206d3777e646c400","src/control_stream_remote.rs":"7a261ac7df77e90a428ab0f92457a934a92a8c581462fc1818efd3de0c0ebd69","src/features/extended_connect/mod.rs":"98fd04d65ebb71a327a667ff9071f007aad4015c29af43a8c25f216b76c45276","src/features/extended_connect/tests/mod.rs":"fd6aee37243713e80fc526552f21f0222338cec9890409b6575a2a637b17ec1f","src/features/extended_connect/tests/webtransport/datagrams.rs":"8ba487a3bbd75e45b9b091397d8948e98ed8f5f8bcee033d629712833794747d","src/features/extended_connect/tests/webtransport/mod.rs":"5378f8192dafcd65b61318094375d7e5a45468e97dbc25495c6ff1a527734f33","src/features/extended_connect/tests/webtransport/negotiation.rs":"f870162ddb079028900154051624863cdbfbacfbe152b6424c0f67b5d3998881","src/features/extended_connect/tests/webtransport/sessions.rs":"53a052b52006193d5e4e3f0a74144572617737de9ff3ae00be033a246088d265","src/features/extended_connect/tests/webtransport/streams.rs":"4d590c8ee46ab6bd965053572994c252182f361698f598fd29ce51a530e5d15a","src/features/extended_connect/webtransport_session.rs":"6fb5d63005a714d72e8d51093d102bdc7f8b6cb62b1533e88386a50901673559","src/features/extended_connect/webtransport_streams.rs":"747be0d9510b87b9bb301c4299d66311bb433334dbe8e3e85568cea85d4294a2","src/features/mod.rs":"a981ebbd03e7bb7ea2313e883452e44f052c48f28edb7fd53a0825911b490230","src/frames/hframe.rs":"eb7e783c4d4b9b1befc25398be3dce45c1ac253ecd40d14f9d867a1fd8c4002f","src/frames/mod.rs":"7d0a46ca147336d14781edb8dbee8b03c2e4bcd6646f5473a9d93d31fe73fecb","src/frames/reader.rs":"3d7af10a21833049aa0277caec4abe13d677d2d62526f1c83126f2defe19ee5e","src/frames/tests/hframe.rs":"33a30bb98bb512606a06ae1752e1ed9e4588b7d3f5e9439ec83bb2e779d4ac80","src/frames/tests/mod.rs":"81cac9702e9016dacd60085c2e3968093b356fe682ced33d7c9c1f3151b9201c","src/frames/tests/reader.rs":"312a3deda7b3a4bbd7afed879c94d0644fce8e34435365ef9cae1fbaa62496af","src/frames/tests/wtframe.rs":"589ebe1e62ce4da63b37b7d22cde7ba572ddbf29336fdcdbbcd0a745f79dacd8","src/frames/wtframe.rs":"e8e46257b7e58ee636965b40499fef3f154523111abbc0341882ace9ba156c1b","src/headers_checks.rs":"ff1b3beca3569e3350bcdb413b8e055eaf0548fdacbaacd66d3147f832444879","src/lib.rs":"dda89089d02c1b0b44563da29b3f402f986812fbc8c78a57a0a20b727fa04407","src/priority.rs":"6bca5851629890b9c7264608d215c71841c7305deeada3d92e8c626b25457f88","src/push_controller.rs":"aa2a64180d8cb1b87682d0d8bbc42167188e8e1890261cb4cabb76de1fcc708b","src/qlog.rs":"877d1b21d2a252b7575254f68530c887cedc71f6d73a3903d9659d90b8206c29","src/qpack_decoder_receiver.rs":"50c5e7679304b227a5f86ab681396d59c5885a8d7a4b72985cce82f3f8eaa129","src/qpack_encoder_receiver.rs":"3deca0555826167bbaf8099eb1b394883e5e5c8c4ee68261f8c4816bdf686eda","src/recv_message.rs":"1f740c59676f5913108de68f41db1dfc1b8d6feeb0467c61ae652d6d26e75682","src/request_target.rs":"9182b641f7a7b55272e0e2e872d24a35b1207f35399221b08b899857c3e873ab","src/send_message.rs":"43cac1e6d5a31fe26007035c12b45916c7a7c7d6564153b3694bf1109fd6aa01","src/server.rs":"afe2710fd2f6927a4b69286eef30f472ac8036f5ccf11f06953a8b3673dc5049","src/server_connection_events.rs":"5fc125a13d9da8be5ad2d165206990b55fff890920e723db2314580159afa5d9","src/server_events.rs":"9f685a7bf28018dcdfc773c27f84c8307221a819b2fa27805e5462bbf9aa6c19","src/settings.rs":"a47a7e84dc745ec88d9eccead786ec1a2355b33b30d988d01a2e8a7043168d98","src/stream_type_reader.rs":"cb84654c2d18e1e382d4ea6c05d29026552928eb6a7de9612e7d6d454e357f03","tests/httpconn.rs":"8b62aa9a24ccc45f436aa57ff7d5b37394d844eced6b204085fc5086b1a643c7","tests/priority.rs":"09c3203e92a711cf8d59ba2abcdaef5399c04f0330bc14860216ad707b54c13a","tests/send_message.rs":"673ae1d0bf2dce46c21ee8353f45f189d2cb64a2f6e137ae38da6b2262ad066e","tests/webtransport.rs":"053fbe155cf33c0cce1e5e8f7c7d44e8bf82d69cd419d0c8147e92555bf89575"},"package":null}
|
{"files":{"Cargo.toml":"fe3c1114cfbb94004bf56740c0d373568cb459efdb12504e67f31923fbd436e1","src/buffered_send_stream.rs":"f45bdf9ad2a04b3828c74ff5440681d3c9d1af39b55470e4f729842dc2412295","src/client_events.rs":"e1392e7bbb62fb0505a4d8bcd27559699bbf38f3c94e7d8cae7291db82e6334c","src/conn_params.rs":"224a8ea6ef632930a7788a1cabf47ce69ad41bd4bc8dcf3053fbd998fdb38e82","src/connection.rs":"09aeb123f8dc6b903dd7d30579e5bb09ed8f70bfae563fb2fcc1871c67d604d4","src/connection_client.rs":"ed1c9ebf443f49dbf12c193953a71ec0e6b95555e1927afce813d2a8324758be","src/connection_server.rs":"ca33b50650bd1ca2a952851b72712d55ec2e48b48f1f06e4184c808b8e1e009a","src/control_stream_local.rs":"d6ecc0adc926e1d5cec9a378317f9dfcfeeb9840a0873a2afb380c2d252d8c54","src/control_stream_remote.rs":"59eb4041e366d92f9f294e8446755caa5e91fd943bba7b79b726698ba13be248","src/features/extended_connect/mod.rs":"3b02f6b18627f3855465a81b1d9b285e6f13839e75a8a6db648ed9082908d7f0","src/features/extended_connect/tests/mod.rs":"fd6aee37243713e80fc526552f21f0222338cec9890409b6575a2a637b17ec1f","src/features/extended_connect/tests/webtransport/datagrams.rs":"7e3bdd591b9c7d02f69954629f889d52bd54f13dbca11d555e614138c2a55107","src/features/extended_connect/tests/webtransport/mod.rs":"fed03f0ded21a9f17be5be99e4572e16dd0c8598e37044df3228990ea7fcc9f4","src/features/extended_connect/tests/webtransport/negotiation.rs":"98254ef8446581ec520026b04ef9549645602181b61602c9936f6660141edf0b","src/features/extended_connect/tests/webtransport/sessions.rs":"de3d836f666c2bec31e70b33bdc2669572cabbe17df2225db7282613a224a364","src/features/extended_connect/tests/webtransport/streams.rs":"8b3c34cac1b2171252a4bb53d420ac2098549a20309c327bf56e2e9ba9e33538","src/features/extended_connect/webtransport_session.rs":"a6472eca50a2d097aa6ba8a76b45ae69fe2edd2696b2953945faa3ce6e7417f9","src/features/extended_connect/webtransport_streams.rs":"a9a106eefc93a9f6e9e1c246df64904353de1c4fbcd394b338e6b117f6c677f5","src/features/mod.rs":"925aae4427ad82e4d019354802b223d53db5e5585d4a940f5417a24a9503d7ee","src/frames/hframe.rs":"726842108261c9af1e7576bc546e7bd7bea86fbef4a5804f4b45a2b4612e2679","src/frames/mod.rs":"7d0a46ca147336d14781edb8dbee8b03c2e4bcd6646f5473a9d93d31fe73fecb","src/frames/reader.rs":"4883e25064da1fb3a6ae46b5d15e6bcfec9c5bbea55a1937ecdb9465b62a93b2","src/frames/tests/hframe.rs":"01ec74eb3eb25d95042aa0263f9267f89535e6b7b8c1161fab4ba9ee5352d4a7","src/frames/tests/mod.rs":"0610609b316767a6a022837d32ee0452e37ea296fde37e51bec87e7c77e923a3","src/frames/tests/reader.rs":"2bfadc7afbc41bff9f5f930b31550259a8a92484d35f6c5d8dd8fd9acfb88f5b","src/frames/tests/wtframe.rs":"589ebe1e62ce4da63b37b7d22cde7ba572ddbf29336fdcdbbcd0a745f79dacd8","src/frames/wtframe.rs":"c80518d1569de277767c7ccb7441898aadbfc5fb2afb968c1d5105f8d175ccff","src/headers_checks.rs":"44891c16dda6b7ef742058ecb0a8d34e219c51cae1216c09c661cf72d9a5e7d5","src/lib.rs":"ed8da14e573cc5a97afb012a78af7f076eb83b5cc20cb4fe432eb7136a3ffe52","src/priority.rs":"10d9dcfcd4585f2ca256daf254c78a428297c41976c6548f19cd3ed2222b7cd2","src/push_controller.rs":"eb27c7c2a52c6108c0e4d040b021775a2b573f32d78b7ac8652ff46fd549f780","src/qlog.rs":"b1e6108b018abb077f218d1806e0a83370afa87709e26b3d51f482ae5d9b9c82","src/qpack_decoder_receiver.rs":"c927dfc3e58c71d282210ba79280f6f03e789733bc3bedc247e68bab516b9e9e","src/qpack_encoder_receiver.rs":"d0ac03cc111b6e1c555a8654d3234116f2b135b5b040edac23cefe2d640beba9","src/recv_message.rs":"06666c22101cda41de14682dc7e2e6721f2821bd45baefc22caceae4ccfcf2e0","src/request_target.rs":"6041a69a0a74969ec08bc164509c055e9bad99f53bbeb16c0aa17d108dd68b8c","src/send_message.rs":"70f8a91d85515f42a64a88bd2a9480175b12596bc082f77587cc5bcff9ce996c","src/server.rs":"ab6d4c80cb5f6c070f74d8df27e7bd62d5c8a8e7756ff9d1a31d3f9ff91327a1","src/server_connection_events.rs":"12d353ca6301467f6d475dde3b789951a5716c89ddd7dbf1383efef8082361f3","src/server_events.rs":"c96cff96d5893a9ab7165d17e3d1afaafc5492418b30003c1c26ca8f489ab7ca","src/settings.rs":"476b154b5eea4c8d69a4a790fee3e527cef4d375df1cfb5eed04ec56406fe15a","src/stream_type_reader.rs":"7a7226b7911d69f7e00ec4987c2a32a5e8a33463203398cbee1e6645d2691478","tests/httpconn.rs":"bb6927801a8c75e4f05eb6cdb1e7f2d57be69b74e68ddad2a1614f2aeed04369","tests/priority.rs":"3418be17fbdfdbcfd80dc4532f9365f405925442fabc916f2b22f90aee89629f","tests/send_message.rs":"1e893216d9252e6fb69a0fb291b4f8b8ea954847c346ff7f9347d7895618cabf","tests/webtransport.rs":"cb30d348c0ce05efb722abac3b1c524216fa4cbde8b62a1d1e3238c3fadecbe7"},"package":null}
|
||||||
19
third_party/rust/neqo-http3/Cargo.toml
vendored
19
third_party/rust/neqo-http3/Cargo.toml
vendored
|
|
@ -11,22 +11,21 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
rust-version = "1.65.0"
|
rust-version = "1.70.0"
|
||||||
name = "neqo-http3"
|
name = "neqo-http3"
|
||||||
version = "0.6.8"
|
version = "0.7.0"
|
||||||
authors = ["Dragana Damjanovic <dragana.damjano@gmail.com>"]
|
authors = ["Dragana Damjanovic <dragana.damjano@gmail.com>"]
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
enumset = "1.1.2"
|
enumset = "1.1.2"
|
||||||
lazy_static = "1.3.0"
|
lazy_static = "1.4"
|
||||||
qlog = "0.9.0"
|
sfv = "0.9.3"
|
||||||
sfv = "0.9.1"
|
smallvec = "1.11.1"
|
||||||
smallvec = "1.0.0"
|
url = "2.5"
|
||||||
url = "2.0"
|
|
||||||
|
|
||||||
[dependencies.log]
|
[dependencies.log]
|
||||||
version = "0.4.0"
|
version = "0.4.17"
|
||||||
default-features = false
|
default-features = false
|
||||||
|
|
||||||
[dependencies.neqo-common]
|
[dependencies.neqo-common]
|
||||||
|
|
@ -41,6 +40,10 @@ path = "./../neqo-qpack"
|
||||||
[dependencies.neqo-transport]
|
[dependencies.neqo-transport]
|
||||||
path = "./../neqo-transport"
|
path = "./../neqo-transport"
|
||||||
|
|
||||||
|
[dependencies.qlog]
|
||||||
|
git = "https://github.com/cloudflare/quiche"
|
||||||
|
rev = "09ea4b244096a013071cfe2175bbf2945fb7f8d1"
|
||||||
|
|
||||||
[dev-dependencies.test-fixture]
|
[dev-dependencies.test-fixture]
|
||||||
path = "../test-fixture"
|
path = "../test-fixture"
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,10 +4,11 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use crate::Res;
|
|
||||||
use neqo_common::qtrace;
|
use neqo_common::qtrace;
|
||||||
use neqo_transport::{Connection, StreamId};
|
use neqo_transport::{Connection, StreamId};
|
||||||
|
|
||||||
|
use crate::Res;
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub enum BufferedStream {
|
pub enum BufferedStream {
|
||||||
Uninitialized,
|
Uninitialized,
|
||||||
|
|
@ -36,6 +37,7 @@ impl BufferedStream {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// If the `BufferedStream` is initialized more than one it will panic.
|
/// If the `BufferedStream` is initialized more than one it will panic.
|
||||||
pub fn init(&mut self, stream_id: StreamId) {
|
pub fn init(&mut self, stream_id: StreamId) {
|
||||||
debug_assert!(&Self::Uninitialized == self);
|
debug_assert!(&Self::Uninitialized == self);
|
||||||
|
|
@ -46,6 +48,7 @@ impl BufferedStream {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// This functon cannot be called before the `BufferedStream` is initialized.
|
/// This functon cannot be called before the `BufferedStream` is initialized.
|
||||||
pub fn buffer(&mut self, to_buf: &[u8]) {
|
pub fn buffer(&mut self, to_buf: &[u8]) {
|
||||||
if let Self::Initialized { buf, .. } = self {
|
if let Self::Initialized { buf, .. } = self {
|
||||||
|
|
@ -56,6 +59,7 @@ impl BufferedStream {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Returns `neqo_transport` errors.
|
/// Returns `neqo_transport` errors.
|
||||||
pub fn send_buffer(&mut self, conn: &mut Connection) -> Res<usize> {
|
pub fn send_buffer(&mut self, conn: &mut Connection) -> Res<usize> {
|
||||||
let label = ::neqo_common::log_subject!(::log::Level::Debug, self);
|
let label = ::neqo_common::log_subject!(::log::Level::Debug, self);
|
||||||
|
|
@ -76,6 +80,7 @@ impl BufferedStream {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Returns `neqo_transport` errors.
|
/// Returns `neqo_transport` errors.
|
||||||
pub fn send_atomic(&mut self, conn: &mut Connection, to_send: &[u8]) -> Res<bool> {
|
pub fn send_atomic(&mut self, conn: &mut Connection, to_send: &[u8]) -> Res<bool> {
|
||||||
// First try to send anything that is in the buffer.
|
// First try to send anything that is in the buffer.
|
||||||
|
|
|
||||||
17
third_party/rust/neqo-http3/src/client_events.rs
vendored
17
third_party/rust/neqo-http3/src/client_events.rs
vendored
|
|
@ -6,19 +6,18 @@
|
||||||
|
|
||||||
#![allow(clippy::module_name_repetitions)]
|
#![allow(clippy::module_name_repetitions)]
|
||||||
|
|
||||||
use crate::connection::Http3State;
|
use std::{cell::RefCell, collections::VecDeque, rc::Rc};
|
||||||
use crate::settings::HSettingType;
|
|
||||||
use crate::{
|
|
||||||
features::extended_connect::{ExtendedConnectEvents, ExtendedConnectType, SessionCloseReason},
|
|
||||||
CloseType, Http3StreamInfo, HttpRecvStreamEvents, RecvStreamEvents, SendStreamEvents,
|
|
||||||
};
|
|
||||||
use neqo_common::{event::Provider as EventProvider, Header};
|
use neqo_common::{event::Provider as EventProvider, Header};
|
||||||
use neqo_crypto::ResumptionToken;
|
use neqo_crypto::ResumptionToken;
|
||||||
use neqo_transport::{AppError, StreamId, StreamType};
|
use neqo_transport::{AppError, StreamId, StreamType};
|
||||||
|
|
||||||
use std::cell::RefCell;
|
use crate::{
|
||||||
use std::collections::VecDeque;
|
connection::Http3State,
|
||||||
use std::rc::Rc;
|
features::extended_connect::{ExtendedConnectEvents, ExtendedConnectType, SessionCloseReason},
|
||||||
|
settings::HSettingType,
|
||||||
|
CloseType, Http3StreamInfo, HttpRecvStreamEvents, RecvStreamEvents, SendStreamEvents,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
pub enum WebTransportEvent {
|
pub enum WebTransportEvent {
|
||||||
|
|
|
||||||
|
|
@ -4,9 +4,10 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
|
use std::cmp::min;
|
||||||
|
|
||||||
use neqo_qpack::QpackSettings;
|
use neqo_qpack::QpackSettings;
|
||||||
use neqo_transport::ConnectionParameters;
|
use neqo_transport::ConnectionParameters;
|
||||||
use std::cmp::min;
|
|
||||||
|
|
||||||
const QPACK_MAX_TABLE_SIZE_DEFAULT: u64 = 65536;
|
const QPACK_MAX_TABLE_SIZE_DEFAULT: u64 = 65536;
|
||||||
const QPACK_TABLE_SIZE_LIMIT: u64 = (1 << 30) - 1;
|
const QPACK_TABLE_SIZE_LIMIT: u64 = (1 << 30) - 1;
|
||||||
|
|
@ -53,6 +54,7 @@ impl Http3Parameters {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// The table size must be smaller than 1 << 30 by the spec.
|
/// The table size must be smaller than 1 << 30 by the spec.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn max_table_size_encoder(mut self, mut max_table: u64) -> Self {
|
pub fn max_table_size_encoder(mut self, mut max_table: u64) -> Self {
|
||||||
|
|
@ -68,6 +70,7 @@ impl Http3Parameters {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// The table size must be smaller than 1 << 30 by the spec.
|
/// The table size must be smaller than 1 << 30 by the spec.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn max_table_size_decoder(mut self, mut max_table: u64) -> Self {
|
pub fn max_table_size_decoder(mut self, mut max_table: u64) -> Self {
|
||||||
|
|
|
||||||
119
third_party/rust/neqo-http3/src/connection.rs
vendored
119
third_party/rust/neqo-http3/src/connection.rs
vendored
|
|
@ -6,41 +6,43 @@
|
||||||
|
|
||||||
#![allow(clippy::module_name_repetitions)]
|
#![allow(clippy::module_name_repetitions)]
|
||||||
|
|
||||||
use crate::control_stream_local::ControlStreamLocal;
|
use std::{
|
||||||
use crate::control_stream_remote::ControlStreamRemote;
|
cell::RefCell,
|
||||||
use crate::features::extended_connect::{
|
collections::{BTreeSet, HashMap},
|
||||||
webtransport_session::WebTransportSession,
|
fmt::Debug,
|
||||||
webtransport_streams::{WebTransportRecvStream, WebTransportSendStream},
|
mem,
|
||||||
ExtendedConnectEvents, ExtendedConnectFeature, ExtendedConnectType,
|
rc::Rc,
|
||||||
};
|
|
||||||
use crate::frames::HFrame;
|
|
||||||
use crate::push_controller::PushController;
|
|
||||||
use crate::qpack_decoder_receiver::DecoderRecvStream;
|
|
||||||
use crate::qpack_encoder_receiver::EncoderRecvStream;
|
|
||||||
use crate::recv_message::{RecvMessage, RecvMessageInfo};
|
|
||||||
use crate::request_target::{AsRequestTarget, RequestTarget};
|
|
||||||
use crate::send_message::SendMessage;
|
|
||||||
use crate::settings::{HSettingType, HSettings, HttpZeroRttChecker};
|
|
||||||
use crate::stream_type_reader::NewStreamHeadReader;
|
|
||||||
use crate::{
|
|
||||||
client_events::Http3ClientEvents, CloseType, Http3Parameters, Http3StreamType,
|
|
||||||
HttpRecvStreamEvents, NewStreamType, Priority, PriorityHandler, ReceiveOutput, RecvStream,
|
|
||||||
RecvStreamEvents, SendStream, SendStreamEvents,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use neqo_common::{qdebug, qerror, qinfo, qtrace, qwarn, Decoder, Header, MessageType, Role};
|
use neqo_common::{qdebug, qerror, qinfo, qtrace, qwarn, Decoder, Header, MessageType, Role};
|
||||||
use neqo_qpack::decoder::QPackDecoder;
|
use neqo_qpack::{decoder::QPackDecoder, encoder::QPackEncoder};
|
||||||
use neqo_qpack::encoder::QPackEncoder;
|
|
||||||
use neqo_transport::{
|
use neqo_transport::{
|
||||||
streams::SendOrder, AppError, Connection, ConnectionError, DatagramTracking, State, StreamId,
|
streams::SendOrder, AppError, Connection, ConnectionError, DatagramTracking, State, StreamId,
|
||||||
StreamType, ZeroRttState,
|
StreamType, ZeroRttState,
|
||||||
};
|
};
|
||||||
use std::cell::RefCell;
|
|
||||||
use std::collections::{BTreeSet, HashMap};
|
|
||||||
use std::fmt::Debug;
|
|
||||||
use std::mem;
|
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use crate::{Error, Res};
|
use crate::{
|
||||||
|
client_events::Http3ClientEvents,
|
||||||
|
control_stream_local::ControlStreamLocal,
|
||||||
|
control_stream_remote::ControlStreamRemote,
|
||||||
|
features::extended_connect::{
|
||||||
|
webtransport_session::WebTransportSession,
|
||||||
|
webtransport_streams::{WebTransportRecvStream, WebTransportSendStream},
|
||||||
|
ExtendedConnectEvents, ExtendedConnectFeature, ExtendedConnectType,
|
||||||
|
},
|
||||||
|
frames::HFrame,
|
||||||
|
push_controller::PushController,
|
||||||
|
qpack_decoder_receiver::DecoderRecvStream,
|
||||||
|
qpack_encoder_receiver::EncoderRecvStream,
|
||||||
|
recv_message::{RecvMessage, RecvMessageInfo},
|
||||||
|
request_target::{AsRequestTarget, RequestTarget},
|
||||||
|
send_message::SendMessage,
|
||||||
|
settings::{HSettingType, HSettings, HttpZeroRttChecker},
|
||||||
|
stream_type_reader::NewStreamHeadReader,
|
||||||
|
CloseType, Error, Http3Parameters, Http3StreamType, HttpRecvStreamEvents, NewStreamType,
|
||||||
|
Priority, PriorityHandler, ReceiveOutput, RecvStream, RecvStreamEvents, Res, SendStream,
|
||||||
|
SendStreamEvents,
|
||||||
|
};
|
||||||
|
|
||||||
pub(crate) struct RequestDescription<'b, 't, T>
|
pub(crate) struct RequestDescription<'b, 't, T>
|
||||||
where
|
where
|
||||||
|
|
@ -79,8 +81,8 @@ enum Http3RemoteSettingsState {
|
||||||
/// - `ZeroRtt`: 0-RTT has been enabled and is active
|
/// - `ZeroRtt`: 0-RTT has been enabled and is active
|
||||||
/// - Connected
|
/// - Connected
|
||||||
/// - GoingAway(StreamId): The connection has received a `GOAWAY` frame
|
/// - GoingAway(StreamId): The connection has received a `GOAWAY` frame
|
||||||
/// - Closing(ConnectionError): The connection is closed. The closing has been initiated by this
|
/// - Closing(ConnectionError): The connection is closed. The closing has been initiated by this end
|
||||||
/// end of the connection, e.g., the `CONNECTION_CLOSE` frame has been sent. In this state, the
|
/// of the connection, e.g., the `CONNECTION_CLOSE` frame has been sent. In this state, the
|
||||||
/// connection waits a certain amount of time to retransmit the `CONNECTION_CLOSE` frame if
|
/// connection waits a certain amount of time to retransmit the `CONNECTION_CLOSE` frame if
|
||||||
/// needed.
|
/// needed.
|
||||||
/// - Closed(ConnectionError): This is the final close state: closing has been initialized by the
|
/// - Closed(ConnectionError): This is the final close state: closing has been initialized by the
|
||||||
|
|
@ -384,7 +386,8 @@ impl Http3Connection {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Inform a `HttpConnection` that a stream has data to send and that `send` should be called for the stream.
|
/// Inform a `HttpConnection` that a stream has data to send and that `send` should be called
|
||||||
|
/// for the stream.
|
||||||
pub fn stream_has_pending_data(&mut self, stream_id: StreamId) {
|
pub fn stream_has_pending_data(&mut self, stream_id: StreamId) {
|
||||||
self.streams_with_pending_data.insert(stream_id);
|
self.streams_with_pending_data.insert(stream_id);
|
||||||
}
|
}
|
||||||
|
|
@ -502,8 +505,8 @@ impl Http3Connection {
|
||||||
/// stream and unidi stream that are still do not have a type.
|
/// stream and unidi stream that are still do not have a type.
|
||||||
/// The function cannot handle:
|
/// The function cannot handle:
|
||||||
/// 1) a `Push(_)`, `Htttp` or `WebTransportStream(_)` stream
|
/// 1) a `Push(_)`, `Htttp` or `WebTransportStream(_)` stream
|
||||||
/// 2) frames `MaxPushId`, `PriorityUpdateRequest`, `PriorityUpdateRequestPush` or `Goaway`
|
/// 2) frames `MaxPushId`, `PriorityUpdateRequest`, `PriorityUpdateRequestPush` or `Goaway` must
|
||||||
/// must be handled by `Http3Client`/`Server`.
|
/// be handled by `Http3Client`/`Server`.
|
||||||
/// The function returns `ReceiveOutput`.
|
/// The function returns `ReceiveOutput`.
|
||||||
pub fn handle_stream_readable(
|
pub fn handle_stream_readable(
|
||||||
&mut self,
|
&mut self,
|
||||||
|
|
@ -579,8 +582,8 @@ impl Http3Connection {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This is called when `neqo_transport::Connection` state has been change to take proper actions in
|
/// This is called when `neqo_transport::Connection` state has been change to take proper
|
||||||
/// the HTTP3 layer.
|
/// actions in the HTTP3 layer.
|
||||||
pub fn handle_state_change(&mut self, conn: &mut Connection, state: &State) -> Res<bool> {
|
pub fn handle_state_change(&mut self, conn: &mut Connection, state: &State) -> Res<bool> {
|
||||||
qdebug!([self], "Handle state change {:?}", state);
|
qdebug!([self], "Handle state change {:?}", state);
|
||||||
match state {
|
match state {
|
||||||
|
|
@ -626,7 +629,8 @@ impl Http3Connection {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This is called when 0RTT has been reset to clear `send_streams`, `recv_streams` and settings.
|
/// This is called when 0RTT has been reset to clear `send_streams`, `recv_streams` and
|
||||||
|
/// settings.
|
||||||
pub fn handle_zero_rtt_rejected(&mut self) -> Res<()> {
|
pub fn handle_zero_rtt_rejected(&mut self) -> Res<()> {
|
||||||
if self.state == Http3State::ZeroRtt {
|
if self.state == Http3State::ZeroRtt {
|
||||||
self.state = Http3State::Initializing;
|
self.state = Http3State::Initializing;
|
||||||
|
|
@ -774,16 +778,16 @@ impl Http3Connection {
|
||||||
/// This function will not handle the output of the function completely, but only
|
/// This function will not handle the output of the function completely, but only
|
||||||
/// handle the indication that a stream is closed. There are 2 cases:
|
/// handle the indication that a stream is closed. There are 2 cases:
|
||||||
/// - an error occurred or
|
/// - an error occurred or
|
||||||
/// - the stream is done, i.e. the second value in `output` tuple is true if
|
/// - the stream is done, i.e. the second value in `output` tuple is true if the stream is done
|
||||||
/// the stream is done and can be removed from the `recv_streams`
|
/// and can be removed from the `recv_streams`
|
||||||
/// How it is handling `output`:
|
/// How it is handling `output`:
|
||||||
/// - if the stream is done, it removes the stream from `recv_streams`
|
/// - if the stream is done, it removes the stream from `recv_streams`
|
||||||
/// - if the stream is not done and there is no error, return `output` and the caller will
|
/// - if the stream is not done and there is no error, return `output` and the caller will
|
||||||
/// handle it.
|
/// handle it.
|
||||||
/// - in case of an error:
|
/// - in case of an error:
|
||||||
/// - if it is only a stream error and the stream is not critical, send `STOP_SENDING`
|
/// - if it is only a stream error and the stream is not critical, send `STOP_SENDING` frame,
|
||||||
/// frame, remove the stream from `recv_streams` and inform the listener that the stream
|
/// remove the stream from `recv_streams` and inform the listener that the stream has been
|
||||||
/// has been reset.
|
/// reset.
|
||||||
/// - otherwise this is a connection error. In this case, propagate the error to the caller
|
/// - otherwise this is a connection error. In this case, propagate the error to the caller
|
||||||
/// that will handle it properly.
|
/// that will handle it properly.
|
||||||
fn handle_stream_manipulation_output<U>(
|
fn handle_stream_manipulation_output<U>(
|
||||||
|
|
@ -861,7 +865,8 @@ impl Http3Connection {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_bidi_transport_stream(&self, conn: &mut Connection) -> Res<StreamId> {
|
fn create_bidi_transport_stream(&self, conn: &mut Connection) -> Res<StreamId> {
|
||||||
// Requests cannot be created when a connection is in states: Initializing, GoingAway, Closing and Closed.
|
// Requests cannot be created when a connection is in states: Initializing, GoingAway,
|
||||||
|
// Closing and Closed.
|
||||||
match self.state() {
|
match self.state() {
|
||||||
Http3State::GoingAway(..) | Http3State::Closing(..) | Http3State::Closed(..) => {
|
Http3State::GoingAway(..) | Http3State::Closing(..) | Http3State::Closed(..) => {
|
||||||
return Err(Error::AlreadyClosed)
|
return Err(Error::AlreadyClosed)
|
||||||
|
|
@ -927,8 +932,9 @@ impl Http3Connection {
|
||||||
)),
|
)),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Call immediately send so that at least headers get sent. This will make Firefox faster, since
|
// Call immediately send so that at least headers get sent. This will make Firefox faster,
|
||||||
// it can send request body immediately in most cases and does not need to do a complete process loop.
|
// since it can send request body immediately in most cases and does not need to do
|
||||||
|
// a complete process loop.
|
||||||
self.send_streams
|
self.send_streams
|
||||||
.get_mut(&stream_id)
|
.get_mut(&stream_id)
|
||||||
.ok_or(Error::InvalidStreamId)?
|
.ok_or(Error::InvalidStreamId)?
|
||||||
|
|
@ -936,11 +942,13 @@ impl Http3Connection {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Stream data are read directly into a buffer supplied as a parameter of this function to avoid copying
|
/// Stream data are read directly into a buffer supplied as a parameter of this function to
|
||||||
/// data.
|
/// avoid copying data.
|
||||||
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
/// It returns an error if a stream does not exist or an error happens while reading a stream, e.g.
|
///
|
||||||
/// early close, protocol error, etc.
|
/// It returns an error if a stream does not exist or an error happens while reading a stream,
|
||||||
|
/// e.g. early close, protocol error, etc.
|
||||||
pub fn read_data(
|
pub fn read_data(
|
||||||
&mut self,
|
&mut self,
|
||||||
conn: &mut Connection,
|
conn: &mut Connection,
|
||||||
|
|
@ -1004,7 +1012,9 @@ impl Http3Connection {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the stream `SendOrder`.
|
/// Set the stream `SendOrder`.
|
||||||
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Returns `InvalidStreamId` if the stream id doesn't exist
|
/// Returns `InvalidStreamId` if the stream id doesn't exist
|
||||||
pub fn stream_set_sendorder(
|
pub fn stream_set_sendorder(
|
||||||
conn: &mut Connection,
|
conn: &mut Connection,
|
||||||
|
|
@ -1018,7 +1028,9 @@ impl Http3Connection {
|
||||||
/// Set the stream Fairness. Fair streams will share bandwidth with other
|
/// Set the stream Fairness. Fair streams will share bandwidth with other
|
||||||
/// streams of the same sendOrder group (or the unordered group). Unfair streams
|
/// streams of the same sendOrder group (or the unordered group). Unfair streams
|
||||||
/// will give bandwidth preferentially to the lowest streamId with data to send.
|
/// will give bandwidth preferentially to the lowest streamId with data to send.
|
||||||
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Returns `InvalidStreamId` if the stream id doesn't exist
|
/// Returns `InvalidStreamId` if the stream id doesn't exist
|
||||||
pub fn stream_set_fairness(
|
pub fn stream_set_fairness(
|
||||||
conn: &mut Connection,
|
conn: &mut Connection,
|
||||||
|
|
@ -1088,8 +1100,8 @@ impl Http3Connection {
|
||||||
.send_streams
|
.send_streams
|
||||||
.get_mut(&stream_id)
|
.get_mut(&stream_id)
|
||||||
.ok_or(Error::InvalidStreamId)?;
|
.ok_or(Error::InvalidStreamId)?;
|
||||||
// The following function may return InvalidStreamId from the transport layer if the stream has been closed
|
// The following function may return InvalidStreamId from the transport layer if the stream
|
||||||
// already. It is ok to ignore it here.
|
// has been closed already. It is ok to ignore it here.
|
||||||
mem::drop(send_stream.close(conn));
|
mem::drop(send_stream.close(conn));
|
||||||
if send_stream.done() {
|
if send_stream.done() {
|
||||||
self.remove_send_stream(stream_id, conn);
|
self.remove_send_stream(stream_id, conn);
|
||||||
|
|
@ -1184,7 +1196,8 @@ impl Http3Connection {
|
||||||
.is_ok()
|
.is_ok()
|
||||||
{
|
{
|
||||||
mem::drop(self.stream_close_send(conn, stream_id));
|
mem::drop(self.stream_close_send(conn, stream_id));
|
||||||
// TODO issue 1294: add a timer to clean up the recv_stream if the peer does not do that in a short time.
|
// TODO issue 1294: add a timer to clean up the recv_stream if the peer does not
|
||||||
|
// do that in a short time.
|
||||||
self.streams_with_pending_data.insert(stream_id);
|
self.streams_with_pending_data.insert(stream_id);
|
||||||
} else {
|
} else {
|
||||||
self.cancel_fetch(stream_id, Error::HttpRequestRejected.code(), conn)?;
|
self.cancel_fetch(stream_id, Error::HttpRequestRejected.code(), conn)?;
|
||||||
|
|
@ -1571,8 +1584,8 @@ impl Http3Connection {
|
||||||
|
|
||||||
for id in recv {
|
for id in recv {
|
||||||
qtrace!("Remove the extended connect sub receiver stream {}", id);
|
qtrace!("Remove the extended connect sub receiver stream {}", id);
|
||||||
// Use CloseType::ResetRemote so that an event will be sent. CloseType::LocalError would have
|
// Use CloseType::ResetRemote so that an event will be sent. CloseType::LocalError would
|
||||||
// the same effect.
|
// have the same effect.
|
||||||
if let Some(mut s) = self.recv_streams.remove(&id) {
|
if let Some(mut s) = self.recv_streams.remove(&id) {
|
||||||
mem::drop(s.reset(CloseType::ResetRemote(Error::HttpRequestCancelled.code())));
|
mem::drop(s.reset(CloseType::ResetRemote(Error::HttpRequestCancelled.code())));
|
||||||
}
|
}
|
||||||
|
|
|
||||||
730
third_party/rust/neqo-http3/src/connection_client.rs
vendored
730
third_party/rust/neqo-http3/src/connection_client.rs
vendored
File diff suppressed because it is too large
Load diff
|
|
@ -4,21 +4,22 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use crate::connection::{Http3Connection, Http3State, WebTransportSessionAcceptAction};
|
use std::{rc::Rc, time::Instant};
|
||||||
use crate::frames::HFrame;
|
|
||||||
use crate::recv_message::{RecvMessage, RecvMessageInfo};
|
|
||||||
use crate::send_message::SendMessage;
|
|
||||||
use crate::server_connection_events::{Http3ServerConnEvent, Http3ServerConnEvents};
|
|
||||||
use crate::{
|
|
||||||
Error, Http3Parameters, Http3StreamType, NewStreamType, Priority, PriorityHandler,
|
|
||||||
ReceiveOutput, Res,
|
|
||||||
};
|
|
||||||
use neqo_common::{event::Provider, qdebug, qinfo, qtrace, Header, MessageType, Role};
|
use neqo_common::{event::Provider, qdebug, qinfo, qtrace, Header, MessageType, Role};
|
||||||
use neqo_transport::{
|
use neqo_transport::{
|
||||||
AppError, Connection, ConnectionEvent, DatagramTracking, StreamId, StreamType,
|
AppError, Connection, ConnectionEvent, DatagramTracking, StreamId, StreamType,
|
||||||
};
|
};
|
||||||
use std::rc::Rc;
|
|
||||||
use std::time::Instant;
|
use crate::{
|
||||||
|
connection::{Http3Connection, Http3State, WebTransportSessionAcceptAction},
|
||||||
|
frames::HFrame,
|
||||||
|
recv_message::{RecvMessage, RecvMessageInfo},
|
||||||
|
send_message::SendMessage,
|
||||||
|
server_connection_events::{Http3ServerConnEvent, Http3ServerConnEvents},
|
||||||
|
Error, Http3Parameters, Http3StreamType, NewStreamType, Priority, PriorityHandler,
|
||||||
|
ReceiveOutput, Res,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Http3ServerHandler {
|
pub struct Http3ServerHandler {
|
||||||
|
|
@ -48,12 +49,15 @@ impl Http3ServerHandler {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Supply a response for a request.
|
/// Supply a response for a request.
|
||||||
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// `InvalidStreamId` if the stream does not exist,
|
/// `InvalidStreamId` if the stream does not exist,
|
||||||
/// `AlreadyClosed` if the stream has already been closed.
|
/// `AlreadyClosed` if the stream has already been closed.
|
||||||
/// `TransportStreamDoesNotExist` if the transport stream does not exist (this may happen if `process_output`
|
/// `TransportStreamDoesNotExist` if the transport stream does not exist (this may happen if
|
||||||
/// has not been called when needed, and HTTP3 layer has not picked up the info that the stream has been closed.)
|
/// `process_output` has not been called when needed, and HTTP3 layer has not picked up the
|
||||||
/// `InvalidInput` if an empty buffer has been supplied.
|
/// info that the stream has been closed.) `InvalidInput` if an empty buffer has been
|
||||||
|
/// supplied.
|
||||||
pub(crate) fn send_data(
|
pub(crate) fn send_data(
|
||||||
&mut self,
|
&mut self,
|
||||||
stream_id: StreamId,
|
stream_id: StreamId,
|
||||||
|
|
@ -89,7 +93,9 @@ impl Http3ServerHandler {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This is called when application is done sending a request.
|
/// This is called when application is done sending a request.
|
||||||
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// An error will be returned if stream does not exist.
|
/// An error will be returned if stream does not exist.
|
||||||
pub fn stream_close_send(&mut self, stream_id: StreamId, conn: &mut Connection) -> Res<()> {
|
pub fn stream_close_send(&mut self, stream_id: StreamId, conn: &mut Connection) -> Res<()> {
|
||||||
qinfo!([self], "Close sending side stream={}.", stream_id);
|
qinfo!([self], "Close sending side stream={}.", stream_id);
|
||||||
|
|
@ -101,7 +107,9 @@ impl Http3ServerHandler {
|
||||||
|
|
||||||
/// An application may reset a stream(request).
|
/// An application may reset a stream(request).
|
||||||
/// Both sides, sending and receiving side, will be closed.
|
/// Both sides, sending and receiving side, will be closed.
|
||||||
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// An error will be return if a stream does not exist.
|
/// An error will be return if a stream does not exist.
|
||||||
pub fn cancel_fetch(
|
pub fn cancel_fetch(
|
||||||
&mut self,
|
&mut self,
|
||||||
|
|
@ -154,11 +162,14 @@ impl Http3ServerHandler {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Close `WebTransport` cleanly
|
/// Close `WebTransport` cleanly
|
||||||
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// `InvalidStreamId` if the stream does not exist,
|
/// `InvalidStreamId` if the stream does not exist,
|
||||||
/// `TransportStreamDoesNotExist` if the transport stream does not exist (this may happen if `process_output`
|
/// `TransportStreamDoesNotExist` if the transport stream does not exist (this may happen if
|
||||||
/// has not been called when needed, and HTTP3 layer has not picked up the info that the stream has been closed.)
|
/// `process_output` has not been called when needed, and HTTP3 layer has not picked up the
|
||||||
/// `InvalidInput` if an empty buffer has been supplied.
|
/// info that the stream has been closed.) `InvalidInput` if an empty buffer has been
|
||||||
|
/// supplied.
|
||||||
pub fn webtransport_close_session(
|
pub fn webtransport_close_session(
|
||||||
&mut self,
|
&mut self,
|
||||||
conn: &mut Connection,
|
conn: &mut Connection,
|
||||||
|
|
@ -354,7 +365,7 @@ impl Http3ServerHandler {
|
||||||
}
|
}
|
||||||
HFrame::PriorityUpdatePush { element_id, priority } => {
|
HFrame::PriorityUpdatePush { element_id, priority } => {
|
||||||
// TODO: check if the element_id references a promised push stream or
|
// TODO: check if the element_id references a promised push stream or
|
||||||
// is greater than the maximum Push ID.
|
// is greater than the maximum Push ID.
|
||||||
self.events.priority_update(StreamId::from(element_id), priority);
|
self.events.priority_update(StreamId::from(element_id), priority);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
@ -383,11 +394,13 @@ impl Http3ServerHandler {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Response data are read directly into a buffer supplied as a parameter of this function to avoid copying
|
/// Response data are read directly into a buffer supplied as a parameter of this function to
|
||||||
/// data.
|
/// avoid copying data.
|
||||||
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
/// It returns an error if a stream does not exist or an error happen while reading a stream, e.g.
|
///
|
||||||
/// early close, protocol error, etc.
|
/// It returns an error if a stream does not exist or an error happen while reading a stream,
|
||||||
|
/// e.g. early close, protocol error, etc.
|
||||||
pub fn read_data(
|
pub fn read_data(
|
||||||
&mut self,
|
&mut self,
|
||||||
conn: &mut Connection,
|
conn: &mut Connection,
|
||||||
|
|
|
||||||
|
|
@ -4,12 +4,15 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use crate::frames::HFrame;
|
use std::{
|
||||||
use crate::{BufferedStream, Http3StreamType, RecvStream, Res};
|
collections::{HashMap, VecDeque},
|
||||||
|
convert::TryFrom,
|
||||||
|
};
|
||||||
|
|
||||||
use neqo_common::{qtrace, Encoder};
|
use neqo_common::{qtrace, Encoder};
|
||||||
use neqo_transport::{Connection, StreamId, StreamType};
|
use neqo_transport::{Connection, StreamId, StreamType};
|
||||||
use std::collections::{HashMap, VecDeque};
|
|
||||||
use std::convert::TryFrom;
|
use crate::{frames::HFrame, BufferedStream, Http3StreamType, RecvStream, Res};
|
||||||
|
|
||||||
pub const HTTP3_UNI_STREAM_TYPE_CONTROL: u64 = 0x0;
|
pub const HTTP3_UNI_STREAM_TYPE_CONTROL: u64 = 0x0;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,12 +4,16 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use crate::frames::{FrameReader, HFrame, StreamReaderConnectionWrapper};
|
|
||||||
use crate::{CloseType, Error, Http3StreamType, ReceiveOutput, RecvStream, Res, Stream};
|
|
||||||
use neqo_common::qdebug;
|
use neqo_common::qdebug;
|
||||||
use neqo_transport::{Connection, StreamId};
|
use neqo_transport::{Connection, StreamId};
|
||||||
|
|
||||||
/// The remote control stream is responsible only for reading frames. The frames are handled by `Http3Connection`.
|
use crate::{
|
||||||
|
frames::{FrameReader, HFrame, StreamReaderConnectionWrapper},
|
||||||
|
CloseType, Error, Http3StreamType, ReceiveOutput, RecvStream, Res, Stream,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// The remote control stream is responsible only for reading frames. The frames are handled by
|
||||||
|
/// `Http3Connection`.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) struct ControlStreamRemote {
|
pub(crate) struct ControlStreamRemote {
|
||||||
stream_id: StreamId,
|
stream_id: StreamId,
|
||||||
|
|
|
||||||
|
|
@ -9,15 +9,19 @@
|
||||||
pub(crate) mod webtransport_session;
|
pub(crate) mod webtransport_session;
|
||||||
pub(crate) mod webtransport_streams;
|
pub(crate) mod webtransport_streams;
|
||||||
|
|
||||||
use crate::client_events::Http3ClientEvents;
|
use std::fmt::Debug;
|
||||||
use crate::features::NegotiationState;
|
|
||||||
use crate::settings::{HSettingType, HSettings};
|
|
||||||
use crate::{CloseType, Http3StreamInfo, Http3StreamType};
|
|
||||||
use neqo_common::Header;
|
use neqo_common::Header;
|
||||||
use neqo_transport::{AppError, StreamId};
|
use neqo_transport::{AppError, StreamId};
|
||||||
use std::fmt::Debug;
|
|
||||||
pub(crate) use webtransport_session::WebTransportSession;
|
pub(crate) use webtransport_session::WebTransportSession;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
client_events::Http3ClientEvents,
|
||||||
|
features::NegotiationState,
|
||||||
|
settings::{HSettingType, HSettings},
|
||||||
|
CloseType, Http3StreamInfo, Http3StreamType,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
pub enum SessionCloseReason {
|
pub enum SessionCloseReason {
|
||||||
Error(AppError),
|
Error(AppError),
|
||||||
|
|
|
||||||
|
|
@ -4,13 +4,17 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use crate::features::extended_connect::tests::webtransport::{
|
use std::convert::TryFrom;
|
||||||
wt_default_parameters, WtTest, DATAGRAM_SIZE,
|
|
||||||
};
|
|
||||||
use crate::{Error, Http3Parameters, WebTransportRequest};
|
|
||||||
use neqo_common::Encoder;
|
use neqo_common::Encoder;
|
||||||
use neqo_transport::Error as TransportError;
|
use neqo_transport::Error as TransportError;
|
||||||
use std::convert::TryFrom;
|
|
||||||
|
use crate::{
|
||||||
|
features::extended_connect::tests::webtransport::{
|
||||||
|
wt_default_parameters, WtTest, DATAGRAM_SIZE,
|
||||||
|
},
|
||||||
|
Error, Http3Parameters, WebTransportRequest,
|
||||||
|
};
|
||||||
|
|
||||||
const DGRAM: &[u8] = &[0, 100];
|
const DGRAM: &[u8] = &[0, 100];
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,15 @@ mod datagrams;
|
||||||
mod negotiation;
|
mod negotiation;
|
||||||
mod sessions;
|
mod sessions;
|
||||||
mod streams;
|
mod streams;
|
||||||
|
use std::{cell::RefCell, rc::Rc, time::Duration};
|
||||||
|
|
||||||
use neqo_common::event::Provider;
|
use neqo_common::event::Provider;
|
||||||
|
use neqo_crypto::AuthenticationStatus;
|
||||||
|
use neqo_transport::{ConnectionParameters, StreamId, StreamType};
|
||||||
|
use test_fixture::{
|
||||||
|
addr, anti_replay, fixture_init, now, CountingConnectionIdGenerator, DEFAULT_ALPN_H3,
|
||||||
|
DEFAULT_KEYS, DEFAULT_SERVER_NAME,
|
||||||
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
features::extended_connect::SessionCloseReason, Error, Header, Http3Client, Http3ClientEvent,
|
features::extended_connect::SessionCloseReason, Error, Header, Http3Client, Http3ClientEvent,
|
||||||
|
|
@ -16,16 +24,6 @@ use crate::{
|
||||||
RecvStreamStats, SendStreamStats, WebTransportEvent, WebTransportRequest,
|
RecvStreamStats, SendStreamStats, WebTransportEvent, WebTransportRequest,
|
||||||
WebTransportServerEvent, WebTransportSessionAcceptAction,
|
WebTransportServerEvent, WebTransportSessionAcceptAction,
|
||||||
};
|
};
|
||||||
use neqo_crypto::AuthenticationStatus;
|
|
||||||
use neqo_transport::{ConnectionParameters, StreamId, StreamType};
|
|
||||||
use std::cell::RefCell;
|
|
||||||
use std::rc::Rc;
|
|
||||||
use std::time::Duration;
|
|
||||||
|
|
||||||
use test_fixture::{
|
|
||||||
addr, anti_replay, fixture_init, now, CountingConnectionIdGenerator, DEFAULT_ALPN_H3,
|
|
||||||
DEFAULT_KEYS, DEFAULT_SERVER_NAME,
|
|
||||||
};
|
|
||||||
|
|
||||||
const DATAGRAM_SIZE: u64 = 1200;
|
const DATAGRAM_SIZE: u64 = 1200;
|
||||||
|
|
||||||
|
|
@ -64,8 +62,8 @@ pub fn default_http3_server(server_params: Http3Parameters) -> Http3Server {
|
||||||
fn exchange_packets(client: &mut Http3Client, server: &mut Http3Server) {
|
fn exchange_packets(client: &mut Http3Client, server: &mut Http3Server) {
|
||||||
let mut out = None;
|
let mut out = None;
|
||||||
loop {
|
loop {
|
||||||
out = client.process(out, now()).dgram();
|
out = client.process(out.as_ref(), now()).dgram();
|
||||||
out = server.process(out, now()).dgram();
|
out = server.process(out.as_ref(), now()).dgram();
|
||||||
if out.is_none() {
|
if out.is_none() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
@ -78,28 +76,28 @@ fn connect_with(client: &mut Http3Client, server: &mut Http3Server) {
|
||||||
let out = client.process(None, now());
|
let out = client.process(None, now());
|
||||||
assert_eq!(client.state(), Http3State::Initializing);
|
assert_eq!(client.state(), Http3State::Initializing);
|
||||||
|
|
||||||
let out = server.process(out.dgram(), now());
|
let out = server.process(out.as_dgram_ref(), now());
|
||||||
let out = client.process(out.dgram(), now());
|
let out = client.process(out.as_dgram_ref(), now());
|
||||||
let out = server.process(out.dgram(), now());
|
let out = server.process(out.as_dgram_ref(), now());
|
||||||
assert!(out.as_dgram_ref().is_none());
|
assert!(out.as_dgram_ref().is_none());
|
||||||
|
|
||||||
let authentication_needed = |e| matches!(e, Http3ClientEvent::AuthenticationNeeded);
|
let authentication_needed = |e| matches!(e, Http3ClientEvent::AuthenticationNeeded);
|
||||||
assert!(client.events().any(authentication_needed));
|
assert!(client.events().any(authentication_needed));
|
||||||
client.authenticated(AuthenticationStatus::Ok, now());
|
client.authenticated(AuthenticationStatus::Ok, now());
|
||||||
|
|
||||||
let out = client.process(out.dgram(), now());
|
let out = client.process(out.as_dgram_ref(), now());
|
||||||
let connected = |e| matches!(e, Http3ClientEvent::StateChange(Http3State::Connected));
|
let connected = |e| matches!(e, Http3ClientEvent::StateChange(Http3State::Connected));
|
||||||
assert!(client.events().any(connected));
|
assert!(client.events().any(connected));
|
||||||
|
|
||||||
assert_eq!(client.state(), Http3State::Connected);
|
assert_eq!(client.state(), Http3State::Connected);
|
||||||
|
|
||||||
// Exchange H3 setttings
|
// Exchange H3 setttings
|
||||||
let out = server.process(out.dgram(), now());
|
let out = server.process(out.as_dgram_ref(), now());
|
||||||
let out = client.process(out.dgram(), now());
|
let out = client.process(out.as_dgram_ref(), now());
|
||||||
let out = server.process(out.dgram(), now());
|
let out = server.process(out.as_dgram_ref(), now());
|
||||||
let out = client.process(out.dgram(), now());
|
let out = client.process(out.as_dgram_ref(), now());
|
||||||
let out = server.process(out.dgram(), now());
|
let out = server.process(out.as_dgram_ref(), now());
|
||||||
std::mem::drop(client.process(out.dgram(), now()));
|
std::mem::drop(client.process(out.as_dgram_ref(), now()));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn connect(
|
fn connect(
|
||||||
|
|
@ -201,10 +199,10 @@ impl WtTest {
|
||||||
let mut now = now();
|
let mut now = now();
|
||||||
loop {
|
loop {
|
||||||
now += RTT / 2;
|
now += RTT / 2;
|
||||||
out = self.client.process(out, now).dgram();
|
out = self.client.process(out.as_ref(), now).dgram();
|
||||||
let client_none = out.is_none();
|
let client_none = out.is_none();
|
||||||
now += RTT / 2;
|
now += RTT / 2;
|
||||||
out = self.server.process(out, now).dgram();
|
out = self.server.process(out.as_ref(), now).dgram();
|
||||||
if client_none && out.is_none() {
|
if client_none && out.is_none() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,17 +4,19 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use neqo_common::{event::Provider, Encoder};
|
||||||
|
use neqo_crypto::AuthenticationStatus;
|
||||||
|
use neqo_transport::{Connection, ConnectionError, StreamType};
|
||||||
|
use test_fixture::{default_server_h3, now};
|
||||||
|
|
||||||
use super::{connect, default_http3_client, default_http3_server, exchange_packets};
|
use super::{connect, default_http3_client, default_http3_server, exchange_packets};
|
||||||
use crate::{
|
use crate::{
|
||||||
settings::{HSetting, HSettingType, HSettings},
|
settings::{HSetting, HSettingType, HSettings},
|
||||||
Error, HFrame, Http3Client, Http3ClientEvent, Http3Parameters, Http3Server, Http3State,
|
Error, HFrame, Http3Client, Http3ClientEvent, Http3Parameters, Http3Server, Http3State,
|
||||||
WebTransportEvent,
|
WebTransportEvent,
|
||||||
};
|
};
|
||||||
use neqo_common::{event::Provider, Encoder};
|
|
||||||
use neqo_crypto::AuthenticationStatus;
|
|
||||||
use neqo_transport::{Connection, ConnectionError, StreamType};
|
|
||||||
use std::time::Duration;
|
|
||||||
use test_fixture::{default_server_h3, now};
|
|
||||||
|
|
||||||
fn check_wt_event(client: &mut Http3Client, wt_enable_client: bool, wt_enable_server: bool) {
|
fn check_wt_event(client: &mut Http3Client, wt_enable_client: bool, wt_enable_server: bool) {
|
||||||
let wt_event = client.events().find_map(|e| {
|
let wt_event = client.events().find_map(|e| {
|
||||||
|
|
@ -86,7 +88,7 @@ fn zero_rtt(
|
||||||
// exchange token
|
// exchange token
|
||||||
let out = server.process(None, now());
|
let out = server.process(None, now());
|
||||||
// We do not have a token so we need to wait for a resumption token timer to trigger.
|
// We do not have a token so we need to wait for a resumption token timer to trigger.
|
||||||
std::mem::drop(client.process(out.dgram(), now() + Duration::from_millis(250)));
|
std::mem::drop(client.process(out.as_dgram_ref(), now() + Duration::from_millis(250)));
|
||||||
assert_eq!(client.state(), Http3State::Connected);
|
assert_eq!(client.state(), Http3State::Connected);
|
||||||
let token = client
|
let token = client
|
||||||
.events()
|
.events()
|
||||||
|
|
@ -234,8 +236,8 @@ fn zero_rtt_wt_settings() {
|
||||||
fn exchange_packets2(client: &mut Http3Client, server: &mut Connection) {
|
fn exchange_packets2(client: &mut Http3Client, server: &mut Connection) {
|
||||||
let mut out = None;
|
let mut out = None;
|
||||||
loop {
|
loop {
|
||||||
out = client.process(out, now()).dgram();
|
out = client.process(out.as_ref(), now()).dgram();
|
||||||
out = server.process(out, now()).dgram();
|
out = server.process(out.as_ref(), now()).dgram();
|
||||||
if out.is_none() {
|
if out.is_none() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,19 +4,25 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use crate::features::extended_connect::tests::webtransport::{
|
use std::mem;
|
||||||
default_http3_client, default_http3_server, wt_default_parameters, WtTest,
|
|
||||||
};
|
|
||||||
use crate::{
|
|
||||||
features::extended_connect::SessionCloseReason, frames::WebTransportFrame, Error, Header,
|
|
||||||
Http3ClientEvent, Http3OrWebTransportStream, Http3Server, Http3ServerEvent, Http3State,
|
|
||||||
Priority, WebTransportEvent, WebTransportServerEvent, WebTransportSessionAcceptAction,
|
|
||||||
};
|
|
||||||
use neqo_common::{event::Provider, Encoder};
|
use neqo_common::{event::Provider, Encoder};
|
||||||
use neqo_transport::StreamType;
|
use neqo_transport::StreamType;
|
||||||
use std::mem;
|
|
||||||
use test_fixture::now;
|
use test_fixture::now;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
features::extended_connect::{
|
||||||
|
tests::webtransport::{
|
||||||
|
default_http3_client, default_http3_server, wt_default_parameters, WtTest,
|
||||||
|
},
|
||||||
|
SessionCloseReason,
|
||||||
|
},
|
||||||
|
frames::WebTransportFrame,
|
||||||
|
Error, Header, Http3ClientEvent, Http3OrWebTransportStream, Http3Server, Http3ServerEvent,
|
||||||
|
Http3State, Priority, WebTransportEvent, WebTransportServerEvent,
|
||||||
|
WebTransportSessionAcceptAction,
|
||||||
|
};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn wt_session() {
|
fn wt_session() {
|
||||||
let mut wt = WtTest::new();
|
let mut wt = WtTest::new();
|
||||||
|
|
@ -419,18 +425,18 @@ fn wt_close_session_cannot_be_sent_at_once() {
|
||||||
Err(Error::InvalidStreamId)
|
Err(Error::InvalidStreamId)
|
||||||
);
|
);
|
||||||
|
|
||||||
let out = wt.server.process(None, now()).dgram();
|
let out = wt.server.process(None, now());
|
||||||
let out = wt.client.process(out, now()).dgram();
|
let out = wt.client.process(out.as_dgram_ref(), now());
|
||||||
|
|
||||||
// Client has not received the full CloseSession frame and it can create more streams.
|
// Client has not received the full CloseSession frame and it can create more streams.
|
||||||
let unidi_client = wt.create_wt_stream_client(wt_session.stream_id(), StreamType::UniDi);
|
let unidi_client = wt.create_wt_stream_client(wt_session.stream_id(), StreamType::UniDi);
|
||||||
|
|
||||||
let out = wt.server.process(out, now()).dgram();
|
let out = wt.server.process(out.as_dgram_ref(), now());
|
||||||
let out = wt.client.process(out, now()).dgram();
|
let out = wt.client.process(out.as_dgram_ref(), now());
|
||||||
let out = wt.server.process(out, now()).dgram();
|
let out = wt.server.process(out.as_dgram_ref(), now());
|
||||||
let out = wt.client.process(out, now()).dgram();
|
let out = wt.client.process(out.as_dgram_ref(), now());
|
||||||
let out = wt.server.process(out, now()).dgram();
|
let out = wt.server.process(out.as_dgram_ref(), now());
|
||||||
let _out = wt.client.process(out, now()).dgram();
|
let _out = wt.client.process(out.as_dgram_ref(), now());
|
||||||
|
|
||||||
wt.check_events_after_closing_session_client(
|
wt.check_events_after_closing_session_client(
|
||||||
&[],
|
&[],
|
||||||
|
|
|
||||||
|
|
@ -4,11 +4,15 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use crate::features::extended_connect::tests::webtransport::WtTest;
|
|
||||||
use crate::{features::extended_connect::SessionCloseReason, Error};
|
|
||||||
use neqo_transport::StreamType;
|
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
|
use neqo_transport::StreamType;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
features::extended_connect::{tests::webtransport::WtTest, SessionCloseReason},
|
||||||
|
Error,
|
||||||
|
};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn wt_client_stream_uni() {
|
fn wt_client_stream_uni() {
|
||||||
const BUF_CLIENT: &[u8] = &[0; 10];
|
const BUF_CLIENT: &[u8] = &[0; 10];
|
||||||
|
|
@ -287,13 +291,17 @@ fn wt_server_stream_bidi_stop_sending() {
|
||||||
// 1) Both sides of a bidirectional client stream are opened.
|
// 1) Both sides of a bidirectional client stream are opened.
|
||||||
// 2) A client unidirectional stream is opened.
|
// 2) A client unidirectional stream is opened.
|
||||||
// 3) A client unidirectional stream has been closed and both sides consumed the closing info.
|
// 3) A client unidirectional stream has been closed and both sides consumed the closing info.
|
||||||
// 4) A client unidirectional stream has been closed, but only the server has consumed the closing info.
|
// 4) A client unidirectional stream has been closed, but only the server has consumed the closing
|
||||||
// 5) A client unidirectional stream has been closed, but only the client has consum the closing info.
|
// info.
|
||||||
|
// 5) A client unidirectional stream has been closed, but only the client has consum the closing
|
||||||
|
// info.
|
||||||
// 6) Both sides of a bidirectional server stream are opened.
|
// 6) Both sides of a bidirectional server stream are opened.
|
||||||
// 7) A server unidirectional stream is opened.
|
// 7) A server unidirectional stream is opened.
|
||||||
// 8) A server unidirectional stream has been closed and both sides consumed the closing info.
|
// 8) A server unidirectional stream has been closed and both sides consumed the closing info.
|
||||||
// 9) A server unidirectional stream has been closed, but only the server has consumed the closing info.
|
// 9) A server unidirectional stream has been closed, but only the server has consumed the closing
|
||||||
// 10) A server unidirectional stream has been closed, but only the client has consumed the closing info.
|
// info.
|
||||||
|
// 10) A server unidirectional stream has been closed, but only the client has consumed the closing
|
||||||
|
// info.
|
||||||
// 11) Both sides of a bidirectional stream have been closed and consumed by both sides.
|
// 11) Both sides of a bidirectional stream have been closed and consumed by both sides.
|
||||||
// 12) Both sides of a bidirectional stream have been closed, but not consumed by both sides.
|
// 12) Both sides of a bidirectional stream have been closed, but not consumed by both sides.
|
||||||
// 13) Multiples open streams
|
// 13) Multiples open streams
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,12 @@
|
||||||
|
|
||||||
#![allow(clippy::module_name_repetitions)]
|
#![allow(clippy::module_name_repetitions)]
|
||||||
|
|
||||||
|
use std::{any::Any, cell::RefCell, collections::BTreeSet, mem, rc::Rc};
|
||||||
|
|
||||||
|
use neqo_common::{qtrace, Encoder, Header, MessageType, Role};
|
||||||
|
use neqo_qpack::{QPackDecoder, QPackEncoder};
|
||||||
|
use neqo_transport::{streams::SendOrder, Connection, DatagramTracking, StreamId};
|
||||||
|
|
||||||
use super::{ExtendedConnectEvents, ExtendedConnectType, SessionCloseReason};
|
use super::{ExtendedConnectEvents, ExtendedConnectType, SessionCloseReason};
|
||||||
use crate::{
|
use crate::{
|
||||||
frames::{FrameReader, StreamReaderRecvStreamWrapper, WebTransportFrame},
|
frames::{FrameReader, StreamReaderRecvStreamWrapper, WebTransportFrame},
|
||||||
|
|
@ -15,14 +21,6 @@ use crate::{
|
||||||
HttpRecvStreamEvents, Priority, PriorityHandler, ReceiveOutput, RecvStream, RecvStreamEvents,
|
HttpRecvStreamEvents, Priority, PriorityHandler, ReceiveOutput, RecvStream, RecvStreamEvents,
|
||||||
Res, SendStream, SendStreamEvents, Stream,
|
Res, SendStream, SendStreamEvents, Stream,
|
||||||
};
|
};
|
||||||
use neqo_common::{qtrace, Encoder, Header, MessageType, Role};
|
|
||||||
use neqo_qpack::{QPackDecoder, QPackEncoder};
|
|
||||||
use neqo_transport::{streams::SendOrder, Connection, DatagramTracking, StreamId};
|
|
||||||
use std::any::Any;
|
|
||||||
use std::cell::RefCell;
|
|
||||||
use std::collections::BTreeSet;
|
|
||||||
use std::mem;
|
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
#[derive(Debug, PartialEq)]
|
||||||
enum SessionState {
|
enum SessionState {
|
||||||
|
|
@ -100,6 +98,7 @@ impl WebTransportSession {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// This function is only called with `RecvStream` and `SendStream` that also implement
|
/// This function is only called with `RecvStream` and `SendStream` that also implement
|
||||||
/// the http specific functions and `http_stream()` will never return `None`.
|
/// the http specific functions and `http_stream()` will never return `None`.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
|
|
@ -134,8 +133,11 @@ impl WebTransportSession {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// The function can only fail if supplied headers are not valid http headers.
|
/// The function can only fail if supplied headers are not valid http headers.
|
||||||
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// `control_stream_send` implements the http specific functions and `http_stream()`
|
/// `control_stream_send` implements the http specific functions and `http_stream()`
|
||||||
/// will never return `None`.
|
/// will never return `None`.
|
||||||
pub fn send_request(&mut self, headers: &[Header], conn: &mut Connection) -> Res<()> {
|
pub fn send_request(&mut self, headers: &[Header], conn: &mut Connection) -> Res<()> {
|
||||||
|
|
@ -220,6 +222,7 @@ impl WebTransportSession {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Panics
|
/// # Panics
|
||||||
|
///
|
||||||
/// This cannot panic because headers are checked before this function called.
|
/// This cannot panic because headers are checked before this function called.
|
||||||
pub fn maybe_check_headers(&mut self) {
|
pub fn maybe_check_headers(&mut self) {
|
||||||
if SessionState::Negotiating != self.state {
|
if SessionState::Negotiating != self.state {
|
||||||
|
|
@ -335,6 +338,7 @@ impl WebTransportSession {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// It may return an error if the frame is not correctly decoded.
|
/// It may return an error if the frame is not correctly decoded.
|
||||||
pub fn read_control_stream(&mut self, conn: &mut Connection) -> Res<()> {
|
pub fn read_control_stream(&mut self, conn: &mut Connection) -> Res<()> {
|
||||||
let (f, fin) = self
|
let (f, fin) = self
|
||||||
|
|
@ -373,8 +377,9 @@ impl WebTransportSession {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Errors
|
/// # Errors
|
||||||
/// Return an error if the stream was closed on the transport layer, but that information is not yet
|
///
|
||||||
/// consumed on the http/3 layer.
|
/// Return an error if the stream was closed on the transport layer, but that information is not
|
||||||
|
/// yet consumed on the http/3 layer.
|
||||||
pub fn close_session(&mut self, conn: &mut Connection, error: u32, message: &str) -> Res<()> {
|
pub fn close_session(&mut self, conn: &mut Connection, error: u32, message: &str) -> Res<()> {
|
||||||
self.state = SessionState::Done;
|
self.state = SessionState::Done;
|
||||||
let close_frame = WebTransportFrame::CloseSession {
|
let close_frame = WebTransportFrame::CloseSession {
|
||||||
|
|
@ -399,6 +404,7 @@ impl WebTransportSession {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Returns an error if the datagram exceeds the remote datagram size limit.
|
/// Returns an error if the datagram exceeds the remote datagram size limit.
|
||||||
pub fn send_datagram(
|
pub fn send_datagram(
|
||||||
&self,
|
&self,
|
||||||
|
|
|
||||||
|
|
@ -4,15 +4,16 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
|
use std::{cell::RefCell, rc::Rc};
|
||||||
|
|
||||||
|
use neqo_common::Encoder;
|
||||||
|
use neqo_transport::{Connection, RecvStreamStats, SendStreamStats, StreamId};
|
||||||
|
|
||||||
use super::WebTransportSession;
|
use super::WebTransportSession;
|
||||||
use crate::{
|
use crate::{
|
||||||
CloseType, Http3StreamInfo, Http3StreamType, ReceiveOutput, RecvStream, RecvStreamEvents, Res,
|
CloseType, Http3StreamInfo, Http3StreamType, ReceiveOutput, RecvStream, RecvStreamEvents, Res,
|
||||||
SendStream, SendStreamEvents, Stream,
|
SendStream, SendStreamEvents, Stream,
|
||||||
};
|
};
|
||||||
use neqo_common::Encoder;
|
|
||||||
use neqo_transport::{Connection, RecvStreamStats, SendStreamStats, StreamId};
|
|
||||||
use std::cell::RefCell;
|
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
pub const WEBTRANSPORT_UNI_STREAM: u64 = 0x54;
|
pub const WEBTRANSPORT_UNI_STREAM: u64 = 0x54;
|
||||||
pub const WEBTRANSPORT_STREAM: u64 = 0x41;
|
pub const WEBTRANSPORT_STREAM: u64 = 0x41;
|
||||||
|
|
|
||||||
15
third_party/rust/neqo-http3/src/features/mod.rs
vendored
15
third_party/rust/neqo-http3/src/features/mod.rs
vendored
|
|
@ -4,23 +4,24 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
|
use std::{fmt::Debug, mem};
|
||||||
|
|
||||||
|
use neqo_common::qtrace;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
client_events::Http3ClientEvents,
|
client_events::Http3ClientEvents,
|
||||||
settings::{HSettingType, HSettings},
|
settings::{HSettingType, HSettings},
|
||||||
};
|
};
|
||||||
use neqo_common::qtrace;
|
|
||||||
use std::fmt::Debug;
|
|
||||||
use std::mem;
|
|
||||||
|
|
||||||
pub mod extended_connect;
|
pub mod extended_connect;
|
||||||
|
|
||||||
/// States:
|
/// States:
|
||||||
/// - `Disable` - it is not turned on for this connection.
|
/// - `Disable` - it is not turned on for this connection.
|
||||||
/// - `Negotiating` - the feature is enabled locally, but settings from the peer
|
/// - `Negotiating` - the feature is enabled locally, but settings from the peer have not been
|
||||||
/// have not been received yet.
|
/// received yet.
|
||||||
/// - `Negotiated` - the settings have been received and both sides support the feature.
|
/// - `Negotiated` - the settings have been received and both sides support the feature.
|
||||||
/// - `NegotiationFailed` - the settings have been received and the peer does not
|
/// - `NegotiationFailed` - the settings have been received and the peer does not support the
|
||||||
/// support the feature.
|
/// feature.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum NegotiationState {
|
pub enum NegotiationState {
|
||||||
Disabled,
|
Disabled,
|
||||||
|
|
|
||||||
|
|
@ -4,12 +4,13 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use crate::{frames::reader::FrameDecoder, settings::HSettings, Error, Priority, Res};
|
use std::{fmt::Debug, io::Write};
|
||||||
|
|
||||||
use neqo_common::{Decoder, Encoder};
|
use neqo_common::{Decoder, Encoder};
|
||||||
use neqo_crypto::random;
|
use neqo_crypto::random;
|
||||||
use neqo_transport::StreamId;
|
use neqo_transport::StreamId;
|
||||||
use std::fmt::Debug;
|
|
||||||
use std::io::Write;
|
use crate::{frames::reader::FrameDecoder, settings::HSettings, Error, Priority, Res};
|
||||||
|
|
||||||
pub(crate) type HFrameType = u64;
|
pub(crate) type HFrameType = u64;
|
||||||
|
|
||||||
|
|
|
||||||
20
third_party/rust/neqo-http3/src/frames/reader.rs
vendored
20
third_party/rust/neqo-http3/src/frames/reader.rs
vendored
|
|
@ -6,34 +6,39 @@
|
||||||
|
|
||||||
#![allow(clippy::module_name_repetitions)]
|
#![allow(clippy::module_name_repetitions)]
|
||||||
|
|
||||||
use crate::{Error, RecvStream, Res};
|
use std::{convert::TryFrom, fmt::Debug};
|
||||||
|
|
||||||
use neqo_common::{
|
use neqo_common::{
|
||||||
hex_with_len, qtrace, Decoder, IncrementalDecoderBuffer, IncrementalDecoderIgnore,
|
hex_with_len, qtrace, Decoder, IncrementalDecoderBuffer, IncrementalDecoderIgnore,
|
||||||
IncrementalDecoderUint,
|
IncrementalDecoderUint,
|
||||||
};
|
};
|
||||||
use neqo_transport::{Connection, StreamId};
|
use neqo_transport::{Connection, StreamId};
|
||||||
use std::convert::TryFrom;
|
|
||||||
use std::fmt::Debug;
|
use crate::{Error, RecvStream, Res};
|
||||||
|
|
||||||
const MAX_READ_SIZE: usize = 4096;
|
const MAX_READ_SIZE: usize = 4096;
|
||||||
|
|
||||||
pub(crate) trait FrameDecoder<T> {
|
pub(crate) trait FrameDecoder<T> {
|
||||||
fn is_known_type(frame_type: u64) -> bool;
|
fn is_known_type(frame_type: u64) -> bool;
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Returns `HttpFrameUnexpected` if frames is not alowed, i.e. is a `H3_RESERVED_FRAME_TYPES`.
|
/// Returns `HttpFrameUnexpected` if frames is not alowed, i.e. is a `H3_RESERVED_FRAME_TYPES`.
|
||||||
fn frame_type_allowed(_frame_type: u64) -> Res<()> {
|
fn frame_type_allowed(_frame_type: u64) -> Res<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// If a frame cannot be properly decoded.
|
/// If a frame cannot be properly decoded.
|
||||||
fn decode(frame_type: u64, frame_len: u64, data: Option<&[u8]>) -> Res<Option<T>>;
|
fn decode(frame_type: u64, frame_len: u64, data: Option<&[u8]>) -> Res<Option<T>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) trait StreamReader {
|
pub(crate) trait StreamReader {
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// An error may happen while reading a stream, e.g. early close, protocol error, etc.
|
/// An error may happen while reading a stream, e.g. early close, protocol error, etc.
|
||||||
/// Return an error if the stream was closed on the transport layer, but that information is not yet
|
/// Return an error if the stream was closed on the transport layer, but that information is not
|
||||||
/// consumed on the http/3 layer.
|
/// yet consumed on the http/3 layer.
|
||||||
fn read_data(&mut self, buf: &mut [u8]) -> Res<(usize, bool)>;
|
fn read_data(&mut self, buf: &mut [u8]) -> Res<(usize, bool)>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -50,6 +55,7 @@ impl<'a> StreamReaderConnectionWrapper<'a> {
|
||||||
|
|
||||||
impl<'a> StreamReader for StreamReaderConnectionWrapper<'a> {
|
impl<'a> StreamReader for StreamReaderConnectionWrapper<'a> {
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// An error may happen while reading a stream, e.g. early close, protocol error, etc.
|
/// An error may happen while reading a stream, e.g. early close, protocol error, etc.
|
||||||
fn read_data(&mut self, buf: &mut [u8]) -> Res<(usize, bool)> {
|
fn read_data(&mut self, buf: &mut [u8]) -> Res<(usize, bool)> {
|
||||||
let res = self.conn.stream_recv(self.stream_id, buf)?;
|
let res = self.conn.stream_recv(self.stream_id, buf)?;
|
||||||
|
|
@ -70,6 +76,7 @@ impl<'a> StreamReaderRecvStreamWrapper<'a> {
|
||||||
|
|
||||||
impl<'a> StreamReader for StreamReaderRecvStreamWrapper<'a> {
|
impl<'a> StreamReader for StreamReaderRecvStreamWrapper<'a> {
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// An error may happen while reading a stream, e.g. early close, protocol error, etc.
|
/// An error may happen while reading a stream, e.g. early close, protocol error, etc.
|
||||||
fn read_data(&mut self, buf: &mut [u8]) -> Res<(usize, bool)> {
|
fn read_data(&mut self, buf: &mut [u8]) -> Res<(usize, bool)> {
|
||||||
self.recv_stream.read_data(self.conn, buf)
|
self.recv_stream.read_data(self.conn, buf)
|
||||||
|
|
@ -146,7 +153,9 @@ impl FrameReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// returns true if quic stream was closed.
|
/// returns true if quic stream was closed.
|
||||||
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// May return `HttpFrame` if a frame cannot be decoded.
|
/// May return `HttpFrame` if a frame cannot be decoded.
|
||||||
/// and `TransportStreamDoesNotExist` if `stream_recv` fails.
|
/// and `TransportStreamDoesNotExist` if `stream_recv` fails.
|
||||||
pub fn receive<T: FrameDecoder<T>>(
|
pub fn receive<T: FrameDecoder<T>>(
|
||||||
|
|
@ -186,6 +195,7 @@ impl FrameReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// May return `HttpFrame` if a frame cannot be decoded.
|
/// May return `HttpFrame` if a frame cannot be decoded.
|
||||||
fn consume<T: FrameDecoder<T>>(&mut self, mut input: Decoder) -> Res<Option<T>> {
|
fn consume<T: FrameDecoder<T>>(&mut self, mut input: Decoder) -> Res<Option<T>> {
|
||||||
match &mut self.state {
|
match &mut self.state {
|
||||||
|
|
|
||||||
|
|
@ -4,15 +4,16 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
|
use neqo_common::{Decoder, Encoder};
|
||||||
|
use neqo_transport::StreamId;
|
||||||
|
use test_fixture::fixture_init;
|
||||||
|
|
||||||
use super::enc_dec_hframe;
|
use super::enc_dec_hframe;
|
||||||
use crate::{
|
use crate::{
|
||||||
frames::HFrame,
|
frames::HFrame,
|
||||||
settings::{HSetting, HSettingType, HSettings},
|
settings::{HSetting, HSettingType, HSettings},
|
||||||
Priority,
|
Priority,
|
||||||
};
|
};
|
||||||
use neqo_common::{Decoder, Encoder};
|
|
||||||
use neqo_transport::StreamId;
|
|
||||||
use test_fixture::fixture_init;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_data_frame() {
|
fn test_data_frame() {
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue