Dev merge gfmul and XEX tasks #3
1769 changed files with 398681 additions and 20 deletions
|
|
@ -6,5 +6,12 @@ edition = "2021"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.90"
|
anyhow = "1.0.90"
|
||||||
base64 = "0.22.1"
|
base64 = "0.22.1"
|
||||||
|
openssl = "0.10.68"
|
||||||
serde = { version = "1.0.210", features = ["derive"] }
|
serde = { version = "1.0.210", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
|
||||||
|
[source.crates-io]
|
||||||
|
replace-with = "vendored-sources"
|
||||||
|
|
||||||
|
[source.vendored-sources]
|
||||||
|
directory = "vendor"
|
||||||
|
|
|
||||||
|
|
@ -1,17 +1,13 @@
|
||||||
use std::{
|
use std::collections::HashMap;
|
||||||
collections::HashMap,
|
|
||||||
fmt::format,
|
|
||||||
io::{self, Error, ErrorKind},
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::utils::parse::{Responses, Testcase, Testcases};
|
use crate::utils::parse::{Responses, Testcase, Testcases};
|
||||||
use tasks01::{
|
use tasks01::{
|
||||||
block2poly::block2poly,
|
block2poly::block2poly,
|
||||||
poly2block::{self, poly2block},
|
poly2block::{poly2block},
|
||||||
|
sea128::sea128,
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
|
|
||||||
mod tasks01;
|
mod tasks01;
|
||||||
|
|
@ -37,6 +33,11 @@ pub fn task_deploy(testcase: &Testcase) -> Result<Value> {
|
||||||
let json = json!({"coefficients" : result});
|
let json = json!({"coefficients" : result});
|
||||||
Ok(json)
|
Ok(json)
|
||||||
}
|
}
|
||||||
|
"sea128" => {
|
||||||
|
let result = sea128(args)?;
|
||||||
|
let json = json!({"output" : result});
|
||||||
|
Ok(json)
|
||||||
|
}
|
||||||
_ => Err(anyhow!("Fatal. No compatible action found")),
|
_ => Err(anyhow!("Fatal. No compatible action found")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -87,4 +88,26 @@ mod tests {
|
||||||
serde_json::to_value(expected).unwrap()
|
serde_json::to_value(expected).unwrap()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_task_sea128_task_full() {
|
||||||
|
let json = fs::read_to_string("src/test_json/sea128.json").unwrap();
|
||||||
|
let parsed = parse_json(json).unwrap();
|
||||||
|
|
||||||
|
let expected = json!({
|
||||||
|
"responses": {
|
||||||
|
"b856d760-023d-4b00-bad2-15d2b6da22fe": {
|
||||||
|
"output": "D5FDo3iVBoBN9gVi9/MSKQ=="
|
||||||
|
},
|
||||||
|
"254eaee7-05fd-4e0d-8292-9b658a852245": {
|
||||||
|
"output": "yv66vvrO263eyviIiDNEVQ=="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
serde_json::to_value(task_distrubute(&parsed)).unwrap(),
|
||||||
|
serde_json::to_value(expected).unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,15 +1,13 @@
|
||||||
use std::{str::Bytes, string};
|
use crate::utils::poly::{b64_2_num, get_coefficients};
|
||||||
|
|
||||||
use crate::utils::poly::{self, block_2_number, get_coefficients};
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use base64::prelude::*;
|
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
pub fn block2poly(val: &Value) -> Result<Vec<u8>> {
|
pub fn block2poly(val: &Value) -> Result<Vec<u8>> {
|
||||||
// Convert JSON data in to a u128
|
// Convert JSON data in to a u128
|
||||||
// TODO: Transfer decoding into own function?
|
// TODO: Transfer decoding into own function?
|
||||||
let string: String = serde_json::from_value(val["block"].clone())?;
|
let string: String = serde_json::from_value(val["block"].clone())?;
|
||||||
let number: u128 = block_2_number(string)?;
|
|
||||||
|
let number = b64_2_num(&string)?;
|
||||||
|
|
||||||
let coefficients: Vec<u8> = get_coefficients(number);
|
let coefficients: Vec<u8> = get_coefficients(number);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ use anyhow::Result;
|
||||||
use base64::prelude::*;
|
use base64::prelude::*;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::utils::poly::{block_2_number, coefficient_to_binary};
|
use crate::utils::poly::{b64_2_num, coefficient_to_binary};
|
||||||
|
|
||||||
pub fn gfmul(args: &Value) -> Result<String> {
|
pub fn gfmul(args: &Value) -> Result<String> {
|
||||||
eprintln!("{args}");
|
eprintln!("{args}");
|
||||||
|
|
@ -11,8 +11,8 @@ pub fn gfmul(args: &Value) -> Result<String> {
|
||||||
let red_poly_num: u128 = coefficient_to_binary(reduction_polynomial_coeffs);
|
let red_poly_num: u128 = coefficient_to_binary(reduction_polynomial_coeffs);
|
||||||
//eprintln!("{:?}", serde_json::from_value(args["a"].clone())?);
|
//eprintln!("{:?}", serde_json::from_value(args["a"].clone())?);
|
||||||
|
|
||||||
let mut poly1: u128 = block_2_number(serde_json::from_value(args["a"].clone())?)?;
|
let mut poly1: u128 = b64_2_num(&serde_json::from_value(args["a"].clone())?)?;
|
||||||
let poly2: u128 = block_2_number(serde_json::from_value(args["b"].clone())?)?;
|
let poly2: u128 = b64_2_num(&serde_json::from_value(args["b"].clone())?)?;
|
||||||
eprintln!("poly1 is: {}", poly1);
|
eprintln!("poly1 is: {}", poly1);
|
||||||
eprintln!("poly2 is: {}", poly2);
|
eprintln!("poly2 is: {}", poly2);
|
||||||
/* Begin of magic algorithm
|
/* Begin of magic algorithm
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,4 @@
|
||||||
pub mod block2poly;
|
pub mod block2poly;
|
||||||
pub mod gfmul;
|
pub mod gfmul;
|
||||||
pub mod poly2block;
|
pub mod poly2block;
|
||||||
|
pub mod sea128;
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
use crate::utils::poly::{self, coefficient_to_binary};
|
use crate::utils::poly::{self};
|
||||||
use base64::prelude::*;
|
use base64::prelude::*;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
|
|
|
||||||
61
src/tasks/tasks01/sea128.rs
Normal file
61
src/tasks/tasks01/sea128.rs
Normal file
|
|
@ -0,0 +1,61 @@
|
||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use base64::prelude::*;
|
||||||
|
use serde_json::Value;
|
||||||
|
|
||||||
|
use crate::utils::ciphers::{sea_128_decrypt, sea_128_encrypt};
|
||||||
|
|
||||||
|
pub fn sea128(args: &Value) -> Result<String> {
|
||||||
|
let key_string: String = serde_json::from_value(args["key"].clone())?;
|
||||||
|
//let key: &[u8] = b64_2_num(key_string)?.to_ne_bytes();
|
||||||
|
let key = BASE64_STANDARD.decode(key_string)?;
|
||||||
|
//eprintln!("{:?}", key);
|
||||||
|
let input_string: String = serde_json::from_value(args["input"].clone())?;
|
||||||
|
//let plaintexts: &[u8] = &b64_2_num(plaintexts_string)?.to_ne_bytes();
|
||||||
|
let input = BASE64_STANDARD.decode(input_string)?;
|
||||||
|
let xor_val: u128 = 0xc0ffeec0ffeec0ffeec0ffeec0ffee11;
|
||||||
|
|
||||||
|
let mode: String = serde_json::from_value(args["mode"].clone())?;
|
||||||
|
match mode.as_str() {
|
||||||
|
"encrypt" => {
|
||||||
|
//eprintln!("{:?}", plaintexts);
|
||||||
|
|
||||||
|
let output = BASE64_STANDARD.encode(sea_128_encrypt(&key, &input)?);
|
||||||
|
|
||||||
|
Ok(output)
|
||||||
|
}
|
||||||
|
"decrypt" => {
|
||||||
|
let output = BASE64_STANDARD.encode(sea_128_decrypt(&key, &input)?);
|
||||||
|
|
||||||
|
Ok(output)
|
||||||
|
}
|
||||||
|
_ => Err(anyhow!("Failure. no valid mode detected")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::fs;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use serde_json::json;
|
||||||
|
// Note this useful idiom: importing names from outer (for mod tests) scope.
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_sea128_encrypt() -> Result<()> {
|
||||||
|
let args = json!({"mode" : "encrypt", "key" : "istDASeincoolerKEYrofg==", "input" : "yv66vvrO263eyviIiDNEVQ=="});
|
||||||
|
|
||||||
|
assert_eq!(sea128(&args)?, "D5FDo3iVBoBN9gVi9/MSKQ==");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_sea128_decrypt() -> Result<()> {
|
||||||
|
let args = json!({"mode" : "decrypt", "key" : "istDASeincoolerKEYrofg==", "input" : "D5FDo3iVBoBN9gVi9/MSKQ=="});
|
||||||
|
|
||||||
|
assert_eq!(sea128(&args)?, "yv66vvrO263eyviIiDNEVQ==");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
20
src/test_json/sea128.json
Normal file
20
src/test_json/sea128.json
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
{
|
||||||
|
"testcases": {
|
||||||
|
"b856d760-023d-4b00-bad2-15d2b6da22fe": {
|
||||||
|
"action": "sea128",
|
||||||
|
"arguments": {
|
||||||
|
"mode": "encrypt",
|
||||||
|
"key": "istDASeincoolerKEYrofg==",
|
||||||
|
"input": "yv66vvrO263eyviIiDNEVQ=="
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"254eaee7-05fd-4e0d-8292-9b658a852245": {
|
||||||
|
"action": "sea128",
|
||||||
|
"arguments": {
|
||||||
|
"mode": "decrypt",
|
||||||
|
"key": "istDASeincoolerKEYrofg==",
|
||||||
|
"input": "D5FDo3iVBoBN9gVi9/MSKQ=="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
59
src/utils/ciphers.rs
Normal file
59
src/utils/ciphers.rs
Normal file
|
|
@ -0,0 +1,59 @@
|
||||||
|
use anyhow::Result;
|
||||||
|
use openssl::symm::{Cipher, Crypter, Mode};
|
||||||
|
|
||||||
|
use super::math::xor_bytes;
|
||||||
|
|
||||||
|
pub fn aes_128_encrypt(key: &Vec<u8>, input: &Vec<u8>) -> Result<Vec<u8>> {
|
||||||
|
let mut encrypter = Crypter::new(Cipher::aes_128_ecb(), Mode::Encrypt, &key, None)?;
|
||||||
|
encrypter.pad(false);
|
||||||
|
|
||||||
|
let mut ciphertext = [0; 32].to_vec();
|
||||||
|
|
||||||
|
let mut count = encrypter.update(input, &mut ciphertext)?;
|
||||||
|
count += encrypter.finalize(&mut ciphertext)?;
|
||||||
|
ciphertext.truncate(count);
|
||||||
|
|
||||||
|
//eprintln!("{:?}", &ciphertext[..]);
|
||||||
|
|
||||||
|
Ok(ciphertext)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn aes_128_decrypt(key: &Vec<u8>, input: &Vec<u8>) -> Result<Vec<u8>> {
|
||||||
|
let mut decrypter = Crypter::new(Cipher::aes_128_ecb(), Mode::Decrypt, key, None)?;
|
||||||
|
decrypter.pad(false);
|
||||||
|
|
||||||
|
let mut plaintext = [0; 32].to_vec();
|
||||||
|
|
||||||
|
let mut count = decrypter.update(input, &mut plaintext)?;
|
||||||
|
count += decrypter.finalize(&mut plaintext)?;
|
||||||
|
plaintext.truncate(count);
|
||||||
|
|
||||||
|
let mut bytes: [u8; 16] = [0u8; 16];
|
||||||
|
bytes.copy_from_slice(&plaintext);
|
||||||
|
let number: u128 = <u128>::from_be_bytes(bytes);
|
||||||
|
|
||||||
|
Ok(plaintext)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn sea_128_encrypt(key: &Vec<u8>, input: &Vec<u8>) -> Result<Vec<u8>> {
|
||||||
|
let xor_val: u128 = 0xc0ffeec0ffeec0ffeec0ffeec0ffee11;
|
||||||
|
let sea128_out = xor_bytes(
|
||||||
|
&aes_128_encrypt(key, input)?,
|
||||||
|
xor_val.to_be_bytes().to_vec(),
|
||||||
|
)?;
|
||||||
|
Ok(sea128_out)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn sea_128_decrypt(key: &Vec<u8>, input: &Vec<u8>) -> Result<Vec<u8>> {
|
||||||
|
let xor_val: u128 = 0xc0ffeec0ffeec0ffeec0ffeec0ffee11;
|
||||||
|
|
||||||
|
let intermediate = xor_bytes(input, xor_val.to_be_bytes().to_vec())?;
|
||||||
|
Ok(aes_128_decrypt(&key, &intermediate)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* let mut bytes: [u8; 16] = [0u8; 16];
|
||||||
|
bytes.copy_from_slice(&ciphertext);
|
||||||
|
let number: u128 = <u128>::from_be_bytes(bytes);
|
||||||
|
|
||||||
|
* */
|
||||||
9
src/utils/math.rs
Normal file
9
src/utils/math.rs
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
use anyhow::Result;
|
||||||
|
|
||||||
|
pub fn xor_bytes(vec1: &Vec<u8>, mut vec2: Vec<u8>) -> Result<Vec<u8>> {
|
||||||
|
for (byte1, byte2) in vec1.iter().zip(vec2.iter_mut()) {
|
||||||
|
*byte2 ^= byte1;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(vec2)
|
||||||
|
}
|
||||||
|
|
@ -1,2 +1,4 @@
|
||||||
|
pub mod ciphers;
|
||||||
|
pub mod math;
|
||||||
pub mod parse;
|
pub mod parse;
|
||||||
pub mod poly;
|
pub mod poly;
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use base64::prelude::*;
|
use base64::prelude::*;
|
||||||
use serde_json::Value;
|
|
||||||
use std::{fmt::format, str::FromStr, u128, u8};
|
use std::{str::FromStr, u128, u8};
|
||||||
|
|
||||||
pub fn get_alpha_rep(num: u128) -> String {
|
pub fn get_alpha_rep(num: u128) -> String {
|
||||||
let powers: Vec<u8> = get_coefficients(num);
|
let powers: Vec<u8> = get_coefficients(num);
|
||||||
|
|
@ -21,8 +21,7 @@ pub fn get_alpha_rep(num: u128) -> String {
|
||||||
alpha_rep
|
alpha_rep
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn block_2_number(string: String) -> Result<u128> {
|
pub fn b64_2_num(string: &String) -> Result<u128> {
|
||||||
//let string: String = serde_json::from_value(val["block"].clone())?;
|
|
||||||
let decoded: Vec<u8> = BASE64_STANDARD.decode(string)?;
|
let decoded: Vec<u8> = BASE64_STANDARD.decode(string)?;
|
||||||
|
|
||||||
let mut bytes: [u8; 16] = [0u8; 16];
|
let mut bytes: [u8; 16] = [0u8; 16];
|
||||||
|
|
@ -67,6 +66,8 @@ pub fn coefficient_to_binary(coefficients: Vec<u8>) -> u128 {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use crate::utils::poly::b64_2_num;
|
||||||
|
use anyhow::Result;
|
||||||
// Note this useful idiom: importing names from outer (for mod tests) scope.
|
// Note this useful idiom: importing names from outer (for mod tests) scope.
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
|
@ -101,4 +102,17 @@ mod tests {
|
||||||
"ARIAAAAAAAAAAAAAAAAAgA=="
|
"ARIAAAAAAAAAAAAAAAAAgA=="
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_b64_2_num() -> Result<()> {
|
||||||
|
let b64_payload: String = String::from_str("juMqbhnlBwAAAAAAAAAAAA==")?;
|
||||||
|
assert_eq!(
|
||||||
|
b64_2_num(&b64_payload)?,
|
||||||
|
2222222222222222,
|
||||||
|
"Error: Value was: {}",
|
||||||
|
b64_2_num(&b64_payload)?
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
1
vendor/anyhow/.cargo-checksum.json
vendored
Normal file
1
vendor/anyhow/.cargo-checksum.json
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
{"files":{"Cargo.toml":"9da67208798e50ce1f391b56f0983e96fa8a84e9d9231d9e50775446cbf80d84","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"136d2b44cc4060192516f18e43ec3eafb65693c1819a80a867e7a00c60a45ee6","build.rs":"1de78cc91e63321318aa336cb550e3acdcda9b39f0648436a884d783247cfcd2","build/probe.rs":"ee0a4518493c0b3cca121ed2e937b1779eb7e8313a5c4d5fc5aea28ff015366b","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/backtrace.rs":"bbaa0e0e228475c9c9532786e305cf04f53729f386c48adb1d93bb8ce07f37ad","src/chain.rs":"85af447405f075633fab186b7f1c94d7f33a36474f239c50a961b2d6197d5426","src/context.rs":"1be432c32752778041e8acf0e7d98d4f6291ce53fd7df5bbb0167824bbea57f7","src/ensure.rs":"9763f418b5397764549866c111ec6db3a7bdc4c30ad95c3bbfc56c5434ea8c09","src/error.rs":"274c175ec92f4aa8bf479d39cf3023d1ead9865a242a0a63ad3998aea57219a6","src/fmt.rs":"adf4be906b29900153bfb4b767a6049d58697dc3bcce7dfbb85ca773f5de5b33","src/kind.rs":"d8cc91e73653049ca0b5593f36aee8632fcc85847b36005b90ecd9a6f0de13cb","src/lib.rs":"1a1aeac072921cc13bb71ad06a9b8bd4cf4db5d90048d51446c3b81859722f24","src/macros.rs":"875797636fde708dcb9c82e0cb3107cf38334086274aaada267fb5bfd60547a9","src/ptr.rs":"4cb31d2f815b178daf951bfb94a1930383e056c0ca68d494603f45d8eea35d50","src/wrapper.rs":"d4e45caee3c2d861d4609a8141310d5c901af59a57d5f0a0de30251347dbd23c","tests/common/mod.rs":"f9088c2d7afafa64ff730b629272045b776bfafc2f5957508242da630635f2e1","tests/compiletest.rs":"4e381aa8ca3eabb7ac14d1e0c3700b3223e47640547a6988cfa13ad68255f60f","tests/drop/mod.rs":"08c3e553c1cc0d2dbd936fc45f4b5b1105057186affd6865e8d261e05f0f0646","tests/test_autotrait.rs":"ba9bc18416115cb48fd08675a3e7fc89584de7926dad6b2be6645dc13d5931df","tests/test_backtrace.rs":"60afdd7ee5850dc22625ff486fe41c47fd322db874a93c4871ddfed2bf603930","tests/test_boxed.rs":"6b26db0e2eb72afe9af7352ea820837aab90f8d486294616dd5dc34c1b94038c","tests/test_chain.rs":"3a8a8d7569913bd98c0e27c69d0bda35101e7fde7c056ed57cdd8ed018e4cbcb","tests/test_context.rs":"8409c53b328562c11e822bd6c3cd17e0d4d50b9bbb8fc3617333fd77303a6a33","tests/test_convert.rs":"7e7a8b4772a427a911014ac4d1083f9519000e786177f898808980dd9bdfde61","tests/test_downcast.rs":"797e69a72d125758c4c4897e5dc776d549d52cc9a6a633e0a33193f588a62b88","tests/test_ensure.rs":"4014ead6596793f5eecd55cbaafa49286b75cee7b7092a8b9b8286fcd813a6da","tests/test_ffi.rs":"d0cb4c1d6d9154090982dee72ae3ebe05a5981f976058c3250f1c9da5a45edef","tests/test_fmt.rs":"0e49b48f08e4faaf03e2f202e1efc5250018876c4e1b01b8379d7a38ae8df870","tests/test_macros.rs":"68673942662a43bceee62aaed69c25d7ddbc55e25d62d528e13033c3e2e756cd","tests/test_repr.rs":"034dee888abd08741e11ac2e95ef4fcb2ab3943d0a76e8e976db404658e1a252","tests/test_source.rs":"b80723cf635a4f8c4df21891b34bfab9ed2b2aa407e7a2f826d24e334cd5f88e","tests/ui/chained-comparison.rs":"6504b03d95b5acc232a7f4defc9f343b2be6733bf475fa0992e8e6545b912bd4","tests/ui/chained-comparison.stderr":"7f1d0a8c251b0ede2d30b3087ec157fc660945c97a642c4a5acf5a14ec58de34","tests/ui/empty-ensure.rs":"ab5bf37c846a0d689f26ce9257a27228411ed64154f9c950f1602d88a355d94b","tests/ui/empty-ensure.stderr":"315782f5f4246290fe190e3767b22c3dcaffaabc19c5ace0373537d53e765278","tests/ui/ensure-nonbool.rs":"7e57cb93fbcd82959b36586ed6bd2ad978b051fe5facd5274651fde6b1600905","tests/ui/ensure-nonbool.stderr":"0b4d1611e3bb65081bf38c1e49b1f12e5096738f276608661016e68f1fe13f7c","tests/ui/must-use.rs":"fb59860b43f673bf4a430a6036ba463e95028844d8dd4243cfe5ebc7f2be582f","tests/ui/must-use.stderr":"c2848c5f254b4c061eea6714d9baf709924aba06619eaf2a8b3aee1266b75f9e","tests/ui/no-impl.rs":"fab6cbf2f6ea510b86f567dfb3b7c31250a9fd71ae5d110dbb9188be569ec593","tests/ui/no-impl.stderr":"0d8ed712d25de898eae18cfdffc575a47f4d5596346058cf6cd50d016c4f8ce8","tests/ui/temporary-value.rs":"4dcc96271b2403e6372cf4cfc813445e5ce4365fc6e156b6bc38274098499a70","tests/ui/temporary-value.stderr":"171f6c1c962503855480696e5d39e68946ec2a027b61a6f36ca1ad1b40265c5d","tests/ui/wrong-interpolation.rs":"9c44d4674c2dccd27b9dedd03341346ec02d993b41793ee89b5755202e7e367e","tests/ui/wrong-interpolation.stderr":"301e60e2eb9401782c7dc0b3580613a4cb2aafd4cc8065734a630a62e1161aa5"},"package":"37bf3594c4c988a53154954629820791dde498571819ae4ca50ca811e060cc95"}
|
||||||
127
vendor/anyhow/Cargo.toml
vendored
Normal file
127
vendor/anyhow/Cargo.toml
vendored
Normal file
|
|
@ -0,0 +1,127 @@
|
||||||
|
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||||
|
#
|
||||||
|
# When uploading crates to the registry Cargo will automatically
|
||||||
|
# "normalize" Cargo.toml files for maximal compatibility
|
||||||
|
# with all versions of Cargo and also rewrite `path` dependencies
|
||||||
|
# to registry (e.g., crates.io) dependencies.
|
||||||
|
#
|
||||||
|
# If you are reading this file be aware that the original Cargo.toml
|
||||||
|
# will likely look very different (and much more reasonable).
|
||||||
|
# See Cargo.toml.orig for the original contents.
|
||||||
|
|
||||||
|
[package]
|
||||||
|
edition = "2018"
|
||||||
|
rust-version = "1.39"
|
||||||
|
name = "anyhow"
|
||||||
|
version = "1.0.90"
|
||||||
|
authors = ["David Tolnay <dtolnay@gmail.com>"]
|
||||||
|
build = "build.rs"
|
||||||
|
autolib = false
|
||||||
|
autobins = false
|
||||||
|
autoexamples = false
|
||||||
|
autotests = false
|
||||||
|
autobenches = false
|
||||||
|
description = "Flexible concrete Error type built on std::error::Error"
|
||||||
|
documentation = "https://docs.rs/anyhow"
|
||||||
|
readme = "README.md"
|
||||||
|
keywords = [
|
||||||
|
"error",
|
||||||
|
"error-handling",
|
||||||
|
]
|
||||||
|
categories = [
|
||||||
|
"rust-patterns",
|
||||||
|
"no-std",
|
||||||
|
]
|
||||||
|
license = "MIT OR Apache-2.0"
|
||||||
|
repository = "https://github.com/dtolnay/anyhow"
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
rustdoc-args = ["--generate-link-to-definition"]
|
||||||
|
targets = ["x86_64-unknown-linux-gnu"]
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
name = "anyhow"
|
||||||
|
path = "src/lib.rs"
|
||||||
|
doc-scrape-examples = false
|
||||||
|
|
||||||
|
[[test]]
|
||||||
|
name = "compiletest"
|
||||||
|
path = "tests/compiletest.rs"
|
||||||
|
|
||||||
|
[[test]]
|
||||||
|
name = "test_autotrait"
|
||||||
|
path = "tests/test_autotrait.rs"
|
||||||
|
|
||||||
|
[[test]]
|
||||||
|
name = "test_backtrace"
|
||||||
|
path = "tests/test_backtrace.rs"
|
||||||
|
|
||||||
|
[[test]]
|
||||||
|
name = "test_boxed"
|
||||||
|
path = "tests/test_boxed.rs"
|
||||||
|
|
||||||
|
[[test]]
|
||||||
|
name = "test_chain"
|
||||||
|
path = "tests/test_chain.rs"
|
||||||
|
|
||||||
|
[[test]]
|
||||||
|
name = "test_context"
|
||||||
|
path = "tests/test_context.rs"
|
||||||
|
|
||||||
|
[[test]]
|
||||||
|
name = "test_convert"
|
||||||
|
path = "tests/test_convert.rs"
|
||||||
|
|
||||||
|
[[test]]
|
||||||
|
name = "test_downcast"
|
||||||
|
path = "tests/test_downcast.rs"
|
||||||
|
|
||||||
|
[[test]]
|
||||||
|
name = "test_ensure"
|
||||||
|
path = "tests/test_ensure.rs"
|
||||||
|
|
||||||
|
[[test]]
|
||||||
|
name = "test_ffi"
|
||||||
|
path = "tests/test_ffi.rs"
|
||||||
|
|
||||||
|
[[test]]
|
||||||
|
name = "test_fmt"
|
||||||
|
path = "tests/test_fmt.rs"
|
||||||
|
|
||||||
|
[[test]]
|
||||||
|
name = "test_macros"
|
||||||
|
path = "tests/test_macros.rs"
|
||||||
|
|
||||||
|
[[test]]
|
||||||
|
name = "test_repr"
|
||||||
|
path = "tests/test_repr.rs"
|
||||||
|
|
||||||
|
[[test]]
|
||||||
|
name = "test_source"
|
||||||
|
path = "tests/test_source.rs"
|
||||||
|
|
||||||
|
[dependencies.backtrace]
|
||||||
|
version = "0.3.51"
|
||||||
|
optional = true
|
||||||
|
|
||||||
|
[dev-dependencies.futures]
|
||||||
|
version = "0.3"
|
||||||
|
default-features = false
|
||||||
|
|
||||||
|
[dev-dependencies.rustversion]
|
||||||
|
version = "1.0.6"
|
||||||
|
|
||||||
|
[dev-dependencies.syn]
|
||||||
|
version = "2.0"
|
||||||
|
features = ["full"]
|
||||||
|
|
||||||
|
[dev-dependencies.thiserror]
|
||||||
|
version = "1.0.45"
|
||||||
|
|
||||||
|
[dev-dependencies.trybuild]
|
||||||
|
version = "1.0.66"
|
||||||
|
features = ["diff"]
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = ["std"]
|
||||||
|
std = []
|
||||||
176
vendor/anyhow/LICENSE-APACHE
vendored
Normal file
176
vendor/anyhow/LICENSE-APACHE
vendored
Normal file
|
|
@ -0,0 +1,176 @@
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
23
vendor/anyhow/LICENSE-MIT
vendored
Normal file
23
vendor/anyhow/LICENSE-MIT
vendored
Normal file
|
|
@ -0,0 +1,23 @@
|
||||||
|
Permission is hereby granted, free of charge, to any
|
||||||
|
person obtaining a copy of this software and associated
|
||||||
|
documentation files (the "Software"), to deal in the
|
||||||
|
Software without restriction, including without
|
||||||
|
limitation the rights to use, copy, modify, merge,
|
||||||
|
publish, distribute, sublicense, and/or sell copies of
|
||||||
|
the Software, and to permit persons to whom the Software
|
||||||
|
is furnished to do so, subject to the following
|
||||||
|
conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice
|
||||||
|
shall be included in all copies or substantial portions
|
||||||
|
of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||||
|
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||||
|
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||||
|
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||||
|
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||||
|
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||||
|
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||||
|
DEALINGS IN THE SOFTWARE.
|
||||||
181
vendor/anyhow/README.md
vendored
Normal file
181
vendor/anyhow/README.md
vendored
Normal file
|
|
@ -0,0 +1,181 @@
|
||||||
|
Anyhow ¯\\\_(°ペ)\_/¯
|
||||||
|
==========================
|
||||||
|
|
||||||
|
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/anyhow-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/anyhow)
|
||||||
|
[<img alt="crates.io" src="https://img.shields.io/crates/v/anyhow.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/anyhow)
|
||||||
|
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-anyhow-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/anyhow)
|
||||||
|
[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/anyhow/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/anyhow/actions?query=branch%3Amaster)
|
||||||
|
|
||||||
|
This library provides [`anyhow::Error`][Error], a trait object based error type
|
||||||
|
for easy idiomatic error handling in Rust applications.
|
||||||
|
|
||||||
|
[Error]: https://docs.rs/anyhow/1.0/anyhow/struct.Error.html
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[dependencies]
|
||||||
|
anyhow = "1.0"
|
||||||
|
```
|
||||||
|
|
||||||
|
*Compiler support: requires rustc 1.39+*
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
|
## Details
|
||||||
|
|
||||||
|
- Use `Result<T, anyhow::Error>`, or equivalently `anyhow::Result<T>`, as the
|
||||||
|
return type of any fallible function.
|
||||||
|
|
||||||
|
Within the function, use `?` to easily propagate any error that implements the
|
||||||
|
[`std::error::Error`] trait.
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use anyhow::Result;
|
||||||
|
|
||||||
|
fn get_cluster_info() -> Result<ClusterMap> {
|
||||||
|
let config = std::fs::read_to_string("cluster.json")?;
|
||||||
|
let map: ClusterMap = serde_json::from_str(&config)?;
|
||||||
|
Ok(map)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
[`std::error::Error`]: https://doc.rust-lang.org/std/error/trait.Error.html
|
||||||
|
|
||||||
|
- Attach context to help the person troubleshooting the error understand where
|
||||||
|
things went wrong. A low-level error like "No such file or directory" can be
|
||||||
|
annoying to debug without more context about what higher level step the
|
||||||
|
application was in the middle of.
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use anyhow::{Context, Result};
|
||||||
|
|
||||||
|
fn main() -> Result<()> {
|
||||||
|
...
|
||||||
|
it.detach().context("Failed to detach the important thing")?;
|
||||||
|
|
||||||
|
let content = std::fs::read(path)
|
||||||
|
.with_context(|| format!("Failed to read instrs from {}", path))?;
|
||||||
|
...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
```console
|
||||||
|
Error: Failed to read instrs from ./path/to/instrs.json
|
||||||
|
|
||||||
|
Caused by:
|
||||||
|
No such file or directory (os error 2)
|
||||||
|
```
|
||||||
|
|
||||||
|
- Downcasting is supported and can be by value, by shared reference, or by
|
||||||
|
mutable reference as needed.
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// If the error was caused by redaction, then return a
|
||||||
|
// tombstone instead of the content.
|
||||||
|
match root_cause.downcast_ref::<DataStoreError>() {
|
||||||
|
Some(DataStoreError::Censored(_)) => Ok(Poll::Ready(REDACTED_CONTENT)),
|
||||||
|
None => Err(error),
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- If using Rust ≥ 1.65, a backtrace is captured and printed with the error if
|
||||||
|
the underlying error type does not already provide its own. In order to see
|
||||||
|
backtraces, they must be enabled through the environment variables described
|
||||||
|
in [`std::backtrace`]:
|
||||||
|
|
||||||
|
- If you want panics and errors to both have backtraces, set
|
||||||
|
`RUST_BACKTRACE=1`;
|
||||||
|
- If you want only errors to have backtraces, set `RUST_LIB_BACKTRACE=1`;
|
||||||
|
- If you want only panics to have backtraces, set `RUST_BACKTRACE=1` and
|
||||||
|
`RUST_LIB_BACKTRACE=0`.
|
||||||
|
|
||||||
|
[`std::backtrace`]: https://doc.rust-lang.org/std/backtrace/index.html#environment-variables
|
||||||
|
|
||||||
|
- Anyhow works with any error type that has an impl of `std::error::Error`,
|
||||||
|
including ones defined in your crate. We do not bundle a `derive(Error)` macro
|
||||||
|
but you can write the impls yourself or use a standalone macro like
|
||||||
|
[thiserror].
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
pub enum FormatError {
|
||||||
|
#[error("Invalid header (expected {expected:?}, got {found:?})")]
|
||||||
|
InvalidHeader {
|
||||||
|
expected: String,
|
||||||
|
found: String,
|
||||||
|
},
|
||||||
|
#[error("Missing attribute: {0}")]
|
||||||
|
MissingAttribute(String),
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- One-off error messages can be constructed using the `anyhow!` macro, which
|
||||||
|
supports string interpolation and produces an `anyhow::Error`.
|
||||||
|
|
||||||
|
```rust
|
||||||
|
return Err(anyhow!("Missing attribute: {}", missing));
|
||||||
|
```
|
||||||
|
|
||||||
|
A `bail!` macro is provided as a shorthand for the same early return.
|
||||||
|
|
||||||
|
```rust
|
||||||
|
bail!("Missing attribute: {}", missing);
|
||||||
|
```
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
|
## No-std support
|
||||||
|
|
||||||
|
In no_std mode, almost all of the same API is available and works the same way.
|
||||||
|
To depend on Anyhow in no_std mode, disable our default enabled "std" feature in
|
||||||
|
Cargo.toml. A global allocator is required.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[dependencies]
|
||||||
|
anyhow = { version = "1.0", default-features = false }
|
||||||
|
```
|
||||||
|
|
||||||
|
With versions of Rust older than 1.81, no_std mode may require an additional
|
||||||
|
`.map_err(Error::msg)` when working with a non-Anyhow error type inside a
|
||||||
|
function that returns Anyhow's error type, as the trait that `?`-based error
|
||||||
|
conversions are defined by is only available in std in those old versions.
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
|
## Comparison to failure
|
||||||
|
|
||||||
|
The `anyhow::Error` type works something like `failure::Error`, but unlike
|
||||||
|
failure ours is built around the standard library's `std::error::Error` trait
|
||||||
|
rather than a separate trait `failure::Fail`. The standard library has adopted
|
||||||
|
the necessary improvements for this to be possible as part of [RFC 2504].
|
||||||
|
|
||||||
|
[RFC 2504]: https://github.com/rust-lang/rfcs/blob/master/text/2504-fix-error.md
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
|
## Comparison to thiserror
|
||||||
|
|
||||||
|
Use Anyhow if you don't care what error type your functions return, you just
|
||||||
|
want it to be easy. This is common in application code. Use [thiserror] if you
|
||||||
|
are a library that wants to design your own dedicated error type(s) so that on
|
||||||
|
failures the caller gets exactly the information that you choose.
|
||||||
|
|
||||||
|
[thiserror]: https://github.com/dtolnay/thiserror
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
|
#### License
|
||||||
|
|
||||||
|
<sup>
|
||||||
|
Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
|
||||||
|
2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
|
||||||
|
</sup>
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
|
<sub>
|
||||||
|
Unless you explicitly state otherwise, any contribution intentionally submitted
|
||||||
|
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
|
||||||
|
be dual licensed as above, without any additional terms or conditions.
|
||||||
|
</sub>
|
||||||
192
vendor/anyhow/build.rs
vendored
Normal file
192
vendor/anyhow/build.rs
vendored
Normal file
|
|
@ -0,0 +1,192 @@
|
||||||
|
use std::env;
|
||||||
|
use std::ffi::OsString;
|
||||||
|
use std::iter;
|
||||||
|
use std::path::Path;
|
||||||
|
use std::process::{self, Command, Stdio};
|
||||||
|
use std::str;
|
||||||
|
|
||||||
|
#[cfg(all(feature = "backtrace", not(feature = "std")))]
|
||||||
|
compile_error! {
|
||||||
|
"`backtrace` feature without `std` feature is not supported"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let mut error_generic_member_access = false;
|
||||||
|
if cfg!(feature = "std") {
|
||||||
|
println!("cargo:rerun-if-changed=build/probe.rs");
|
||||||
|
|
||||||
|
let consider_rustc_bootstrap;
|
||||||
|
if compile_probe(false) {
|
||||||
|
// This is a nightly or dev compiler, so it supports unstable
|
||||||
|
// features regardless of RUSTC_BOOTSTRAP. No need to rerun build
|
||||||
|
// script if RUSTC_BOOTSTRAP is changed.
|
||||||
|
error_generic_member_access = true;
|
||||||
|
consider_rustc_bootstrap = false;
|
||||||
|
} else if let Some(rustc_bootstrap) = env::var_os("RUSTC_BOOTSTRAP") {
|
||||||
|
if compile_probe(true) {
|
||||||
|
// This is a stable or beta compiler for which the user has set
|
||||||
|
// RUSTC_BOOTSTRAP to turn on unstable features. Rerun build
|
||||||
|
// script if they change it.
|
||||||
|
error_generic_member_access = true;
|
||||||
|
consider_rustc_bootstrap = true;
|
||||||
|
} else if rustc_bootstrap == "1" {
|
||||||
|
// This compiler does not support the generic member access API
|
||||||
|
// in the form that anyhow expects. No need to pay attention to
|
||||||
|
// RUSTC_BOOTSTRAP.
|
||||||
|
error_generic_member_access = false;
|
||||||
|
consider_rustc_bootstrap = false;
|
||||||
|
} else {
|
||||||
|
// This is a stable or beta compiler for which RUSTC_BOOTSTRAP
|
||||||
|
// is set to restrict the use of unstable features by this
|
||||||
|
// crate.
|
||||||
|
error_generic_member_access = false;
|
||||||
|
consider_rustc_bootstrap = true;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Without RUSTC_BOOTSTRAP, this compiler does not support the
|
||||||
|
// generic member access API in the form that anyhow expects, but
|
||||||
|
// try again if the user turns on unstable features.
|
||||||
|
error_generic_member_access = false;
|
||||||
|
consider_rustc_bootstrap = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if error_generic_member_access {
|
||||||
|
println!("cargo:rustc-cfg=std_backtrace");
|
||||||
|
println!("cargo:rustc-cfg=error_generic_member_access");
|
||||||
|
}
|
||||||
|
|
||||||
|
if consider_rustc_bootstrap {
|
||||||
|
println!("cargo:rerun-if-env-changed=RUSTC_BOOTSTRAP");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let rustc = match rustc_minor_version() {
|
||||||
|
Some(rustc) => rustc,
|
||||||
|
None => return,
|
||||||
|
};
|
||||||
|
|
||||||
|
if rustc >= 80 {
|
||||||
|
println!("cargo:rustc-check-cfg=cfg(anyhow_nightly_testing)");
|
||||||
|
println!("cargo:rustc-check-cfg=cfg(anyhow_no_core_error)");
|
||||||
|
println!("cargo:rustc-check-cfg=cfg(anyhow_no_core_unwind_safe)");
|
||||||
|
println!("cargo:rustc-check-cfg=cfg(anyhow_no_fmt_arguments_as_str)");
|
||||||
|
println!("cargo:rustc-check-cfg=cfg(anyhow_no_ptr_addr_of)");
|
||||||
|
println!("cargo:rustc-check-cfg=cfg(anyhow_no_unsafe_op_in_unsafe_fn_lint)");
|
||||||
|
println!("cargo:rustc-check-cfg=cfg(error_generic_member_access)");
|
||||||
|
println!("cargo:rustc-check-cfg=cfg(std_backtrace)");
|
||||||
|
}
|
||||||
|
|
||||||
|
if rustc < 51 {
|
||||||
|
// core::ptr::addr_of
|
||||||
|
// https://blog.rust-lang.org/2021/03/25/Rust-1.51.0.html#stabilized-apis
|
||||||
|
println!("cargo:rustc-cfg=anyhow_no_ptr_addr_of");
|
||||||
|
}
|
||||||
|
|
||||||
|
if rustc < 52 {
|
||||||
|
// core::fmt::Arguments::as_str
|
||||||
|
// https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html#stabilized-apis
|
||||||
|
println!("cargo:rustc-cfg=anyhow_no_fmt_arguments_as_str");
|
||||||
|
|
||||||
|
// #![deny(unsafe_op_in_unsafe_fn)]
|
||||||
|
// https://github.com/rust-lang/rust/issues/71668
|
||||||
|
println!("cargo:rustc-cfg=anyhow_no_unsafe_op_in_unsafe_fn_lint");
|
||||||
|
}
|
||||||
|
|
||||||
|
if rustc < 56 {
|
||||||
|
// core::panic::{UnwindSafe, RefUnwindSafe}
|
||||||
|
// https://blog.rust-lang.org/2021/10/21/Rust-1.56.0.html#stabilized-apis
|
||||||
|
println!("cargo:rustc-cfg=anyhow_no_core_unwind_safe");
|
||||||
|
}
|
||||||
|
|
||||||
|
if !error_generic_member_access && cfg!(feature = "std") && rustc >= 65 {
|
||||||
|
// std::backtrace::Backtrace
|
||||||
|
// https://blog.rust-lang.org/2022/11/03/Rust-1.65.0.html#stabilized-apis
|
||||||
|
println!("cargo:rustc-cfg=std_backtrace");
|
||||||
|
}
|
||||||
|
|
||||||
|
if rustc < 81 {
|
||||||
|
// core::error::Error
|
||||||
|
// https://blog.rust-lang.org/2024/09/05/Rust-1.81.0.html#coreerrorerror
|
||||||
|
println!("cargo:rustc-cfg=anyhow_no_core_error");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compile_probe(rustc_bootstrap: bool) -> bool {
|
||||||
|
if env::var_os("RUSTC_STAGE").is_some() {
|
||||||
|
// We are running inside rustc bootstrap. This is a highly non-standard
|
||||||
|
// environment with issues such as:
|
||||||
|
//
|
||||||
|
// https://github.com/rust-lang/cargo/issues/11138
|
||||||
|
// https://github.com/rust-lang/rust/issues/114839
|
||||||
|
//
|
||||||
|
// Let's just not use nightly features here.
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
let rustc = cargo_env_var("RUSTC");
|
||||||
|
let out_dir = cargo_env_var("OUT_DIR");
|
||||||
|
let probefile = Path::new("build").join("probe.rs");
|
||||||
|
|
||||||
|
let rustc_wrapper = env::var_os("RUSTC_WRAPPER").filter(|wrapper| !wrapper.is_empty());
|
||||||
|
let rustc_workspace_wrapper =
|
||||||
|
env::var_os("RUSTC_WORKSPACE_WRAPPER").filter(|wrapper| !wrapper.is_empty());
|
||||||
|
let mut rustc = rustc_wrapper
|
||||||
|
.into_iter()
|
||||||
|
.chain(rustc_workspace_wrapper)
|
||||||
|
.chain(iter::once(rustc));
|
||||||
|
let mut cmd = Command::new(rustc.next().unwrap());
|
||||||
|
cmd.args(rustc);
|
||||||
|
|
||||||
|
if !rustc_bootstrap {
|
||||||
|
cmd.env_remove("RUSTC_BOOTSTRAP");
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd.stderr(Stdio::null())
|
||||||
|
.arg("--edition=2018")
|
||||||
|
.arg("--crate-name=anyhow")
|
||||||
|
.arg("--crate-type=lib")
|
||||||
|
.arg("--emit=dep-info,metadata")
|
||||||
|
.arg("--cap-lints=allow")
|
||||||
|
.arg("--out-dir")
|
||||||
|
.arg(out_dir)
|
||||||
|
.arg(probefile);
|
||||||
|
|
||||||
|
if let Some(target) = env::var_os("TARGET") {
|
||||||
|
cmd.arg("--target").arg(target);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If Cargo wants to set RUSTFLAGS, use that.
|
||||||
|
if let Ok(rustflags) = env::var("CARGO_ENCODED_RUSTFLAGS") {
|
||||||
|
if !rustflags.is_empty() {
|
||||||
|
for arg in rustflags.split('\x1f') {
|
||||||
|
cmd.arg(arg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match cmd.status() {
|
||||||
|
Ok(status) => status.success(),
|
||||||
|
Err(_) => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rustc_minor_version() -> Option<u32> {
|
||||||
|
let rustc = cargo_env_var("RUSTC");
|
||||||
|
let output = Command::new(rustc).arg("--version").output().ok()?;
|
||||||
|
let version = str::from_utf8(&output.stdout).ok()?;
|
||||||
|
let mut pieces = version.split('.');
|
||||||
|
if pieces.next() != Some("rustc 1") {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
pieces.next()?.parse().ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cargo_env_var(key: &str) -> OsString {
|
||||||
|
env::var_os(key).unwrap_or_else(|| {
|
||||||
|
eprintln!(
|
||||||
|
"Environment variable ${} is not set during execution of build script",
|
||||||
|
key,
|
||||||
|
);
|
||||||
|
process::exit(1);
|
||||||
|
})
|
||||||
|
}
|
||||||
35
vendor/anyhow/build/probe.rs
vendored
Normal file
35
vendor/anyhow/build/probe.rs
vendored
Normal file
|
|
@ -0,0 +1,35 @@
|
||||||
|
// This code exercises the surface area that we expect of the Error generic
|
||||||
|
// member access API. If the current toolchain is able to compile it, then
|
||||||
|
// anyhow is able to provide backtrace support.
|
||||||
|
|
||||||
|
#![feature(error_generic_member_access)]
|
||||||
|
|
||||||
|
use core::error::{self, Error, Request};
|
||||||
|
use core::fmt::{self, Debug, Display};
|
||||||
|
use std::backtrace::Backtrace;
|
||||||
|
|
||||||
|
struct MyError(Thing);
|
||||||
|
struct Thing;
|
||||||
|
|
||||||
|
impl Debug for MyError {
|
||||||
|
fn fmt(&self, _formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for MyError {
|
||||||
|
fn fmt(&self, _formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Error for MyError {
|
||||||
|
fn provide<'a>(&'a self, request: &mut Request<'a>) {
|
||||||
|
request.provide_ref(&self.0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const _: fn(&dyn Error) -> Option<&Backtrace> = |err| error::request_ref::<Backtrace>(err);
|
||||||
|
|
||||||
|
// Include in sccache cache key.
|
||||||
|
const _: Option<&str> = option_env!("RUSTC_BOOTSTRAP");
|
||||||
2
vendor/anyhow/rust-toolchain.toml
vendored
Normal file
2
vendor/anyhow/rust-toolchain.toml
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
[toolchain]
|
||||||
|
components = ["rust-src"]
|
||||||
411
vendor/anyhow/src/backtrace.rs
vendored
Normal file
411
vendor/anyhow/src/backtrace.rs
vendored
Normal file
|
|
@ -0,0 +1,411 @@
|
||||||
|
#[cfg(std_backtrace)]
|
||||||
|
pub(crate) use std::backtrace::{Backtrace, BacktraceStatus};
|
||||||
|
|
||||||
|
#[cfg(all(not(std_backtrace), feature = "backtrace"))]
|
||||||
|
pub(crate) use self::capture::{Backtrace, BacktraceStatus};
|
||||||
|
|
||||||
|
#[cfg(not(any(std_backtrace, feature = "backtrace")))]
|
||||||
|
pub(crate) enum Backtrace {}
|
||||||
|
|
||||||
|
#[cfg(std_backtrace)]
|
||||||
|
macro_rules! impl_backtrace {
|
||||||
|
() => {
|
||||||
|
std::backtrace::Backtrace
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(all(not(std_backtrace), feature = "backtrace"))]
|
||||||
|
macro_rules! impl_backtrace {
|
||||||
|
() => {
|
||||||
|
impl core::fmt::Debug + core::fmt::Display
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(std_backtrace, feature = "backtrace"))]
|
||||||
|
macro_rules! backtrace {
|
||||||
|
() => {
|
||||||
|
Some(crate::backtrace::Backtrace::capture())
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(any(std_backtrace, feature = "backtrace")))]
|
||||||
|
macro_rules! backtrace {
|
||||||
|
() => {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(error_generic_member_access)]
|
||||||
|
macro_rules! backtrace_if_absent {
|
||||||
|
($err:expr) => {
|
||||||
|
match core::error::request_ref::<std::backtrace::Backtrace>($err as &dyn core::error::Error)
|
||||||
|
{
|
||||||
|
Some(_) => None,
|
||||||
|
None => backtrace!(),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(all(
|
||||||
|
any(feature = "std", not(anyhow_no_core_error)),
|
||||||
|
not(error_generic_member_access),
|
||||||
|
any(std_backtrace, feature = "backtrace")
|
||||||
|
))]
|
||||||
|
macro_rules! backtrace_if_absent {
|
||||||
|
($err:expr) => {
|
||||||
|
backtrace!()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(all(
|
||||||
|
any(feature = "std", not(anyhow_no_core_error)),
|
||||||
|
not(std_backtrace),
|
||||||
|
not(feature = "backtrace"),
|
||||||
|
))]
|
||||||
|
macro_rules! backtrace_if_absent {
|
||||||
|
($err:expr) => {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(all(not(std_backtrace), feature = "backtrace"))]
|
||||||
|
mod capture {
|
||||||
|
use alloc::borrow::{Cow, ToOwned as _};
|
||||||
|
use alloc::vec::Vec;
|
||||||
|
use backtrace::{BacktraceFmt, BytesOrWideString, Frame, PrintFmt, SymbolName};
|
||||||
|
use core::cell::UnsafeCell;
|
||||||
|
use core::fmt::{self, Debug, Display};
|
||||||
|
use core::sync::atomic::{AtomicUsize, Ordering};
|
||||||
|
use std::env;
|
||||||
|
use std::path::{self, Path, PathBuf};
|
||||||
|
use std::sync::Once;
|
||||||
|
|
||||||
|
pub(crate) struct Backtrace {
|
||||||
|
inner: Inner,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) enum BacktraceStatus {
|
||||||
|
Unsupported,
|
||||||
|
Disabled,
|
||||||
|
Captured,
|
||||||
|
}
|
||||||
|
|
||||||
|
enum Inner {
|
||||||
|
Unsupported,
|
||||||
|
Disabled,
|
||||||
|
Captured(LazilyResolvedCapture),
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Capture {
|
||||||
|
actual_start: usize,
|
||||||
|
resolved: bool,
|
||||||
|
frames: Vec<BacktraceFrame>,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct BacktraceFrame {
|
||||||
|
frame: Frame,
|
||||||
|
symbols: Vec<BacktraceSymbol>,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct BacktraceSymbol {
|
||||||
|
name: Option<Vec<u8>>,
|
||||||
|
filename: Option<BytesOrWide>,
|
||||||
|
lineno: Option<u32>,
|
||||||
|
colno: Option<u32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
enum BytesOrWide {
|
||||||
|
Bytes(Vec<u8>),
|
||||||
|
Wide(Vec<u16>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Debug for Backtrace {
|
||||||
|
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
let capture = match &self.inner {
|
||||||
|
Inner::Unsupported => return fmt.write_str("<unsupported>"),
|
||||||
|
Inner::Disabled => return fmt.write_str("<disabled>"),
|
||||||
|
Inner::Captured(c) => c.force(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let frames = &capture.frames[capture.actual_start..];
|
||||||
|
|
||||||
|
write!(fmt, "Backtrace ")?;
|
||||||
|
|
||||||
|
let mut dbg = fmt.debug_list();
|
||||||
|
|
||||||
|
for frame in frames {
|
||||||
|
if frame.frame.ip().is_null() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
dbg.entries(&frame.symbols);
|
||||||
|
}
|
||||||
|
|
||||||
|
dbg.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Debug for BacktraceFrame {
|
||||||
|
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
let mut dbg = fmt.debug_list();
|
||||||
|
dbg.entries(&self.symbols);
|
||||||
|
dbg.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Debug for BacktraceSymbol {
|
||||||
|
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(fmt, "{{ ")?;
|
||||||
|
|
||||||
|
if let Some(fn_name) = self.name.as_ref().map(|b| SymbolName::new(b)) {
|
||||||
|
write!(fmt, "fn: \"{:#}\"", fn_name)?;
|
||||||
|
} else {
|
||||||
|
write!(fmt, "fn: <unknown>")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(fname) = self.filename.as_ref() {
|
||||||
|
write!(fmt, ", file: \"{:?}\"", fname)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(line) = self.lineno {
|
||||||
|
write!(fmt, ", line: {:?}", line)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
write!(fmt, " }}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Debug for BytesOrWide {
|
||||||
|
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
output_filename(
|
||||||
|
fmt,
|
||||||
|
match self {
|
||||||
|
BytesOrWide::Bytes(w) => BytesOrWideString::Bytes(w),
|
||||||
|
BytesOrWide::Wide(w) => BytesOrWideString::Wide(w),
|
||||||
|
},
|
||||||
|
PrintFmt::Short,
|
||||||
|
env::current_dir().as_ref().ok(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Backtrace {
|
||||||
|
fn enabled() -> bool {
|
||||||
|
static ENABLED: AtomicUsize = AtomicUsize::new(0);
|
||||||
|
match ENABLED.load(Ordering::Relaxed) {
|
||||||
|
0 => {}
|
||||||
|
1 => return false,
|
||||||
|
_ => return true,
|
||||||
|
}
|
||||||
|
let enabled = match env::var_os("RUST_LIB_BACKTRACE") {
|
||||||
|
Some(s) => s != "0",
|
||||||
|
None => match env::var_os("RUST_BACKTRACE") {
|
||||||
|
Some(s) => s != "0",
|
||||||
|
None => false,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
ENABLED.store(enabled as usize + 1, Ordering::Relaxed);
|
||||||
|
enabled
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline(never)] // want to make sure there's a frame here to remove
|
||||||
|
pub(crate) fn capture() -> Backtrace {
|
||||||
|
if Backtrace::enabled() {
|
||||||
|
Backtrace::create(Backtrace::capture as usize)
|
||||||
|
} else {
|
||||||
|
let inner = Inner::Disabled;
|
||||||
|
Backtrace { inner }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Capture a backtrace which starts just before the function addressed
|
||||||
|
// by `ip`
|
||||||
|
fn create(ip: usize) -> Backtrace {
|
||||||
|
let mut frames = Vec::new();
|
||||||
|
let mut actual_start = None;
|
||||||
|
backtrace::trace(|frame| {
|
||||||
|
frames.push(BacktraceFrame {
|
||||||
|
frame: frame.clone(),
|
||||||
|
symbols: Vec::new(),
|
||||||
|
});
|
||||||
|
if frame.symbol_address() as usize == ip && actual_start.is_none() {
|
||||||
|
actual_start = Some(frames.len() + 1);
|
||||||
|
}
|
||||||
|
true
|
||||||
|
});
|
||||||
|
|
||||||
|
// If no frames came out assume that this is an unsupported platform
|
||||||
|
// since `backtrace` doesn't provide a way of learning this right
|
||||||
|
// now, and this should be a good enough approximation.
|
||||||
|
let inner = if frames.is_empty() {
|
||||||
|
Inner::Unsupported
|
||||||
|
} else {
|
||||||
|
Inner::Captured(LazilyResolvedCapture::new(Capture {
|
||||||
|
actual_start: actual_start.unwrap_or(0),
|
||||||
|
frames,
|
||||||
|
resolved: false,
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
|
||||||
|
Backtrace { inner }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn status(&self) -> BacktraceStatus {
|
||||||
|
match self.inner {
|
||||||
|
Inner::Unsupported => BacktraceStatus::Unsupported,
|
||||||
|
Inner::Disabled => BacktraceStatus::Disabled,
|
||||||
|
Inner::Captured(_) => BacktraceStatus::Captured,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for Backtrace {
|
||||||
|
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
let capture = match &self.inner {
|
||||||
|
Inner::Unsupported => return fmt.write_str("unsupported backtrace"),
|
||||||
|
Inner::Disabled => return fmt.write_str("disabled backtrace"),
|
||||||
|
Inner::Captured(c) => c.force(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let full = fmt.alternate();
|
||||||
|
let (frames, style) = if full {
|
||||||
|
(&capture.frames[..], PrintFmt::Full)
|
||||||
|
} else {
|
||||||
|
(&capture.frames[capture.actual_start..], PrintFmt::Short)
|
||||||
|
};
|
||||||
|
|
||||||
|
// When printing paths we try to strip the cwd if it exists,
|
||||||
|
// otherwise we just print the path as-is. Note that we also only do
|
||||||
|
// this for the short format, because if it's full we presumably
|
||||||
|
// want to print everything.
|
||||||
|
let cwd = env::current_dir();
|
||||||
|
let mut print_path = move |fmt: &mut fmt::Formatter, path: BytesOrWideString| {
|
||||||
|
output_filename(fmt, path, style, cwd.as_ref().ok())
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut f = BacktraceFmt::new(fmt, style, &mut print_path);
|
||||||
|
f.add_context()?;
|
||||||
|
for frame in frames {
|
||||||
|
let mut f = f.frame();
|
||||||
|
if frame.symbols.is_empty() {
|
||||||
|
f.print_raw(frame.frame.ip(), None, None, None)?;
|
||||||
|
} else {
|
||||||
|
for symbol in frame.symbols.iter() {
|
||||||
|
f.print_raw_with_column(
|
||||||
|
frame.frame.ip(),
|
||||||
|
symbol.name.as_ref().map(|b| SymbolName::new(b)),
|
||||||
|
symbol.filename.as_ref().map(|b| match b {
|
||||||
|
BytesOrWide::Bytes(w) => BytesOrWideString::Bytes(w),
|
||||||
|
BytesOrWide::Wide(w) => BytesOrWideString::Wide(w),
|
||||||
|
}),
|
||||||
|
symbol.lineno,
|
||||||
|
symbol.colno,
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
f.finish()?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct LazilyResolvedCapture {
|
||||||
|
sync: Once,
|
||||||
|
capture: UnsafeCell<Capture>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LazilyResolvedCapture {
|
||||||
|
fn new(capture: Capture) -> Self {
|
||||||
|
LazilyResolvedCapture {
|
||||||
|
sync: Once::new(),
|
||||||
|
capture: UnsafeCell::new(capture),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn force(&self) -> &Capture {
|
||||||
|
self.sync.call_once(|| {
|
||||||
|
// Safety: This exclusive reference can't overlap with any
|
||||||
|
// others. `Once` guarantees callers will block until this
|
||||||
|
// closure returns. `Once` also guarantees only a single caller
|
||||||
|
// will enter this closure.
|
||||||
|
unsafe { &mut *self.capture.get() }.resolve();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Safety: This shared reference can't overlap with the exclusive
|
||||||
|
// reference above.
|
||||||
|
unsafe { &*self.capture.get() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Safety: Access to the inner value is synchronized using a thread-safe
|
||||||
|
// `Once`. So long as `Capture` is `Sync`, `LazilyResolvedCapture` is too
|
||||||
|
unsafe impl Sync for LazilyResolvedCapture where Capture: Sync {}
|
||||||
|
|
||||||
|
impl Capture {
|
||||||
|
fn resolve(&mut self) {
|
||||||
|
// If we're already resolved, nothing to do!
|
||||||
|
if self.resolved {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
self.resolved = true;
|
||||||
|
|
||||||
|
for frame in self.frames.iter_mut() {
|
||||||
|
let symbols = &mut frame.symbols;
|
||||||
|
let frame = &frame.frame;
|
||||||
|
backtrace::resolve_frame(frame, |symbol| {
|
||||||
|
symbols.push(BacktraceSymbol {
|
||||||
|
name: symbol.name().map(|m| m.as_bytes().to_vec()),
|
||||||
|
filename: symbol.filename_raw().map(|b| match b {
|
||||||
|
BytesOrWideString::Bytes(b) => BytesOrWide::Bytes(b.to_owned()),
|
||||||
|
BytesOrWideString::Wide(b) => BytesOrWide::Wide(b.to_owned()),
|
||||||
|
}),
|
||||||
|
lineno: symbol.lineno(),
|
||||||
|
colno: symbol.colno(),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prints the filename of the backtrace frame.
|
||||||
|
fn output_filename(
|
||||||
|
fmt: &mut fmt::Formatter,
|
||||||
|
bows: BytesOrWideString,
|
||||||
|
print_fmt: PrintFmt,
|
||||||
|
cwd: Option<&PathBuf>,
|
||||||
|
) -> fmt::Result {
|
||||||
|
let file: Cow<Path> = match bows {
|
||||||
|
#[cfg(unix)]
|
||||||
|
BytesOrWideString::Bytes(bytes) => {
|
||||||
|
use std::os::unix::ffi::OsStrExt;
|
||||||
|
Path::new(std::ffi::OsStr::from_bytes(bytes)).into()
|
||||||
|
}
|
||||||
|
#[cfg(not(unix))]
|
||||||
|
BytesOrWideString::Bytes(bytes) => {
|
||||||
|
Path::new(std::str::from_utf8(bytes).unwrap_or("<unknown>")).into()
|
||||||
|
}
|
||||||
|
#[cfg(windows)]
|
||||||
|
BytesOrWideString::Wide(wide) => {
|
||||||
|
use std::os::windows::ffi::OsStringExt;
|
||||||
|
Cow::Owned(std::ffi::OsString::from_wide(wide).into())
|
||||||
|
}
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
BytesOrWideString::Wide(_wide) => Path::new("<unknown>").into(),
|
||||||
|
};
|
||||||
|
if print_fmt == PrintFmt::Short && file.is_absolute() {
|
||||||
|
if let Some(cwd) = cwd {
|
||||||
|
if let Ok(stripped) = file.strip_prefix(&cwd) {
|
||||||
|
if let Some(s) = stripped.to_str() {
|
||||||
|
return write!(fmt, ".{}{}", path::MAIN_SEPARATOR, s);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Display::fmt(&file.display(), fmt)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn _assert_send_sync() {
|
||||||
|
fn assert<T: Send + Sync>() {}
|
||||||
|
assert::<Backtrace>();
|
||||||
|
}
|
||||||
102
vendor/anyhow/src/chain.rs
vendored
Normal file
102
vendor/anyhow/src/chain.rs
vendored
Normal file
|
|
@ -0,0 +1,102 @@
|
||||||
|
use self::ChainState::*;
|
||||||
|
use crate::StdError;
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
use alloc::vec::{self, Vec};
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
pub(crate) use crate::Chain;
|
||||||
|
|
||||||
|
#[cfg(all(not(feature = "std"), anyhow_no_core_error))]
|
||||||
|
pub(crate) struct Chain<'a> {
|
||||||
|
state: ChainState<'a>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub(crate) enum ChainState<'a> {
|
||||||
|
Linked {
|
||||||
|
next: Option<&'a (dyn StdError + 'static)>,
|
||||||
|
},
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
Buffered {
|
||||||
|
rest: vec::IntoIter<&'a (dyn StdError + 'static)>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Chain<'a> {
|
||||||
|
#[cold]
|
||||||
|
pub fn new(head: &'a (dyn StdError + 'static)) -> Self {
|
||||||
|
Chain {
|
||||||
|
state: ChainState::Linked { next: Some(head) },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for Chain<'a> {
|
||||||
|
type Item = &'a (dyn StdError + 'static);
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
match &mut self.state {
|
||||||
|
Linked { next } => {
|
||||||
|
let error = (*next)?;
|
||||||
|
*next = error.source();
|
||||||
|
Some(error)
|
||||||
|
}
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
Buffered { rest } => rest.next(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
|
let len = self.len();
|
||||||
|
(len, Some(len))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
impl DoubleEndedIterator for Chain<'_> {
|
||||||
|
fn next_back(&mut self) -> Option<Self::Item> {
|
||||||
|
match &mut self.state {
|
||||||
|
Linked { mut next } => {
|
||||||
|
let mut rest = Vec::new();
|
||||||
|
while let Some(cause) = next {
|
||||||
|
next = cause.source();
|
||||||
|
rest.push(cause);
|
||||||
|
}
|
||||||
|
let mut rest = rest.into_iter();
|
||||||
|
let last = rest.next_back();
|
||||||
|
self.state = Buffered { rest };
|
||||||
|
last
|
||||||
|
}
|
||||||
|
Buffered { rest } => rest.next_back(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExactSizeIterator for Chain<'_> {
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
match &self.state {
|
||||||
|
Linked { mut next } => {
|
||||||
|
let mut len = 0;
|
||||||
|
while let Some(cause) = next {
|
||||||
|
next = cause.source();
|
||||||
|
len += 1;
|
||||||
|
}
|
||||||
|
len
|
||||||
|
}
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
Buffered { rest } => rest.len(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
impl Default for Chain<'_> {
|
||||||
|
fn default() -> Self {
|
||||||
|
Chain {
|
||||||
|
state: ChainState::Buffered {
|
||||||
|
rest: Vec::new().into_iter(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
193
vendor/anyhow/src/context.rs
vendored
Normal file
193
vendor/anyhow/src/context.rs
vendored
Normal file
|
|
@ -0,0 +1,193 @@
|
||||||
|
use crate::error::ContextError;
|
||||||
|
use crate::{Context, Error, StdError};
|
||||||
|
use core::convert::Infallible;
|
||||||
|
use core::fmt::{self, Debug, Display, Write};
|
||||||
|
|
||||||
|
#[cfg(error_generic_member_access)]
|
||||||
|
use core::error::Request;
|
||||||
|
|
||||||
|
mod ext {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
pub trait StdError {
|
||||||
|
fn ext_context<C>(self, context: C) -> Error
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
impl<E> StdError for E
|
||||||
|
where
|
||||||
|
E: crate::StdError + Send + Sync + 'static,
|
||||||
|
{
|
||||||
|
fn ext_context<C>(self, context: C) -> Error
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static,
|
||||||
|
{
|
||||||
|
let backtrace = backtrace_if_absent!(&self);
|
||||||
|
Error::from_context(context, self, backtrace)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl StdError for Error {
|
||||||
|
fn ext_context<C>(self, context: C) -> Error
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static,
|
||||||
|
{
|
||||||
|
self.context(context)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T, E> Context<T, E> for Result<T, E>
|
||||||
|
where
|
||||||
|
E: ext::StdError + Send + Sync + 'static,
|
||||||
|
{
|
||||||
|
fn context<C>(self, context: C) -> Result<T, Error>
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static,
|
||||||
|
{
|
||||||
|
// Not using map_err to save 2 useless frames off the captured backtrace
|
||||||
|
// in ext_context.
|
||||||
|
match self {
|
||||||
|
Ok(ok) => Ok(ok),
|
||||||
|
Err(error) => Err(error.ext_context(context)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_context<C, F>(self, context: F) -> Result<T, Error>
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static,
|
||||||
|
F: FnOnce() -> C,
|
||||||
|
{
|
||||||
|
match self {
|
||||||
|
Ok(ok) => Ok(ok),
|
||||||
|
Err(error) => Err(error.ext_context(context())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// ```
|
||||||
|
/// # type T = ();
|
||||||
|
/// #
|
||||||
|
/// use anyhow::{Context, Result};
|
||||||
|
///
|
||||||
|
/// fn maybe_get() -> Option<T> {
|
||||||
|
/// # const IGNORE: &str = stringify! {
|
||||||
|
/// ...
|
||||||
|
/// # };
|
||||||
|
/// # unimplemented!()
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// fn demo() -> Result<()> {
|
||||||
|
/// let t = maybe_get().context("there is no T")?;
|
||||||
|
/// # const IGNORE: &str = stringify! {
|
||||||
|
/// ...
|
||||||
|
/// # };
|
||||||
|
/// # unimplemented!()
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
impl<T> Context<T, Infallible> for Option<T> {
|
||||||
|
fn context<C>(self, context: C) -> Result<T, Error>
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static,
|
||||||
|
{
|
||||||
|
// Not using ok_or_else to save 2 useless frames off the captured
|
||||||
|
// backtrace.
|
||||||
|
match self {
|
||||||
|
Some(ok) => Ok(ok),
|
||||||
|
None => Err(Error::from_display(context, backtrace!())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_context<C, F>(self, context: F) -> Result<T, Error>
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static,
|
||||||
|
F: FnOnce() -> C,
|
||||||
|
{
|
||||||
|
match self {
|
||||||
|
Some(ok) => Ok(ok),
|
||||||
|
None => Err(Error::from_display(context(), backtrace!())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<C, E> Debug for ContextError<C, E>
|
||||||
|
where
|
||||||
|
C: Display,
|
||||||
|
E: Debug,
|
||||||
|
{
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
f.debug_struct("Error")
|
||||||
|
.field("context", &Quoted(&self.context))
|
||||||
|
.field("source", &self.error)
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<C, E> Display for ContextError<C, E>
|
||||||
|
where
|
||||||
|
C: Display,
|
||||||
|
{
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
Display::fmt(&self.context, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<C, E> StdError for ContextError<C, E>
|
||||||
|
where
|
||||||
|
C: Display,
|
||||||
|
E: StdError + 'static,
|
||||||
|
{
|
||||||
|
fn source(&self) -> Option<&(dyn StdError + 'static)> {
|
||||||
|
Some(&self.error)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(error_generic_member_access)]
|
||||||
|
fn provide<'a>(&'a self, request: &mut Request<'a>) {
|
||||||
|
StdError::provide(&self.error, request);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<C> StdError for ContextError<C, Error>
|
||||||
|
where
|
||||||
|
C: Display,
|
||||||
|
{
|
||||||
|
fn source(&self) -> Option<&(dyn StdError + 'static)> {
|
||||||
|
Some(unsafe { crate::ErrorImpl::error(self.error.inner.by_ref()) })
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(error_generic_member_access)]
|
||||||
|
fn provide<'a>(&'a self, request: &mut Request<'a>) {
|
||||||
|
Error::provide(&self.error, request);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Quoted<C>(C);
|
||||||
|
|
||||||
|
impl<C> Debug for Quoted<C>
|
||||||
|
where
|
||||||
|
C: Display,
|
||||||
|
{
|
||||||
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
formatter.write_char('"')?;
|
||||||
|
Quoted(&mut *formatter).write_fmt(format_args!("{}", self.0))?;
|
||||||
|
formatter.write_char('"')?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Write for Quoted<&mut fmt::Formatter<'_>> {
|
||||||
|
fn write_str(&mut self, s: &str) -> fmt::Result {
|
||||||
|
Display::fmt(&s.escape_debug(), self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) mod private {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
pub trait Sealed {}
|
||||||
|
|
||||||
|
impl<T, E> Sealed for Result<T, E> where E: ext::StdError {}
|
||||||
|
impl<T> Sealed for Option<T> {}
|
||||||
|
}
|
||||||
919
vendor/anyhow/src/ensure.rs
vendored
Normal file
919
vendor/anyhow/src/ensure.rs
vendored
Normal file
|
|
@ -0,0 +1,919 @@
|
||||||
|
use crate::Error;
|
||||||
|
use alloc::string::String;
|
||||||
|
use core::fmt::{self, Debug, Write};
|
||||||
|
use core::mem::MaybeUninit;
|
||||||
|
use core::ptr;
|
||||||
|
use core::slice;
|
||||||
|
use core::str;
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub trait BothDebug {
|
||||||
|
fn __dispatch_ensure(self, msg: &'static str) -> Error;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<A, B> BothDebug for (A, B)
|
||||||
|
where
|
||||||
|
A: Debug,
|
||||||
|
B: Debug,
|
||||||
|
{
|
||||||
|
fn __dispatch_ensure(self, msg: &'static str) -> Error {
|
||||||
|
render(msg, &self.0, &self.1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub trait NotBothDebug {
|
||||||
|
fn __dispatch_ensure(self, msg: &'static str) -> Error;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<A, B> NotBothDebug for &(A, B) {
|
||||||
|
fn __dispatch_ensure(self, msg: &'static str) -> Error {
|
||||||
|
Error::msg(msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Buf {
|
||||||
|
bytes: [MaybeUninit<u8>; 40],
|
||||||
|
written: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Buf {
|
||||||
|
fn new() -> Self {
|
||||||
|
Buf {
|
||||||
|
bytes: [MaybeUninit::uninit(); 40],
|
||||||
|
written: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_str(&self) -> &str {
|
||||||
|
unsafe {
|
||||||
|
str::from_utf8_unchecked(slice::from_raw_parts(
|
||||||
|
self.bytes.as_ptr().cast::<u8>(),
|
||||||
|
self.written,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Write for Buf {
|
||||||
|
fn write_str(&mut self, s: &str) -> fmt::Result {
|
||||||
|
if s.bytes().any(|b| b == b' ' || b == b'\n') {
|
||||||
|
return Err(fmt::Error);
|
||||||
|
}
|
||||||
|
|
||||||
|
let remaining = self.bytes.len() - self.written;
|
||||||
|
if s.len() > remaining {
|
||||||
|
return Err(fmt::Error);
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
ptr::copy_nonoverlapping(
|
||||||
|
s.as_ptr(),
|
||||||
|
self.bytes.as_mut_ptr().add(self.written).cast::<u8>(),
|
||||||
|
s.len(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
self.written += s.len();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn render(msg: &'static str, lhs: &dyn Debug, rhs: &dyn Debug) -> Error {
|
||||||
|
let mut lhs_buf = Buf::new();
|
||||||
|
if fmt::write(&mut lhs_buf, format_args!("{:?}", lhs)).is_ok() {
|
||||||
|
let mut rhs_buf = Buf::new();
|
||||||
|
if fmt::write(&mut rhs_buf, format_args!("{:?}", rhs)).is_ok() {
|
||||||
|
let lhs_str = lhs_buf.as_str();
|
||||||
|
let rhs_str = rhs_buf.as_str();
|
||||||
|
// "{msg} ({lhs} vs {rhs})"
|
||||||
|
let len = msg.len() + 2 + lhs_str.len() + 4 + rhs_str.len() + 1;
|
||||||
|
let mut string = String::with_capacity(len);
|
||||||
|
string.push_str(msg);
|
||||||
|
string.push_str(" (");
|
||||||
|
string.push_str(lhs_str);
|
||||||
|
string.push_str(" vs ");
|
||||||
|
string.push_str(rhs_str);
|
||||||
|
string.push(')');
|
||||||
|
return Error::msg(string);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Error::msg(msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! __parse_ensure {
|
||||||
|
(atom () $bail:tt $fuel:tt {($($rhs:tt)+) ($($lhs:tt)+) $op:tt} $dup:tt $(,)?) => {
|
||||||
|
$crate::__fancy_ensure!($($lhs)+, $op, $($rhs)+)
|
||||||
|
};
|
||||||
|
|
||||||
|
// low precedence control flow constructs
|
||||||
|
|
||||||
|
(0 $stack:tt ($($bail:tt)*) $fuel:tt $parse:tt $dup:tt return $($rest:tt)*) => {
|
||||||
|
$crate::__fallback_ensure!($($bail)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt ($($bail:tt)*) $fuel:tt $parse:tt $dup:tt break $($rest:tt)*) => {
|
||||||
|
$crate::__fallback_ensure!($($bail)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt ($($bail:tt)*) $fuel:tt $parse:tt $dup:tt continue $($rest:tt)*) => {
|
||||||
|
$crate::__fallback_ensure!($($bail)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt ($($bail:tt)*) $fuel:tt $parse:tt $dup:tt yield $($rest:tt)*) => {
|
||||||
|
$crate::__fallback_ensure!($($bail)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt ($($bail:tt)*) $fuel:tt $parse:tt $dup:tt move $($rest:tt)*) => {
|
||||||
|
$crate::__fallback_ensure!($($bail)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
// unary operators
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($deref:tt $($dup:tt)*) * $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $deref) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($not:tt $($dup:tt)*) ! $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $not) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($neg:tt $($dup:tt)*) - $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $neg) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($let:tt $($dup:tt)*) let $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $let) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($lifetime:tt $colon:tt $($dup:tt)*) $label:lifetime : $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $lifetime $colon) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $mut:tt $($dup:tt)*) &mut $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $and $mut) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $($dup:tt)*) & $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $and) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($andand:tt $mut:tt $($dup:tt)*) &&mut $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $andand $mut) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($andand:tt $($dup:tt)*) && $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $andand) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
// control flow constructs
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($if:tt $($dup:tt)*) if $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 (cond $stack) $bail ($($fuel)*) {($($buf)* $if) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($match:tt $($dup:tt)*) match $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 (cond $stack) $bail ($($fuel)*) {($($buf)* $match) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($while:tt $($dup:tt)*) while $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 (cond $stack) $bail ($($fuel)*) {($($buf)* $while) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($for:tt $($dup:tt)*) for $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(pat (cond $stack) $bail ($($fuel)*) {($($buf)* $for) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom (cond $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($brace:tt $($dup:tt)*) {$($block:tt)*} $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(cond $stack $bail ($($fuel)*) {($($buf)* $brace) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(cond $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($else:tt $if:tt $($dup:tt)*) else if $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 (cond $stack) $bail ($($fuel)*) {($($buf)* $else $if) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(cond $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($else:tt $brace:tt $($dup:tt)*) else {$($block:tt)*} $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $else $brace) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(cond $stack:tt $bail:tt (~$($fuel:tt)*) $parse:tt $dup:tt $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) $parse $dup $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
// atomic expressions
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($paren:tt $($dup:tt)*) ($($content:tt)*) $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $paren) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bracket:tt $($dup:tt)*) [$($array:tt)*] $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $bracket) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($brace:tt $($dup:tt)*) {$($block:tt)*} $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $brace) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($loop:tt $block:tt $($dup:tt)*) loop {$($body:tt)*} $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $loop $block) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($async:tt $block:tt $($dup:tt)*) async {$($body:tt)*} $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $async $block) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($async:tt $move:tt $block:tt $($dup:tt)*) async move {$($body:tt)*} $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $async $move $block) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($unsafe:tt $block:tt $($dup:tt)*) unsafe {$($body:tt)*} $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $unsafe $block) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($const:tt $block:tt $($dup:tt)*) const {$($body:tt)*} $($rest:tt)*) => {
|
||||||
|
// TODO: this is mostly useless due to https://github.com/rust-lang/rust/issues/86730
|
||||||
|
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $const $block) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($literal:tt $($dup:tt)*) $lit:literal $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $literal) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
// path expressions
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $ident:tt $($dup:tt)*) :: $i:ident $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(epath (atom $stack) $bail ($($fuel)*) {($($buf)* $colons $ident) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($ident:tt $($dup:tt)*) $i:ident $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(epath (atom $stack) $bail ($($fuel)*) {($($buf)* $ident) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($langle:tt $($dup:tt)*) < $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type (qpath (epath (atom $stack))) $bail ($($fuel)*) {($($buf)* $langle) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(epath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $langle:tt $($dup:tt)*) :: < $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(generic (epath $stack) $bail ($($fuel)*) {($($buf)* $colons $langle) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(epath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $langle:tt $($dup:tt)*) :: << $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type (qpath (tpath (arglist (epath $stack)))) $bail ($($fuel)*) {($($buf)* $colons $langle) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(epath $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt :: <- - $($rest:tt)*) => {
|
||||||
|
$crate::__fallback_ensure!($($bail)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(epath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $larrow:tt $($dup:tt)*) :: <- $lit:literal $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(generic (epath $stack) $bail ($($fuel)*) {($($buf)* $colons $larrow) $($parse)*} ($($dup)*) $($dup)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(epath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $ident:tt $($dup:tt)*) :: $i:ident $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(epath $stack $bail ($($fuel)*) {($($buf)* $colons $ident) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(epath ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bang:tt $args:tt $($dup:tt)*) ! ($($mac:tt)*) $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $bang $args) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(epath ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bang:tt $args:tt $($dup:tt)*) ! [$($mac:tt)*] $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $bang $args) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(epath ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bang:tt $args:tt $($dup:tt)*) ! {$($mac:tt)*} $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $bang $args) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(epath (split ($pop:ident $stack:tt)) $bail:tt (~$($fuel:tt)*) $parse:tt $dup:tt $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop (split $stack) $bail ($($fuel)*) $parse $dup $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(epath ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) $parse:tt $dup:tt $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) $parse $dup $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
// trailer expressions
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($paren:tt $($dup:tt)*) ($($call:tt)*) $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $paren) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bracket:tt $($dup:tt)*) [$($index:tt)*] $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $bracket) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($brace:tt $($dup:tt)*) {$($init:tt)*} $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $brace) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($question:tt $($dup:tt)*) ? $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $question) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($dot:tt $ident:tt $colons:tt $langle:tt $($dup:tt)*) . $i:ident :: < $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(generic (atom $stack) $bail ($($fuel)*) {($($buf)* $dot $ident $colons $langle) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($dot:tt $ident:tt $colons:tt $langle:tt $($dup:tt)*) . $i:ident :: << $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type (qpath (tpath (arglist (atom $stack)))) $bail ($($fuel)*) {($($buf)* $dot $ident $colons $langle) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt . $i:ident :: <- - $($rest:tt)*) => {
|
||||||
|
$crate::__fallback_ensure!($($bail)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($dot:tt $ident:tt $colons:tt $larrow:tt $($dup:tt)*) . $i:ident :: <- $lit:literal $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(generic (atom $stack) $bail ($($fuel)*) {($($buf)* $dot $ident $colons $larrow) $($parse)*} ($($dup)*) $($dup)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($dot:tt $field:tt $($dup:tt)*) . $i:ident $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $dot $field) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt . - $($rest:tt)*) => {
|
||||||
|
$crate::__fallback_ensure!($($bail)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($dot:tt $index:tt $($dup:tt)*) . $lit:literal $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $dot $index) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($as:tt $($dup:tt)*) as $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type (atom $stack) $bail ($($fuel)*) {($($buf)* $as) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
// types
|
||||||
|
|
||||||
|
(type ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bracket:tt $($dup:tt)*) [$($content:tt)*] $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $bracket) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($paren:tt $($dup:tt)*) ($($content:tt)*) $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $paren) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($star:tt $const:tt $($dup:tt)*) *const $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $star $const) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($star:tt $mut:tt $($dup:tt)*) *mut $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $star $mut) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $lifetime:tt $mut:tt $($dup:tt)*) & $l:lifetime mut $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $and $lifetime $mut) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $mut:tt $($dup:tt)*) & mut $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $and $mut) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $lifetime:tt $($dup:tt)*) & $l:lifetime $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $and $lifetime) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $($dup:tt)*) & $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $and) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $lifetime:tt $mut:tt $($dup:tt)*) && $l:lifetime mut $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $and $lifetime $mut) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $mut:tt $($dup:tt)*) && mut $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $and $mut) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $lifetime:tt $($dup:tt)*) && $l:lifetime $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $and $lifetime) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $($dup:tt)*) && $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $and) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt unsafe extern - $($rest:tt)*) => {
|
||||||
|
$crate::__fallback_ensure!($($bail)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($unsafe:tt $(extern $($abi:literal)?)? fn $($dup:tt)*) unsafe $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $unsafe) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt extern - $($rest:tt)*) => {
|
||||||
|
$crate::__fallback_ensure!($($bail)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($extern:tt $abi:tt fn $($dup:tt)*) extern $lit:literal $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $extern $abi) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($extern:tt fn $($dup:tt)*) extern $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $extern) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($fn:tt $paren:tt $arrow:tt $($dup:tt)*) fn ($($args:tt)*) -> $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $fn $paren $arrow) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($fn:tt $paren:tt $($dup:tt)*) fn ($($args:tt)*) $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $fn $paren) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($impl:tt $($dup:tt)*) impl $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $impl) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($dyn:tt $($dup:tt)*) dyn $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $dyn) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($wild:tt $($dup:tt)*) _ $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $wild) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($never:tt $($dup:tt)*) ! $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $never) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($for:tt $langle:tt $($dup:tt)*) for < $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(generic (type $stack) $bail ($($fuel)*) {($($buf)* $for $langle) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
// path types
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $ident:tt $($dup:tt)*) :: $i:ident $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(tpath $stack $bail ($($fuel)*) {($($buf)* $colons $ident) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($ident:tt $($dup:tt)*) $i:ident $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(tpath $stack $bail ($($fuel)*) {($($buf)* $ident) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($langle:tt $($dup:tt)*) < $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type (qpath (tpath $stack)) $bail ($($fuel)*) {($($buf)* $langle) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($langle:tt $($dup:tt)*) < $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(generic (tpath $stack) $bail ($($fuel)*) {($($buf)* $langle) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($langle:tt $($dup:tt)*) << $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type (qpath (tpath (arglist (tpath $stack)))) $bail ($($fuel)*) {($($buf)* $langle) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(tpath $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt <- - $($rest:tt)*) => {
|
||||||
|
$crate::__fallback_ensure!($($bail)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($larrow:tt $($dup:tt)*) <- $lit:literal $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(generic (tpath $stack) $bail ($($fuel)*) {($($buf)* $larrow) $($parse)*} ($($dup)*) $($dup)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $langle:tt $($dup:tt)*) :: < $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(generic (tpath $stack) $bail ($($fuel)*) {($($buf)* $colons $langle) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $langle:tt $($dup:tt)*) :: << $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type (qpath (tpath (arglist (tpath $stack)))) $bail ($($fuel)*) {($($buf)* $colons $langle) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(tpath $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt :: <- - $($rest:tt)*) => {
|
||||||
|
$crate::__fallback_ensure!($($bail)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $larrow:tt $($dup:tt)*) :: <- $lit:literal $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(generic (tpath $stack) $bail ($($fuel)*) {($($buf)* $colons $larrow) $($parse)*} ($($dup)*) $($dup)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $ident:tt $($dup:tt)*) :: $i:ident $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(tpath $stack $bail ($($fuel)*) {($($buf)* $colons $ident) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($paren:tt $arrow:tt $($dup:tt)*) ($($args:tt)*) -> $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $paren $arrow) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($paren:tt $($dup:tt)*) ($($args:tt)*) $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(object $stack $bail ($($fuel)*) {($($buf)* $paren) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $paren:tt $arrow:tt $($dup:tt)*) :: ($($args:tt)*) -> $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $colons $paren $arrow) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $paren:tt $($dup:tt)*) :: ($($args:tt)*) $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(object $stack $bail ($($fuel)*) {($($buf)* $colons $paren) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(tpath ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bang:tt $args:tt $($dup:tt)*) ! ($($mac:tt)*) $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $bang $args) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(tpath ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bang:tt $args:tt $($dup:tt)*) ! [$($mac:tt)*] $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $bang $args) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(tpath ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bang:tt $args:tt $($dup:tt)*) ! {$($mac:tt)*} $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $bang $args) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) $parse:tt $dup:tt $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(object $stack $bail ($($fuel)*) $parse $dup $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
// qualified paths
|
||||||
|
|
||||||
|
(qpath (split ($pop:ident $stack:tt)) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($rangle:tt $colons:tt $ident:tt $($dup:tt)*) >> :: $i:ident $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $rangle $colons $ident) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(qpath ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($rangle:tt $colons:tt $ident:tt $($dup:tt)*) > :: $i:ident $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $rangle $colons $ident) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(qpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($as:tt $($dup:tt)*) as $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type (qpath $stack) $bail ($($fuel)*) {($($buf)* $as) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
// trait objects
|
||||||
|
|
||||||
|
(object (arglist $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($plus:tt $colons:tt $ident:tt $($dup:tt)*) + :: $i:ident $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(tpath (arglist $stack) $bail ($($fuel)*) {($($buf)* $plus $colons $ident) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(object (arglist $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($plus:tt $ident:tt $($dup:tt)*) + $i:ident $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(tpath (arglist $stack) $bail ($($fuel)*) {($($buf)* $plus $ident) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(object (split ($pop:ident $stack:tt)) $bail:tt (~$($fuel:tt)*) $parse:tt $dup:tt $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop (split $stack) $bail ($($fuel)*) $parse $dup $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(object ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) $parse:tt $dup:tt $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) $parse $dup $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
// angle bracketed generic arguments
|
||||||
|
|
||||||
|
(generic (split ($pop:ident $stack:tt)) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($rangle:tt $($dup:tt)*) >> $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $rangle) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(generic ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($rangle:tt $($dup:tt)*) > $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $rangle) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(generic ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($rangle:tt $($dup:tt)*) >> $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop (split $stack) $bail ($($fuel)*) {($($buf)*) $($parse)*} ($rangle $($rest)*) $rangle $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(generic $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt - - $($rest:tt)*) => {
|
||||||
|
$crate::__fallback_ensure!($($bail)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(generic $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($neg:tt $($dup:tt)*) - $lit:literal $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(generic $stack $bail ($($fuel)*) {($($buf)* $neg) $($parse)*} ($($dup)*) $($dup)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(generic $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt - $($rest:tt)*) => {
|
||||||
|
$crate::__fallback_ensure!($($bail)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(generic $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($literal:tt $($dup:tt)*) $lit:literal $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(arglist $stack $bail ($($fuel)*) {($($buf)* $literal) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(generic $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($brace:tt $($dup:tt)*) {$($block:tt)*} $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(arglist $stack $bail ($($fuel)*) {($($buf)* $brace) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(generic $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($lifetime:tt $($dup:tt)*) $l:lifetime $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(arglist $stack $bail ($($fuel)*) {($($buf)* $lifetime) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(generic $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($assoc:tt $eq:tt $($dup:tt)*) $ident:ident = $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type (arglist $stack) $bail ($($fuel)*) {($($buf)* $assoc $eq) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(generic $stack:tt $bail:tt (~$($fuel:tt)*) $parse:tt $dup:tt $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type (arglist $stack) $bail ($($fuel)*) $parse $dup $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(arglist $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($comma:tt $($dup:tt)*) , $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(generic $stack $bail ($($fuel)*) {($($buf)* $comma) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(arglist (split ($pop:ident $stack:tt)) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($rangle:tt $($dup:tt)*) >> $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)*) $rangle $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(arglist ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($rangle:tt $($dup:tt)*) > $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $rangle) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(arglist ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($rangle:tt $($dup:tt)*) >> $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!($pop (split $stack) $bail ($($fuel)*) {($($buf)*) $($parse)*} ($rangle $($rest)*) $rangle $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
// patterns
|
||||||
|
|
||||||
|
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($pipe:tt $($dup:tt)*) | $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $pipe) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($eq:tt $($dup:tt)*) = $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $eq) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($in:tt $($dup:tt)*) in $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $in) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($ref:tt $($dup:tt)*) ref $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $ref) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($mut:tt $($dup:tt)*) mut $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $mut) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($at:tt $($dup:tt)*) @ $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $at) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt - - $($rest:tt)*) => {
|
||||||
|
$crate::__fallback_ensure!($($bail)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($neg:tt $($dup:tt)*) - $lit:literal $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $neg) $($parse)*} ($($dup)*) $($dup)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt - $($rest:tt)*) => {
|
||||||
|
$crate::__fallback_ensure!($($bail)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($literal:tt $($dup:tt)*) $lit:literal $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $literal) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($range:tt $($dup:tt)*) .. $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $range) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($range:tt $($dup:tt)*) ..= $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $range) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $($dup:tt)*) & $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $and) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($andand:tt $($dup:tt)*) && $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $andand) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($paren:tt $($dup:tt)*) ($($content:tt)*) $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $paren) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bracket:tt $($dup:tt)*) [$($content:tt)*] $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $bracket) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($brace:tt $($dup:tt)*) {$($content:tt)*} $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $brace) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($wild:tt $($dup:tt)*) _ $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $wild) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $ident:tt $($dup:tt)*) :: $i:ident $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(epath (pat $stack) $bail ($($fuel)*) {($($buf)* $colons $ident) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($ident:tt $($dup:tt)*) $i:ident $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(epath (pat $stack) $bail ($($fuel)*) {($($buf)* $ident) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($langle:tt $($dup:tt)*) < $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(type (qpath (epath (pat $stack))) $bail ($($fuel)*) {($($buf)* $langle) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
// comparison binary operators
|
||||||
|
|
||||||
|
(atom () $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($eq:tt $($dup:tt)*) == $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 () $bail ($($fuel)*) {() $($parse)* ($($buf)*) $eq} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)+) $($parse:tt)*} ($eq:tt $($dup:tt)*) == $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $eq) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom () $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($le:tt $($dup:tt)*) <= $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 () $bail ($($fuel)*) {() $($parse)* ($($buf)*) $le} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)+) $($parse:tt)*} ($le:tt $($dup:tt)*) <= $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $le) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom () $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($lt:tt $($dup:tt)*) < $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 () $bail ($($fuel)*) {() $($parse)* ($($buf)*) $lt} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)+) $($parse:tt)*} ($lt:tt $($dup:tt)*) < $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $lt) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom () $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($ne:tt $($dup:tt)*) != $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 () $bail ($($fuel)*) {() $($parse)* ($($buf)*) $ne} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)+) $($parse:tt)*} ($ne:tt $($dup:tt)*) != $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $ne) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom () $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($ge:tt $($dup:tt)*) >= $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 () $bail ($($fuel)*) {() $($parse)* ($($buf)*) $ge} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)+) $($parse:tt)*} ($ge:tt $($dup:tt)*) >= $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $ge) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom (split ()) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt >> $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 () $bail ($($fuel)*) {() $($parse)* ($($buf)* > ) > } ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom () $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($gt:tt $($dup:tt)*) > $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 () $bail ($($fuel)*) {() $($parse)* ($($buf)*) $gt} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom (split $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)+) $($parse:tt)*} ($rangle:tt $($dup:tt)*) >> $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $rangle) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)+) $($parse:tt)*} ($gt:tt $($dup:tt)*) > $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $gt) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
// high precedence binary operators
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($add:tt $($dup:tt)*) + $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $add) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($sub:tt $($dup:tt)*) - $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $sub) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($mul:tt $($dup:tt)*) * $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $mul) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($div:tt $($dup:tt)*) / $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $div) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($rem:tt $($dup:tt)*) % $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $rem) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bitxor:tt $($dup:tt)*) ^ $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $bitxor) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bitand:tt $($dup:tt)*) & $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $bitand) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bitor:tt $($dup:tt)*) | $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $bitor) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($shl:tt $($dup:tt)*) << $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $shl) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($shr:tt $($dup:tt)*) >> $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $shr) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
// low precedence binary operators
|
||||||
|
|
||||||
|
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $($dup:tt)*) && $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $and) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($or:tt $($dup:tt)*) || $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $or) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($assign:tt $($dup:tt)*) = $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $assign) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($addeq:tt $($dup:tt)*) += $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $addeq) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($subeq:tt $($dup:tt)*) -= $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $subeq) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($muleq:tt $($dup:tt)*) *= $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $muleq) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($diveq:tt $($dup:tt)*) /= $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $diveq) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($remeq:tt $($dup:tt)*) %= $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $remeq) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bitxoreq:tt $($dup:tt)*) ^= $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $bitxoreq) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bitandeq:tt $($dup:tt)*) &= $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $bitandeq) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bitoreq:tt $($dup:tt)*) |= $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $bitoreq) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($shleq:tt $($dup:tt)*) <<= $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $shleq) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($shreq:tt $($dup:tt)*) >>= $($rest:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $shreq) $($parse)*} ($($rest)*) $($rest)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
// unrecognized expression
|
||||||
|
|
||||||
|
($state:tt $stack:tt ($($bail:tt)*) $($rest:tt)*) => {
|
||||||
|
$crate::__fallback_ensure!($($bail)*)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! __fancy_ensure {
|
||||||
|
($lhs:expr, $op:tt, $rhs:expr) => {
|
||||||
|
match (&$lhs, &$rhs) {
|
||||||
|
(lhs, rhs) => {
|
||||||
|
if !(lhs $op rhs) {
|
||||||
|
#[allow(unused_imports)]
|
||||||
|
use $crate::__private::{BothDebug, NotBothDebug};
|
||||||
|
return Err((lhs, rhs).__dispatch_ensure(
|
||||||
|
$crate::__private::concat!(
|
||||||
|
"Condition failed: `",
|
||||||
|
$crate::__private::stringify!($lhs),
|
||||||
|
" ",
|
||||||
|
$crate::__private::stringify!($op),
|
||||||
|
" ",
|
||||||
|
$crate::__private::stringify!($rhs),
|
||||||
|
"`",
|
||||||
|
),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! __fallback_ensure {
|
||||||
|
($cond:expr $(,)?) => {
|
||||||
|
if $crate::__private::not($cond) {
|
||||||
|
return $crate::__private::Err($crate::Error::msg(
|
||||||
|
$crate::__private::concat!("Condition failed: `", $crate::__private::stringify!($cond), "`")
|
||||||
|
));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
($cond:expr, $msg:literal $(,)?) => {
|
||||||
|
if $crate::__private::not($cond) {
|
||||||
|
return $crate::__private::Err($crate::__anyhow!($msg));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
($cond:expr, $err:expr $(,)?) => {
|
||||||
|
if $crate::__private::not($cond) {
|
||||||
|
return $crate::__private::Err($crate::__anyhow!($err));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
($cond:expr, $fmt:expr, $($arg:tt)*) => {
|
||||||
|
if $crate::__private::not($cond) {
|
||||||
|
return $crate::__private::Err($crate::__anyhow!($fmt, $($arg)*));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
1027
vendor/anyhow/src/error.rs
vendored
Normal file
1027
vendor/anyhow/src/error.rs
vendored
Normal file
File diff suppressed because it is too large
Load diff
158
vendor/anyhow/src/fmt.rs
vendored
Normal file
158
vendor/anyhow/src/fmt.rs
vendored
Normal file
|
|
@ -0,0 +1,158 @@
|
||||||
|
use crate::chain::Chain;
|
||||||
|
use crate::error::ErrorImpl;
|
||||||
|
use crate::ptr::Ref;
|
||||||
|
use core::fmt::{self, Debug, Write};
|
||||||
|
|
||||||
|
impl ErrorImpl {
|
||||||
|
pub(crate) unsafe fn display(this: Ref<Self>, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(f, "{}", unsafe { Self::error(this) })?;
|
||||||
|
|
||||||
|
if f.alternate() {
|
||||||
|
let chain = unsafe { Self::chain(this) };
|
||||||
|
for cause in chain.skip(1) {
|
||||||
|
write!(f, ": {}", cause)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) unsafe fn debug(this: Ref<Self>, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
let error = unsafe { Self::error(this) };
|
||||||
|
|
||||||
|
if f.alternate() {
|
||||||
|
return Debug::fmt(error, f);
|
||||||
|
}
|
||||||
|
|
||||||
|
write!(f, "{}", error)?;
|
||||||
|
|
||||||
|
if let Some(cause) = error.source() {
|
||||||
|
write!(f, "\n\nCaused by:")?;
|
||||||
|
let multiple = cause.source().is_some();
|
||||||
|
for (n, error) in Chain::new(cause).enumerate() {
|
||||||
|
writeln!(f)?;
|
||||||
|
let mut indented = Indented {
|
||||||
|
inner: f,
|
||||||
|
number: if multiple { Some(n) } else { None },
|
||||||
|
started: false,
|
||||||
|
};
|
||||||
|
write!(indented, "{}", error)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(std_backtrace, feature = "backtrace"))]
|
||||||
|
{
|
||||||
|
use crate::backtrace::BacktraceStatus;
|
||||||
|
use alloc::string::ToString;
|
||||||
|
|
||||||
|
let backtrace = unsafe { Self::backtrace(this) };
|
||||||
|
if let BacktraceStatus::Captured = backtrace.status() {
|
||||||
|
let mut backtrace = backtrace.to_string();
|
||||||
|
write!(f, "\n\n")?;
|
||||||
|
if backtrace.starts_with("stack backtrace:") {
|
||||||
|
// Capitalize to match "Caused by:"
|
||||||
|
backtrace.replace_range(0..1, "S");
|
||||||
|
} else {
|
||||||
|
// "stack backtrace:" prefix was removed in
|
||||||
|
// https://github.com/rust-lang/backtrace-rs/pull/286
|
||||||
|
writeln!(f, "Stack backtrace:")?;
|
||||||
|
}
|
||||||
|
backtrace.truncate(backtrace.trim_end().len());
|
||||||
|
write!(f, "{}", backtrace)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Indented<'a, D> {
|
||||||
|
inner: &'a mut D,
|
||||||
|
number: Option<usize>,
|
||||||
|
started: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Write for Indented<'_, T>
|
||||||
|
where
|
||||||
|
T: Write,
|
||||||
|
{
|
||||||
|
fn write_str(&mut self, s: &str) -> fmt::Result {
|
||||||
|
for (i, line) in s.split('\n').enumerate() {
|
||||||
|
if !self.started {
|
||||||
|
self.started = true;
|
||||||
|
match self.number {
|
||||||
|
Some(number) => write!(self.inner, "{: >5}: ", number)?,
|
||||||
|
None => self.inner.write_str(" ")?,
|
||||||
|
}
|
||||||
|
} else if i > 0 {
|
||||||
|
self.inner.write_char('\n')?;
|
||||||
|
if self.number.is_some() {
|
||||||
|
self.inner.write_str(" ")?;
|
||||||
|
} else {
|
||||||
|
self.inner.write_str(" ")?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.inner.write_str(line)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use alloc::string::String;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn one_digit() {
|
||||||
|
let input = "verify\nthis";
|
||||||
|
let expected = " 2: verify\n this";
|
||||||
|
let mut output = String::new();
|
||||||
|
|
||||||
|
Indented {
|
||||||
|
inner: &mut output,
|
||||||
|
number: Some(2),
|
||||||
|
started: false,
|
||||||
|
}
|
||||||
|
.write_str(input)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(expected, output);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn two_digits() {
|
||||||
|
let input = "verify\nthis";
|
||||||
|
let expected = " 12: verify\n this";
|
||||||
|
let mut output = String::new();
|
||||||
|
|
||||||
|
Indented {
|
||||||
|
inner: &mut output,
|
||||||
|
number: Some(12),
|
||||||
|
started: false,
|
||||||
|
}
|
||||||
|
.write_str(input)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(expected, output);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_digits() {
|
||||||
|
let input = "verify\nthis";
|
||||||
|
let expected = " verify\n this";
|
||||||
|
let mut output = String::new();
|
||||||
|
|
||||||
|
Indented {
|
||||||
|
inner: &mut output,
|
||||||
|
number: None,
|
||||||
|
started: false,
|
||||||
|
}
|
||||||
|
.write_str(input)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(expected, output);
|
||||||
|
}
|
||||||
|
}
|
||||||
121
vendor/anyhow/src/kind.rs
vendored
Normal file
121
vendor/anyhow/src/kind.rs
vendored
Normal file
|
|
@ -0,0 +1,121 @@
|
||||||
|
// Tagged dispatch mechanism for resolving the behavior of `anyhow!($expr)`.
|
||||||
|
//
|
||||||
|
// When anyhow! is given a single expr argument to turn into anyhow::Error, we
|
||||||
|
// want the resulting Error to pick up the input's implementation of source()
|
||||||
|
// and backtrace() if it has a std::error::Error impl, otherwise require nothing
|
||||||
|
// more than Display and Debug.
|
||||||
|
//
|
||||||
|
// Expressed in terms of specialization, we want something like:
|
||||||
|
//
|
||||||
|
// trait AnyhowNew {
|
||||||
|
// fn new(self) -> Error;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// impl<T> AnyhowNew for T
|
||||||
|
// where
|
||||||
|
// T: Display + Debug + Send + Sync + 'static,
|
||||||
|
// {
|
||||||
|
// default fn new(self) -> Error {
|
||||||
|
// /* no std error impl */
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// impl<T> AnyhowNew for T
|
||||||
|
// where
|
||||||
|
// T: std::error::Error + Send + Sync + 'static,
|
||||||
|
// {
|
||||||
|
// fn new(self) -> Error {
|
||||||
|
// /* use std error's source() and backtrace() */
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Since specialization is not stable yet, instead we rely on autoref behavior
|
||||||
|
// of method resolution to perform tagged dispatch. Here we have two traits
|
||||||
|
// AdhocKind and TraitKind that both have an anyhow_kind() method. AdhocKind is
|
||||||
|
// implemented whether or not the caller's type has a std error impl, while
|
||||||
|
// TraitKind is implemented only when a std error impl does exist. The ambiguity
|
||||||
|
// is resolved by AdhocKind requiring an extra autoref so that it has lower
|
||||||
|
// precedence.
|
||||||
|
//
|
||||||
|
// The anyhow! macro will set up the call in this form:
|
||||||
|
//
|
||||||
|
// #[allow(unused_imports)]
|
||||||
|
// use $crate::__private::{AdhocKind, TraitKind};
|
||||||
|
// let error = $msg;
|
||||||
|
// (&error).anyhow_kind().new(error)
|
||||||
|
|
||||||
|
use crate::Error;
|
||||||
|
use core::fmt::{Debug, Display};
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
use crate::StdError;
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
use alloc::boxed::Box;
|
||||||
|
|
||||||
|
pub struct Adhoc;
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub trait AdhocKind: Sized {
|
||||||
|
#[inline]
|
||||||
|
fn anyhow_kind(&self) -> Adhoc {
|
||||||
|
Adhoc
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> AdhocKind for &T where T: ?Sized + Display + Debug + Send + Sync + 'static {}
|
||||||
|
|
||||||
|
impl Adhoc {
|
||||||
|
#[cold]
|
||||||
|
pub fn new<M>(self, message: M) -> Error
|
||||||
|
where
|
||||||
|
M: Display + Debug + Send + Sync + 'static,
|
||||||
|
{
|
||||||
|
Error::from_adhoc(message, backtrace!())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Trait;
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub trait TraitKind: Sized {
|
||||||
|
#[inline]
|
||||||
|
fn anyhow_kind(&self) -> Trait {
|
||||||
|
Trait
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<E> TraitKind for E where E: Into<Error> {}
|
||||||
|
|
||||||
|
impl Trait {
|
||||||
|
#[cold]
|
||||||
|
pub fn new<E>(self, error: E) -> Error
|
||||||
|
where
|
||||||
|
E: Into<Error>,
|
||||||
|
{
|
||||||
|
error.into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
pub struct Boxed;
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub trait BoxedKind: Sized {
|
||||||
|
#[inline]
|
||||||
|
fn anyhow_kind(&self) -> Boxed {
|
||||||
|
Boxed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
impl BoxedKind for Box<dyn StdError + Send + Sync> {}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
impl Boxed {
|
||||||
|
#[cold]
|
||||||
|
pub fn new(self, error: Box<dyn StdError + Send + Sync>) -> Error {
|
||||||
|
let backtrace = backtrace_if_absent!(&*error);
|
||||||
|
Error::from_boxed(error, backtrace)
|
||||||
|
}
|
||||||
|
}
|
||||||
732
vendor/anyhow/src/lib.rs
vendored
Normal file
732
vendor/anyhow/src/lib.rs
vendored
Normal file
|
|
@ -0,0 +1,732 @@
|
||||||
|
//! [![github]](https://github.com/dtolnay/anyhow) [![crates-io]](https://crates.io/crates/anyhow) [![docs-rs]](https://docs.rs/anyhow)
|
||||||
|
//!
|
||||||
|
//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
|
||||||
|
//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
|
||||||
|
//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs
|
||||||
|
//!
|
||||||
|
//! <br>
|
||||||
|
//!
|
||||||
|
//! This library provides [`anyhow::Error`][Error], a trait object based error
|
||||||
|
//! type for easy idiomatic error handling in Rust applications.
|
||||||
|
//!
|
||||||
|
//! <br>
|
||||||
|
//!
|
||||||
|
//! # Details
|
||||||
|
//!
|
||||||
|
//! - Use `Result<T, anyhow::Error>`, or equivalently `anyhow::Result<T>`, as
|
||||||
|
//! the return type of any fallible function.
|
||||||
|
//!
|
||||||
|
//! Within the function, use `?` to easily propagate any error that implements
|
||||||
|
//! the [`std::error::Error`] trait.
|
||||||
|
//!
|
||||||
|
//! ```
|
||||||
|
//! # pub trait Deserialize {}
|
||||||
|
//! #
|
||||||
|
//! # mod serde_json {
|
||||||
|
//! # use super::Deserialize;
|
||||||
|
//! # use std::io;
|
||||||
|
//! #
|
||||||
|
//! # pub fn from_str<T: Deserialize>(json: &str) -> io::Result<T> {
|
||||||
|
//! # unimplemented!()
|
||||||
|
//! # }
|
||||||
|
//! # }
|
||||||
|
//! #
|
||||||
|
//! # struct ClusterMap;
|
||||||
|
//! #
|
||||||
|
//! # impl Deserialize for ClusterMap {}
|
||||||
|
//! #
|
||||||
|
//! use anyhow::Result;
|
||||||
|
//!
|
||||||
|
//! fn get_cluster_info() -> Result<ClusterMap> {
|
||||||
|
//! let config = std::fs::read_to_string("cluster.json")?;
|
||||||
|
//! let map: ClusterMap = serde_json::from_str(&config)?;
|
||||||
|
//! Ok(map)
|
||||||
|
//! }
|
||||||
|
//! #
|
||||||
|
//! # fn main() {}
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! - Attach context to help the person troubleshooting the error understand
|
||||||
|
//! where things went wrong. A low-level error like "No such file or
|
||||||
|
//! directory" can be annoying to debug without more context about what higher
|
||||||
|
//! level step the application was in the middle of.
|
||||||
|
//!
|
||||||
|
//! ```
|
||||||
|
//! # struct It;
|
||||||
|
//! #
|
||||||
|
//! # impl It {
|
||||||
|
//! # fn detach(&self) -> Result<()> {
|
||||||
|
//! # unimplemented!()
|
||||||
|
//! # }
|
||||||
|
//! # }
|
||||||
|
//! #
|
||||||
|
//! use anyhow::{Context, Result};
|
||||||
|
//!
|
||||||
|
//! fn main() -> Result<()> {
|
||||||
|
//! # return Ok(());
|
||||||
|
//! #
|
||||||
|
//! # const _: &str = stringify! {
|
||||||
|
//! ...
|
||||||
|
//! # };
|
||||||
|
//! #
|
||||||
|
//! # let it = It;
|
||||||
|
//! # let path = "./path/to/instrs.json";
|
||||||
|
//! #
|
||||||
|
//! it.detach().context("Failed to detach the important thing")?;
|
||||||
|
//!
|
||||||
|
//! let content = std::fs::read(path)
|
||||||
|
//! .with_context(|| format!("Failed to read instrs from {}", path))?;
|
||||||
|
//! #
|
||||||
|
//! # const _: &str = stringify! {
|
||||||
|
//! ...
|
||||||
|
//! # };
|
||||||
|
//! #
|
||||||
|
//! # Ok(())
|
||||||
|
//! }
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! ```console
|
||||||
|
//! Error: Failed to read instrs from ./path/to/instrs.json
|
||||||
|
//!
|
||||||
|
//! Caused by:
|
||||||
|
//! No such file or directory (os error 2)
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! - Downcasting is supported and can be by value, by shared reference, or by
|
||||||
|
//! mutable reference as needed.
|
||||||
|
//!
|
||||||
|
//! ```
|
||||||
|
//! # use anyhow::anyhow;
|
||||||
|
//! # use std::fmt::{self, Display};
|
||||||
|
//! # use std::task::Poll;
|
||||||
|
//! #
|
||||||
|
//! # #[derive(Debug)]
|
||||||
|
//! # enum DataStoreError {
|
||||||
|
//! # Censored(()),
|
||||||
|
//! # }
|
||||||
|
//! #
|
||||||
|
//! # impl Display for DataStoreError {
|
||||||
|
//! # fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
//! # unimplemented!()
|
||||||
|
//! # }
|
||||||
|
//! # }
|
||||||
|
//! #
|
||||||
|
//! # impl std::error::Error for DataStoreError {}
|
||||||
|
//! #
|
||||||
|
//! # const REDACTED_CONTENT: () = ();
|
||||||
|
//! #
|
||||||
|
//! # let error = anyhow!("...");
|
||||||
|
//! # let root_cause = &error;
|
||||||
|
//! #
|
||||||
|
//! # let ret =
|
||||||
|
//! // If the error was caused by redaction, then return a
|
||||||
|
//! // tombstone instead of the content.
|
||||||
|
//! match root_cause.downcast_ref::<DataStoreError>() {
|
||||||
|
//! Some(DataStoreError::Censored(_)) => Ok(Poll::Ready(REDACTED_CONTENT)),
|
||||||
|
//! None => Err(error),
|
||||||
|
//! }
|
||||||
|
//! # ;
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! - If using Rust ≥ 1.65, a backtrace is captured and printed with the
|
||||||
|
//! error if the underlying error type does not already provide its own. In
|
||||||
|
//! order to see backtraces, they must be enabled through the environment
|
||||||
|
//! variables described in [`std::backtrace`]:
|
||||||
|
//!
|
||||||
|
//! - If you want panics and errors to both have backtraces, set
|
||||||
|
//! `RUST_BACKTRACE=1`;
|
||||||
|
//! - If you want only errors to have backtraces, set `RUST_LIB_BACKTRACE=1`;
|
||||||
|
//! - If you want only panics to have backtraces, set `RUST_BACKTRACE=1` and
|
||||||
|
//! `RUST_LIB_BACKTRACE=0`.
|
||||||
|
//!
|
||||||
|
//! [`std::backtrace`]: https://doc.rust-lang.org/std/backtrace/index.html#environment-variables
|
||||||
|
//!
|
||||||
|
//! - Anyhow works with any error type that has an impl of `std::error::Error`,
|
||||||
|
//! including ones defined in your crate. We do not bundle a `derive(Error)`
|
||||||
|
//! macro but you can write the impls yourself or use a standalone macro like
|
||||||
|
//! [thiserror].
|
||||||
|
//!
|
||||||
|
//! [thiserror]: https://github.com/dtolnay/thiserror
|
||||||
|
//!
|
||||||
|
//! ```
|
||||||
|
//! use thiserror::Error;
|
||||||
|
//!
|
||||||
|
//! #[derive(Error, Debug)]
|
||||||
|
//! pub enum FormatError {
|
||||||
|
//! #[error("Invalid header (expected {expected:?}, got {found:?})")]
|
||||||
|
//! InvalidHeader {
|
||||||
|
//! expected: String,
|
||||||
|
//! found: String,
|
||||||
|
//! },
|
||||||
|
//! #[error("Missing attribute: {0}")]
|
||||||
|
//! MissingAttribute(String),
|
||||||
|
//! }
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! - One-off error messages can be constructed using the `anyhow!` macro, which
|
||||||
|
//! supports string interpolation and produces an `anyhow::Error`.
|
||||||
|
//!
|
||||||
|
//! ```
|
||||||
|
//! # use anyhow::{anyhow, Result};
|
||||||
|
//! #
|
||||||
|
//! # fn demo() -> Result<()> {
|
||||||
|
//! # let missing = "...";
|
||||||
|
//! return Err(anyhow!("Missing attribute: {}", missing));
|
||||||
|
//! # Ok(())
|
||||||
|
//! # }
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! A `bail!` macro is provided as a shorthand for the same early return.
|
||||||
|
//!
|
||||||
|
//! ```
|
||||||
|
//! # use anyhow::{bail, Result};
|
||||||
|
//! #
|
||||||
|
//! # fn demo() -> Result<()> {
|
||||||
|
//! # let missing = "...";
|
||||||
|
//! bail!("Missing attribute: {}", missing);
|
||||||
|
//! # Ok(())
|
||||||
|
//! # }
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! <br>
|
||||||
|
//!
|
||||||
|
//! # No-std support
|
||||||
|
//!
|
||||||
|
//! In no_std mode, almost all of the same API is available and works the same
|
||||||
|
//! way. To depend on Anyhow in no_std mode, disable our default enabled "std"
|
||||||
|
//! feature in Cargo.toml. A global allocator is required.
|
||||||
|
//!
|
||||||
|
//! ```toml
|
||||||
|
//! [dependencies]
|
||||||
|
//! anyhow = { version = "1.0", default-features = false }
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! With versions of Rust older than 1.81, no_std mode may require an additional
|
||||||
|
//! `.map_err(Error::msg)` when working with a non-Anyhow error type inside a
|
||||||
|
//! function that returns Anyhow's error type, as the trait that `?`-based error
|
||||||
|
//! conversions are defined by is only available in std in those old versions.
|
||||||
|
|
||||||
|
#![doc(html_root_url = "https://docs.rs/anyhow/1.0.90")]
|
||||||
|
#![cfg_attr(error_generic_member_access, feature(error_generic_member_access))]
|
||||||
|
#![no_std]
|
||||||
|
#![deny(dead_code, unused_imports, unused_mut)]
|
||||||
|
#![cfg_attr(
|
||||||
|
not(anyhow_no_unsafe_op_in_unsafe_fn_lint),
|
||||||
|
deny(unsafe_op_in_unsafe_fn)
|
||||||
|
)]
|
||||||
|
#![cfg_attr(anyhow_no_unsafe_op_in_unsafe_fn_lint, allow(unused_unsafe))]
|
||||||
|
#![allow(
|
||||||
|
clippy::doc_markdown,
|
||||||
|
clippy::enum_glob_use,
|
||||||
|
clippy::explicit_auto_deref,
|
||||||
|
clippy::extra_unused_type_parameters,
|
||||||
|
clippy::incompatible_msrv,
|
||||||
|
clippy::let_underscore_untyped,
|
||||||
|
clippy::missing_errors_doc,
|
||||||
|
clippy::missing_panics_doc,
|
||||||
|
clippy::module_name_repetitions,
|
||||||
|
clippy::must_use_candidate,
|
||||||
|
clippy::needless_doctest_main,
|
||||||
|
clippy::needless_lifetimes,
|
||||||
|
clippy::new_ret_no_self,
|
||||||
|
clippy::redundant_else,
|
||||||
|
clippy::return_self_not_must_use,
|
||||||
|
clippy::struct_field_names,
|
||||||
|
clippy::unused_self,
|
||||||
|
clippy::used_underscore_binding,
|
||||||
|
clippy::wildcard_imports,
|
||||||
|
clippy::wrong_self_convention
|
||||||
|
)]
|
||||||
|
|
||||||
|
#[cfg(all(
|
||||||
|
anyhow_nightly_testing,
|
||||||
|
feature = "std",
|
||||||
|
not(error_generic_member_access)
|
||||||
|
))]
|
||||||
|
compile_error!("Build script probe failed to compile.");
|
||||||
|
|
||||||
|
extern crate alloc;
|
||||||
|
|
||||||
|
#[cfg(feature = "std")]
|
||||||
|
extern crate std;
|
||||||
|
|
||||||
|
#[macro_use]
|
||||||
|
mod backtrace;
|
||||||
|
mod chain;
|
||||||
|
mod context;
|
||||||
|
mod ensure;
|
||||||
|
mod error;
|
||||||
|
mod fmt;
|
||||||
|
mod kind;
|
||||||
|
mod macros;
|
||||||
|
mod ptr;
|
||||||
|
mod wrapper;
|
||||||
|
|
||||||
|
use crate::error::ErrorImpl;
|
||||||
|
use crate::ptr::Own;
|
||||||
|
use core::fmt::Display;
|
||||||
|
|
||||||
|
#[cfg(all(not(feature = "std"), anyhow_no_core_error))]
|
||||||
|
use core::fmt::Debug;
|
||||||
|
|
||||||
|
#[cfg(feature = "std")]
|
||||||
|
use std::error::Error as StdError;
|
||||||
|
|
||||||
|
#[cfg(not(any(feature = "std", anyhow_no_core_error)))]
|
||||||
|
use core::error::Error as StdError;
|
||||||
|
|
||||||
|
#[cfg(all(not(feature = "std"), anyhow_no_core_error))]
|
||||||
|
trait StdError: Debug + Display {
|
||||||
|
fn source(&self) -> Option<&(dyn StdError + 'static)> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[doc(no_inline)]
|
||||||
|
pub use anyhow as format_err;
|
||||||
|
|
||||||
|
/// The `Error` type, a wrapper around a dynamic error type.
|
||||||
|
///
|
||||||
|
/// `Error` works a lot like `Box<dyn std::error::Error>`, but with these
|
||||||
|
/// differences:
|
||||||
|
///
|
||||||
|
/// - `Error` requires that the error is `Send`, `Sync`, and `'static`.
|
||||||
|
/// - `Error` guarantees that a backtrace is available, even if the underlying
|
||||||
|
/// error type does not provide one.
|
||||||
|
/// - `Error` is represented as a narrow pointer — exactly one word in
|
||||||
|
/// size instead of two.
|
||||||
|
///
|
||||||
|
/// <br>
|
||||||
|
///
|
||||||
|
/// # Display representations
|
||||||
|
///
|
||||||
|
/// When you print an error object using "{}" or to_string(), only the outermost
|
||||||
|
/// underlying error or context is printed, not any of the lower level causes.
|
||||||
|
/// This is exactly as if you had called the Display impl of the error from
|
||||||
|
/// which you constructed your anyhow::Error.
|
||||||
|
///
|
||||||
|
/// ```console
|
||||||
|
/// Failed to read instrs from ./path/to/instrs.json
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// To print causes as well using anyhow's default formatting of causes, use the
|
||||||
|
/// alternate selector "{:#}".
|
||||||
|
///
|
||||||
|
/// ```console
|
||||||
|
/// Failed to read instrs from ./path/to/instrs.json: No such file or directory (os error 2)
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// The Debug format "{:?}" includes your backtrace if one was captured. Note
|
||||||
|
/// that this is the representation you get by default if you return an error
|
||||||
|
/// from `fn main` instead of printing it explicitly yourself.
|
||||||
|
///
|
||||||
|
/// ```console
|
||||||
|
/// Error: Failed to read instrs from ./path/to/instrs.json
|
||||||
|
///
|
||||||
|
/// Caused by:
|
||||||
|
/// No such file or directory (os error 2)
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// and if there is a backtrace available:
|
||||||
|
///
|
||||||
|
/// ```console
|
||||||
|
/// Error: Failed to read instrs from ./path/to/instrs.json
|
||||||
|
///
|
||||||
|
/// Caused by:
|
||||||
|
/// No such file or directory (os error 2)
|
||||||
|
///
|
||||||
|
/// Stack backtrace:
|
||||||
|
/// 0: <E as anyhow::context::ext::StdError>::ext_context
|
||||||
|
/// at /git/anyhow/src/backtrace.rs:26
|
||||||
|
/// 1: core::result::Result<T,E>::map_err
|
||||||
|
/// at /git/rustc/src/libcore/result.rs:596
|
||||||
|
/// 2: anyhow::context::<impl anyhow::Context<T,E> for core::result::Result<T,E>>::with_context
|
||||||
|
/// at /git/anyhow/src/context.rs:58
|
||||||
|
/// 3: testing::main
|
||||||
|
/// at src/main.rs:5
|
||||||
|
/// 4: std::rt::lang_start
|
||||||
|
/// at /git/rustc/src/libstd/rt.rs:61
|
||||||
|
/// 5: main
|
||||||
|
/// 6: __libc_start_main
|
||||||
|
/// 7: _start
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// To see a conventional struct-style Debug representation, use "{:#?}".
|
||||||
|
///
|
||||||
|
/// ```console
|
||||||
|
/// Error {
|
||||||
|
/// context: "Failed to read instrs from ./path/to/instrs.json",
|
||||||
|
/// source: Os {
|
||||||
|
/// code: 2,
|
||||||
|
/// kind: NotFound,
|
||||||
|
/// message: "No such file or directory",
|
||||||
|
/// },
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// If none of the built-in representations are appropriate and you would prefer
|
||||||
|
/// to render the error and its cause chain yourself, it can be done something
|
||||||
|
/// like this:
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use anyhow::{Context, Result};
|
||||||
|
///
|
||||||
|
/// fn main() {
|
||||||
|
/// if let Err(err) = try_main() {
|
||||||
|
/// eprintln!("ERROR: {}", err);
|
||||||
|
/// err.chain().skip(1).for_each(|cause| eprintln!("because: {}", cause));
|
||||||
|
/// std::process::exit(1);
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// fn try_main() -> Result<()> {
|
||||||
|
/// # const IGNORE: &str = stringify! {
|
||||||
|
/// ...
|
||||||
|
/// # };
|
||||||
|
/// # Ok(())
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
#[repr(transparent)]
|
||||||
|
pub struct Error {
|
||||||
|
inner: Own<ErrorImpl>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Iterator of a chain of source errors.
|
||||||
|
///
|
||||||
|
/// This type is the iterator returned by [`Error::chain`].
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use anyhow::Error;
|
||||||
|
/// use std::io;
|
||||||
|
///
|
||||||
|
/// pub fn underlying_io_error_kind(error: &Error) -> Option<io::ErrorKind> {
|
||||||
|
/// for cause in error.chain() {
|
||||||
|
/// if let Some(io_error) = cause.downcast_ref::<io::Error>() {
|
||||||
|
/// return Some(io_error.kind());
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
/// None
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Chain<'a> {
|
||||||
|
state: crate::chain::ChainState<'a>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `Result<T, Error>`
|
||||||
|
///
|
||||||
|
/// This is a reasonable return type to use throughout your application but also
|
||||||
|
/// for `fn main`; if you do, failures will be printed along with any
|
||||||
|
/// [context][Context] and a backtrace if one was captured.
|
||||||
|
///
|
||||||
|
/// `anyhow::Result` may be used with one *or* two type parameters.
|
||||||
|
///
|
||||||
|
/// ```rust
|
||||||
|
/// use anyhow::Result;
|
||||||
|
///
|
||||||
|
/// # const IGNORE: &str = stringify! {
|
||||||
|
/// fn demo1() -> Result<T> {...}
|
||||||
|
/// // ^ equivalent to std::result::Result<T, anyhow::Error>
|
||||||
|
///
|
||||||
|
/// fn demo2() -> Result<T, OtherError> {...}
|
||||||
|
/// // ^ equivalent to std::result::Result<T, OtherError>
|
||||||
|
/// # };
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # pub trait Deserialize {}
|
||||||
|
/// #
|
||||||
|
/// # mod serde_json {
|
||||||
|
/// # use super::Deserialize;
|
||||||
|
/// # use std::io;
|
||||||
|
/// #
|
||||||
|
/// # pub fn from_str<T: Deserialize>(json: &str) -> io::Result<T> {
|
||||||
|
/// # unimplemented!()
|
||||||
|
/// # }
|
||||||
|
/// # }
|
||||||
|
/// #
|
||||||
|
/// # #[derive(Debug)]
|
||||||
|
/// # struct ClusterMap;
|
||||||
|
/// #
|
||||||
|
/// # impl Deserialize for ClusterMap {}
|
||||||
|
/// #
|
||||||
|
/// use anyhow::Result;
|
||||||
|
///
|
||||||
|
/// fn main() -> Result<()> {
|
||||||
|
/// # return Ok(());
|
||||||
|
/// let config = std::fs::read_to_string("cluster.json")?;
|
||||||
|
/// let map: ClusterMap = serde_json::from_str(&config)?;
|
||||||
|
/// println!("cluster info: {:#?}", map);
|
||||||
|
/// Ok(())
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
pub type Result<T, E = Error> = core::result::Result<T, E>;
|
||||||
|
|
||||||
|
/// Provides the `context` method for `Result`.
|
||||||
|
///
|
||||||
|
/// This trait is sealed and cannot be implemented for types outside of
|
||||||
|
/// `anyhow`.
|
||||||
|
///
|
||||||
|
/// <br>
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use anyhow::{Context, Result};
|
||||||
|
/// use std::fs;
|
||||||
|
/// use std::path::PathBuf;
|
||||||
|
///
|
||||||
|
/// pub struct ImportantThing {
|
||||||
|
/// path: PathBuf,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// impl ImportantThing {
|
||||||
|
/// # const IGNORE: &'static str = stringify! {
|
||||||
|
/// pub fn detach(&mut self) -> Result<()> {...}
|
||||||
|
/// # };
|
||||||
|
/// # fn detach(&mut self) -> Result<()> {
|
||||||
|
/// # unimplemented!()
|
||||||
|
/// # }
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// pub fn do_it(mut it: ImportantThing) -> Result<Vec<u8>> {
|
||||||
|
/// it.detach().context("Failed to detach the important thing")?;
|
||||||
|
///
|
||||||
|
/// let path = &it.path;
|
||||||
|
/// let content = fs::read(path)
|
||||||
|
/// .with_context(|| format!("Failed to read instrs from {}", path.display()))?;
|
||||||
|
///
|
||||||
|
/// Ok(content)
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// When printed, the outermost context would be printed first and the lower
|
||||||
|
/// level underlying causes would be enumerated below.
|
||||||
|
///
|
||||||
|
/// ```console
|
||||||
|
/// Error: Failed to read instrs from ./path/to/instrs.json
|
||||||
|
///
|
||||||
|
/// Caused by:
|
||||||
|
/// No such file or directory (os error 2)
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// Refer to the [Display representations] documentation for other forms in
|
||||||
|
/// which this context chain can be rendered.
|
||||||
|
///
|
||||||
|
/// [Display representations]: Error#display-representations
|
||||||
|
///
|
||||||
|
/// <br>
|
||||||
|
///
|
||||||
|
/// # Effect on downcasting
|
||||||
|
///
|
||||||
|
/// After attaching context of type `C` onto an error of type `E`, the resulting
|
||||||
|
/// `anyhow::Error` may be downcast to `C` **or** to `E`.
|
||||||
|
///
|
||||||
|
/// That is, in codebases that rely on downcasting, Anyhow's context supports
|
||||||
|
/// both of the following use cases:
|
||||||
|
///
|
||||||
|
/// - **Attaching context whose type is insignificant onto errors whose type
|
||||||
|
/// is used in downcasts.**
|
||||||
|
///
|
||||||
|
/// In other error libraries whose context is not designed this way, it can
|
||||||
|
/// be risky to introduce context to existing code because new context might
|
||||||
|
/// break existing working downcasts. In Anyhow, any downcast that worked
|
||||||
|
/// before adding context will continue to work after you add a context, so
|
||||||
|
/// you should freely add human-readable context to errors wherever it would
|
||||||
|
/// be helpful.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # use anyhow::bail;
|
||||||
|
/// # use thiserror::Error;
|
||||||
|
/// #
|
||||||
|
/// # #[derive(Error, Debug)]
|
||||||
|
/// # #[error("???")]
|
||||||
|
/// # struct SuspiciousError;
|
||||||
|
/// #
|
||||||
|
/// # fn helper() -> Result<()> {
|
||||||
|
/// # bail!(SuspiciousError);
|
||||||
|
/// # }
|
||||||
|
/// #
|
||||||
|
/// use anyhow::{Context, Result};
|
||||||
|
///
|
||||||
|
/// fn do_it() -> Result<()> {
|
||||||
|
/// helper().context("Failed to complete the work")?;
|
||||||
|
/// # const IGNORE: &str = stringify! {
|
||||||
|
/// ...
|
||||||
|
/// # };
|
||||||
|
/// # unreachable!()
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// fn main() {
|
||||||
|
/// let err = do_it().unwrap_err();
|
||||||
|
/// if let Some(e) = err.downcast_ref::<SuspiciousError>() {
|
||||||
|
/// // If helper() returned SuspiciousError, this downcast will
|
||||||
|
/// // correctly succeed even with the context in between.
|
||||||
|
/// # return;
|
||||||
|
/// }
|
||||||
|
/// # panic!("expected downcast to succeed");
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// - **Attaching context whose type is used in downcasts onto errors whose
|
||||||
|
/// type is insignificant.**
|
||||||
|
///
|
||||||
|
/// Some codebases prefer to use machine-readable context to categorize
|
||||||
|
/// lower level errors in a way that will be actionable to higher levels of
|
||||||
|
/// the application.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # use anyhow::bail;
|
||||||
|
/// # use thiserror::Error;
|
||||||
|
/// #
|
||||||
|
/// # #[derive(Error, Debug)]
|
||||||
|
/// # #[error("???")]
|
||||||
|
/// # struct HelperFailed;
|
||||||
|
/// #
|
||||||
|
/// # fn helper() -> Result<()> {
|
||||||
|
/// # bail!("no such file or directory");
|
||||||
|
/// # }
|
||||||
|
/// #
|
||||||
|
/// use anyhow::{Context, Result};
|
||||||
|
///
|
||||||
|
/// fn do_it() -> Result<()> {
|
||||||
|
/// helper().context(HelperFailed)?;
|
||||||
|
/// # const IGNORE: &str = stringify! {
|
||||||
|
/// ...
|
||||||
|
/// # };
|
||||||
|
/// # unreachable!()
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// fn main() {
|
||||||
|
/// let err = do_it().unwrap_err();
|
||||||
|
/// if let Some(e) = err.downcast_ref::<HelperFailed>() {
|
||||||
|
/// // If helper failed, this downcast will succeed because
|
||||||
|
/// // HelperFailed is the context that has been attached to
|
||||||
|
/// // that error.
|
||||||
|
/// # return;
|
||||||
|
/// }
|
||||||
|
/// # panic!("expected downcast to succeed");
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
pub trait Context<T, E>: context::private::Sealed {
|
||||||
|
/// Wrap the error value with additional context.
|
||||||
|
fn context<C>(self, context: C) -> Result<T, Error>
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static;
|
||||||
|
|
||||||
|
/// Wrap the error value with additional context that is evaluated lazily
|
||||||
|
/// only once an error does occur.
|
||||||
|
fn with_context<C, F>(self, f: F) -> Result<T, Error>
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static,
|
||||||
|
F: FnOnce() -> C;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Equivalent to Ok::<_, anyhow::Error>(value).
|
||||||
|
///
|
||||||
|
/// This simplifies creation of an anyhow::Result in places where type inference
|
||||||
|
/// cannot deduce the `E` type of the result — without needing to write
|
||||||
|
/// `Ok::<_, anyhow::Error>(value)`.
|
||||||
|
///
|
||||||
|
/// One might think that `anyhow::Result::Ok(value)` would work in such cases
|
||||||
|
/// but it does not.
|
||||||
|
///
|
||||||
|
/// ```console
|
||||||
|
/// error[E0282]: type annotations needed for `std::result::Result<i32, E>`
|
||||||
|
/// --> src/main.rs:11:13
|
||||||
|
/// |
|
||||||
|
/// 11 | let _ = anyhow::Result::Ok(1);
|
||||||
|
/// | - ^^^^^^^^^^^^^^^^^^ cannot infer type for type parameter `E` declared on the enum `Result`
|
||||||
|
/// | |
|
||||||
|
/// | consider giving this pattern the explicit type `std::result::Result<i32, E>`, where the type parameter `E` is specified
|
||||||
|
/// ```
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub fn Ok<T>(t: T) -> Result<T> {
|
||||||
|
Result::Ok(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not public API. Referenced by macro-generated code.
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub mod __private {
|
||||||
|
use self::not::Bool;
|
||||||
|
use crate::Error;
|
||||||
|
use alloc::fmt;
|
||||||
|
use core::fmt::Arguments;
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub use crate::ensure::{BothDebug, NotBothDebug};
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub use alloc::format;
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub use core::result::Result::Err;
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub use core::{concat, format_args, stringify};
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub mod kind {
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub use crate::kind::{AdhocKind, TraitKind};
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub use crate::kind::BoxedKind;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[inline]
|
||||||
|
#[cold]
|
||||||
|
pub fn format_err(args: Arguments) -> Error {
|
||||||
|
#[cfg(anyhow_no_fmt_arguments_as_str)]
|
||||||
|
let fmt_arguments_as_str = None::<&str>;
|
||||||
|
#[cfg(not(anyhow_no_fmt_arguments_as_str))]
|
||||||
|
let fmt_arguments_as_str = args.as_str();
|
||||||
|
|
||||||
|
if let Some(message) = fmt_arguments_as_str {
|
||||||
|
// anyhow!("literal"), can downcast to &'static str
|
||||||
|
Error::msg(message)
|
||||||
|
} else {
|
||||||
|
// anyhow!("interpolate {var}"), can downcast to String
|
||||||
|
Error::msg(fmt::format(args))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[inline]
|
||||||
|
#[cold]
|
||||||
|
#[must_use]
|
||||||
|
pub fn must_use(error: Error) -> Error {
|
||||||
|
error
|
||||||
|
}
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[inline]
|
||||||
|
pub fn not(cond: impl Bool) -> bool {
|
||||||
|
cond.not()
|
||||||
|
}
|
||||||
|
|
||||||
|
mod not {
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub trait Bool {
|
||||||
|
fn not(self) -> bool;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Bool for bool {
|
||||||
|
#[inline]
|
||||||
|
fn not(self) -> bool {
|
||||||
|
!self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Bool for &bool {
|
||||||
|
#[inline]
|
||||||
|
fn not(self) -> bool {
|
||||||
|
!*self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
242
vendor/anyhow/src/macros.rs
vendored
Normal file
242
vendor/anyhow/src/macros.rs
vendored
Normal file
|
|
@ -0,0 +1,242 @@
|
||||||
|
/// Return early with an error.
|
||||||
|
///
|
||||||
|
/// This macro is equivalent to
|
||||||
|
/// <code>return Err([anyhow!($args\...)][anyhow!])</code>.
|
||||||
|
///
|
||||||
|
/// The surrounding function's or closure's return value is required to be
|
||||||
|
/// <code>Result<_, [anyhow::Error][crate::Error]></code>.
|
||||||
|
///
|
||||||
|
/// [anyhow!]: crate::anyhow
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # use anyhow::{bail, Result};
|
||||||
|
/// #
|
||||||
|
/// # fn has_permission(user: usize, resource: usize) -> bool {
|
||||||
|
/// # true
|
||||||
|
/// # }
|
||||||
|
/// #
|
||||||
|
/// # fn main() -> Result<()> {
|
||||||
|
/// # let user = 0;
|
||||||
|
/// # let resource = 0;
|
||||||
|
/// #
|
||||||
|
/// if !has_permission(user, resource) {
|
||||||
|
/// bail!("permission denied for accessing {}", resource);
|
||||||
|
/// }
|
||||||
|
/// # Ok(())
|
||||||
|
/// # }
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # use anyhow::{bail, Result};
|
||||||
|
/// # use thiserror::Error;
|
||||||
|
/// #
|
||||||
|
/// # const MAX_DEPTH: usize = 1;
|
||||||
|
/// #
|
||||||
|
/// #[derive(Error, Debug)]
|
||||||
|
/// enum ScienceError {
|
||||||
|
/// #[error("recursion limit exceeded")]
|
||||||
|
/// RecursionLimitExceeded,
|
||||||
|
/// # #[error("...")]
|
||||||
|
/// # More = (stringify! {
|
||||||
|
/// ...
|
||||||
|
/// # }, 1).1,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// # fn main() -> Result<()> {
|
||||||
|
/// # let depth = 0;
|
||||||
|
/// #
|
||||||
|
/// if depth > MAX_DEPTH {
|
||||||
|
/// bail!(ScienceError::RecursionLimitExceeded);
|
||||||
|
/// }
|
||||||
|
/// # Ok(())
|
||||||
|
/// # }
|
||||||
|
/// ```
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! bail {
|
||||||
|
($msg:literal $(,)?) => {
|
||||||
|
return $crate::__private::Err($crate::__anyhow!($msg))
|
||||||
|
};
|
||||||
|
($err:expr $(,)?) => {
|
||||||
|
return $crate::__private::Err($crate::__anyhow!($err))
|
||||||
|
};
|
||||||
|
($fmt:expr, $($arg:tt)*) => {
|
||||||
|
return $crate::__private::Err($crate::__anyhow!($fmt, $($arg)*))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! __ensure {
|
||||||
|
($ensure:item) => {
|
||||||
|
/// Return early with an error if a condition is not satisfied.
|
||||||
|
///
|
||||||
|
/// This macro is equivalent to
|
||||||
|
/// <code>if !$cond { return Err([anyhow!($args\...)][anyhow!]); }</code>.
|
||||||
|
///
|
||||||
|
/// The surrounding function's or closure's return value is required to be
|
||||||
|
/// <code>Result<_, [anyhow::Error][crate::Error]></code>.
|
||||||
|
///
|
||||||
|
/// Analogously to `assert!`, `ensure!` takes a condition and exits the function
|
||||||
|
/// if the condition fails. Unlike `assert!`, `ensure!` returns an `Error`
|
||||||
|
/// rather than panicking.
|
||||||
|
///
|
||||||
|
/// [anyhow!]: crate::anyhow
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # use anyhow::{ensure, Result};
|
||||||
|
/// #
|
||||||
|
/// # fn main() -> Result<()> {
|
||||||
|
/// # let user = 0;
|
||||||
|
/// #
|
||||||
|
/// ensure!(user == 0, "only user 0 is allowed");
|
||||||
|
/// # Ok(())
|
||||||
|
/// # }
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # use anyhow::{ensure, Result};
|
||||||
|
/// # use thiserror::Error;
|
||||||
|
/// #
|
||||||
|
/// # const MAX_DEPTH: usize = 1;
|
||||||
|
/// #
|
||||||
|
/// #[derive(Error, Debug)]
|
||||||
|
/// enum ScienceError {
|
||||||
|
/// #[error("recursion limit exceeded")]
|
||||||
|
/// RecursionLimitExceeded,
|
||||||
|
/// # #[error("...")]
|
||||||
|
/// # More = (stringify! {
|
||||||
|
/// ...
|
||||||
|
/// # }, 1).1,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// # fn main() -> Result<()> {
|
||||||
|
/// # let depth = 0;
|
||||||
|
/// #
|
||||||
|
/// ensure!(depth <= MAX_DEPTH, ScienceError::RecursionLimitExceeded);
|
||||||
|
/// # Ok(())
|
||||||
|
/// # }
|
||||||
|
/// ```
|
||||||
|
$ensure
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(doc)]
|
||||||
|
__ensure![
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! ensure {
|
||||||
|
($cond:expr $(,)?) => {
|
||||||
|
if !$cond {
|
||||||
|
return $crate::__private::Err($crate::Error::msg(
|
||||||
|
$crate::__private::concat!("Condition failed: `", $crate::__private::stringify!($cond), "`")
|
||||||
|
));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
($cond:expr, $msg:literal $(,)?) => {
|
||||||
|
if !$cond {
|
||||||
|
return $crate::__private::Err($crate::__anyhow!($msg));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
($cond:expr, $err:expr $(,)?) => {
|
||||||
|
if !$cond {
|
||||||
|
return $crate::__private::Err($crate::__anyhow!($err));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
($cond:expr, $fmt:expr, $($arg:tt)*) => {
|
||||||
|
if !$cond {
|
||||||
|
return $crate::__private::Err($crate::__anyhow!($fmt, $($arg)*));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
#[cfg(not(doc))]
|
||||||
|
__ensure![
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! ensure {
|
||||||
|
($($tt:tt)*) => {
|
||||||
|
$crate::__parse_ensure!(
|
||||||
|
/* state */ 0
|
||||||
|
/* stack */ ()
|
||||||
|
/* bail */ ($($tt)*)
|
||||||
|
/* fuel */ (~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~)
|
||||||
|
/* parse */ {()}
|
||||||
|
/* dup */ ($($tt)*)
|
||||||
|
/* rest */ $($tt)*
|
||||||
|
)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
/// Construct an ad-hoc error from a string or existing non-`anyhow` error
|
||||||
|
/// value.
|
||||||
|
///
|
||||||
|
/// This evaluates to an [`Error`][crate::Error]. It can take either just a
|
||||||
|
/// string, or a format string with arguments. It also can take any custom type
|
||||||
|
/// which implements `Debug` and `Display`.
|
||||||
|
///
|
||||||
|
/// If called with a single argument whose type implements `std::error::Error`
|
||||||
|
/// (in addition to `Debug` and `Display`, which are always required), then that
|
||||||
|
/// Error impl's `source` is preserved as the `source` of the resulting
|
||||||
|
/// `anyhow::Error`.
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # type V = ();
|
||||||
|
/// #
|
||||||
|
/// use anyhow::{anyhow, Result};
|
||||||
|
///
|
||||||
|
/// fn lookup(key: &str) -> Result<V> {
|
||||||
|
/// if key.len() != 16 {
|
||||||
|
/// return Err(anyhow!("key length must be 16 characters, got {:?}", key));
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// // ...
|
||||||
|
/// # Ok(())
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! anyhow {
|
||||||
|
($msg:literal $(,)?) => {
|
||||||
|
$crate::__private::must_use({
|
||||||
|
let error = $crate::__private::format_err($crate::__private::format_args!($msg));
|
||||||
|
error
|
||||||
|
})
|
||||||
|
};
|
||||||
|
($err:expr $(,)?) => {
|
||||||
|
$crate::__private::must_use({
|
||||||
|
use $crate::__private::kind::*;
|
||||||
|
let error = match $err {
|
||||||
|
error => (&error).anyhow_kind().new(error),
|
||||||
|
};
|
||||||
|
error
|
||||||
|
})
|
||||||
|
};
|
||||||
|
($fmt:expr, $($arg:tt)*) => {
|
||||||
|
$crate::Error::msg($crate::__private::format!($fmt, $($arg)*))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not public API. This is used in the implementation of some of the other
|
||||||
|
// macros, in which the must_use call is not needed because the value is known
|
||||||
|
// to be used.
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! __anyhow {
|
||||||
|
($msg:literal $(,)?) => ({
|
||||||
|
let error = $crate::__private::format_err($crate::__private::format_args!($msg));
|
||||||
|
error
|
||||||
|
});
|
||||||
|
($err:expr $(,)?) => ({
|
||||||
|
use $crate::__private::kind::*;
|
||||||
|
let error = match $err {
|
||||||
|
error => (&error).anyhow_kind().new(error),
|
||||||
|
};
|
||||||
|
error
|
||||||
|
});
|
||||||
|
($fmt:expr, $($arg:tt)*) => {
|
||||||
|
$crate::Error::msg($crate::__private::format!($fmt, $($arg)*))
|
||||||
|
};
|
||||||
|
}
|
||||||
199
vendor/anyhow/src/ptr.rs
vendored
Normal file
199
vendor/anyhow/src/ptr.rs
vendored
Normal file
|
|
@ -0,0 +1,199 @@
|
||||||
|
use alloc::boxed::Box;
|
||||||
|
use core::marker::PhantomData;
|
||||||
|
use core::ptr::NonNull;
|
||||||
|
|
||||||
|
#[repr(transparent)]
|
||||||
|
pub struct Own<T>
|
||||||
|
where
|
||||||
|
T: ?Sized,
|
||||||
|
{
|
||||||
|
pub ptr: NonNull<T>,
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl<T> Send for Own<T> where T: ?Sized {}
|
||||||
|
|
||||||
|
unsafe impl<T> Sync for Own<T> where T: ?Sized {}
|
||||||
|
|
||||||
|
impl<T> Copy for Own<T> where T: ?Sized {}
|
||||||
|
|
||||||
|
impl<T> Clone for Own<T>
|
||||||
|
where
|
||||||
|
T: ?Sized,
|
||||||
|
{
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
*self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Own<T>
|
||||||
|
where
|
||||||
|
T: ?Sized,
|
||||||
|
{
|
||||||
|
pub fn new(ptr: Box<T>) -> Self {
|
||||||
|
Own {
|
||||||
|
ptr: unsafe { NonNull::new_unchecked(Box::into_raw(ptr)) },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn cast<U: CastTo>(self) -> Own<U::Target> {
|
||||||
|
Own {
|
||||||
|
ptr: self.ptr.cast(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub unsafe fn boxed(self) -> Box<T> {
|
||||||
|
unsafe { Box::from_raw(self.ptr.as_ptr()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn by_ref(&self) -> Ref<T> {
|
||||||
|
Ref {
|
||||||
|
ptr: self.ptr,
|
||||||
|
lifetime: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn by_mut(&mut self) -> Mut<T> {
|
||||||
|
Mut {
|
||||||
|
ptr: self.ptr,
|
||||||
|
lifetime: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[repr(transparent)]
|
||||||
|
pub struct Ref<'a, T>
|
||||||
|
where
|
||||||
|
T: ?Sized,
|
||||||
|
{
|
||||||
|
pub ptr: NonNull<T>,
|
||||||
|
lifetime: PhantomData<&'a T>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, T> Copy for Ref<'a, T> where T: ?Sized {}
|
||||||
|
|
||||||
|
impl<'a, T> Clone for Ref<'a, T>
|
||||||
|
where
|
||||||
|
T: ?Sized,
|
||||||
|
{
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
*self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, T> Ref<'a, T>
|
||||||
|
where
|
||||||
|
T: ?Sized,
|
||||||
|
{
|
||||||
|
pub fn new(ptr: &'a T) -> Self {
|
||||||
|
Ref {
|
||||||
|
ptr: NonNull::from(ptr),
|
||||||
|
lifetime: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(anyhow_no_ptr_addr_of))]
|
||||||
|
pub fn from_raw(ptr: NonNull<T>) -> Self {
|
||||||
|
Ref {
|
||||||
|
ptr,
|
||||||
|
lifetime: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn cast<U: CastTo>(self) -> Ref<'a, U::Target> {
|
||||||
|
Ref {
|
||||||
|
ptr: self.ptr.cast(),
|
||||||
|
lifetime: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(anyhow_no_ptr_addr_of))]
|
||||||
|
pub fn by_mut(self) -> Mut<'a, T> {
|
||||||
|
Mut {
|
||||||
|
ptr: self.ptr,
|
||||||
|
lifetime: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(anyhow_no_ptr_addr_of))]
|
||||||
|
pub fn as_ptr(self) -> *const T {
|
||||||
|
self.ptr.as_ptr() as *const T
|
||||||
|
}
|
||||||
|
|
||||||
|
pub unsafe fn deref(self) -> &'a T {
|
||||||
|
unsafe { &*self.ptr.as_ptr() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[repr(transparent)]
|
||||||
|
pub struct Mut<'a, T>
|
||||||
|
where
|
||||||
|
T: ?Sized,
|
||||||
|
{
|
||||||
|
pub ptr: NonNull<T>,
|
||||||
|
lifetime: PhantomData<&'a mut T>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, T> Copy for Mut<'a, T> where T: ?Sized {}
|
||||||
|
|
||||||
|
impl<'a, T> Clone for Mut<'a, T>
|
||||||
|
where
|
||||||
|
T: ?Sized,
|
||||||
|
{
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
*self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, T> Mut<'a, T>
|
||||||
|
where
|
||||||
|
T: ?Sized,
|
||||||
|
{
|
||||||
|
#[cfg(anyhow_no_ptr_addr_of)]
|
||||||
|
pub fn new(ptr: &'a mut T) -> Self {
|
||||||
|
Mut {
|
||||||
|
ptr: NonNull::from(ptr),
|
||||||
|
lifetime: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn cast<U: CastTo>(self) -> Mut<'a, U::Target> {
|
||||||
|
Mut {
|
||||||
|
ptr: self.ptr.cast(),
|
||||||
|
lifetime: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(anyhow_no_ptr_addr_of))]
|
||||||
|
pub fn by_ref(self) -> Ref<'a, T> {
|
||||||
|
Ref {
|
||||||
|
ptr: self.ptr,
|
||||||
|
lifetime: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn extend<'b>(self) -> Mut<'b, T> {
|
||||||
|
Mut {
|
||||||
|
ptr: self.ptr,
|
||||||
|
lifetime: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub unsafe fn deref_mut(self) -> &'a mut T {
|
||||||
|
unsafe { &mut *self.ptr.as_ptr() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, T> Mut<'a, T> {
|
||||||
|
pub unsafe fn read(self) -> T {
|
||||||
|
unsafe { self.ptr.as_ptr().read() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Force turbofish on all calls of `.cast::<U>()`.
|
||||||
|
pub trait CastTo {
|
||||||
|
type Target;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> CastTo for T {
|
||||||
|
type Target = T;
|
||||||
|
}
|
||||||
84
vendor/anyhow/src/wrapper.rs
vendored
Normal file
84
vendor/anyhow/src/wrapper.rs
vendored
Normal file
|
|
@ -0,0 +1,84 @@
|
||||||
|
use crate::StdError;
|
||||||
|
use core::fmt::{self, Debug, Display};
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
use alloc::boxed::Box;
|
||||||
|
|
||||||
|
#[cfg(error_generic_member_access)]
|
||||||
|
use core::error::Request;
|
||||||
|
|
||||||
|
#[repr(transparent)]
|
||||||
|
pub struct MessageError<M>(pub M);
|
||||||
|
|
||||||
|
impl<M> Debug for MessageError<M>
|
||||||
|
where
|
||||||
|
M: Display + Debug,
|
||||||
|
{
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
Debug::fmt(&self.0, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<M> Display for MessageError<M>
|
||||||
|
where
|
||||||
|
M: Display + Debug,
|
||||||
|
{
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
Display::fmt(&self.0, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<M> StdError for MessageError<M> where M: Display + Debug + 'static {}
|
||||||
|
|
||||||
|
#[repr(transparent)]
|
||||||
|
pub struct DisplayError<M>(pub M);
|
||||||
|
|
||||||
|
impl<M> Debug for DisplayError<M>
|
||||||
|
where
|
||||||
|
M: Display,
|
||||||
|
{
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
Display::fmt(&self.0, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<M> Display for DisplayError<M>
|
||||||
|
where
|
||||||
|
M: Display,
|
||||||
|
{
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
Display::fmt(&self.0, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<M> StdError for DisplayError<M> where M: Display + 'static {}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
#[repr(transparent)]
|
||||||
|
pub struct BoxedError(pub Box<dyn StdError + Send + Sync>);
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
impl Debug for BoxedError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
Debug::fmt(&self.0, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
impl Display for BoxedError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
Display::fmt(&self.0, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
|
||||||
|
impl StdError for BoxedError {
|
||||||
|
fn source(&self) -> Option<&(dyn StdError + 'static)> {
|
||||||
|
self.0.source()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(error_generic_member_access)]
|
||||||
|
fn provide<'a>(&'a self, request: &mut Request<'a>) {
|
||||||
|
self.0.provide(request);
|
||||||
|
}
|
||||||
|
}
|
||||||
14
vendor/anyhow/tests/common/mod.rs
vendored
Normal file
14
vendor/anyhow/tests/common/mod.rs
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
||||||
|
use anyhow::{bail, Result};
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
pub fn bail_literal() -> Result<()> {
|
||||||
|
bail!("oh no!");
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn bail_fmt() -> Result<()> {
|
||||||
|
bail!("{} {}!", "oh", "no");
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn bail_error() -> Result<()> {
|
||||||
|
bail!(io::Error::new(io::ErrorKind::Other, "oh no!"));
|
||||||
|
}
|
||||||
7
vendor/anyhow/tests/compiletest.rs
vendored
Normal file
7
vendor/anyhow/tests/compiletest.rs
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
||||||
|
#[rustversion::attr(not(nightly), ignore = "requires nightly")]
|
||||||
|
#[cfg_attr(miri, ignore = "incompatible with miri")]
|
||||||
|
#[test]
|
||||||
|
fn ui() {
|
||||||
|
let t = trybuild::TestCases::new();
|
||||||
|
t.compile_fail("tests/ui/*.rs");
|
||||||
|
}
|
||||||
53
vendor/anyhow/tests/drop/mod.rs
vendored
Normal file
53
vendor/anyhow/tests/drop/mod.rs
vendored
Normal file
|
|
@ -0,0 +1,53 @@
|
||||||
|
#![allow(clippy::module_name_repetitions)]
|
||||||
|
|
||||||
|
use std::error::Error as StdError;
|
||||||
|
use std::fmt::{self, Display};
|
||||||
|
use std::sync::atomic::{AtomicBool, Ordering};
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Flag {
|
||||||
|
atomic: Arc<AtomicBool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Flag {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Flag {
|
||||||
|
atomic: Arc::new(AtomicBool::new(false)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get(&self) -> bool {
|
||||||
|
self.atomic.load(Ordering::Relaxed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct DetectDrop {
|
||||||
|
has_dropped: Flag,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DetectDrop {
|
||||||
|
pub fn new(has_dropped: &Flag) -> Self {
|
||||||
|
DetectDrop {
|
||||||
|
has_dropped: Flag {
|
||||||
|
atomic: Arc::clone(&has_dropped.atomic),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl StdError for DetectDrop {}
|
||||||
|
|
||||||
|
impl Display for DetectDrop {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(f, "oh no!")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for DetectDrop {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
let already_dropped = self.has_dropped.atomic.swap(true, Ordering::Relaxed);
|
||||||
|
assert!(!already_dropped);
|
||||||
|
}
|
||||||
|
}
|
||||||
34
vendor/anyhow/tests/test_autotrait.rs
vendored
Normal file
34
vendor/anyhow/tests/test_autotrait.rs
vendored
Normal file
|
|
@ -0,0 +1,34 @@
|
||||||
|
#![allow(clippy::extra_unused_type_parameters)]
|
||||||
|
|
||||||
|
use anyhow::Error;
|
||||||
|
use std::panic::{RefUnwindSafe, UnwindSafe};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_send() {
|
||||||
|
fn assert_send<T: Send>() {}
|
||||||
|
assert_send::<Error>();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_sync() {
|
||||||
|
fn assert_sync<T: Sync>() {}
|
||||||
|
assert_sync::<Error>();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_unwind_safe() {
|
||||||
|
fn assert_unwind_safe<T: UnwindSafe>() {}
|
||||||
|
assert_unwind_safe::<Error>();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ref_unwind_safe() {
|
||||||
|
fn assert_ref_unwind_safe<T: RefUnwindSafe>() {}
|
||||||
|
assert_ref_unwind_safe::<Error>();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_unpin() {
|
||||||
|
fn assert_unpin<T: Unpin>() {}
|
||||||
|
assert_unpin::<Error>();
|
||||||
|
}
|
||||||
15
vendor/anyhow/tests/test_backtrace.rs
vendored
Normal file
15
vendor/anyhow/tests/test_backtrace.rs
vendored
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
#![allow(clippy::let_underscore_untyped)]
|
||||||
|
|
||||||
|
#[rustversion::not(nightly)]
|
||||||
|
#[ignore = "requires nightly"]
|
||||||
|
#[test]
|
||||||
|
fn test_backtrace() {}
|
||||||
|
|
||||||
|
#[rustversion::nightly]
|
||||||
|
#[test]
|
||||||
|
fn test_backtrace() {
|
||||||
|
use anyhow::anyhow;
|
||||||
|
|
||||||
|
let error = anyhow!("oh no!");
|
||||||
|
let _ = error.backtrace();
|
||||||
|
}
|
||||||
45
vendor/anyhow/tests/test_boxed.rs
vendored
Normal file
45
vendor/anyhow/tests/test_boxed.rs
vendored
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
#![allow(
|
||||||
|
// Clippy bug: https://github.com/rust-lang/rust-clippy/issues/7422
|
||||||
|
clippy::nonstandard_macro_braces,
|
||||||
|
)]
|
||||||
|
|
||||||
|
use anyhow::anyhow;
|
||||||
|
use std::error::Error as StdError;
|
||||||
|
use std::io;
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
#[error("outer")]
|
||||||
|
struct MyError {
|
||||||
|
source: io::Error,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_boxed_str() {
|
||||||
|
let error = Box::<dyn StdError + Send + Sync>::from("oh no!");
|
||||||
|
let error = anyhow!(error);
|
||||||
|
assert_eq!("oh no!", error.to_string());
|
||||||
|
assert_eq!(
|
||||||
|
"oh no!",
|
||||||
|
error
|
||||||
|
.downcast_ref::<Box<dyn StdError + Send + Sync>>()
|
||||||
|
.unwrap()
|
||||||
|
.to_string()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_boxed_thiserror() {
|
||||||
|
let error = MyError {
|
||||||
|
source: io::Error::new(io::ErrorKind::Other, "oh no!"),
|
||||||
|
};
|
||||||
|
let error = anyhow!(error);
|
||||||
|
assert_eq!("oh no!", error.source().unwrap().to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_boxed_anyhow() {
|
||||||
|
let error = anyhow!("oh no!").context("it failed");
|
||||||
|
let error = anyhow!(error);
|
||||||
|
assert_eq!("oh no!", error.source().unwrap().to_string());
|
||||||
|
}
|
||||||
69
vendor/anyhow/tests/test_chain.rs
vendored
Normal file
69
vendor/anyhow/tests/test_chain.rs
vendored
Normal file
|
|
@ -0,0 +1,69 @@
|
||||||
|
use anyhow::{anyhow, Chain, Error};
|
||||||
|
|
||||||
|
fn error() -> Error {
|
||||||
|
anyhow!({ 0 }).context(1).context(2).context(3)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_iter() {
|
||||||
|
let e = error();
|
||||||
|
let mut chain = e.chain();
|
||||||
|
assert_eq!("3", chain.next().unwrap().to_string());
|
||||||
|
assert_eq!("2", chain.next().unwrap().to_string());
|
||||||
|
assert_eq!("1", chain.next().unwrap().to_string());
|
||||||
|
assert_eq!("0", chain.next().unwrap().to_string());
|
||||||
|
assert!(chain.next().is_none());
|
||||||
|
assert!(chain.next_back().is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_rev() {
|
||||||
|
let e = error();
|
||||||
|
let mut chain = e.chain().rev();
|
||||||
|
assert_eq!("0", chain.next().unwrap().to_string());
|
||||||
|
assert_eq!("1", chain.next().unwrap().to_string());
|
||||||
|
assert_eq!("2", chain.next().unwrap().to_string());
|
||||||
|
assert_eq!("3", chain.next().unwrap().to_string());
|
||||||
|
assert!(chain.next().is_none());
|
||||||
|
assert!(chain.next_back().is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_len() {
|
||||||
|
let e = error();
|
||||||
|
let mut chain = e.chain();
|
||||||
|
assert_eq!(4, chain.len());
|
||||||
|
assert_eq!((4, Some(4)), chain.size_hint());
|
||||||
|
assert_eq!("3", chain.next().unwrap().to_string());
|
||||||
|
assert_eq!(3, chain.len());
|
||||||
|
assert_eq!((3, Some(3)), chain.size_hint());
|
||||||
|
assert_eq!("0", chain.next_back().unwrap().to_string());
|
||||||
|
assert_eq!(2, chain.len());
|
||||||
|
assert_eq!((2, Some(2)), chain.size_hint());
|
||||||
|
assert_eq!("2", chain.next().unwrap().to_string());
|
||||||
|
assert_eq!(1, chain.len());
|
||||||
|
assert_eq!((1, Some(1)), chain.size_hint());
|
||||||
|
assert_eq!("1", chain.next_back().unwrap().to_string());
|
||||||
|
assert_eq!(0, chain.len());
|
||||||
|
assert_eq!((0, Some(0)), chain.size_hint());
|
||||||
|
assert!(chain.next().is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_default() {
|
||||||
|
let mut c = Chain::default();
|
||||||
|
assert!(c.next().is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[allow(clippy::redundant_clone)]
|
||||||
|
fn test_clone() {
|
||||||
|
let e = error();
|
||||||
|
let mut chain = e.chain().clone();
|
||||||
|
assert_eq!("3", chain.next().unwrap().to_string());
|
||||||
|
assert_eq!("2", chain.next().unwrap().to_string());
|
||||||
|
assert_eq!("1", chain.next().unwrap().to_string());
|
||||||
|
assert_eq!("0", chain.next().unwrap().to_string());
|
||||||
|
assert!(chain.next().is_none());
|
||||||
|
assert!(chain.next_back().is_none());
|
||||||
|
}
|
||||||
172
vendor/anyhow/tests/test_context.rs
vendored
Normal file
172
vendor/anyhow/tests/test_context.rs
vendored
Normal file
|
|
@ -0,0 +1,172 @@
|
||||||
|
#![allow(
|
||||||
|
// Clippy bug: https://github.com/rust-lang/rust-clippy/issues/7422
|
||||||
|
clippy::nonstandard_macro_braces,
|
||||||
|
)]
|
||||||
|
|
||||||
|
mod drop;
|
||||||
|
|
||||||
|
use crate::drop::{DetectDrop, Flag};
|
||||||
|
use anyhow::{Context, Error, Result};
|
||||||
|
use std::fmt::{self, Display};
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
// https://github.com/dtolnay/anyhow/issues/18
|
||||||
|
#[test]
|
||||||
|
fn test_inference() -> Result<()> {
|
||||||
|
let x = "1";
|
||||||
|
let y: u32 = x.parse().context("...")?;
|
||||||
|
assert_eq!(y, 1);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! context_type {
|
||||||
|
($name:ident) => {
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct $name {
|
||||||
|
message: &'static str,
|
||||||
|
#[allow(dead_code)]
|
||||||
|
drop: DetectDrop,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for $name {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
f.write_str(self.message)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
context_type!(HighLevel);
|
||||||
|
context_type!(MidLevel);
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
#[error("{message}")]
|
||||||
|
struct LowLevel {
|
||||||
|
message: &'static str,
|
||||||
|
drop: DetectDrop,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Dropped {
|
||||||
|
low: Flag,
|
||||||
|
mid: Flag,
|
||||||
|
high: Flag,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Dropped {
|
||||||
|
fn none(&self) -> bool {
|
||||||
|
!self.low.get() && !self.mid.get() && !self.high.get()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn all(&self) -> bool {
|
||||||
|
self.low.get() && self.mid.get() && self.high.get()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn make_chain() -> (Error, Dropped) {
|
||||||
|
let dropped = Dropped {
|
||||||
|
low: Flag::new(),
|
||||||
|
mid: Flag::new(),
|
||||||
|
high: Flag::new(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let low = LowLevel {
|
||||||
|
message: "no such file or directory",
|
||||||
|
drop: DetectDrop::new(&dropped.low),
|
||||||
|
};
|
||||||
|
|
||||||
|
// impl Context for Result<T, E>
|
||||||
|
let mid = Err::<(), LowLevel>(low)
|
||||||
|
.context(MidLevel {
|
||||||
|
message: "failed to load config",
|
||||||
|
drop: DetectDrop::new(&dropped.mid),
|
||||||
|
})
|
||||||
|
.unwrap_err();
|
||||||
|
|
||||||
|
// impl Context for Result<T, Error>
|
||||||
|
let high = Err::<(), Error>(mid)
|
||||||
|
.context(HighLevel {
|
||||||
|
message: "failed to start server",
|
||||||
|
drop: DetectDrop::new(&dropped.high),
|
||||||
|
})
|
||||||
|
.unwrap_err();
|
||||||
|
|
||||||
|
(high, dropped)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_downcast_ref() {
|
||||||
|
let (err, dropped) = make_chain();
|
||||||
|
|
||||||
|
assert!(!err.is::<String>());
|
||||||
|
assert!(err.downcast_ref::<String>().is_none());
|
||||||
|
|
||||||
|
assert!(err.is::<HighLevel>());
|
||||||
|
let high = err.downcast_ref::<HighLevel>().unwrap();
|
||||||
|
assert_eq!(high.to_string(), "failed to start server");
|
||||||
|
|
||||||
|
assert!(err.is::<MidLevel>());
|
||||||
|
let mid = err.downcast_ref::<MidLevel>().unwrap();
|
||||||
|
assert_eq!(mid.to_string(), "failed to load config");
|
||||||
|
|
||||||
|
assert!(err.is::<LowLevel>());
|
||||||
|
let low = err.downcast_ref::<LowLevel>().unwrap();
|
||||||
|
assert_eq!(low.to_string(), "no such file or directory");
|
||||||
|
|
||||||
|
assert!(dropped.none());
|
||||||
|
drop(err);
|
||||||
|
assert!(dropped.all());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_downcast_high() {
|
||||||
|
let (err, dropped) = make_chain();
|
||||||
|
|
||||||
|
let err = err.downcast::<HighLevel>().unwrap();
|
||||||
|
assert!(!dropped.high.get());
|
||||||
|
assert!(dropped.low.get() && dropped.mid.get());
|
||||||
|
|
||||||
|
drop(err);
|
||||||
|
assert!(dropped.all());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_downcast_mid() {
|
||||||
|
let (err, dropped) = make_chain();
|
||||||
|
|
||||||
|
let err = err.downcast::<MidLevel>().unwrap();
|
||||||
|
assert!(!dropped.mid.get());
|
||||||
|
assert!(dropped.low.get() && dropped.high.get());
|
||||||
|
|
||||||
|
drop(err);
|
||||||
|
assert!(dropped.all());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_downcast_low() {
|
||||||
|
let (err, dropped) = make_chain();
|
||||||
|
|
||||||
|
let err = err.downcast::<LowLevel>().unwrap();
|
||||||
|
assert!(!dropped.low.get());
|
||||||
|
assert!(dropped.mid.get() && dropped.high.get());
|
||||||
|
|
||||||
|
drop(err);
|
||||||
|
assert!(dropped.all());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_unsuccessful_downcast() {
|
||||||
|
let (err, dropped) = make_chain();
|
||||||
|
|
||||||
|
let err = err.downcast::<String>().unwrap_err();
|
||||||
|
assert!(dropped.none());
|
||||||
|
|
||||||
|
drop(err);
|
||||||
|
assert!(dropped.all());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_root_cause() {
|
||||||
|
let (err, _) = make_chain();
|
||||||
|
|
||||||
|
assert_eq!(err.root_cause().to_string(), "no such file or directory");
|
||||||
|
}
|
||||||
46
vendor/anyhow/tests/test_convert.rs
vendored
Normal file
46
vendor/anyhow/tests/test_convert.rs
vendored
Normal file
|
|
@ -0,0 +1,46 @@
|
||||||
|
#![allow(clippy::unnecessary_wraps)]
|
||||||
|
|
||||||
|
mod drop;
|
||||||
|
|
||||||
|
use self::drop::{DetectDrop, Flag};
|
||||||
|
use anyhow::{Error, Result};
|
||||||
|
use std::error::Error as StdError;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_convert() {
|
||||||
|
let has_dropped = Flag::new();
|
||||||
|
let error = Error::new(DetectDrop::new(&has_dropped));
|
||||||
|
let box_dyn = Box::<dyn StdError>::from(error);
|
||||||
|
assert_eq!("oh no!", box_dyn.to_string());
|
||||||
|
drop(box_dyn);
|
||||||
|
assert!(has_dropped.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_convert_send() {
|
||||||
|
let has_dropped = Flag::new();
|
||||||
|
let error = Error::new(DetectDrop::new(&has_dropped));
|
||||||
|
let box_dyn = Box::<dyn StdError + Send>::from(error);
|
||||||
|
assert_eq!("oh no!", box_dyn.to_string());
|
||||||
|
drop(box_dyn);
|
||||||
|
assert!(has_dropped.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_convert_send_sync() {
|
||||||
|
let has_dropped = Flag::new();
|
||||||
|
let error = Error::new(DetectDrop::new(&has_dropped));
|
||||||
|
let box_dyn = Box::<dyn StdError + Send + Sync>::from(error);
|
||||||
|
assert_eq!("oh no!", box_dyn.to_string());
|
||||||
|
drop(box_dyn);
|
||||||
|
assert!(has_dropped.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_question_mark() -> Result<(), Box<dyn StdError>> {
|
||||||
|
fn f() -> Result<()> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
f()?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
123
vendor/anyhow/tests/test_downcast.rs
vendored
Normal file
123
vendor/anyhow/tests/test_downcast.rs
vendored
Normal file
|
|
@ -0,0 +1,123 @@
|
||||||
|
#![allow(clippy::assertions_on_result_states, clippy::wildcard_imports)]
|
||||||
|
|
||||||
|
mod common;
|
||||||
|
mod drop;
|
||||||
|
|
||||||
|
use self::common::*;
|
||||||
|
use self::drop::{DetectDrop, Flag};
|
||||||
|
use anyhow::Error;
|
||||||
|
use std::error::Error as StdError;
|
||||||
|
use std::fmt::{self, Display};
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_downcast() {
|
||||||
|
assert_eq!(
|
||||||
|
"oh no!",
|
||||||
|
bail_literal().unwrap_err().downcast::<&str>().unwrap(),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
"oh no!",
|
||||||
|
bail_fmt().unwrap_err().downcast::<String>().unwrap(),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
"oh no!",
|
||||||
|
bail_error()
|
||||||
|
.unwrap_err()
|
||||||
|
.downcast::<io::Error>()
|
||||||
|
.unwrap()
|
||||||
|
.to_string(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_downcast_ref() {
|
||||||
|
assert_eq!(
|
||||||
|
"oh no!",
|
||||||
|
*bail_literal().unwrap_err().downcast_ref::<&str>().unwrap(),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
"oh no!",
|
||||||
|
bail_fmt().unwrap_err().downcast_ref::<String>().unwrap(),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
"oh no!",
|
||||||
|
bail_error()
|
||||||
|
.unwrap_err()
|
||||||
|
.downcast_ref::<io::Error>()
|
||||||
|
.unwrap()
|
||||||
|
.to_string(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_downcast_mut() {
|
||||||
|
assert_eq!(
|
||||||
|
"oh no!",
|
||||||
|
*bail_literal().unwrap_err().downcast_mut::<&str>().unwrap(),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
"oh no!",
|
||||||
|
bail_fmt().unwrap_err().downcast_mut::<String>().unwrap(),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
"oh no!",
|
||||||
|
bail_error()
|
||||||
|
.unwrap_err()
|
||||||
|
.downcast_mut::<io::Error>()
|
||||||
|
.unwrap()
|
||||||
|
.to_string(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut bailed = bail_fmt().unwrap_err();
|
||||||
|
*bailed.downcast_mut::<String>().unwrap() = "clobber".to_string();
|
||||||
|
assert_eq!(bailed.downcast_ref::<String>().unwrap(), "clobber");
|
||||||
|
assert_eq!(bailed.downcast_mut::<String>().unwrap(), "clobber");
|
||||||
|
assert_eq!(bailed.downcast::<String>().unwrap(), "clobber");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_drop() {
|
||||||
|
let has_dropped = Flag::new();
|
||||||
|
let error = Error::new(DetectDrop::new(&has_dropped));
|
||||||
|
drop(error.downcast::<DetectDrop>().unwrap());
|
||||||
|
assert!(has_dropped.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_as_ref() {
|
||||||
|
let error = bail_error().unwrap_err();
|
||||||
|
let ref_dyn: &dyn StdError = error.as_ref();
|
||||||
|
assert_eq!("oh no!", ref_dyn.to_string());
|
||||||
|
let ref_dyn_send_sync: &(dyn StdError + Send + Sync) = error.as_ref();
|
||||||
|
assert_eq!("oh no!", ref_dyn_send_sync.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_large_alignment() {
|
||||||
|
#[repr(align(64))]
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct LargeAlignedError(&'static str);
|
||||||
|
|
||||||
|
impl Display for LargeAlignedError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
f.write_str(self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl StdError for LargeAlignedError {}
|
||||||
|
|
||||||
|
let error = Error::new(LargeAlignedError("oh no!"));
|
||||||
|
assert_eq!(
|
||||||
|
"oh no!",
|
||||||
|
error.downcast_ref::<LargeAlignedError>().unwrap().0
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_unsuccessful_downcast() {
|
||||||
|
let mut error = bail_error().unwrap_err();
|
||||||
|
assert!(error.downcast_ref::<&str>().is_none());
|
||||||
|
assert!(error.downcast_mut::<&str>().is_none());
|
||||||
|
assert!(error.downcast::<&str>().is_err());
|
||||||
|
}
|
||||||
741
vendor/anyhow/tests/test_ensure.rs
vendored
Normal file
741
vendor/anyhow/tests/test_ensure.rs
vendored
Normal file
|
|
@ -0,0 +1,741 @@
|
||||||
|
#![allow(
|
||||||
|
clippy::bool_to_int_with_if,
|
||||||
|
clippy::char_lit_as_u8,
|
||||||
|
clippy::deref_addrof,
|
||||||
|
clippy::diverging_sub_expression,
|
||||||
|
clippy::erasing_op,
|
||||||
|
clippy::extra_unused_type_parameters,
|
||||||
|
clippy::if_same_then_else,
|
||||||
|
clippy::ifs_same_cond,
|
||||||
|
clippy::ignored_unit_patterns,
|
||||||
|
clippy::items_after_statements,
|
||||||
|
clippy::let_and_return,
|
||||||
|
clippy::let_underscore_untyped,
|
||||||
|
clippy::match_bool,
|
||||||
|
clippy::needless_else,
|
||||||
|
clippy::never_loop,
|
||||||
|
clippy::overly_complex_bool_expr,
|
||||||
|
clippy::redundant_closure_call,
|
||||||
|
clippy::redundant_pattern_matching,
|
||||||
|
clippy::too_many_lines,
|
||||||
|
clippy::unit_arg,
|
||||||
|
clippy::unnecessary_cast,
|
||||||
|
clippy::while_immutable_condition,
|
||||||
|
clippy::zero_ptr,
|
||||||
|
irrefutable_let_patterns
|
||||||
|
)]
|
||||||
|
|
||||||
|
use self::Enum::Generic;
|
||||||
|
use anyhow::{anyhow, ensure, Chain, Error, Result};
|
||||||
|
use std::fmt::{self, Debug};
|
||||||
|
use std::iter;
|
||||||
|
use std::marker::{PhantomData, PhantomData as P};
|
||||||
|
use std::mem;
|
||||||
|
use std::ops::Add;
|
||||||
|
use std::ptr;
|
||||||
|
|
||||||
|
struct S;
|
||||||
|
|
||||||
|
impl<T> Add<T> for S {
|
||||||
|
type Output = bool;
|
||||||
|
fn add(self, rhs: T) -> Self::Output {
|
||||||
|
let _ = rhs;
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
trait Trait: Sized {
|
||||||
|
const V: usize = 0;
|
||||||
|
fn t(self, i: i32) -> i32 {
|
||||||
|
i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Trait for T {}
|
||||||
|
|
||||||
|
enum Enum<T: ?Sized> {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
Thing(PhantomData<T>),
|
||||||
|
Generic,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: ?Sized> PartialEq for Enum<T> {
|
||||||
|
fn eq(&self, rhs: &Self) -> bool {
|
||||||
|
mem::discriminant(self) == mem::discriminant(rhs)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: ?Sized> Debug for Enum<T> {
|
||||||
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
formatter.write_str("Generic")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
fn assert_err<T: Debug>(result: impl FnOnce() -> Result<T>, expected: &'static str) {
|
||||||
|
let actual = result().unwrap_err().to_string();
|
||||||
|
|
||||||
|
// In general different rustc versions will format the interpolated lhs and
|
||||||
|
// rhs $:expr fragment with insignificant differences in whitespace or
|
||||||
|
// punctuation, so we check the message in full against nightly and do just
|
||||||
|
// a cursory test on older toolchains.
|
||||||
|
if rustversion::cfg!(nightly) && !cfg!(miri) {
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
} else {
|
||||||
|
assert_eq!(actual.contains(" vs "), expected.contains(" vs "));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_recursion() {
|
||||||
|
// Must not blow the default #[recursion_limit], which is 128.
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(
|
||||||
|
false | false | false | false | false | false | false | false | false |
|
||||||
|
false | false | false | false | false | false | false | false | false |
|
||||||
|
false | false | false | false | false | false | false | false | false |
|
||||||
|
false | false | false | false | false | false | false | false | false |
|
||||||
|
false | false | false | false | false | false | false | false | false |
|
||||||
|
false | false | false | false | false | false | false | false | false |
|
||||||
|
false | false | false | false | false | false | false | false | false
|
||||||
|
));
|
||||||
|
|
||||||
|
test().unwrap_err();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_low_precedence_control_flow() {
|
||||||
|
#[allow(unreachable_code)]
|
||||||
|
let test = || {
|
||||||
|
let val = loop {
|
||||||
|
// Break has lower precedence than the comparison operators so the
|
||||||
|
// expression here is `S + (break (1 == 1))`. It would be bad if the
|
||||||
|
// ensure macro partitioned this input into `(S + break 1) == (1)`
|
||||||
|
// because that means a different thing than what was written.
|
||||||
|
ensure!(S + break 1 == 1);
|
||||||
|
};
|
||||||
|
Ok(val)
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(test().unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_low_precedence_binary_operator() {
|
||||||
|
// Must not partition as `false == (true && false)`.
|
||||||
|
let test = || Ok(ensure!(false == true && false));
|
||||||
|
assert_err(test, "Condition failed: `false == true && false`");
|
||||||
|
|
||||||
|
// But outside the root level, it is fine.
|
||||||
|
let test = || Ok(ensure!(while false == true && false {} < ()));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `while false == true && false {} < ()` (() vs ())",
|
||||||
|
);
|
||||||
|
|
||||||
|
let a = 15;
|
||||||
|
let b = 3;
|
||||||
|
let test = || Ok(ensure!(a <= b || a - b <= 10));
|
||||||
|
assert_err(test, "Condition failed: `a <= b || a - b <= 10`");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_high_precedence_binary_operator() {
|
||||||
|
let a = 15;
|
||||||
|
let b = 3;
|
||||||
|
let test = || Ok(ensure!(a - b <= 10));
|
||||||
|
assert_err(test, "Condition failed: `a - b <= 10` (12 vs 10)");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_closure() {
|
||||||
|
// Must not partition as `(S + move) || (1 == 1)` by treating move as an
|
||||||
|
// identifier, nor as `(S + move || 1) == (1)` by misinterpreting the
|
||||||
|
// closure precedence.
|
||||||
|
let test = || Ok(ensure!(S + move || 1 == 1));
|
||||||
|
assert_err(test, "Condition failed: `S + move || 1 == 1`");
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(S + || 1 == 1));
|
||||||
|
assert_err(test, "Condition failed: `S + || 1 == 1`");
|
||||||
|
|
||||||
|
// Must not partition as `S + ((move | ()) | 1) == 1` by treating those
|
||||||
|
// pipes as bitwise-or.
|
||||||
|
let test = || Ok(ensure!(S + move |()| 1 == 1));
|
||||||
|
assert_err(test, "Condition failed: `S + move |()| 1 == 1`");
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(S + |()| 1 == 1));
|
||||||
|
assert_err(test, "Condition failed: `S + |()| 1 == 1`");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_unary() {
|
||||||
|
let mut x = &1;
|
||||||
|
let test = || Ok(ensure!(*x == 2));
|
||||||
|
assert_err(test, "Condition failed: `*x == 2` (1 vs 2)");
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(!x == 1));
|
||||||
|
assert_err(test, "Condition failed: `!x == 1` (-2 vs 1)");
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(-x == 1));
|
||||||
|
assert_err(test, "Condition failed: `-x == 1` (-1 vs 1)");
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(&x == &&2));
|
||||||
|
assert_err(test, "Condition failed: `&x == &&2` (1 vs 2)");
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(&mut x == *&&mut &2));
|
||||||
|
assert_err(test, "Condition failed: `&mut x == *&&mut &2` (1 vs 2)");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_if() {
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(if false {}.t(1) == 2));
|
||||||
|
assert_err(test, "Condition failed: `if false {}.t(1) == 2` (1 vs 2)");
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(if false {} else {}.t(1) == 2));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `if false {} else {}.t(1) == 2` (1 vs 2)",
|
||||||
|
);
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(if false {} else if false {}.t(1) == 2));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `if false {} else if false {}.t(1) == 2` (1 vs 2)",
|
||||||
|
);
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(if let 1 = 2 {}.t(1) == 2));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `if let 1 = 2 {}.t(1) == 2` (1 vs 2)",
|
||||||
|
);
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(if let 1 | 2 = 2 {}.t(1) == 2));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `if let 1 | 2 = 2 {}.t(1) == 2` (1 vs 2)",
|
||||||
|
);
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(if let | 1 | 2 = 2 {}.t(1) == 2));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `if let | 1 | 2 = 2 {}.t(1) == 2` (1 vs 2)",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_loop() {
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(1 + loop { break 1 } == 1));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `1 + loop { break 1 } == 1` (2 vs 1)",
|
||||||
|
);
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(1 + 'a: loop { break 'a 1 } == 1));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `1 + 'a: loop { break 'a 1 } == 1` (2 vs 1)",
|
||||||
|
);
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(while false {}.t(1) == 2));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `while false {}.t(1) == 2` (1 vs 2)",
|
||||||
|
);
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(while let None = Some(1) {}.t(1) == 2));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `while let None = Some(1) {}.t(1) == 2` (1 vs 2)",
|
||||||
|
);
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(for _x in iter::once(0) {}.t(1) == 2));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `for _x in iter::once(0) {}.t(1) == 2` (1 vs 2)",
|
||||||
|
);
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(for | _x in iter::once(0) {}.t(1) == 2));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `for | _x in iter::once(0) {}.t(1) == 2` (1 vs 2)",
|
||||||
|
);
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(for true | false in iter::empty() {}.t(1) == 2));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `for true | false in iter::empty() {}.t(1) == 2` (1 vs 2)",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_match() {
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(match 1 == 1 { true => 1, false => 0 } == 2));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `match 1 == 1 { true => 1, false => 0 } == 2` (1 vs 2)",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_atom() {
|
||||||
|
let test = || Ok(ensure!([false, false].len() > 3));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `[false, false].len() > 3` (2 vs 3)",
|
||||||
|
);
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!({ let x = 1; x } >= 3));
|
||||||
|
assert_err(test, "Condition failed: `{ let x = 1; x } >= 3` (1 vs 3)");
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(S + async { 1 } == true));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `S + async { 1 } == true` (false vs true)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(S + async move { 1 } == true));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `S + async move { 1 } == true` (false vs true)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let x = &1;
|
||||||
|
let test = || Ok(ensure!(S + unsafe { ptr::read(x) } == true));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `S + unsafe { ptr::read(x) } == true` (false vs true)",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_path() {
|
||||||
|
let test = || Ok(ensure!(crate::S.t(1) == 2));
|
||||||
|
assert_err(test, "Condition failed: `crate::S.t(1) == 2` (1 vs 2)");
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(::anyhow::Error::root_cause.t(1) == 2));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `::anyhow::Error::root_cause.t(1) == 2` (1 vs 2)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(Error::msg::<&str>.t(1) == 2));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `Error::msg::<&str>.t(1) == 2` (1 vs 2)",
|
||||||
|
);
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(Error::msg::<&str,>.t(1) == 2));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `Error::msg::<&str,>.t(1) == 2` (1 vs 2)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(Error::msg::<<str as ToOwned>::Owned>.t(1) == 2));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `Error::msg::<<str as ToOwned>::Owned>.t(1) == 2` (1 vs 2)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(Chain::<'static>::new.t(1) == 2));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `Chain::<'static>::new.t(1) == 2` (1 vs 2)",
|
||||||
|
);
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(Chain::<'static,>::new.t(1) == 2));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `Chain::<'static,>::new.t(1) == 2` (1 vs 2)",
|
||||||
|
);
|
||||||
|
|
||||||
|
fn f<const I: isize>() {}
|
||||||
|
let test = || Ok(ensure!(f::<1>() != ()));
|
||||||
|
assert_err(test, "Condition failed: `f::<1>() != ()` (() vs ())");
|
||||||
|
let test = || Ok(ensure!(f::<-1>() != ()));
|
||||||
|
assert_err(test, "Condition failed: `f::<-1>() != ()` (() vs ())");
|
||||||
|
|
||||||
|
fn g<T, const I: isize>() {}
|
||||||
|
let test = || Ok(ensure!(g::<u8, 1>() != ()));
|
||||||
|
assert_err(test, "Condition failed: `g::<u8, 1>() != ()` (() vs ())");
|
||||||
|
let test = || Ok(ensure!(g::<u8, -1>() != ()));
|
||||||
|
assert_err(test, "Condition failed: `g::<u8, -1>() != ()` (() vs ())");
|
||||||
|
|
||||||
|
#[derive(PartialOrd, PartialEq, Debug)]
|
||||||
|
enum E<'a, T> {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
T(&'a T),
|
||||||
|
U,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(E::U::<>>E::U::<u8>));
|
||||||
|
assert_err(test, "Condition failed: `E::U::<> > E::U::<u8>` (U vs U)");
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(E::U::<u8>>E::U));
|
||||||
|
assert_err(test, "Condition failed: `E::U::<u8> > E::U` (U vs U)");
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(E::U::<u8,>>E::U));
|
||||||
|
assert_err(test, "Condition failed: `E::U::<u8,> > E::U` (U vs U)");
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(Generic::<dyn Debug + Sync> != Generic));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `Generic::<dyn Debug + Sync> != Generic` (Generic vs Generic)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(Generic::<dyn Fn() + Sync> != Generic));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `Generic::<dyn Fn() + Sync> != Generic` (Generic vs Generic)",
|
||||||
|
);
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || {
|
||||||
|
Ok(ensure!(
|
||||||
|
Generic::<dyn Fn::() + ::std::marker::Sync> != Generic
|
||||||
|
))
|
||||||
|
};
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `Generic::<dyn Fn::() + ::std::marker::Sync> != Generic` (Generic vs Generic)",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_macro() {
|
||||||
|
let test = || Ok(ensure!(anyhow!("...").to_string().len() <= 1));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `anyhow!(\"...\").to_string().len() <= 1` (3 vs 1)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(vec![1].len() < 1));
|
||||||
|
assert_err(test, "Condition failed: `vec![1].len() < 1` (1 vs 1)");
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(stringify! {} != ""));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `stringify! {} != \"\"` (\"\" vs \"\")",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_trailer() {
|
||||||
|
let test = || Ok(ensure!((|| 1)() == 2));
|
||||||
|
assert_err(test, "Condition failed: `(|| 1)() == 2` (1 vs 2)");
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(b"hmm"[1] == b'c'));
|
||||||
|
assert_err(test, "Condition failed: `b\"hmm\"[1] == b'c'` (109 vs 99)");
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(PhantomData::<u8> {} != PhantomData));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `PhantomData::<u8> {} != PhantomData` (PhantomData<u8> vs PhantomData<u8>)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let result = Ok::<_, Error>(1);
|
||||||
|
let test = || Ok(ensure!(result? == 2));
|
||||||
|
assert_err(test, "Condition failed: `result? == 2` (1 vs 2)");
|
||||||
|
|
||||||
|
let test = || Ok(ensure!((2, 3).1 == 2));
|
||||||
|
assert_err(test, "Condition failed: `(2, 3).1 == 2` (3 vs 2)");
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!((2, (3, 4)). 1.1 == 2));
|
||||||
|
assert_err(test, "Condition failed: `(2, (3, 4)).1.1 == 2` (4 vs 2)");
|
||||||
|
|
||||||
|
let err = anyhow!("");
|
||||||
|
let test = || Ok(ensure!(err.is::<&str>() == false));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `err.is::<&str>() == false` (true vs false)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(err.is::<<str as ToOwned>::Owned>() == true));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `err.is::<<str as ToOwned>::Owned>() == true` (false vs true)",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_whitespace() {
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Point {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub x: i32,
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub y: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
let point = Point { x: 0, y: 0 };
|
||||||
|
let test = || Ok(ensure!("" == format!("{:#?}", point)));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `\"\" == format!(\"{:#?}\", point)`",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_too_long() {
|
||||||
|
let test = || Ok(ensure!("" == "x".repeat(10)));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `\"\" == \"x\".repeat(10)` (\"\" vs \"xxxxxxxxxx\")",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!("" == "x".repeat(80)));
|
||||||
|
assert_err(test, "Condition failed: `\"\" == \"x\".repeat(80)`");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_as() {
|
||||||
|
let test = || Ok(ensure!('\0' as u8 > 1));
|
||||||
|
assert_err(test, "Condition failed: `'\\0' as u8 > 1` (0 vs 1)");
|
||||||
|
|
||||||
|
let test = || Ok(ensure!('\0' as ::std::primitive::u8 > 1));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `'\\0' as ::std::primitive::u8 > 1` (0 vs 1)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(&[0] as &[i32] == [1]));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `&[0] as &[i32] == [1]` ([0] vs [1])",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(0 as *const () as *mut _ == 1 as *mut ()));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `0 as *const () as *mut _ == 1 as *mut ()` (0x0 vs 0x1)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let s = "";
|
||||||
|
let test = || Ok(ensure!(s as &str != s));
|
||||||
|
assert_err(test, "Condition failed: `s as &str != s` (\"\" vs \"\")");
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(&s as &&str != &s));
|
||||||
|
assert_err(test, "Condition failed: `&s as &&str != &s` (\"\" vs \"\")");
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(s as &'static str != s));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `s as &'static str != s` (\"\" vs \"\")",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(&s as &&'static str != &s));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `&s as &&'static str != &s` (\"\" vs \"\")",
|
||||||
|
);
|
||||||
|
|
||||||
|
let m: &mut str = Default::default();
|
||||||
|
let test = || Ok(ensure!(m as &mut str != s));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `m as &mut str != s` (\"\" vs \"\")",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(&m as &&mut str != &s));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `&m as &&mut str != &s` (\"\" vs \"\")",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(&m as &&'static mut str != &s));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `&m as &&'static mut str != &s` (\"\" vs \"\")",
|
||||||
|
);
|
||||||
|
|
||||||
|
let f = || {};
|
||||||
|
let test = || Ok(ensure!(f as fn() as usize * 0 != 0));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `f as fn() as usize * 0 != 0` (0 vs 0)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(f as fn() -> () as usize * 0 != 0));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `f as fn() -> () as usize * 0 != 0` (0 vs 0)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(f as for<'a> fn() as usize * 0 != 0));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `f as for<'a> fn() as usize * 0 != 0` (0 vs 0)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(f as unsafe fn() as usize * 0 != 0));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `f as unsafe fn() as usize * 0 != 0` (0 vs 0)",
|
||||||
|
);
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(f as extern "Rust" fn() as usize * 0 != 0));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `f as extern \"Rust\" fn() as usize * 0 != 0` (0 vs 0)",
|
||||||
|
);
|
||||||
|
|
||||||
|
extern "C" fn extern_fn() {}
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let test = || Ok(ensure!(extern_fn as extern fn() as usize * 0 != 0));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `extern_fn as extern fn() as usize * 0 != 0` (0 vs 0)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let f = || -> ! { panic!() };
|
||||||
|
let test = || Ok(ensure!(f as fn() -> ! as usize * 0 != 0));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `f as fn() -> ! as usize * 0 != 0` (0 vs 0)",
|
||||||
|
);
|
||||||
|
|
||||||
|
trait EqDebug<T>: PartialEq<T> + Debug {
|
||||||
|
type Assoc;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S, T> EqDebug<T> for S
|
||||||
|
where
|
||||||
|
S: PartialEq<T> + Debug,
|
||||||
|
{
|
||||||
|
type Assoc = bool;
|
||||||
|
}
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(&0 as &dyn EqDebug<i32, Assoc = bool> != &0));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `&0 as &dyn EqDebug<i32, Assoc = bool> != &0` (0 vs 0)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || {
|
||||||
|
Ok(ensure!(
|
||||||
|
PhantomData as PhantomData<<i32 as ToOwned>::Owned> != PhantomData
|
||||||
|
))
|
||||||
|
};
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `PhantomData as PhantomData<<i32 as ToOwned>::Owned> != PhantomData` (PhantomData<i32> vs PhantomData<i32>)",
|
||||||
|
);
|
||||||
|
|
||||||
|
macro_rules! int {
|
||||||
|
(...) => {
|
||||||
|
u8
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(0 as int!(...) != 0));
|
||||||
|
assert_err(test, "Condition failed: `0 as int!(...) != 0` (0 vs 0)");
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(0 as int![...] != 0));
|
||||||
|
assert_err(test, "Condition failed: `0 as int![...] != 0` (0 vs 0)");
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(0 as int! {...} != 0));
|
||||||
|
assert_err(test, "Condition failed: `0 as int! { ... } != 0` (0 vs 0)");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_pat() {
|
||||||
|
let test = || Ok(ensure!(if let ref mut _x @ 0 = 0 { 0 } else { 1 } == 1));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `if let ref mut _x @ 0 = 0 { 0 } else { 1 } == 1` (0 vs 1)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(if let -1..=1 = 0 { 0 } else { 1 } == 1));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `if let -1..=1 = 0 { 0 } else { 1 } == 1` (0 vs 1)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(if let &0 = &0 { 0 } else { 1 } == 1));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `if let &0 = &0 { 0 } else { 1 } == 1` (0 vs 1)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(if let &&0 = &&0 { 0 } else { 1 } == 1));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `if let &&0 = &&0 { 0 } else { 1 } == 1` (0 vs 1)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(if let &mut 0 = &mut 0 { 0 } else { 1 } == 1));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `if let &mut 0 = &mut 0 { 0 } else { 1 } == 1` (0 vs 1)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(if let &&mut 0 = &&mut 0 { 0 } else { 1 } == 1));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `if let &&mut 0 = &&mut 0 { 0 } else { 1 } == 1` (0 vs 1)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(if let (0, 1) = (0, 1) { 0 } else { 1 } == 1));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `if let (0, 1) = (0, 1) { 0 } else { 1 } == 1` (0 vs 1)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(if let [0] = b"\0" { 0 } else { 1 } == 1));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `if let [0] = b\"\\0\" { 0 } else { 1 } == 1` (0 vs 1)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let p = PhantomData::<u8>;
|
||||||
|
let test = || Ok(ensure!(if let P::<u8> {} = p { 0 } else { 1 } == 1));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `if let P::<u8> {} = p { 0 } else { 1 } == 1` (0 vs 1)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(if let ::std::marker::PhantomData = p {} != ()));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `if let ::std::marker::PhantomData = p {} != ()` (() vs ())",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(if let <S as Trait>::V = 0 { 0 } else { 1 } == 1));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `if let <S as Trait>::V = 0 { 0 } else { 1 } == 1` (0 vs 1)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(for _ in iter::once(()) {} != ()));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `for _ in iter::once(()) {} != ()` (() vs ())",
|
||||||
|
);
|
||||||
|
|
||||||
|
let test = || Ok(ensure!(if let stringify!(x) = "x" { 0 } else { 1 } == 1));
|
||||||
|
assert_err(
|
||||||
|
test,
|
||||||
|
"Condition failed: `if let stringify!(x) = \"x\" { 0 } else { 1 } == 1` (0 vs 1)",
|
||||||
|
);
|
||||||
|
}
|
||||||
18
vendor/anyhow/tests/test_ffi.rs
vendored
Normal file
18
vendor/anyhow/tests/test_ffi.rs
vendored
Normal file
|
|
@ -0,0 +1,18 @@
|
||||||
|
#![deny(improper_ctypes, improper_ctypes_definitions)]
|
||||||
|
|
||||||
|
use anyhow::anyhow;
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub extern "C" fn anyhow1(err: anyhow::Error) {
|
||||||
|
println!("{:?}", err);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub extern "C" fn anyhow2(err: &mut Option<anyhow::Error>) {
|
||||||
|
*err = Some(anyhow!("ffi error"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub extern "C" fn anyhow3() -> Option<anyhow::Error> {
|
||||||
|
Some(anyhow!("ffi error"))
|
||||||
|
}
|
||||||
93
vendor/anyhow/tests/test_fmt.rs
vendored
Normal file
93
vendor/anyhow/tests/test_fmt.rs
vendored
Normal file
|
|
@ -0,0 +1,93 @@
|
||||||
|
use anyhow::{bail, Context, Result};
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
fn f() -> Result<()> {
|
||||||
|
bail!(io::Error::new(io::ErrorKind::PermissionDenied, "oh no!"));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn g() -> Result<()> {
|
||||||
|
f().context("f failed")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn h() -> Result<()> {
|
||||||
|
g().context("g failed")
|
||||||
|
}
|
||||||
|
|
||||||
|
const EXPECTED_ALTDISPLAY_F: &str = "oh no!";
|
||||||
|
|
||||||
|
const EXPECTED_ALTDISPLAY_G: &str = "f failed: oh no!";
|
||||||
|
|
||||||
|
const EXPECTED_ALTDISPLAY_H: &str = "g failed: f failed: oh no!";
|
||||||
|
|
||||||
|
const EXPECTED_DEBUG_F: &str = "oh no!";
|
||||||
|
|
||||||
|
const EXPECTED_DEBUG_G: &str = "\
|
||||||
|
f failed
|
||||||
|
|
||||||
|
Caused by:
|
||||||
|
oh no!\
|
||||||
|
";
|
||||||
|
|
||||||
|
const EXPECTED_DEBUG_H: &str = "\
|
||||||
|
g failed
|
||||||
|
|
||||||
|
Caused by:
|
||||||
|
0: f failed
|
||||||
|
1: oh no!\
|
||||||
|
";
|
||||||
|
|
||||||
|
const EXPECTED_ALTDEBUG_F: &str = "\
|
||||||
|
Custom {
|
||||||
|
kind: PermissionDenied,
|
||||||
|
error: \"oh no!\",
|
||||||
|
}\
|
||||||
|
";
|
||||||
|
|
||||||
|
const EXPECTED_ALTDEBUG_G: &str = "\
|
||||||
|
Error {
|
||||||
|
context: \"f failed\",
|
||||||
|
source: Custom {
|
||||||
|
kind: PermissionDenied,
|
||||||
|
error: \"oh no!\",
|
||||||
|
},
|
||||||
|
}\
|
||||||
|
";
|
||||||
|
|
||||||
|
const EXPECTED_ALTDEBUG_H: &str = "\
|
||||||
|
Error {
|
||||||
|
context: \"g failed\",
|
||||||
|
source: Error {
|
||||||
|
context: \"f failed\",
|
||||||
|
source: Custom {
|
||||||
|
kind: PermissionDenied,
|
||||||
|
error: \"oh no!\",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}\
|
||||||
|
";
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_display() {
|
||||||
|
assert_eq!("g failed", h().unwrap_err().to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_altdisplay() {
|
||||||
|
assert_eq!(EXPECTED_ALTDISPLAY_F, format!("{:#}", f().unwrap_err()));
|
||||||
|
assert_eq!(EXPECTED_ALTDISPLAY_G, format!("{:#}", g().unwrap_err()));
|
||||||
|
assert_eq!(EXPECTED_ALTDISPLAY_H, format!("{:#}", h().unwrap_err()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_debug() {
|
||||||
|
assert_eq!(EXPECTED_DEBUG_F, format!("{:?}", f().unwrap_err()));
|
||||||
|
assert_eq!(EXPECTED_DEBUG_G, format!("{:?}", g().unwrap_err()));
|
||||||
|
assert_eq!(EXPECTED_DEBUG_H, format!("{:?}", h().unwrap_err()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_altdebug() {
|
||||||
|
assert_eq!(EXPECTED_ALTDEBUG_F, format!("{:#?}", f().unwrap_err()));
|
||||||
|
assert_eq!(EXPECTED_ALTDEBUG_G, format!("{:#?}", g().unwrap_err()));
|
||||||
|
assert_eq!(EXPECTED_ALTDEBUG_H, format!("{:#?}", h().unwrap_err()));
|
||||||
|
}
|
||||||
96
vendor/anyhow/tests/test_macros.rs
vendored
Normal file
96
vendor/anyhow/tests/test_macros.rs
vendored
Normal file
|
|
@ -0,0 +1,96 @@
|
||||||
|
#![allow(
|
||||||
|
clippy::assertions_on_result_states,
|
||||||
|
clippy::eq_op,
|
||||||
|
clippy::incompatible_msrv, // https://github.com/rust-lang/rust-clippy/issues/12257
|
||||||
|
clippy::items_after_statements,
|
||||||
|
clippy::match_single_binding,
|
||||||
|
clippy::needless_pass_by_value,
|
||||||
|
clippy::shadow_unrelated,
|
||||||
|
clippy::wildcard_imports
|
||||||
|
)]
|
||||||
|
|
||||||
|
mod common;
|
||||||
|
|
||||||
|
use self::common::*;
|
||||||
|
use anyhow::{anyhow, ensure, Result};
|
||||||
|
use std::cell::Cell;
|
||||||
|
use std::future;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_messages() {
|
||||||
|
assert_eq!("oh no!", bail_literal().unwrap_err().to_string());
|
||||||
|
assert_eq!("oh no!", bail_fmt().unwrap_err().to_string());
|
||||||
|
assert_eq!("oh no!", bail_error().unwrap_err().to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ensure() {
|
||||||
|
let f = || {
|
||||||
|
ensure!(1 + 1 == 2, "This is correct");
|
||||||
|
Ok(())
|
||||||
|
};
|
||||||
|
assert!(f().is_ok());
|
||||||
|
|
||||||
|
let v = 1;
|
||||||
|
let f = || {
|
||||||
|
ensure!(v + v == 2, "This is correct, v: {}", v);
|
||||||
|
Ok(())
|
||||||
|
};
|
||||||
|
assert!(f().is_ok());
|
||||||
|
|
||||||
|
let f = || {
|
||||||
|
ensure!(v + v == 1, "This is not correct, v: {}", v);
|
||||||
|
Ok(())
|
||||||
|
};
|
||||||
|
assert!(f().is_err());
|
||||||
|
|
||||||
|
let f = || {
|
||||||
|
ensure!(v + v == 1);
|
||||||
|
Ok(())
|
||||||
|
};
|
||||||
|
assert_eq!(
|
||||||
|
f().unwrap_err().to_string(),
|
||||||
|
"Condition failed: `v + v == 1` (2 vs 1)",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ensure_nonbool() -> Result<()> {
|
||||||
|
struct Struct {
|
||||||
|
condition: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
let s = Struct { condition: true };
|
||||||
|
match &s {
|
||||||
|
Struct { condition } => ensure!(condition), // &bool
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_temporaries() {
|
||||||
|
fn require_send_sync(_: impl Send + Sync) {}
|
||||||
|
|
||||||
|
require_send_sync(async {
|
||||||
|
// If anyhow hasn't dropped any temporary format_args it creates by the
|
||||||
|
// time it's done evaluating, those will stick around until the
|
||||||
|
// semicolon, which is on the other side of the await point, making the
|
||||||
|
// enclosing future non-Send.
|
||||||
|
future::ready(anyhow!("...")).await;
|
||||||
|
});
|
||||||
|
|
||||||
|
fn message(cell: Cell<&str>) -> &str {
|
||||||
|
cell.get()
|
||||||
|
}
|
||||||
|
|
||||||
|
require_send_sync(async {
|
||||||
|
future::ready(anyhow!(message(Cell::new("...")))).await;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_brace_escape() {
|
||||||
|
let err = anyhow!("unterminated ${{..}} expression");
|
||||||
|
assert_eq!("unterminated ${..} expression", err.to_string());
|
||||||
|
}
|
||||||
30
vendor/anyhow/tests/test_repr.rs
vendored
Normal file
30
vendor/anyhow/tests/test_repr.rs
vendored
Normal file
|
|
@ -0,0 +1,30 @@
|
||||||
|
#![allow(clippy::extra_unused_type_parameters)]
|
||||||
|
|
||||||
|
mod drop;
|
||||||
|
|
||||||
|
use self::drop::{DetectDrop, Flag};
|
||||||
|
use anyhow::Error;
|
||||||
|
use std::mem;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_error_size() {
|
||||||
|
assert_eq!(mem::size_of::<Error>(), mem::size_of::<usize>());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_null_pointer_optimization() {
|
||||||
|
assert_eq!(mem::size_of::<Result<(), Error>>(), mem::size_of::<usize>());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_autotraits() {
|
||||||
|
fn assert<E: Unpin + Send + Sync + 'static>() {}
|
||||||
|
assert::<Error>();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_drop() {
|
||||||
|
let has_dropped = Flag::new();
|
||||||
|
drop(Error::new(DetectDrop::new(&has_dropped)));
|
||||||
|
assert!(has_dropped.get());
|
||||||
|
}
|
||||||
62
vendor/anyhow/tests/test_source.rs
vendored
Normal file
62
vendor/anyhow/tests/test_source.rs
vendored
Normal file
|
|
@ -0,0 +1,62 @@
|
||||||
|
use anyhow::anyhow;
|
||||||
|
use std::error::Error as StdError;
|
||||||
|
use std::fmt::{self, Display};
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum TestError {
|
||||||
|
Io(io::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for TestError {
|
||||||
|
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
TestError::Io(e) => Display::fmt(e, formatter),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl StdError for TestError {
|
||||||
|
fn source(&self) -> Option<&(dyn StdError + 'static)> {
|
||||||
|
match self {
|
||||||
|
TestError::Io(io) => Some(io),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_literal_source() {
|
||||||
|
let error = anyhow!("oh no!");
|
||||||
|
assert!(error.source().is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_variable_source() {
|
||||||
|
let msg = "oh no!";
|
||||||
|
let error = anyhow!(msg);
|
||||||
|
assert!(error.source().is_none());
|
||||||
|
|
||||||
|
let msg = msg.to_owned();
|
||||||
|
let error = anyhow!(msg);
|
||||||
|
assert!(error.source().is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_fmt_source() {
|
||||||
|
let error = anyhow!("{} {}!", "oh", "no");
|
||||||
|
assert!(error.source().is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_io_source() {
|
||||||
|
let io = io::Error::new(io::ErrorKind::Other, "oh no!");
|
||||||
|
let error = anyhow!(TestError::Io(io));
|
||||||
|
assert_eq!("oh no!", error.source().unwrap().to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_anyhow_from_anyhow() {
|
||||||
|
let error = anyhow!("oh no!").context("context");
|
||||||
|
let error = anyhow!(error);
|
||||||
|
assert_eq!("oh no!", error.source().unwrap().to_string());
|
||||||
|
}
|
||||||
8
vendor/anyhow/tests/ui/chained-comparison.rs
vendored
Normal file
8
vendor/anyhow/tests/ui/chained-comparison.rs
vendored
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
use anyhow::{ensure, Result};
|
||||||
|
|
||||||
|
fn main() -> Result<()> {
|
||||||
|
// `ensure!` must not partition this into `(false) == (false == true)`
|
||||||
|
// because Rust doesn't ordinarily allow this form of expression.
|
||||||
|
ensure!(false == false == true);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
10
vendor/anyhow/tests/ui/chained-comparison.stderr
vendored
Normal file
10
vendor/anyhow/tests/ui/chained-comparison.stderr
vendored
Normal file
|
|
@ -0,0 +1,10 @@
|
||||||
|
error: comparison operators cannot be chained
|
||||||
|
--> tests/ui/chained-comparison.rs:6:19
|
||||||
|
|
|
||||||
|
6 | ensure!(false == false == true);
|
||||||
|
| ^^ ^^
|
||||||
|
|
|
||||||
|
help: split the comparison into two
|
||||||
|
|
|
||||||
|
6 | ensure!(false == false && false == true);
|
||||||
|
| ++++++++
|
||||||
6
vendor/anyhow/tests/ui/empty-ensure.rs
vendored
Normal file
6
vendor/anyhow/tests/ui/empty-ensure.rs
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
use anyhow::{ensure, Result};
|
||||||
|
|
||||||
|
fn main() -> Result<()> {
|
||||||
|
ensure!();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
12
vendor/anyhow/tests/ui/empty-ensure.stderr
vendored
Normal file
12
vendor/anyhow/tests/ui/empty-ensure.stderr
vendored
Normal file
|
|
@ -0,0 +1,12 @@
|
||||||
|
error: unexpected end of macro invocation
|
||||||
|
--> tests/ui/empty-ensure.rs:4:5
|
||||||
|
|
|
||||||
|
4 | ensure!();
|
||||||
|
| ^^^^^^^^^ missing tokens in macro arguments
|
||||||
|
|
|
||||||
|
note: while trying to match meta-variable `$cond:expr`
|
||||||
|
--> src/ensure.rs
|
||||||
|
|
|
||||||
|
| ($cond:expr $(,)?) => {
|
||||||
|
| ^^^^^^^^^^
|
||||||
|
= note: this error originates in the macro `$crate::__parse_ensure` which comes from the expansion of the macro `ensure` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
40
vendor/anyhow/tests/ui/ensure-nonbool.rs
vendored
Normal file
40
vendor/anyhow/tests/ui/ensure-nonbool.rs
vendored
Normal file
|
|
@ -0,0 +1,40 @@
|
||||||
|
use anyhow::{ensure, Result};
|
||||||
|
use std::ops::{Deref, Not};
|
||||||
|
|
||||||
|
struct Bool(bool);
|
||||||
|
|
||||||
|
struct DerefBool(bool);
|
||||||
|
|
||||||
|
struct NotBool(bool);
|
||||||
|
|
||||||
|
impl Deref for DerefBool {
|
||||||
|
type Target = bool;
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Not for NotBool {
|
||||||
|
type Output = bool;
|
||||||
|
fn not(self) -> Self::Output {
|
||||||
|
!self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() -> Result<()> {
|
||||||
|
ensure!("...");
|
||||||
|
|
||||||
|
let mut s = Bool(true);
|
||||||
|
match &mut s {
|
||||||
|
Bool(cond) => ensure!(cond),
|
||||||
|
}
|
||||||
|
|
||||||
|
let db = DerefBool(true);
|
||||||
|
ensure!(db);
|
||||||
|
ensure!(&db);
|
||||||
|
|
||||||
|
let nb = NotBool(true);
|
||||||
|
ensure!(nb);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
91
vendor/anyhow/tests/ui/ensure-nonbool.stderr
vendored
Normal file
91
vendor/anyhow/tests/ui/ensure-nonbool.stderr
vendored
Normal file
|
|
@ -0,0 +1,91 @@
|
||||||
|
error[E0277]: the trait bound `&str: __private::not::Bool` is not satisfied
|
||||||
|
--> tests/ui/ensure-nonbool.rs:25:13
|
||||||
|
|
|
||||||
|
25 | ensure!("...");
|
||||||
|
| --------^^^^^-
|
||||||
|
| | |
|
||||||
|
| | the trait `__private::not::Bool` is not implemented for `&str`
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the following other types implement trait `__private::not::Bool`:
|
||||||
|
&bool
|
||||||
|
bool
|
||||||
|
note: required by a bound in `anyhow::__private::not`
|
||||||
|
--> src/lib.rs
|
||||||
|
|
|
||||||
|
| pub fn not(cond: impl Bool) -> bool {
|
||||||
|
| ^^^^ required by this bound in `not`
|
||||||
|
|
||||||
|
error[E0277]: the trait bound `&mut bool: __private::not::Bool` is not satisfied
|
||||||
|
--> tests/ui/ensure-nonbool.rs:29:31
|
||||||
|
|
|
||||||
|
29 | Bool(cond) => ensure!(cond),
|
||||||
|
| --------^^^^-
|
||||||
|
| | |
|
||||||
|
| | the trait `__private::not::Bool` is not implemented for `&mut bool`
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the following other types implement trait `__private::not::Bool`:
|
||||||
|
&bool
|
||||||
|
bool
|
||||||
|
= note: `__private::not::Bool` is implemented for `&bool`, but not for `&mut bool`
|
||||||
|
note: required by a bound in `anyhow::__private::not`
|
||||||
|
--> src/lib.rs
|
||||||
|
|
|
||||||
|
| pub fn not(cond: impl Bool) -> bool {
|
||||||
|
| ^^^^ required by this bound in `not`
|
||||||
|
|
||||||
|
error[E0277]: the trait bound `DerefBool: __private::not::Bool` is not satisfied
|
||||||
|
--> tests/ui/ensure-nonbool.rs:33:13
|
||||||
|
|
|
||||||
|
33 | ensure!(db);
|
||||||
|
| --------^^-
|
||||||
|
| | |
|
||||||
|
| | the trait `__private::not::Bool` is not implemented for `DerefBool`
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the following other types implement trait `__private::not::Bool`:
|
||||||
|
&bool
|
||||||
|
bool
|
||||||
|
note: required by a bound in `anyhow::__private::not`
|
||||||
|
--> src/lib.rs
|
||||||
|
|
|
||||||
|
| pub fn not(cond: impl Bool) -> bool {
|
||||||
|
| ^^^^ required by this bound in `not`
|
||||||
|
|
||||||
|
error[E0277]: the trait bound `&DerefBool: __private::not::Bool` is not satisfied
|
||||||
|
--> tests/ui/ensure-nonbool.rs:34:13
|
||||||
|
|
|
||||||
|
34 | ensure!(&db);
|
||||||
|
| --------^^^-
|
||||||
|
| | |
|
||||||
|
| | the trait `__private::not::Bool` is not implemented for `&DerefBool`
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
note: required by a bound in `anyhow::__private::not`
|
||||||
|
--> src/lib.rs
|
||||||
|
|
|
||||||
|
| pub fn not(cond: impl Bool) -> bool {
|
||||||
|
| ^^^^ required by this bound in `not`
|
||||||
|
help: consider dereferencing here
|
||||||
|
|
|
||||||
|
34 | ensure!(&*db);
|
||||||
|
| +
|
||||||
|
|
||||||
|
error[E0277]: the trait bound `NotBool: __private::not::Bool` is not satisfied
|
||||||
|
--> tests/ui/ensure-nonbool.rs:37:13
|
||||||
|
|
|
||||||
|
37 | ensure!(nb);
|
||||||
|
| --------^^-
|
||||||
|
| | |
|
||||||
|
| | the trait `__private::not::Bool` is not implemented for `NotBool`
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the following other types implement trait `__private::not::Bool`:
|
||||||
|
&bool
|
||||||
|
bool
|
||||||
|
note: required by a bound in `anyhow::__private::not`
|
||||||
|
--> src/lib.rs
|
||||||
|
|
|
||||||
|
| pub fn not(cond: impl Bool) -> bool {
|
||||||
|
| ^^^^ required by this bound in `not`
|
||||||
11
vendor/anyhow/tests/ui/must-use.rs
vendored
Normal file
11
vendor/anyhow/tests/ui/must-use.rs
vendored
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
#![deny(unused_must_use)]
|
||||||
|
|
||||||
|
use anyhow::anyhow;
|
||||||
|
|
||||||
|
fn main() -> anyhow::Result<()> {
|
||||||
|
if true {
|
||||||
|
// meant to write bail!
|
||||||
|
anyhow!("it failed");
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
12
vendor/anyhow/tests/ui/must-use.stderr
vendored
Normal file
12
vendor/anyhow/tests/ui/must-use.stderr
vendored
Normal file
|
|
@ -0,0 +1,12 @@
|
||||||
|
error: unused return value of `anyhow::__private::must_use` that must be used
|
||||||
|
--> tests/ui/must-use.rs:8:9
|
||||||
|
|
|
||||||
|
8 | anyhow!("it failed");
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
note: the lint level is defined here
|
||||||
|
--> tests/ui/must-use.rs:1:9
|
||||||
|
|
|
||||||
|
1 | #![deny(unused_must_use)]
|
||||||
|
| ^^^^^^^^^^^^^^^
|
||||||
|
= note: this error originates in the macro `anyhow` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
8
vendor/anyhow/tests/ui/no-impl.rs
vendored
Normal file
8
vendor/anyhow/tests/ui/no-impl.rs
vendored
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
use anyhow::anyhow;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct Error;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let _ = anyhow!(Error);
|
||||||
|
}
|
||||||
32
vendor/anyhow/tests/ui/no-impl.stderr
vendored
Normal file
32
vendor/anyhow/tests/ui/no-impl.stderr
vendored
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
error[E0599]: the method `anyhow_kind` exists for reference `&Error`, but its trait bounds were not satisfied
|
||||||
|
--> tests/ui/no-impl.rs:7:13
|
||||||
|
|
|
||||||
|
4 | struct Error;
|
||||||
|
| ------------ doesn't satisfy `Error: Into<anyhow::Error>`, `Error: anyhow::kind::TraitKind` or `Error: std::fmt::Display`
|
||||||
|
...
|
||||||
|
7 | let _ = anyhow!(Error);
|
||||||
|
| ^^^^^^^^^^^^^^ method cannot be called on `&Error` due to unsatisfied trait bounds
|
||||||
|
|
|
||||||
|
= note: the following trait bounds were not satisfied:
|
||||||
|
`Error: Into<anyhow::Error>`
|
||||||
|
which is required by `Error: anyhow::kind::TraitKind`
|
||||||
|
`Error: std::fmt::Display`
|
||||||
|
which is required by `&Error: anyhow::kind::AdhocKind`
|
||||||
|
`&Error: Into<anyhow::Error>`
|
||||||
|
which is required by `&Error: anyhow::kind::TraitKind`
|
||||||
|
note: the traits `Into` and `std::fmt::Display` must be implemented
|
||||||
|
--> $RUST/core/src/fmt/mod.rs
|
||||||
|
|
|
||||||
|
| pub trait Display {
|
||||||
|
| ^^^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
::: $RUST/core/src/convert/mod.rs
|
||||||
|
|
|
||||||
|
| pub trait Into<T>: Sized {
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
= help: items from traits can only be used if the trait is implemented and in scope
|
||||||
|
= note: the following traits define an item `anyhow_kind`, perhaps you need to implement one of them:
|
||||||
|
candidate #1: `anyhow::kind::AdhocKind`
|
||||||
|
candidate #2: `anyhow::kind::BoxedKind`
|
||||||
|
candidate #3: `anyhow::kind::TraitKind`
|
||||||
|
= note: this error originates in the macro `anyhow` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
5
vendor/anyhow/tests/ui/temporary-value.rs
vendored
Normal file
5
vendor/anyhow/tests/ui/temporary-value.rs
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
use anyhow::anyhow;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let _ = anyhow!(&String::new());
|
||||||
|
}
|
||||||
9
vendor/anyhow/tests/ui/temporary-value.stderr
vendored
Normal file
9
vendor/anyhow/tests/ui/temporary-value.stderr
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
error[E0716]: temporary value dropped while borrowed
|
||||||
|
--> tests/ui/temporary-value.rs:4:22
|
||||||
|
|
|
||||||
|
4 | let _ = anyhow!(&String::new());
|
||||||
|
| ---------^^^^^^^^^^^^^-
|
||||||
|
| | |
|
||||||
|
| | creates a temporary value which is freed while still in use
|
||||||
|
| temporary value is freed at the end of this statement
|
||||||
|
| argument requires that borrow lasts for `'static`
|
||||||
5
vendor/anyhow/tests/ui/wrong-interpolation.rs
vendored
Normal file
5
vendor/anyhow/tests/ui/wrong-interpolation.rs
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
use anyhow::{bail, Result};
|
||||||
|
|
||||||
|
fn main() -> Result<()> {
|
||||||
|
bail!("{} not found");
|
||||||
|
}
|
||||||
5
vendor/anyhow/tests/ui/wrong-interpolation.stderr
vendored
Normal file
5
vendor/anyhow/tests/ui/wrong-interpolation.stderr
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
error: 1 positional argument in format string, but no arguments were given
|
||||||
|
--> tests/ui/wrong-interpolation.rs:4:12
|
||||||
|
|
|
||||||
|
4 | bail!("{} not found");
|
||||||
|
| ^^
|
||||||
1
vendor/base64/.cargo-checksum.json
vendored
Normal file
1
vendor/base64/.cargo-checksum.json
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
{"files":{"Cargo.lock":"cee37732975a1ffc1f956d3d05b6edf1baec72841cfabc384a21b02b3bfa0275","Cargo.toml":"52bee6a418e14918d37058fd15fccfd0f417a06fe4f9668b6f97866bf7f991e3","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0dd882e53de11566d50f8e8e2d5a651bcf3fabee4987d70f306233cf39094ba7","README.md":"df01f5b4317d601e7de86743f9818aec9196abf9e298f5e47679b7a966ecd945","RELEASE-NOTES.md":"997a5193317a8bff266ecfe4f015ba070b782b6df7d3a1738b9b52584d57f9c6","benches/benchmarks.rs":"cebbcc8649e760e569c6be04f5e727aee2c2568ced7faab580fc0aa0d0426d26","clippy.toml":"b26be4d15ed059985ce6994f11817fd7562046f46e460a0dc64dbb71cfc246d1","examples/base64.rs":"b75ead2199a9b4389c69fe6f1ae988176a263b8fc84e7a4fea1d7e5a41592078","icon_CLion.svg":"cffa044ba75cb998ee3306991dc4a3755ec2f39ab95ddd4b74bc21988389020f","src/alphabet.rs":"5de2beb8fcccb078c61cac2c0477ebbde145122d6c10a0f7ea2e57e8159318e0","src/chunked_encoder.rs":"edfdbb9a4329b80fb2c769ada81e234e00839e0fa85faaa70bacf40ce12e951c","src/decode.rs":"b046a72d62eaac58dc42efcf7848d9d96d022f6594e851cf87074b77ce45c04a","src/display.rs":"31bf3e19274a0b80dd8948a81ea535944f756ef5b88736124c940f5fe1e8c71c","src/encode.rs":"44ddcc162f3fe9817b6e857dda0a3b9197b90a657e5f71c44aacabf5431ccf7d","src/engine/general_purpose/decode.rs":"d865b057e5788e7fefd189cf57ec913df263e6a0742dfa52513f587e14fa1a92","src/engine/general_purpose/decode_suffix.rs":"689688f7bf442b232d3b9f56a1b41c56d9393ace88556a165c224b93dd19b74e","src/engine/general_purpose/mod.rs":"901760a7f5721ec3bafad5fea6251f57de0f767ecb2e1e2fdfe64d661404ec34","src/engine/mod.rs":"5e4a6c0e86417f3b62350264ef383f91e9864390f7c315d786ecd8e9c920ee9f","src/engine/naive.rs":"70de29d909c3fe7918d2965782088b05047b8b6e30d1d2bf11ba073d3f8633ff","src/engine/tests.rs":"2cc8d1431f40f5b9c3ad8970e6fb73bba8be3f2317553dd026539f41908aaa19","src/lib.rs":"c4db7bd31ace78aec2ecd151cef3ad90dfdc76097ba12027bde79d3c82612f7c","src/prelude.rs":"c1587138e5301ac797c5c362cb3638649b33f79c20c16db6f38ad44330540752","src/read/decoder.rs":"00aaa0553a54fcf12762658c4e56663a9705cc30c07af30976291e6f69d78c3d","src/read/decoder_tests.rs":"66ec39bf6e86f21f4db1afd6c5cd63d4a4931ab896b9c38de25d99b803804bbf","src/read/mod.rs":"e0b714eda02d16b1ffa6f78fd09b2f963e01c881b1f7c17b39db4e904be5e746","src/tests.rs":"90cb9f8a1ccb7c4ddc4f8618208e0031fc97e0df0e5aa466d6a5cf45d25967d8","src/write/encoder.rs":"c889c853249220fe2ddaeb77ee6e2ee2945f7db88cd6658ef89ff71b81255ea8","src/write/encoder_string_writer.rs":"0326c9d120369b9bbc35697b5b9b141bed24283374c93d5af1052eb042e47799","src/write/encoder_tests.rs":"28695a485b17cf5db73656aae5d90127f726e02c6d70efd83e5ab53a4cc17b38","src/write/mod.rs":"73cd98dadc9d712b3fefd9449d97e825e097397441b90588e0051e4d3b0911b9","tests/encode.rs":"5309f4538b1df611436f7bfba7409c725161b6f841b1bbf8d9890ae185de7d88","tests/tests.rs":"78efcf0dc4bb6ae52f7a91fcad89e44e4dce578224c36b4e6c1c306459be8500"},"package":"72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"}
|
||||||
85
vendor/base64/Cargo.toml
vendored
Normal file
85
vendor/base64/Cargo.toml
vendored
Normal file
|
|
@ -0,0 +1,85 @@
|
||||||
|
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||||
|
#
|
||||||
|
# When uploading crates to the registry Cargo will automatically
|
||||||
|
# "normalize" Cargo.toml files for maximal compatibility
|
||||||
|
# with all versions of Cargo and also rewrite `path` dependencies
|
||||||
|
# to registry (e.g., crates.io) dependencies.
|
||||||
|
#
|
||||||
|
# If you are reading this file be aware that the original Cargo.toml
|
||||||
|
# will likely look very different (and much more reasonable).
|
||||||
|
# See Cargo.toml.orig for the original contents.
|
||||||
|
|
||||||
|
[package]
|
||||||
|
edition = "2018"
|
||||||
|
rust-version = "1.48.0"
|
||||||
|
name = "base64"
|
||||||
|
version = "0.22.1"
|
||||||
|
authors = ["Marshall Pierce <marshall@mpierce.org>"]
|
||||||
|
description = "encodes and decodes base64 as bytes or utf8"
|
||||||
|
documentation = "https://docs.rs/base64"
|
||||||
|
readme = "README.md"
|
||||||
|
keywords = [
|
||||||
|
"base64",
|
||||||
|
"utf8",
|
||||||
|
"encode",
|
||||||
|
"decode",
|
||||||
|
"no_std",
|
||||||
|
]
|
||||||
|
categories = ["encoding"]
|
||||||
|
license = "MIT OR Apache-2.0"
|
||||||
|
repository = "https://github.com/marshallpierce/rust-base64"
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
rustdoc-args = ["--generate-link-to-definition"]
|
||||||
|
|
||||||
|
[profile.bench]
|
||||||
|
debug = 2
|
||||||
|
|
||||||
|
[profile.test]
|
||||||
|
opt-level = 3
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "base64"
|
||||||
|
required-features = ["std"]
|
||||||
|
|
||||||
|
[[test]]
|
||||||
|
name = "tests"
|
||||||
|
required-features = ["alloc"]
|
||||||
|
|
||||||
|
[[test]]
|
||||||
|
name = "encode"
|
||||||
|
required-features = ["alloc"]
|
||||||
|
|
||||||
|
[[bench]]
|
||||||
|
name = "benchmarks"
|
||||||
|
harness = false
|
||||||
|
required-features = ["std"]
|
||||||
|
|
||||||
|
[dev-dependencies.clap]
|
||||||
|
version = "3.2.25"
|
||||||
|
features = ["derive"]
|
||||||
|
|
||||||
|
[dev-dependencies.criterion]
|
||||||
|
version = "0.4.0"
|
||||||
|
|
||||||
|
[dev-dependencies.once_cell]
|
||||||
|
version = "1"
|
||||||
|
|
||||||
|
[dev-dependencies.rand]
|
||||||
|
version = "0.8.5"
|
||||||
|
features = ["small_rng"]
|
||||||
|
|
||||||
|
[dev-dependencies.rstest]
|
||||||
|
version = "0.13.0"
|
||||||
|
|
||||||
|
[dev-dependencies.rstest_reuse]
|
||||||
|
version = "0.6.0"
|
||||||
|
|
||||||
|
[dev-dependencies.strum]
|
||||||
|
version = "0.25"
|
||||||
|
features = ["derive"]
|
||||||
|
|
||||||
|
[features]
|
||||||
|
alloc = []
|
||||||
|
default = ["std"]
|
||||||
|
std = ["alloc"]
|
||||||
201
vendor/base64/LICENSE-APACHE
vendored
Normal file
201
vendor/base64/LICENSE-APACHE
vendored
Normal file
|
|
@ -0,0 +1,201 @@
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
21
vendor/base64/LICENSE-MIT
vendored
Normal file
21
vendor/base64/LICENSE-MIT
vendored
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2015 Alice Maz
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
154
vendor/base64/README.md
vendored
Normal file
154
vendor/base64/README.md
vendored
Normal file
|
|
@ -0,0 +1,154 @@
|
||||||
|
# [base64](https://crates.io/crates/base64)
|
||||||
|
|
||||||
|
[](https://crates.io/crates/base64) [](https://docs.rs/base64) [](https://circleci.com/gh/marshallpierce/rust-base64/tree/master) [](https://codecov.io/gh/marshallpierce/rust-base64) [](https://github.com/rust-secure-code/safety-dance/)
|
||||||
|
|
||||||
|
<a href="https://www.jetbrains.com/?from=rust-base64"><img src="/icon_CLion.svg" height="40px"/></a>
|
||||||
|
|
||||||
|
Made with CLion. Thanks to JetBrains for supporting open source!
|
||||||
|
|
||||||
|
It's base64. What more could anyone want?
|
||||||
|
|
||||||
|
This library's goals are to be *correct* and *fast*. It's thoroughly tested and widely used. It exposes functionality at
|
||||||
|
multiple levels of abstraction so you can choose the level of convenience vs performance that you want,
|
||||||
|
e.g. `decode_engine_slice` decodes into an existing `&mut [u8]` and is pretty fast (2.6GiB/s for a 3 KiB input),
|
||||||
|
whereas `decode_engine` allocates a new `Vec<u8>` and returns it, which might be more convenient in some cases, but is
|
||||||
|
slower (although still fast enough for almost any purpose) at 2.1 GiB/s.
|
||||||
|
|
||||||
|
See the [docs](https://docs.rs/base64) for all the details.
|
||||||
|
|
||||||
|
## FAQ
|
||||||
|
|
||||||
|
### I need to decode base64 with whitespace/null bytes/other random things interspersed in it. What should I do?
|
||||||
|
|
||||||
|
Remove non-base64 characters from your input before decoding.
|
||||||
|
|
||||||
|
If you have a `Vec` of base64, [retain](https://doc.rust-lang.org/std/vec/struct.Vec.html#method.retain) can be used to
|
||||||
|
strip out whatever you need removed.
|
||||||
|
|
||||||
|
If you have a `Read` (e.g. reading a file or network socket), there are various approaches.
|
||||||
|
|
||||||
|
- Use [iter_read](https://crates.io/crates/iter-read) together with `Read`'s `bytes()` to filter out unwanted bytes.
|
||||||
|
- Implement `Read` with a `read()` impl that delegates to your actual `Read`, and then drops any bytes you don't want.
|
||||||
|
|
||||||
|
### I need to line-wrap base64, e.g. for MIME/PEM.
|
||||||
|
|
||||||
|
[line-wrap](https://crates.io/crates/line-wrap) does just that.
|
||||||
|
|
||||||
|
### I want canonical base64 encoding/decoding.
|
||||||
|
|
||||||
|
First, don't do this. You should no more expect Base64 to be canonical than you should expect compression algorithms to
|
||||||
|
produce canonical output across all usage in the wild (hint: they don't).
|
||||||
|
However, [people are drawn to their own destruction like moths to a flame](https://eprint.iacr.org/2022/361), so here we
|
||||||
|
are.
|
||||||
|
|
||||||
|
There are two opportunities for non-canonical encoding (and thus, detection of the same during decoding): the final bits
|
||||||
|
of the last encoded token in two or three token suffixes, and the `=` token used to inflate the suffix to a full four
|
||||||
|
tokens.
|
||||||
|
|
||||||
|
The trailing bits issue is unavoidable: with 6 bits available in each encoded token, 1 input byte takes 2 tokens,
|
||||||
|
with the second one having some bits unused. Same for two input bytes: 16 bits, but 3 tokens have 18 bits. Unless we
|
||||||
|
decide to stop shipping whole bytes around, we're stuck with those extra bits that a sneaky or buggy encoder might set
|
||||||
|
to 1 instead of 0.
|
||||||
|
|
||||||
|
The `=` pad bytes, on the other hand, are entirely a self-own by the Base64 standard. They do not affect decoding other
|
||||||
|
than to provide an opportunity to say "that padding is incorrect". Exabytes of storage and transfer have no doubt been
|
||||||
|
wasted on pointless `=` bytes. Somehow we all seem to be quite comfortable with, say, hex-encoded data just stopping
|
||||||
|
when it's done rather than requiring a confirmation that the author of the encoder could count to four. Anyway, there
|
||||||
|
are two ways to make pad bytes predictable: require canonical padding to the next multiple of four bytes as per the RFC,
|
||||||
|
or, if you control all producers and consumers, save a few bytes by requiring no padding (especially applicable to the
|
||||||
|
url-safe alphabet).
|
||||||
|
|
||||||
|
All `Engine` implementations must at a minimum support treating non-canonical padding of both types as an error, and
|
||||||
|
optionally may allow other behaviors.
|
||||||
|
|
||||||
|
## Rust version compatibility
|
||||||
|
|
||||||
|
The minimum supported Rust version is 1.48.0.
|
||||||
|
|
||||||
|
# Contributing
|
||||||
|
|
||||||
|
Contributions are very welcome. However, because this library is used widely, and in security-sensitive contexts, all
|
||||||
|
PRs will be carefully scrutinized. Beyond that, this sort of low level library simply needs to be 100% correct. Nobody
|
||||||
|
wants to chase bugs in encoding of any sort.
|
||||||
|
|
||||||
|
All this means that it takes me a fair amount of time to review each PR, so it might take quite a while to carve out the
|
||||||
|
free time to give each PR the attention it deserves. I will get to everyone eventually!
|
||||||
|
|
||||||
|
## Developing
|
||||||
|
|
||||||
|
Benchmarks are in `benches/`.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo bench
|
||||||
|
```
|
||||||
|
|
||||||
|
## no_std
|
||||||
|
|
||||||
|
This crate supports no_std. By default the crate targets std via the `std` feature. You can deactivate
|
||||||
|
the `default-features` to target `core` instead. In that case you lose out on all the functionality revolving
|
||||||
|
around `std::io`, `std::error::Error`, and heap allocations. There is an additional `alloc` feature that you can activate
|
||||||
|
to bring back the support for heap allocations.
|
||||||
|
|
||||||
|
## Profiling
|
||||||
|
|
||||||
|
On Linux, you can use [perf](https://perf.wiki.kernel.org/index.php/Main_Page) for profiling. Then compile the
|
||||||
|
benchmarks with `cargo bench --no-run`.
|
||||||
|
|
||||||
|
Run the benchmark binary with `perf` (shown here filtering to one particular benchmark, which will make the results
|
||||||
|
easier to read). `perf` is only available to the root user on most systems as it fiddles with event counters in your
|
||||||
|
CPU, so use `sudo`. We need to run the actual benchmark binary, hence the path into `target`. You can see the actual
|
||||||
|
full path with `cargo bench -v`; it will print out the commands it runs. If you use the exact path
|
||||||
|
that `bench` outputs, make sure you get the one that's for the benchmarks, not the tests. You may also want
|
||||||
|
to `cargo clean` so you have only one `benchmarks-` binary (they tend to accumulate).
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo perf record target/release/deps/benchmarks-* --bench decode_10mib_reuse
|
||||||
|
```
|
||||||
|
|
||||||
|
Then analyze the results, again with perf:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo perf annotate -l
|
||||||
|
```
|
||||||
|
|
||||||
|
You'll see a bunch of interleaved rust source and assembly like this. The section with `lib.rs:327` is telling us that
|
||||||
|
4.02% of samples saw the `movzbl` aka bit shift as the active instruction. However, this percentage is not as exact as
|
||||||
|
it seems due to a phenomenon called *skid*. Basically, a consequence of how fancy modern CPUs are is that this sort of
|
||||||
|
instruction profiling is inherently inaccurate, especially in branch-heavy code.
|
||||||
|
|
||||||
|
```text
|
||||||
|
lib.rs:322 0.70 : 10698: mov %rdi,%rax
|
||||||
|
2.82 : 1069b: shr $0x38,%rax
|
||||||
|
: if morsel == decode_tables::INVALID_VALUE {
|
||||||
|
: bad_byte_index = input_index;
|
||||||
|
: break;
|
||||||
|
: };
|
||||||
|
: accum = (morsel as u64) << 58;
|
||||||
|
lib.rs:327 4.02 : 1069f: movzbl (%r9,%rax,1),%r15d
|
||||||
|
: // fast loop of 8 bytes at a time
|
||||||
|
: while input_index < length_of_full_chunks {
|
||||||
|
: let mut accum: u64;
|
||||||
|
:
|
||||||
|
: let input_chunk = BigEndian::read_u64(&input_bytes[input_index..(input_index + 8)]);
|
||||||
|
: morsel = decode_table[(input_chunk >> 56) as usize];
|
||||||
|
lib.rs:322 3.68 : 106a4: cmp $0xff,%r15
|
||||||
|
: if morsel == decode_tables::INVALID_VALUE {
|
||||||
|
0.00 : 106ab: je 1090e <base64::decode_config_buf::hbf68a45fefa299c1+0x46e>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Fuzzing
|
||||||
|
|
||||||
|
This uses [cargo-fuzz](https://github.com/rust-fuzz/cargo-fuzz). See `fuzz/fuzzers` for the available fuzzing scripts.
|
||||||
|
To run, use an invocation like these:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo +nightly fuzz run roundtrip
|
||||||
|
cargo +nightly fuzz run roundtrip_no_pad
|
||||||
|
cargo +nightly fuzz run roundtrip_random_config -- -max_len=10240
|
||||||
|
cargo +nightly fuzz run decode_random
|
||||||
|
```
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
This project is dual-licensed under MIT and Apache 2.0.
|
||||||
|
|
||||||
271
vendor/base64/RELEASE-NOTES.md
vendored
Normal file
271
vendor/base64/RELEASE-NOTES.md
vendored
Normal file
|
|
@ -0,0 +1,271 @@
|
||||||
|
# 0.22.1
|
||||||
|
|
||||||
|
- Correct the symbols used for the predefined `alphabet::BIN_HEX`.
|
||||||
|
|
||||||
|
# 0.22.0
|
||||||
|
|
||||||
|
- `DecodeSliceError::OutputSliceTooSmall` is now conservative rather than precise. That is, the error will only occur if the decoded output _cannot_ fit, meaning that `Engine::decode_slice` can now be used with exactly-sized output slices. As part of this, `Engine::internal_decode` now returns `DecodeSliceError` instead of `DecodeError`, but that is not expected to affect any external callers.
|
||||||
|
- `DecodeError::InvalidLength` now refers specifically to the _number of valid symbols_ being invalid (i.e. `len % 4 == 1`), rather than just the number of input bytes. This avoids confusing scenarios when based on interpretation you could make a case for either `InvalidLength` or `InvalidByte` being appropriate.
|
||||||
|
- Decoding is somewhat faster (5-10%)
|
||||||
|
|
||||||
|
# 0.21.7
|
||||||
|
|
||||||
|
- Support getting an alphabet's contents as a str via `Alphabet::as_str()`
|
||||||
|
|
||||||
|
# 0.21.6
|
||||||
|
|
||||||
|
- Improved introductory documentation and example
|
||||||
|
|
||||||
|
# 0.21.5
|
||||||
|
|
||||||
|
- Add `Debug` and `Clone` impls for the general purpose Engine
|
||||||
|
|
||||||
|
# 0.21.4
|
||||||
|
|
||||||
|
- Make `encoded_len` `const`, allowing the creation of arrays sized to encode compile-time-known data lengths
|
||||||
|
|
||||||
|
# 0.21.3
|
||||||
|
|
||||||
|
- Implement `source` instead of `cause` on Error types
|
||||||
|
- Roll back MSRV to 1.48.0 so Debian can continue to live in a time warp
|
||||||
|
- Slightly faster chunked encoding for short inputs
|
||||||
|
- Decrease binary size
|
||||||
|
|
||||||
|
# 0.21.2
|
||||||
|
|
||||||
|
- Rollback MSRV to 1.57.0 -- only dev dependencies need 1.60, not the main code
|
||||||
|
|
||||||
|
# 0.21.1
|
||||||
|
|
||||||
|
- Remove the possibility of panicking during decoded length calculations
|
||||||
|
- `DecoderReader` no longer sometimes erroneously ignores
|
||||||
|
padding [#226](https://github.com/marshallpierce/rust-base64/issues/226)
|
||||||
|
|
||||||
|
## Breaking changes
|
||||||
|
|
||||||
|
- `Engine.internal_decode` return type changed
|
||||||
|
- Update MSRV to 1.60.0
|
||||||
|
|
||||||
|
# 0.21.0
|
||||||
|
|
||||||
|
## Migration
|
||||||
|
|
||||||
|
### Functions
|
||||||
|
|
||||||
|
| < 0.20 function | 0.21 equivalent |
|
||||||
|
|-------------------------|-------------------------------------------------------------------------------------|
|
||||||
|
| `encode()` | `engine::general_purpose::STANDARD.encode()` or `prelude::BASE64_STANDARD.encode()` |
|
||||||
|
| `encode_config()` | `engine.encode()` |
|
||||||
|
| `encode_config_buf()` | `engine.encode_string()` |
|
||||||
|
| `encode_config_slice()` | `engine.encode_slice()` |
|
||||||
|
| `decode()` | `engine::general_purpose::STANDARD.decode()` or `prelude::BASE64_STANDARD.decode()` |
|
||||||
|
| `decode_config()` | `engine.decode()` |
|
||||||
|
| `decode_config_buf()` | `engine.decode_vec()` |
|
||||||
|
| `decode_config_slice()` | `engine.decode_slice()` |
|
||||||
|
|
||||||
|
The short-lived 0.20 functions were the 0.13 functions with `config` replaced with `engine`.
|
||||||
|
|
||||||
|
### Padding
|
||||||
|
|
||||||
|
If applicable, use the preset engines `engine::STANDARD`, `engine::STANDARD_NO_PAD`, `engine::URL_SAFE`,
|
||||||
|
or `engine::URL_SAFE_NO_PAD`.
|
||||||
|
The `NO_PAD` ones require that padding is absent when decoding, and the others require that
|
||||||
|
canonical padding is present .
|
||||||
|
|
||||||
|
If you need the < 0.20 behavior that did not care about padding, or want to recreate < 0.20.0's predefined `Config`s
|
||||||
|
precisely, see the following table.
|
||||||
|
|
||||||
|
| 0.13.1 Config | 0.20.0+ alphabet | `encode_padding` | `decode_padding_mode` |
|
||||||
|
|-----------------|------------------|------------------|-----------------------|
|
||||||
|
| STANDARD | STANDARD | true | Indifferent |
|
||||||
|
| STANDARD_NO_PAD | STANDARD | false | Indifferent |
|
||||||
|
| URL_SAFE | URL_SAFE | true | Indifferent |
|
||||||
|
| URL_SAFE_NO_PAD | URL_SAFE | false | Indifferent |
|
||||||
|
|
||||||
|
# 0.21.0-rc.1
|
||||||
|
|
||||||
|
- Restore the ability to decode into a slice of precisely the correct length with `Engine.decode_slice_unchecked`.
|
||||||
|
- Add `Engine` as a `pub use` in `prelude`.
|
||||||
|
|
||||||
|
# 0.21.0-beta.2
|
||||||
|
|
||||||
|
## Breaking changes
|
||||||
|
|
||||||
|
- Re-exports of preconfigured engines in `engine` are removed in favor of `base64::prelude::...` that are better suited
|
||||||
|
to those who wish to `use` the entire path to a name.
|
||||||
|
|
||||||
|
# 0.21.0-beta.1
|
||||||
|
|
||||||
|
## Breaking changes
|
||||||
|
|
||||||
|
- `FastPortable` was only meant to be an interim name, and shouldn't have shipped in 0.20. It is now `GeneralPurpose` to
|
||||||
|
make its intended usage more clear.
|
||||||
|
- `GeneralPurpose` and its config are now `pub use`'d in the `engine` module for convenience.
|
||||||
|
- Change a few `from()` functions to be `new()`. `from()` causes confusing compiler errors because of confusion
|
||||||
|
with `From::from`, and is a little misleading because some of those invocations are not very cheap as one would
|
||||||
|
usually expect from a `from` call.
|
||||||
|
- `encode*` and `decode*` top level functions are now methods on `Engine`.
|
||||||
|
- `DEFAULT_ENGINE` was replaced by `engine::general_purpose::STANDARD`
|
||||||
|
- Predefined engine consts `engine::general_purpose::{STANDARD, STANDARD_NO_PAD, URL_SAFE, URL_SAFE_NO_PAD}`
|
||||||
|
- These are `pub use`d into `engine` as well
|
||||||
|
- The `*_slice` decode/encode functions now return an error instead of panicking when the output slice is too small
|
||||||
|
- As part of this, there isn't now a public way to decode into a slice _exactly_ the size needed for inputs that
|
||||||
|
aren't multiples of 4 tokens. If adding up to 2 bytes to always be a multiple of 3 bytes for the decode buffer is
|
||||||
|
a problem, file an issue.
|
||||||
|
|
||||||
|
## Other changes
|
||||||
|
|
||||||
|
- `decoded_len_estimate()` is provided to make it easy to size decode buffers correctly.
|
||||||
|
|
||||||
|
# 0.20.0
|
||||||
|
|
||||||
|
## Breaking changes
|
||||||
|
|
||||||
|
- Update MSRV to 1.57.0
|
||||||
|
- Decoding can now either ignore padding, require correct padding, or require no padding. The default is to require
|
||||||
|
correct padding.
|
||||||
|
- The `NO_PAD` config now requires that padding be absent when decoding.
|
||||||
|
|
||||||
|
## 0.20.0-alpha.1
|
||||||
|
|
||||||
|
### Breaking changes
|
||||||
|
|
||||||
|
- Extended the `Config` concept into the `Engine` abstraction, allowing the user to pick different encoding / decoding
|
||||||
|
implementations.
|
||||||
|
- What was formerly the only algorithm is now the `FastPortable` engine, so named because it's portable (works on
|
||||||
|
any CPU) and relatively fast.
|
||||||
|
- This opens the door to a portable constant-time
|
||||||
|
implementation ([#153](https://github.com/marshallpierce/rust-base64/pull/153),
|
||||||
|
presumably `ConstantTimePortable`?) for security-sensitive applications that need side-channel resistance, and
|
||||||
|
CPU-specific SIMD implementations for more speed.
|
||||||
|
- Standard base64 per the RFC is available via `DEFAULT_ENGINE`. To use different alphabets or other settings (
|
||||||
|
padding, etc), create your own engine instance.
|
||||||
|
- `CharacterSet` is now `Alphabet` (per the RFC), and allows creating custom alphabets. The corresponding tables that
|
||||||
|
were previously code-generated are now built dynamically.
|
||||||
|
- Since there are already multiple breaking changes, various functions are renamed to be more consistent and
|
||||||
|
discoverable.
|
||||||
|
- MSRV is now 1.47.0 to allow various things to use `const fn`.
|
||||||
|
- `DecoderReader` now owns its inner reader, and can expose it via `into_inner()`. For symmetry, `EncoderWriter` can do
|
||||||
|
the same with its writer.
|
||||||
|
- `encoded_len` is now public so you can size encode buffers precisely.
|
||||||
|
|
||||||
|
# 0.13.1
|
||||||
|
|
||||||
|
- More precise decode buffer sizing, avoiding unnecessary allocation in `decode_config`.
|
||||||
|
|
||||||
|
# 0.13.0
|
||||||
|
|
||||||
|
- Config methods are const
|
||||||
|
- Added `EncoderStringWriter` to allow encoding directly to a String
|
||||||
|
- `EncoderWriter` now owns its delegate writer rather than keeping a reference to it (though refs still work)
|
||||||
|
- As a consequence, it is now possible to extract the delegate writer from an `EncoderWriter` via `finish()`, which
|
||||||
|
returns `Result<W>` instead of `Result<()>`. If you were calling `finish()` explicitly, you will now need to
|
||||||
|
use `let _ = foo.finish()` instead of just `foo.finish()` to avoid a warning about the unused value.
|
||||||
|
- When decoding input that has both an invalid length and an invalid symbol as the last byte, `InvalidByte` will be
|
||||||
|
emitted instead of `InvalidLength` to make the problem more obvious.
|
||||||
|
|
||||||
|
# 0.12.2
|
||||||
|
|
||||||
|
- Add `BinHex` alphabet
|
||||||
|
|
||||||
|
# 0.12.1
|
||||||
|
|
||||||
|
- Add `Bcrypt` alphabet
|
||||||
|
|
||||||
|
# 0.12.0
|
||||||
|
|
||||||
|
- A `Read` implementation (`DecoderReader`) to let users transparently decoded data from a b64 input source
|
||||||
|
- IMAP's modified b64 alphabet
|
||||||
|
- Relaxed type restrictions to just `AsRef<[ut8]>` for main `encode*`/`decode*` functions
|
||||||
|
- A minor performance improvement in encoding
|
||||||
|
|
||||||
|
# 0.11.0
|
||||||
|
|
||||||
|
- Minimum rust version 1.34.0
|
||||||
|
- `no_std` is now supported via the two new features `alloc` and `std`.
|
||||||
|
|
||||||
|
# 0.10.1
|
||||||
|
|
||||||
|
- Minimum rust version 1.27.2
|
||||||
|
- Fix bug in streaming encoding ([#90](https://github.com/marshallpierce/rust-base64/pull/90)): if the underlying writer
|
||||||
|
didn't write all the bytes given to it, the remaining bytes would not be retried later. See the docs
|
||||||
|
on `EncoderWriter::write`.
|
||||||
|
- Make it configurable whether or not to return an error when decoding detects excess trailing bits.
|
||||||
|
|
||||||
|
# 0.10.0
|
||||||
|
|
||||||
|
- Remove line wrapping. Line wrapping was never a great conceptual fit in this library, and other features (streaming
|
||||||
|
encoding, etc) either couldn't support it or could support only special cases of it with a great increase in
|
||||||
|
complexity. Line wrapping has been pulled out into a [line-wrap](https://crates.io/crates/line-wrap) crate, so it's
|
||||||
|
still available if you need it.
|
||||||
|
- `Base64Display` creation no longer uses a `Result` because it can't fail, which means its helper methods for
|
||||||
|
common
|
||||||
|
configs that `unwrap()` for you are no longer needed
|
||||||
|
- Add a streaming encoder `Write` impl to transparently base64 as you write.
|
||||||
|
- Remove the remaining `unsafe` code.
|
||||||
|
- Remove whitespace stripping to simplify `no_std` support. No out of the box configs use it, and it's trivial to do
|
||||||
|
yourself if needed: `filter(|b| !b" \n\t\r\x0b\x0c".contains(b)`.
|
||||||
|
- Detect invalid trailing symbols when decoding and return an error rather than silently ignoring them.
|
||||||
|
|
||||||
|
# 0.9.3
|
||||||
|
|
||||||
|
- Update safemem
|
||||||
|
|
||||||
|
# 0.9.2
|
||||||
|
|
||||||
|
- Derive `Clone` for `DecodeError`.
|
||||||
|
|
||||||
|
# 0.9.1
|
||||||
|
|
||||||
|
- Add support for `crypt(3)`'s base64 variant.
|
||||||
|
|
||||||
|
# 0.9.0
|
||||||
|
|
||||||
|
- `decode_config_slice` function for no-allocation decoding, analogous to `encode_config_slice`
|
||||||
|
- Decode performance optimization
|
||||||
|
|
||||||
|
# 0.8.0
|
||||||
|
|
||||||
|
- `encode_config_slice` function for no-allocation encoding
|
||||||
|
|
||||||
|
# 0.7.0
|
||||||
|
|
||||||
|
- `STANDARD_NO_PAD` config
|
||||||
|
- `Base64Display` heap-free wrapper for use in format strings, etc
|
||||||
|
|
||||||
|
# 0.6.0
|
||||||
|
|
||||||
|
- Decode performance improvements
|
||||||
|
- Use `unsafe` in fewer places
|
||||||
|
- Added fuzzers
|
||||||
|
|
||||||
|
# 0.5.2
|
||||||
|
|
||||||
|
- Avoid usize overflow when calculating length
|
||||||
|
- Better line wrapping performance
|
||||||
|
|
||||||
|
# 0.5.1
|
||||||
|
|
||||||
|
- Temporarily disable line wrapping
|
||||||
|
- Add Apache 2.0 license
|
||||||
|
|
||||||
|
# 0.5.0
|
||||||
|
|
||||||
|
- MIME support, including configurable line endings and line wrapping
|
||||||
|
- Removed `decode_ws`
|
||||||
|
- Renamed `Base64Error` to `DecodeError`
|
||||||
|
|
||||||
|
# 0.4.1
|
||||||
|
|
||||||
|
- Allow decoding a `AsRef<[u8]>` instead of just a `&str`
|
||||||
|
|
||||||
|
# 0.4.0
|
||||||
|
|
||||||
|
- Configurable padding
|
||||||
|
- Encode performance improvements
|
||||||
|
|
||||||
|
# 0.3.0
|
||||||
|
|
||||||
|
- Added encode/decode functions that do not allocate their own storage
|
||||||
|
- Decode performance improvements
|
||||||
|
- Extraneous padding bytes are no longer ignored. Now, an error will be returned.
|
||||||
238
vendor/base64/benches/benchmarks.rs
vendored
Normal file
238
vendor/base64/benches/benchmarks.rs
vendored
Normal file
|
|
@ -0,0 +1,238 @@
|
||||||
|
#[macro_use]
|
||||||
|
extern crate criterion;
|
||||||
|
|
||||||
|
use base64::{
|
||||||
|
display,
|
||||||
|
engine::{general_purpose::STANDARD, Engine},
|
||||||
|
write,
|
||||||
|
};
|
||||||
|
use criterion::{black_box, Bencher, BenchmarkId, Criterion, Throughput};
|
||||||
|
use rand::{Rng, SeedableRng};
|
||||||
|
use std::io::{self, Read, Write};
|
||||||
|
|
||||||
|
fn do_decode_bench(b: &mut Bencher, &size: &usize) {
|
||||||
|
let mut v: Vec<u8> = Vec::with_capacity(size * 3 / 4);
|
||||||
|
fill(&mut v);
|
||||||
|
let encoded = STANDARD.encode(&v);
|
||||||
|
|
||||||
|
b.iter(|| {
|
||||||
|
let orig = STANDARD.decode(&encoded);
|
||||||
|
black_box(&orig);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn do_decode_bench_reuse_buf(b: &mut Bencher, &size: &usize) {
|
||||||
|
let mut v: Vec<u8> = Vec::with_capacity(size * 3 / 4);
|
||||||
|
fill(&mut v);
|
||||||
|
let encoded = STANDARD.encode(&v);
|
||||||
|
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
b.iter(|| {
|
||||||
|
STANDARD.decode_vec(&encoded, &mut buf).unwrap();
|
||||||
|
black_box(&buf);
|
||||||
|
buf.clear();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn do_decode_bench_slice(b: &mut Bencher, &size: &usize) {
|
||||||
|
let mut v: Vec<u8> = Vec::with_capacity(size * 3 / 4);
|
||||||
|
fill(&mut v);
|
||||||
|
let encoded = STANDARD.encode(&v);
|
||||||
|
|
||||||
|
let mut buf = vec![0; size];
|
||||||
|
b.iter(|| {
|
||||||
|
STANDARD.decode_slice(&encoded, &mut buf).unwrap();
|
||||||
|
black_box(&buf);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn do_decode_bench_stream(b: &mut Bencher, &size: &usize) {
|
||||||
|
let mut v: Vec<u8> = Vec::with_capacity(size * 3 / 4);
|
||||||
|
fill(&mut v);
|
||||||
|
let encoded = STANDARD.encode(&v);
|
||||||
|
|
||||||
|
let mut buf = vec![0; size];
|
||||||
|
buf.truncate(0);
|
||||||
|
|
||||||
|
b.iter(|| {
|
||||||
|
let mut cursor = io::Cursor::new(&encoded[..]);
|
||||||
|
let mut decoder = base64::read::DecoderReader::new(&mut cursor, &STANDARD);
|
||||||
|
decoder.read_to_end(&mut buf).unwrap();
|
||||||
|
buf.clear();
|
||||||
|
black_box(&buf);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn do_encode_bench(b: &mut Bencher, &size: &usize) {
|
||||||
|
let mut v: Vec<u8> = Vec::with_capacity(size);
|
||||||
|
fill(&mut v);
|
||||||
|
b.iter(|| {
|
||||||
|
let e = STANDARD.encode(&v);
|
||||||
|
black_box(&e);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn do_encode_bench_display(b: &mut Bencher, &size: &usize) {
|
||||||
|
let mut v: Vec<u8> = Vec::with_capacity(size);
|
||||||
|
fill(&mut v);
|
||||||
|
b.iter(|| {
|
||||||
|
let e = format!("{}", display::Base64Display::new(&v, &STANDARD));
|
||||||
|
black_box(&e);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn do_encode_bench_reuse_buf(b: &mut Bencher, &size: &usize) {
|
||||||
|
let mut v: Vec<u8> = Vec::with_capacity(size);
|
||||||
|
fill(&mut v);
|
||||||
|
let mut buf = String::new();
|
||||||
|
b.iter(|| {
|
||||||
|
STANDARD.encode_string(&v, &mut buf);
|
||||||
|
buf.clear();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn do_encode_bench_slice(b: &mut Bencher, &size: &usize) {
|
||||||
|
let mut v: Vec<u8> = Vec::with_capacity(size);
|
||||||
|
fill(&mut v);
|
||||||
|
// conservative estimate of encoded size
|
||||||
|
let mut buf = vec![0; v.len() * 2];
|
||||||
|
b.iter(|| STANDARD.encode_slice(&v, &mut buf).unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
fn do_encode_bench_stream(b: &mut Bencher, &size: &usize) {
|
||||||
|
let mut v: Vec<u8> = Vec::with_capacity(size);
|
||||||
|
fill(&mut v);
|
||||||
|
let mut buf = Vec::with_capacity(size * 2);
|
||||||
|
|
||||||
|
b.iter(|| {
|
||||||
|
buf.clear();
|
||||||
|
let mut stream_enc = write::EncoderWriter::new(&mut buf, &STANDARD);
|
||||||
|
stream_enc.write_all(&v).unwrap();
|
||||||
|
stream_enc.flush().unwrap();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn do_encode_bench_string_stream(b: &mut Bencher, &size: &usize) {
|
||||||
|
let mut v: Vec<u8> = Vec::with_capacity(size);
|
||||||
|
fill(&mut v);
|
||||||
|
|
||||||
|
b.iter(|| {
|
||||||
|
let mut stream_enc = write::EncoderStringWriter::new(&STANDARD);
|
||||||
|
stream_enc.write_all(&v).unwrap();
|
||||||
|
stream_enc.flush().unwrap();
|
||||||
|
let _ = stream_enc.into_inner();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn do_encode_bench_string_reuse_buf_stream(b: &mut Bencher, &size: &usize) {
|
||||||
|
let mut v: Vec<u8> = Vec::with_capacity(size);
|
||||||
|
fill(&mut v);
|
||||||
|
|
||||||
|
let mut buf = String::new();
|
||||||
|
b.iter(|| {
|
||||||
|
buf.clear();
|
||||||
|
let mut stream_enc = write::EncoderStringWriter::from_consumer(&mut buf, &STANDARD);
|
||||||
|
stream_enc.write_all(&v).unwrap();
|
||||||
|
stream_enc.flush().unwrap();
|
||||||
|
let _ = stream_enc.into_inner();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fill(v: &mut Vec<u8>) {
|
||||||
|
let cap = v.capacity();
|
||||||
|
// weak randomness is plenty; we just want to not be completely friendly to the branch predictor
|
||||||
|
let mut r = rand::rngs::SmallRng::from_entropy();
|
||||||
|
while v.len() < cap {
|
||||||
|
v.push(r.gen::<u8>());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const BYTE_SIZES: [usize; 5] = [3, 50, 100, 500, 3 * 1024];
|
||||||
|
|
||||||
|
// Benchmarks over these byte sizes take longer so we will run fewer samples to
|
||||||
|
// keep the benchmark runtime reasonable.
|
||||||
|
const LARGE_BYTE_SIZES: [usize; 3] = [3 * 1024 * 1024, 10 * 1024 * 1024, 30 * 1024 * 1024];
|
||||||
|
|
||||||
|
fn encode_benchmarks(c: &mut Criterion, label: &str, byte_sizes: &[usize]) {
|
||||||
|
let mut group = c.benchmark_group(label);
|
||||||
|
group
|
||||||
|
.warm_up_time(std::time::Duration::from_millis(500))
|
||||||
|
.measurement_time(std::time::Duration::from_secs(3));
|
||||||
|
|
||||||
|
for size in byte_sizes {
|
||||||
|
group
|
||||||
|
.throughput(Throughput::Bytes(*size as u64))
|
||||||
|
.bench_with_input(BenchmarkId::new("encode", size), size, do_encode_bench)
|
||||||
|
.bench_with_input(
|
||||||
|
BenchmarkId::new("encode_display", size),
|
||||||
|
size,
|
||||||
|
do_encode_bench_display,
|
||||||
|
)
|
||||||
|
.bench_with_input(
|
||||||
|
BenchmarkId::new("encode_reuse_buf", size),
|
||||||
|
size,
|
||||||
|
do_encode_bench_reuse_buf,
|
||||||
|
)
|
||||||
|
.bench_with_input(
|
||||||
|
BenchmarkId::new("encode_slice", size),
|
||||||
|
size,
|
||||||
|
do_encode_bench_slice,
|
||||||
|
)
|
||||||
|
.bench_with_input(
|
||||||
|
BenchmarkId::new("encode_reuse_buf_stream", size),
|
||||||
|
size,
|
||||||
|
do_encode_bench_stream,
|
||||||
|
)
|
||||||
|
.bench_with_input(
|
||||||
|
BenchmarkId::new("encode_string_stream", size),
|
||||||
|
size,
|
||||||
|
do_encode_bench_string_stream,
|
||||||
|
)
|
||||||
|
.bench_with_input(
|
||||||
|
BenchmarkId::new("encode_string_reuse_buf_stream", size),
|
||||||
|
size,
|
||||||
|
do_encode_bench_string_reuse_buf_stream,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
group.finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decode_benchmarks(c: &mut Criterion, label: &str, byte_sizes: &[usize]) {
|
||||||
|
let mut group = c.benchmark_group(label);
|
||||||
|
|
||||||
|
for size in byte_sizes {
|
||||||
|
group
|
||||||
|
.warm_up_time(std::time::Duration::from_millis(500))
|
||||||
|
.measurement_time(std::time::Duration::from_secs(3))
|
||||||
|
.throughput(Throughput::Bytes(*size as u64))
|
||||||
|
.bench_with_input(BenchmarkId::new("decode", size), size, do_decode_bench)
|
||||||
|
.bench_with_input(
|
||||||
|
BenchmarkId::new("decode_reuse_buf", size),
|
||||||
|
size,
|
||||||
|
do_decode_bench_reuse_buf,
|
||||||
|
)
|
||||||
|
.bench_with_input(
|
||||||
|
BenchmarkId::new("decode_slice", size),
|
||||||
|
size,
|
||||||
|
do_decode_bench_slice,
|
||||||
|
)
|
||||||
|
.bench_with_input(
|
||||||
|
BenchmarkId::new("decode_stream", size),
|
||||||
|
size,
|
||||||
|
do_decode_bench_stream,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
group.finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn bench(c: &mut Criterion) {
|
||||||
|
encode_benchmarks(c, "encode_small_input", &BYTE_SIZES[..]);
|
||||||
|
encode_benchmarks(c, "encode_large_input", &LARGE_BYTE_SIZES[..]);
|
||||||
|
decode_benchmarks(c, "decode_small_input", &BYTE_SIZES[..]);
|
||||||
|
decode_benchmarks(c, "decode_large_input", &LARGE_BYTE_SIZES[..]);
|
||||||
|
}
|
||||||
|
|
||||||
|
criterion_group!(benches, bench);
|
||||||
|
criterion_main!(benches);
|
||||||
1
vendor/base64/clippy.toml
vendored
Normal file
1
vendor/base64/clippy.toml
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
msrv = "1.48.0"
|
||||||
81
vendor/base64/examples/base64.rs
vendored
Normal file
81
vendor/base64/examples/base64.rs
vendored
Normal file
|
|
@ -0,0 +1,81 @@
|
||||||
|
use std::fs::File;
|
||||||
|
use std::io::{self, Read};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::process;
|
||||||
|
|
||||||
|
use base64::{alphabet, engine, read, write};
|
||||||
|
use clap::Parser;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Parser, strum::EnumString, Default)]
|
||||||
|
#[strum(serialize_all = "kebab-case")]
|
||||||
|
enum Alphabet {
|
||||||
|
#[default]
|
||||||
|
Standard,
|
||||||
|
UrlSafe,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Base64 encode or decode FILE (or standard input), to standard output.
|
||||||
|
#[derive(Debug, Parser)]
|
||||||
|
struct Opt {
|
||||||
|
/// Decode the base64-encoded input (default: encode the input as base64).
|
||||||
|
#[structopt(short = 'd', long = "decode")]
|
||||||
|
decode: bool,
|
||||||
|
|
||||||
|
/// The encoding alphabet: "standard" (default) or "url-safe".
|
||||||
|
#[structopt(long = "alphabet")]
|
||||||
|
alphabet: Option<Alphabet>,
|
||||||
|
|
||||||
|
/// Omit padding characters while encoding, and reject them while decoding.
|
||||||
|
#[structopt(short = 'p', long = "no-padding")]
|
||||||
|
no_padding: bool,
|
||||||
|
|
||||||
|
/// The file to encode or decode.
|
||||||
|
#[structopt(name = "FILE", parse(from_os_str))]
|
||||||
|
file: Option<PathBuf>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let opt = Opt::parse();
|
||||||
|
let stdin;
|
||||||
|
let mut input: Box<dyn Read> = match opt.file {
|
||||||
|
None => {
|
||||||
|
stdin = io::stdin();
|
||||||
|
Box::new(stdin.lock())
|
||||||
|
}
|
||||||
|
Some(ref f) if f.as_os_str() == "-" => {
|
||||||
|
stdin = io::stdin();
|
||||||
|
Box::new(stdin.lock())
|
||||||
|
}
|
||||||
|
Some(f) => Box::new(File::open(f).unwrap()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let alphabet = opt.alphabet.unwrap_or_default();
|
||||||
|
let engine = engine::GeneralPurpose::new(
|
||||||
|
&match alphabet {
|
||||||
|
Alphabet::Standard => alphabet::STANDARD,
|
||||||
|
Alphabet::UrlSafe => alphabet::URL_SAFE,
|
||||||
|
},
|
||||||
|
match opt.no_padding {
|
||||||
|
true => engine::general_purpose::NO_PAD,
|
||||||
|
false => engine::general_purpose::PAD,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
let stdout = io::stdout();
|
||||||
|
let mut stdout = stdout.lock();
|
||||||
|
let r = if opt.decode {
|
||||||
|
let mut decoder = read::DecoderReader::new(&mut input, &engine);
|
||||||
|
io::copy(&mut decoder, &mut stdout)
|
||||||
|
} else {
|
||||||
|
let mut encoder = write::EncoderWriter::new(&mut stdout, &engine);
|
||||||
|
io::copy(&mut input, &mut encoder)
|
||||||
|
};
|
||||||
|
if let Err(e) = r {
|
||||||
|
eprintln!(
|
||||||
|
"Base64 {} failed with {}",
|
||||||
|
if opt.decode { "decode" } else { "encode" },
|
||||||
|
e
|
||||||
|
);
|
||||||
|
process::exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
34
vendor/base64/icon_CLion.svg
vendored
Normal file
34
vendor/base64/icon_CLion.svg
vendored
Normal file
|
|
@ -0,0 +1,34 @@
|
||||||
|
<svg id="Layer_1" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 128 128">
|
||||||
|
<defs>
|
||||||
|
<linearGradient id="linear-gradient" x1="40.69" y1="-676.56" x2="83.48" y2="-676.56" gradientTransform="matrix(1, 0, 0, -1, 0, -648.86)" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop offset="0" stop-color="#ed358c"/>
|
||||||
|
<stop offset="0.16" stop-color="#e9388c"/>
|
||||||
|
<stop offset="0.3" stop-color="#de418c"/>
|
||||||
|
<stop offset="0.43" stop-color="#cc508c"/>
|
||||||
|
<stop offset="0.57" stop-color="#b2658d"/>
|
||||||
|
<stop offset="0.7" stop-color="#90808d"/>
|
||||||
|
<stop offset="0.83" stop-color="#67a18e"/>
|
||||||
|
<stop offset="0.95" stop-color="#37c78f"/>
|
||||||
|
<stop offset="1" stop-color="#22d88f"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="linear-gradient-2" x1="32.58" y1="-665.27" x2="13.76" y2="-791.59" gradientTransform="matrix(1, 0, 0, -1, 0, -648.86)" gradientUnits="userSpaceOnUse">
|
||||||
|
<stop offset="0.09" stop-color="#22d88f"/>
|
||||||
|
<stop offset="0.9" stop-color="#029de0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="linear-gradient-3" x1="116.68" y1="-660.66" x2="-12.09" y2="-796.66" xlink:href="#linear-gradient-2"/>
|
||||||
|
<linearGradient id="linear-gradient-4" x1="73.35" y1="-739.1" x2="122.29" y2="-746.06" xlink:href="#linear-gradient-2"/>
|
||||||
|
</defs>
|
||||||
|
<title>icon_CLion</title>
|
||||||
|
<g>
|
||||||
|
<polygon points="49.2 51.8 40.6 55.4 48.4 0 77.8 16.2 49.2 51.8" fill="url(#linear-gradient)"/>
|
||||||
|
<polygon points="44.6 76.8 48.8 0 11.8 23.2 0 94 44.6 76.8" fill="url(#linear-gradient-2)"/>
|
||||||
|
<polygon points="125.4 38.4 109 4.8 77.8 16.2 55 41.4 0 94 41.6 124.4 93.6 77.2 125.4 38.4" fill="url(#linear-gradient-3)"/>
|
||||||
|
<polygon points="53.8 54.6 46.6 98.4 75.8 121 107.8 128 128 82.4 53.8 54.6" fill="url(#linear-gradient-4)"/>
|
||||||
|
</g>
|
||||||
|
<g>
|
||||||
|
<rect x="24" y="24" width="80" height="80"/>
|
||||||
|
<rect x="31.6" y="89" width="30" height="5" fill="#fff"/>
|
||||||
|
<path d="M31,51.2h0A16.83,16.83,0,0,1,48.2,34c6.2,0,10,2,13,5.2l-4.6,5.4c-2.6-2.4-5.2-3.8-8.4-3.8-5.6,0-9.6,4.6-9.6,10.4h0c0,5.6,4,10.4,9.6,10.4,3.8,0,6.2-1.6,8.8-3.8l4.6,4.6c-3.4,3.6-7.2,6-13.6,6A17,17,0,0,1,31,51.2" fill="#fff"/>
|
||||||
|
<path d="M66.6,34.4H74v27H88.4v6.2H66.6V34.4Z" fill="#fff"/>
|
||||||
|
</g>
|
||||||
|
</svg>
|
||||||
|
Before Width: | Height: | Size: 2.2 KiB After Width: | Height: | Size: 2.2 KiB |
285
vendor/base64/src/alphabet.rs
vendored
Normal file
285
vendor/base64/src/alphabet.rs
vendored
Normal file
|
|
@ -0,0 +1,285 @@
|
||||||
|
//! Provides [Alphabet] and constants for alphabets commonly used in the wild.
|
||||||
|
|
||||||
|
use crate::PAD_BYTE;
|
||||||
|
use core::{convert, fmt};
|
||||||
|
#[cfg(any(feature = "std", test))]
|
||||||
|
use std::error;
|
||||||
|
|
||||||
|
const ALPHABET_SIZE: usize = 64;
|
||||||
|
|
||||||
|
/// An alphabet defines the 64 ASCII characters (symbols) used for base64.
|
||||||
|
///
|
||||||
|
/// Common alphabets are provided as constants, and custom alphabets
|
||||||
|
/// can be made via `from_str` or the `TryFrom<str>` implementation.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// Building and using a custom Alphabet:
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// let custom = base64::alphabet::Alphabet::new("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/").unwrap();
|
||||||
|
///
|
||||||
|
/// let engine = base64::engine::GeneralPurpose::new(
|
||||||
|
/// &custom,
|
||||||
|
/// base64::engine::general_purpose::PAD);
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// Building a const:
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use base64::alphabet::Alphabet;
|
||||||
|
///
|
||||||
|
/// static CUSTOM: Alphabet = {
|
||||||
|
/// // Result::unwrap() isn't const yet, but panic!() is OK
|
||||||
|
/// match Alphabet::new("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/") {
|
||||||
|
/// Ok(x) => x,
|
||||||
|
/// Err(_) => panic!("creation of alphabet failed"),
|
||||||
|
/// }
|
||||||
|
/// };
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// Building lazily:
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use base64::{
|
||||||
|
/// alphabet::Alphabet,
|
||||||
|
/// engine::{general_purpose::GeneralPurpose, GeneralPurposeConfig},
|
||||||
|
/// };
|
||||||
|
/// use once_cell::sync::Lazy;
|
||||||
|
///
|
||||||
|
/// static CUSTOM: Lazy<Alphabet> = Lazy::new(||
|
||||||
|
/// Alphabet::new("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/").unwrap()
|
||||||
|
/// );
|
||||||
|
/// ```
|
||||||
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
|
pub struct Alphabet {
|
||||||
|
pub(crate) symbols: [u8; ALPHABET_SIZE],
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Alphabet {
|
||||||
|
/// Performs no checks so that it can be const.
|
||||||
|
/// Used only for known-valid strings.
|
||||||
|
const fn from_str_unchecked(alphabet: &str) -> Self {
|
||||||
|
let mut symbols = [0_u8; ALPHABET_SIZE];
|
||||||
|
let source_bytes = alphabet.as_bytes();
|
||||||
|
|
||||||
|
// a way to copy that's allowed in const fn
|
||||||
|
let mut index = 0;
|
||||||
|
while index < ALPHABET_SIZE {
|
||||||
|
symbols[index] = source_bytes[index];
|
||||||
|
index += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
Self { symbols }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create an `Alphabet` from a string of 64 unique printable ASCII bytes.
|
||||||
|
///
|
||||||
|
/// The `=` byte is not allowed as it is used for padding.
|
||||||
|
pub const fn new(alphabet: &str) -> Result<Self, ParseAlphabetError> {
|
||||||
|
let bytes = alphabet.as_bytes();
|
||||||
|
if bytes.len() != ALPHABET_SIZE {
|
||||||
|
return Err(ParseAlphabetError::InvalidLength);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let mut index = 0;
|
||||||
|
while index < ALPHABET_SIZE {
|
||||||
|
let byte = bytes[index];
|
||||||
|
|
||||||
|
// must be ascii printable. 127 (DEL) is commonly considered printable
|
||||||
|
// for some reason but clearly unsuitable for base64.
|
||||||
|
if !(byte >= 32_u8 && byte <= 126_u8) {
|
||||||
|
return Err(ParseAlphabetError::UnprintableByte(byte));
|
||||||
|
}
|
||||||
|
// = is assumed to be padding, so cannot be used as a symbol
|
||||||
|
if byte == PAD_BYTE {
|
||||||
|
return Err(ParseAlphabetError::ReservedByte(byte));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for duplicates while staying within what const allows.
|
||||||
|
// It's n^2, but only over 64 hot bytes, and only once, so it's likely in the single digit
|
||||||
|
// microsecond range.
|
||||||
|
|
||||||
|
let mut probe_index = 0;
|
||||||
|
while probe_index < ALPHABET_SIZE {
|
||||||
|
if probe_index == index {
|
||||||
|
probe_index += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let probe_byte = bytes[probe_index];
|
||||||
|
|
||||||
|
if byte == probe_byte {
|
||||||
|
return Err(ParseAlphabetError::DuplicatedByte(byte));
|
||||||
|
}
|
||||||
|
|
||||||
|
probe_index += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
index += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Self::from_str_unchecked(alphabet))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a `&str` from the symbols in the `Alphabet`
|
||||||
|
pub fn as_str(&self) -> &str {
|
||||||
|
core::str::from_utf8(&self.symbols).unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl convert::TryFrom<&str> for Alphabet {
|
||||||
|
type Error = ParseAlphabetError;
|
||||||
|
|
||||||
|
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||||
|
Self::new(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Possible errors when constructing an [Alphabet] from a `str`.
|
||||||
|
#[derive(Debug, Eq, PartialEq)]
|
||||||
|
pub enum ParseAlphabetError {
|
||||||
|
/// Alphabets must be 64 ASCII bytes
|
||||||
|
InvalidLength,
|
||||||
|
/// All bytes must be unique
|
||||||
|
DuplicatedByte(u8),
|
||||||
|
/// All bytes must be printable (in the range `[32, 126]`).
|
||||||
|
UnprintableByte(u8),
|
||||||
|
/// `=` cannot be used
|
||||||
|
ReservedByte(u8),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for ParseAlphabetError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
Self::InvalidLength => write!(f, "Invalid length - must be 64 bytes"),
|
||||||
|
Self::DuplicatedByte(b) => write!(f, "Duplicated byte: {:#04x}", b),
|
||||||
|
Self::UnprintableByte(b) => write!(f, "Unprintable byte: {:#04x}", b),
|
||||||
|
Self::ReservedByte(b) => write!(f, "Reserved byte: {:#04x}", b),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", test))]
|
||||||
|
impl error::Error for ParseAlphabetError {}
|
||||||
|
|
||||||
|
/// The standard alphabet (with `+` and `/`) specified in [RFC 4648][].
|
||||||
|
///
|
||||||
|
/// [RFC 4648]: https://datatracker.ietf.org/doc/html/rfc4648#section-4
|
||||||
|
pub const STANDARD: Alphabet = Alphabet::from_str_unchecked(
|
||||||
|
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",
|
||||||
|
);
|
||||||
|
|
||||||
|
/// The URL-safe alphabet (with `-` and `_`) specified in [RFC 4648][].
|
||||||
|
///
|
||||||
|
/// [RFC 4648]: https://datatracker.ietf.org/doc/html/rfc4648#section-5
|
||||||
|
pub const URL_SAFE: Alphabet = Alphabet::from_str_unchecked(
|
||||||
|
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_",
|
||||||
|
);
|
||||||
|
|
||||||
|
/// The `crypt(3)` alphabet (with `.` and `/` as the _first_ two characters).
|
||||||
|
///
|
||||||
|
/// Not standardized, but folk wisdom on the net asserts that this alphabet is what crypt uses.
|
||||||
|
pub const CRYPT: Alphabet = Alphabet::from_str_unchecked(
|
||||||
|
"./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
||||||
|
);
|
||||||
|
|
||||||
|
/// The bcrypt alphabet.
|
||||||
|
pub const BCRYPT: Alphabet = Alphabet::from_str_unchecked(
|
||||||
|
"./ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789",
|
||||||
|
);
|
||||||
|
|
||||||
|
/// The alphabet used in IMAP-modified UTF-7 (with `+` and `,`).
|
||||||
|
///
|
||||||
|
/// See [RFC 3501](https://tools.ietf.org/html/rfc3501#section-5.1.3)
|
||||||
|
pub const IMAP_MUTF7: Alphabet = Alphabet::from_str_unchecked(
|
||||||
|
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+,",
|
||||||
|
);
|
||||||
|
|
||||||
|
/// The alphabet used in BinHex 4.0 files.
|
||||||
|
///
|
||||||
|
/// See [BinHex 4.0 Definition](http://files.stairways.com/other/binhex-40-specs-info.txt)
|
||||||
|
pub const BIN_HEX: Alphabet = Alphabet::from_str_unchecked(
|
||||||
|
"!\"#$%&'()*+,-012345689@ABCDEFGHIJKLMNPQRSTUVXYZ[`abcdefhijklmpqr",
|
||||||
|
);
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::alphabet::*;
|
||||||
|
use core::convert::TryFrom as _;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn detects_duplicate_start() {
|
||||||
|
assert_eq!(
|
||||||
|
ParseAlphabetError::DuplicatedByte(b'A'),
|
||||||
|
Alphabet::new("AACDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/")
|
||||||
|
.unwrap_err()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn detects_duplicate_end() {
|
||||||
|
assert_eq!(
|
||||||
|
ParseAlphabetError::DuplicatedByte(b'/'),
|
||||||
|
Alphabet::new("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789//")
|
||||||
|
.unwrap_err()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn detects_duplicate_middle() {
|
||||||
|
assert_eq!(
|
||||||
|
ParseAlphabetError::DuplicatedByte(b'Z'),
|
||||||
|
Alphabet::new("ABCDEFGHIJKLMNOPQRSTUVWXYZZbcdefghijklmnopqrstuvwxyz0123456789+/")
|
||||||
|
.unwrap_err()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn detects_length() {
|
||||||
|
assert_eq!(
|
||||||
|
ParseAlphabetError::InvalidLength,
|
||||||
|
Alphabet::new(
|
||||||
|
"xxxxxxxxxABCDEFGHIJKLMNOPQRSTUVWXYZZbcdefghijklmnopqrstuvwxyz0123456789+/",
|
||||||
|
)
|
||||||
|
.unwrap_err()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn detects_padding() {
|
||||||
|
assert_eq!(
|
||||||
|
ParseAlphabetError::ReservedByte(b'='),
|
||||||
|
Alphabet::new("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+=")
|
||||||
|
.unwrap_err()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn detects_unprintable() {
|
||||||
|
// form feed
|
||||||
|
assert_eq!(
|
||||||
|
ParseAlphabetError::UnprintableByte(0xc),
|
||||||
|
Alphabet::new("\x0cBCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/")
|
||||||
|
.unwrap_err()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn same_as_unchecked() {
|
||||||
|
assert_eq!(
|
||||||
|
STANDARD,
|
||||||
|
Alphabet::try_from("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/")
|
||||||
|
.unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn str_same_as_input() {
|
||||||
|
let alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
||||||
|
let a = Alphabet::try_from(alphabet).unwrap();
|
||||||
|
assert_eq!(alphabet, a.as_str())
|
||||||
|
}
|
||||||
|
}
|
||||||
172
vendor/base64/src/chunked_encoder.rs
vendored
Normal file
172
vendor/base64/src/chunked_encoder.rs
vendored
Normal file
|
|
@ -0,0 +1,172 @@
|
||||||
|
use crate::{
|
||||||
|
encode::add_padding,
|
||||||
|
engine::{Config, Engine},
|
||||||
|
};
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
use alloc::string::String;
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
use core::str;
|
||||||
|
|
||||||
|
/// The output mechanism for ChunkedEncoder's encoded bytes.
|
||||||
|
pub trait Sink {
|
||||||
|
type Error;
|
||||||
|
|
||||||
|
/// Handle a chunk of encoded base64 data (as UTF-8 bytes)
|
||||||
|
fn write_encoded_bytes(&mut self, encoded: &[u8]) -> Result<(), Self::Error>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A base64 encoder that emits encoded bytes in chunks without heap allocation.
|
||||||
|
pub struct ChunkedEncoder<'e, E: Engine + ?Sized> {
|
||||||
|
engine: &'e E,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'e, E: Engine + ?Sized> ChunkedEncoder<'e, E> {
|
||||||
|
pub fn new(engine: &'e E) -> ChunkedEncoder<'e, E> {
|
||||||
|
ChunkedEncoder { engine }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn encode<S: Sink>(&self, bytes: &[u8], sink: &mut S) -> Result<(), S::Error> {
|
||||||
|
const BUF_SIZE: usize = 1024;
|
||||||
|
const CHUNK_SIZE: usize = BUF_SIZE / 4 * 3;
|
||||||
|
|
||||||
|
let mut buf = [0; BUF_SIZE];
|
||||||
|
for chunk in bytes.chunks(CHUNK_SIZE) {
|
||||||
|
let mut len = self.engine.internal_encode(chunk, &mut buf);
|
||||||
|
if chunk.len() != CHUNK_SIZE && self.engine.config().encode_padding() {
|
||||||
|
// Final, potentially partial, chunk.
|
||||||
|
// Only need to consider if padding is needed on a partial chunk since full chunk
|
||||||
|
// is a multiple of 3, which therefore won't be padded.
|
||||||
|
// Pad output to multiple of four bytes if required by config.
|
||||||
|
len += add_padding(len, &mut buf[len..]);
|
||||||
|
}
|
||||||
|
sink.write_encoded_bytes(&buf[..len])?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// A really simple sink that just appends to a string
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
pub(crate) struct StringSink<'a> {
|
||||||
|
string: &'a mut String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
impl<'a> StringSink<'a> {
|
||||||
|
pub(crate) fn new(s: &mut String) -> StringSink {
|
||||||
|
StringSink { string: s }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
impl<'a> Sink for StringSink<'a> {
|
||||||
|
type Error = ();
|
||||||
|
|
||||||
|
fn write_encoded_bytes(&mut self, s: &[u8]) -> Result<(), Self::Error> {
|
||||||
|
self.string.push_str(str::from_utf8(s).unwrap());
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub mod tests {
|
||||||
|
use rand::{
|
||||||
|
distributions::{Distribution, Uniform},
|
||||||
|
Rng, SeedableRng,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
alphabet::STANDARD,
|
||||||
|
engine::general_purpose::{GeneralPurpose, GeneralPurposeConfig, PAD},
|
||||||
|
tests::random_engine,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn chunked_encode_empty() {
|
||||||
|
assert_eq!("", chunked_encode_str(&[], PAD));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn chunked_encode_intermediate_fast_loop() {
|
||||||
|
// > 8 bytes input, will enter the pretty fast loop
|
||||||
|
assert_eq!("Zm9vYmFyYmF6cXV4", chunked_encode_str(b"foobarbazqux", PAD));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn chunked_encode_fast_loop() {
|
||||||
|
// > 32 bytes input, will enter the uber fast loop
|
||||||
|
assert_eq!(
|
||||||
|
"Zm9vYmFyYmF6cXV4cXV1eGNvcmdlZ3JhdWx0Z2FycGx5eg==",
|
||||||
|
chunked_encode_str(b"foobarbazquxquuxcorgegraultgarplyz", PAD)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn chunked_encode_slow_loop_only() {
|
||||||
|
// < 8 bytes input, slow loop only
|
||||||
|
assert_eq!("Zm9vYmFy", chunked_encode_str(b"foobar", PAD));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn chunked_encode_matches_normal_encode_random_string_sink() {
|
||||||
|
let helper = StringSinkTestHelper;
|
||||||
|
chunked_encode_matches_normal_encode_random(&helper);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn chunked_encode_matches_normal_encode_random<S: SinkTestHelper>(sink_test_helper: &S) {
|
||||||
|
let mut input_buf: Vec<u8> = Vec::new();
|
||||||
|
let mut output_buf = String::new();
|
||||||
|
let mut rng = rand::rngs::SmallRng::from_entropy();
|
||||||
|
let input_len_range = Uniform::new(1, 10_000);
|
||||||
|
|
||||||
|
for _ in 0..20_000 {
|
||||||
|
input_buf.clear();
|
||||||
|
output_buf.clear();
|
||||||
|
|
||||||
|
let buf_len = input_len_range.sample(&mut rng);
|
||||||
|
for _ in 0..buf_len {
|
||||||
|
input_buf.push(rng.gen());
|
||||||
|
}
|
||||||
|
|
||||||
|
let engine = random_engine(&mut rng);
|
||||||
|
|
||||||
|
let chunk_encoded_string = sink_test_helper.encode_to_string(&engine, &input_buf);
|
||||||
|
engine.encode_string(&input_buf, &mut output_buf);
|
||||||
|
|
||||||
|
assert_eq!(output_buf, chunk_encoded_string, "input len={}", buf_len);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn chunked_encode_str(bytes: &[u8], config: GeneralPurposeConfig) -> String {
|
||||||
|
let mut s = String::new();
|
||||||
|
|
||||||
|
let mut sink = StringSink::new(&mut s);
|
||||||
|
let engine = GeneralPurpose::new(&STANDARD, config);
|
||||||
|
let encoder = ChunkedEncoder::new(&engine);
|
||||||
|
encoder.encode(bytes, &mut sink).unwrap();
|
||||||
|
|
||||||
|
s
|
||||||
|
}
|
||||||
|
|
||||||
|
// An abstraction around sinks so that we can have tests that easily to any sink implementation
|
||||||
|
pub trait SinkTestHelper {
|
||||||
|
fn encode_to_string<E: Engine>(&self, engine: &E, bytes: &[u8]) -> String;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct StringSinkTestHelper;
|
||||||
|
|
||||||
|
impl SinkTestHelper for StringSinkTestHelper {
|
||||||
|
fn encode_to_string<E: Engine>(&self, engine: &E, bytes: &[u8]) -> String {
|
||||||
|
let encoder = ChunkedEncoder::new(engine);
|
||||||
|
let mut s = String::new();
|
||||||
|
let mut sink = StringSink::new(&mut s);
|
||||||
|
encoder.encode(bytes, &mut sink).unwrap();
|
||||||
|
|
||||||
|
s
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
386
vendor/base64/src/decode.rs
vendored
Normal file
386
vendor/base64/src/decode.rs
vendored
Normal file
|
|
@ -0,0 +1,386 @@
|
||||||
|
use crate::engine::{general_purpose::STANDARD, DecodeEstimate, Engine};
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
use alloc::vec::Vec;
|
||||||
|
use core::fmt;
|
||||||
|
#[cfg(any(feature = "std", test))]
|
||||||
|
use std::error;
|
||||||
|
|
||||||
|
/// Errors that can occur while decoding.
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
|
pub enum DecodeError {
|
||||||
|
/// An invalid byte was found in the input. The offset and offending byte are provided.
|
||||||
|
///
|
||||||
|
/// Padding characters (`=`) interspersed in the encoded form are invalid, as they may only
|
||||||
|
/// be present as the last 0-2 bytes of input.
|
||||||
|
///
|
||||||
|
/// This error may also indicate that extraneous trailing input bytes are present, causing
|
||||||
|
/// otherwise valid padding to no longer be the last bytes of input.
|
||||||
|
InvalidByte(usize, u8),
|
||||||
|
/// The length of the input, as measured in valid base64 symbols, is invalid.
|
||||||
|
/// There must be 2-4 symbols in the last input quad.
|
||||||
|
InvalidLength(usize),
|
||||||
|
/// The last non-padding input symbol's encoded 6 bits have nonzero bits that will be discarded.
|
||||||
|
/// This is indicative of corrupted or truncated Base64.
|
||||||
|
/// Unlike [DecodeError::InvalidByte], which reports symbols that aren't in the alphabet,
|
||||||
|
/// this error is for symbols that are in the alphabet but represent nonsensical encodings.
|
||||||
|
InvalidLastSymbol(usize, u8),
|
||||||
|
/// The nature of the padding was not as configured: absent or incorrect when it must be
|
||||||
|
/// canonical, or present when it must be absent, etc.
|
||||||
|
InvalidPadding,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for DecodeError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
match *self {
|
||||||
|
Self::InvalidByte(index, byte) => {
|
||||||
|
write!(f, "Invalid symbol {}, offset {}.", byte, index)
|
||||||
|
}
|
||||||
|
Self::InvalidLength(len) => write!(f, "Invalid input length: {}", len),
|
||||||
|
Self::InvalidLastSymbol(index, byte) => {
|
||||||
|
write!(f, "Invalid last symbol {}, offset {}.", byte, index)
|
||||||
|
}
|
||||||
|
Self::InvalidPadding => write!(f, "Invalid padding"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", test))]
|
||||||
|
impl error::Error for DecodeError {}
|
||||||
|
|
||||||
|
/// Errors that can occur while decoding into a slice.
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
|
pub enum DecodeSliceError {
|
||||||
|
/// A [DecodeError] occurred
|
||||||
|
DecodeError(DecodeError),
|
||||||
|
/// The provided slice is too small.
|
||||||
|
OutputSliceTooSmall,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for DecodeSliceError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
Self::DecodeError(e) => write!(f, "DecodeError: {}", e),
|
||||||
|
Self::OutputSliceTooSmall => write!(f, "Output slice too small"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", test))]
|
||||||
|
impl error::Error for DecodeSliceError {
|
||||||
|
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
|
||||||
|
match self {
|
||||||
|
DecodeSliceError::DecodeError(e) => Some(e),
|
||||||
|
DecodeSliceError::OutputSliceTooSmall => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<DecodeError> for DecodeSliceError {
|
||||||
|
fn from(e: DecodeError) -> Self {
|
||||||
|
DecodeSliceError::DecodeError(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decode base64 using the [`STANDARD` engine](STANDARD).
|
||||||
|
///
|
||||||
|
/// See [Engine::decode].
|
||||||
|
#[deprecated(since = "0.21.0", note = "Use Engine::decode")]
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
pub fn decode<T: AsRef<[u8]>>(input: T) -> Result<Vec<u8>, DecodeError> {
|
||||||
|
STANDARD.decode(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decode from string reference as octets using the specified [Engine].
|
||||||
|
///
|
||||||
|
/// See [Engine::decode].
|
||||||
|
///Returns a `Result` containing a `Vec<u8>`.
|
||||||
|
#[deprecated(since = "0.21.0", note = "Use Engine::decode")]
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
pub fn decode_engine<E: Engine, T: AsRef<[u8]>>(
|
||||||
|
input: T,
|
||||||
|
engine: &E,
|
||||||
|
) -> Result<Vec<u8>, DecodeError> {
|
||||||
|
engine.decode(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decode from string reference as octets.
|
||||||
|
///
|
||||||
|
/// See [Engine::decode_vec].
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
#[deprecated(since = "0.21.0", note = "Use Engine::decode_vec")]
|
||||||
|
pub fn decode_engine_vec<E: Engine, T: AsRef<[u8]>>(
|
||||||
|
input: T,
|
||||||
|
buffer: &mut Vec<u8>,
|
||||||
|
engine: &E,
|
||||||
|
) -> Result<(), DecodeError> {
|
||||||
|
engine.decode_vec(input, buffer)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decode the input into the provided output slice.
|
||||||
|
///
|
||||||
|
/// See [Engine::decode_slice].
|
||||||
|
#[deprecated(since = "0.21.0", note = "Use Engine::decode_slice")]
|
||||||
|
pub fn decode_engine_slice<E: Engine, T: AsRef<[u8]>>(
|
||||||
|
input: T,
|
||||||
|
output: &mut [u8],
|
||||||
|
engine: &E,
|
||||||
|
) -> Result<usize, DecodeSliceError> {
|
||||||
|
engine.decode_slice(input, output)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a conservative estimate of the decoded size of `encoded_len` base64 symbols (rounded up
|
||||||
|
/// to the next group of 3 decoded bytes).
|
||||||
|
///
|
||||||
|
/// The resulting length will be a safe choice for the size of a decode buffer, but may have up to
|
||||||
|
/// 2 trailing bytes that won't end up being needed.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use base64::decoded_len_estimate;
|
||||||
|
///
|
||||||
|
/// assert_eq!(3, decoded_len_estimate(1));
|
||||||
|
/// assert_eq!(3, decoded_len_estimate(2));
|
||||||
|
/// assert_eq!(3, decoded_len_estimate(3));
|
||||||
|
/// assert_eq!(3, decoded_len_estimate(4));
|
||||||
|
/// // start of the next quad of encoded symbols
|
||||||
|
/// assert_eq!(6, decoded_len_estimate(5));
|
||||||
|
/// ```
|
||||||
|
pub fn decoded_len_estimate(encoded_len: usize) -> usize {
|
||||||
|
STANDARD
|
||||||
|
.internal_decoded_len_estimate(encoded_len)
|
||||||
|
.decoded_len_estimate()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::{
|
||||||
|
alphabet,
|
||||||
|
engine::{general_purpose, Config, GeneralPurpose},
|
||||||
|
tests::{assert_encode_sanity, random_engine},
|
||||||
|
};
|
||||||
|
use rand::{
|
||||||
|
distributions::{Distribution, Uniform},
|
||||||
|
Rng, SeedableRng,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn decode_into_nonempty_vec_doesnt_clobber_existing_prefix() {
|
||||||
|
let mut orig_data = Vec::new();
|
||||||
|
let mut encoded_data = String::new();
|
||||||
|
let mut decoded_with_prefix = Vec::new();
|
||||||
|
let mut decoded_without_prefix = Vec::new();
|
||||||
|
let mut prefix = Vec::new();
|
||||||
|
|
||||||
|
let prefix_len_range = Uniform::new(0, 1000);
|
||||||
|
let input_len_range = Uniform::new(0, 1000);
|
||||||
|
|
||||||
|
let mut rng = rand::rngs::SmallRng::from_entropy();
|
||||||
|
|
||||||
|
for _ in 0..10_000 {
|
||||||
|
orig_data.clear();
|
||||||
|
encoded_data.clear();
|
||||||
|
decoded_with_prefix.clear();
|
||||||
|
decoded_without_prefix.clear();
|
||||||
|
prefix.clear();
|
||||||
|
|
||||||
|
let input_len = input_len_range.sample(&mut rng);
|
||||||
|
|
||||||
|
for _ in 0..input_len {
|
||||||
|
orig_data.push(rng.gen());
|
||||||
|
}
|
||||||
|
|
||||||
|
let engine = random_engine(&mut rng);
|
||||||
|
engine.encode_string(&orig_data, &mut encoded_data);
|
||||||
|
assert_encode_sanity(&encoded_data, engine.config().encode_padding(), input_len);
|
||||||
|
|
||||||
|
let prefix_len = prefix_len_range.sample(&mut rng);
|
||||||
|
|
||||||
|
// fill the buf with a prefix
|
||||||
|
for _ in 0..prefix_len {
|
||||||
|
prefix.push(rng.gen());
|
||||||
|
}
|
||||||
|
|
||||||
|
decoded_with_prefix.resize(prefix_len, 0);
|
||||||
|
decoded_with_prefix.copy_from_slice(&prefix);
|
||||||
|
|
||||||
|
// decode into the non-empty buf
|
||||||
|
engine
|
||||||
|
.decode_vec(&encoded_data, &mut decoded_with_prefix)
|
||||||
|
.unwrap();
|
||||||
|
// also decode into the empty buf
|
||||||
|
engine
|
||||||
|
.decode_vec(&encoded_data, &mut decoded_without_prefix)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
prefix_len + decoded_without_prefix.len(),
|
||||||
|
decoded_with_prefix.len()
|
||||||
|
);
|
||||||
|
assert_eq!(orig_data, decoded_without_prefix);
|
||||||
|
|
||||||
|
// append plain decode onto prefix
|
||||||
|
prefix.append(&mut decoded_without_prefix);
|
||||||
|
|
||||||
|
assert_eq!(prefix, decoded_with_prefix);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn decode_slice_doesnt_clobber_existing_prefix_or_suffix() {
|
||||||
|
do_decode_slice_doesnt_clobber_existing_prefix_or_suffix(|e, input, output| {
|
||||||
|
e.decode_slice(input, output).unwrap()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn decode_slice_unchecked_doesnt_clobber_existing_prefix_or_suffix() {
|
||||||
|
do_decode_slice_doesnt_clobber_existing_prefix_or_suffix(|e, input, output| {
|
||||||
|
e.decode_slice_unchecked(input, output).unwrap()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn decode_engine_estimation_works_for_various_lengths() {
|
||||||
|
let engine = GeneralPurpose::new(&alphabet::STANDARD, general_purpose::NO_PAD);
|
||||||
|
for num_prefix_quads in 0..100 {
|
||||||
|
for suffix in &["AA", "AAA", "AAAA"] {
|
||||||
|
let mut prefix = "AAAA".repeat(num_prefix_quads);
|
||||||
|
prefix.push_str(suffix);
|
||||||
|
// make sure no overflow (and thus a panic) occurs
|
||||||
|
let res = engine.decode(prefix);
|
||||||
|
assert!(res.is_ok());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn decode_slice_output_length_errors() {
|
||||||
|
for num_quads in 1..100 {
|
||||||
|
let input = "AAAA".repeat(num_quads);
|
||||||
|
let mut vec = vec![0; (num_quads - 1) * 3];
|
||||||
|
assert_eq!(
|
||||||
|
DecodeSliceError::OutputSliceTooSmall,
|
||||||
|
STANDARD.decode_slice(&input, &mut vec).unwrap_err()
|
||||||
|
);
|
||||||
|
vec.push(0);
|
||||||
|
assert_eq!(
|
||||||
|
DecodeSliceError::OutputSliceTooSmall,
|
||||||
|
STANDARD.decode_slice(&input, &mut vec).unwrap_err()
|
||||||
|
);
|
||||||
|
vec.push(0);
|
||||||
|
assert_eq!(
|
||||||
|
DecodeSliceError::OutputSliceTooSmall,
|
||||||
|
STANDARD.decode_slice(&input, &mut vec).unwrap_err()
|
||||||
|
);
|
||||||
|
vec.push(0);
|
||||||
|
// now it works
|
||||||
|
assert_eq!(
|
||||||
|
num_quads * 3,
|
||||||
|
STANDARD.decode_slice(&input, &mut vec).unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn do_decode_slice_doesnt_clobber_existing_prefix_or_suffix<
|
||||||
|
F: Fn(&GeneralPurpose, &[u8], &mut [u8]) -> usize,
|
||||||
|
>(
|
||||||
|
call_decode: F,
|
||||||
|
) {
|
||||||
|
let mut orig_data = Vec::new();
|
||||||
|
let mut encoded_data = String::new();
|
||||||
|
let mut decode_buf = Vec::new();
|
||||||
|
let mut decode_buf_copy: Vec<u8> = Vec::new();
|
||||||
|
|
||||||
|
let input_len_range = Uniform::new(0, 1000);
|
||||||
|
|
||||||
|
let mut rng = rand::rngs::SmallRng::from_entropy();
|
||||||
|
|
||||||
|
for _ in 0..10_000 {
|
||||||
|
orig_data.clear();
|
||||||
|
encoded_data.clear();
|
||||||
|
decode_buf.clear();
|
||||||
|
decode_buf_copy.clear();
|
||||||
|
|
||||||
|
let input_len = input_len_range.sample(&mut rng);
|
||||||
|
|
||||||
|
for _ in 0..input_len {
|
||||||
|
orig_data.push(rng.gen());
|
||||||
|
}
|
||||||
|
|
||||||
|
let engine = random_engine(&mut rng);
|
||||||
|
engine.encode_string(&orig_data, &mut encoded_data);
|
||||||
|
assert_encode_sanity(&encoded_data, engine.config().encode_padding(), input_len);
|
||||||
|
|
||||||
|
// fill the buffer with random garbage, long enough to have some room before and after
|
||||||
|
for _ in 0..5000 {
|
||||||
|
decode_buf.push(rng.gen());
|
||||||
|
}
|
||||||
|
|
||||||
|
// keep a copy for later comparison
|
||||||
|
decode_buf_copy.extend(decode_buf.iter());
|
||||||
|
|
||||||
|
let offset = 1000;
|
||||||
|
|
||||||
|
// decode into the non-empty buf
|
||||||
|
let decode_bytes_written =
|
||||||
|
call_decode(&engine, encoded_data.as_bytes(), &mut decode_buf[offset..]);
|
||||||
|
|
||||||
|
assert_eq!(orig_data.len(), decode_bytes_written);
|
||||||
|
assert_eq!(
|
||||||
|
orig_data,
|
||||||
|
&decode_buf[offset..(offset + decode_bytes_written)]
|
||||||
|
);
|
||||||
|
assert_eq!(&decode_buf_copy[0..offset], &decode_buf[0..offset]);
|
||||||
|
assert_eq!(
|
||||||
|
&decode_buf_copy[offset + decode_bytes_written..],
|
||||||
|
&decode_buf[offset + decode_bytes_written..]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(deprecated)]
|
||||||
|
#[cfg(test)]
|
||||||
|
mod coverage_gaming {
|
||||||
|
use super::*;
|
||||||
|
use std::error::Error;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn decode_error() {
|
||||||
|
let _ = format!("{:?}", DecodeError::InvalidPadding.clone());
|
||||||
|
let _ = format!(
|
||||||
|
"{} {} {} {}",
|
||||||
|
DecodeError::InvalidByte(0, 0),
|
||||||
|
DecodeError::InvalidLength(0),
|
||||||
|
DecodeError::InvalidLastSymbol(0, 0),
|
||||||
|
DecodeError::InvalidPadding,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn decode_slice_error() {
|
||||||
|
let _ = format!("{:?}", DecodeSliceError::OutputSliceTooSmall.clone());
|
||||||
|
let _ = format!(
|
||||||
|
"{} {}",
|
||||||
|
DecodeSliceError::OutputSliceTooSmall,
|
||||||
|
DecodeSliceError::DecodeError(DecodeError::InvalidPadding)
|
||||||
|
);
|
||||||
|
let _ = DecodeSliceError::OutputSliceTooSmall.source();
|
||||||
|
let _ = DecodeSliceError::DecodeError(DecodeError::InvalidPadding).source();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn deprecated_fns() {
|
||||||
|
let _ = decode("");
|
||||||
|
let _ = decode_engine("", &crate::prelude::BASE64_STANDARD);
|
||||||
|
let _ = decode_engine_vec("", &mut Vec::new(), &crate::prelude::BASE64_STANDARD);
|
||||||
|
let _ = decode_engine_slice("", &mut [], &crate::prelude::BASE64_STANDARD);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn decoded_len_est() {
|
||||||
|
assert_eq!(3, decoded_len_estimate(4));
|
||||||
|
}
|
||||||
|
}
|
||||||
88
vendor/base64/src/display.rs
vendored
Normal file
88
vendor/base64/src/display.rs
vendored
Normal file
|
|
@ -0,0 +1,88 @@
|
||||||
|
//! Enables base64'd output anywhere you might use a `Display` implementation, like a format string.
|
||||||
|
//!
|
||||||
|
//! ```
|
||||||
|
//! use base64::{display::Base64Display, engine::general_purpose::STANDARD};
|
||||||
|
//!
|
||||||
|
//! let data = vec![0x0, 0x1, 0x2, 0x3];
|
||||||
|
//! let wrapper = Base64Display::new(&data, &STANDARD);
|
||||||
|
//!
|
||||||
|
//! assert_eq!("base64: AAECAw==", format!("base64: {}", wrapper));
|
||||||
|
//! ```
|
||||||
|
|
||||||
|
use super::chunked_encoder::ChunkedEncoder;
|
||||||
|
use crate::engine::Engine;
|
||||||
|
use core::fmt::{Display, Formatter};
|
||||||
|
use core::{fmt, str};
|
||||||
|
|
||||||
|
/// A convenience wrapper for base64'ing bytes into a format string without heap allocation.
|
||||||
|
pub struct Base64Display<'a, 'e, E: Engine> {
|
||||||
|
bytes: &'a [u8],
|
||||||
|
chunked_encoder: ChunkedEncoder<'e, E>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, 'e, E: Engine> Base64Display<'a, 'e, E> {
|
||||||
|
/// Create a `Base64Display` with the provided engine.
|
||||||
|
pub fn new(bytes: &'a [u8], engine: &'e E) -> Base64Display<'a, 'e, E> {
|
||||||
|
Base64Display {
|
||||||
|
bytes,
|
||||||
|
chunked_encoder: ChunkedEncoder::new(engine),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, 'e, E: Engine> Display for Base64Display<'a, 'e, E> {
|
||||||
|
fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {
|
||||||
|
let mut sink = FormatterSink { f: formatter };
|
||||||
|
self.chunked_encoder.encode(self.bytes, &mut sink)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct FormatterSink<'a, 'b: 'a> {
|
||||||
|
f: &'a mut Formatter<'b>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, 'b: 'a> super::chunked_encoder::Sink for FormatterSink<'a, 'b> {
|
||||||
|
type Error = fmt::Error;
|
||||||
|
|
||||||
|
fn write_encoded_bytes(&mut self, encoded: &[u8]) -> Result<(), Self::Error> {
|
||||||
|
// Avoid unsafe. If max performance is needed, write your own display wrapper that uses
|
||||||
|
// unsafe here to gain about 10-15%.
|
||||||
|
self.f
|
||||||
|
.write_str(str::from_utf8(encoded).expect("base64 data was not utf8"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::super::chunked_encoder::tests::{
|
||||||
|
chunked_encode_matches_normal_encode_random, SinkTestHelper,
|
||||||
|
};
|
||||||
|
use super::*;
|
||||||
|
use crate::engine::general_purpose::STANDARD;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn basic_display() {
|
||||||
|
assert_eq!(
|
||||||
|
"~$Zm9vYmFy#*",
|
||||||
|
format!("~${}#*", Base64Display::new(b"foobar", &STANDARD))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
"~$Zm9vYmFyZg==#*",
|
||||||
|
format!("~${}#*", Base64Display::new(b"foobarf", &STANDARD))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn display_encode_matches_normal_encode() {
|
||||||
|
let helper = DisplaySinkTestHelper;
|
||||||
|
chunked_encode_matches_normal_encode_random(&helper);
|
||||||
|
}
|
||||||
|
|
||||||
|
struct DisplaySinkTestHelper;
|
||||||
|
|
||||||
|
impl SinkTestHelper for DisplaySinkTestHelper {
|
||||||
|
fn encode_to_string<E: Engine>(&self, engine: &E, bytes: &[u8]) -> String {
|
||||||
|
format!("{}", Base64Display::new(bytes, engine))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
492
vendor/base64/src/encode.rs
vendored
Normal file
492
vendor/base64/src/encode.rs
vendored
Normal file
|
|
@ -0,0 +1,492 @@
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
use alloc::string::String;
|
||||||
|
use core::fmt;
|
||||||
|
#[cfg(any(feature = "std", test))]
|
||||||
|
use std::error;
|
||||||
|
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
use crate::engine::general_purpose::STANDARD;
|
||||||
|
use crate::engine::{Config, Engine};
|
||||||
|
use crate::PAD_BYTE;
|
||||||
|
|
||||||
|
/// Encode arbitrary octets as base64 using the [`STANDARD` engine](STANDARD).
|
||||||
|
///
|
||||||
|
/// See [Engine::encode].
|
||||||
|
#[allow(unused)]
|
||||||
|
#[deprecated(since = "0.21.0", note = "Use Engine::encode")]
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
pub fn encode<T: AsRef<[u8]>>(input: T) -> String {
|
||||||
|
STANDARD.encode(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
///Encode arbitrary octets as base64 using the provided `Engine` into a new `String`.
|
||||||
|
///
|
||||||
|
/// See [Engine::encode].
|
||||||
|
#[allow(unused)]
|
||||||
|
#[deprecated(since = "0.21.0", note = "Use Engine::encode")]
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
pub fn encode_engine<E: Engine, T: AsRef<[u8]>>(input: T, engine: &E) -> String {
|
||||||
|
engine.encode(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
///Encode arbitrary octets as base64 into a supplied `String`.
|
||||||
|
///
|
||||||
|
/// See [Engine::encode_string].
|
||||||
|
#[allow(unused)]
|
||||||
|
#[deprecated(since = "0.21.0", note = "Use Engine::encode_string")]
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
pub fn encode_engine_string<E: Engine, T: AsRef<[u8]>>(
|
||||||
|
input: T,
|
||||||
|
output_buf: &mut String,
|
||||||
|
engine: &E,
|
||||||
|
) {
|
||||||
|
engine.encode_string(input, output_buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Encode arbitrary octets as base64 into a supplied slice.
|
||||||
|
///
|
||||||
|
/// See [Engine::encode_slice].
|
||||||
|
#[allow(unused)]
|
||||||
|
#[deprecated(since = "0.21.0", note = "Use Engine::encode_slice")]
|
||||||
|
pub fn encode_engine_slice<E: Engine, T: AsRef<[u8]>>(
|
||||||
|
input: T,
|
||||||
|
output_buf: &mut [u8],
|
||||||
|
engine: &E,
|
||||||
|
) -> Result<usize, EncodeSliceError> {
|
||||||
|
engine.encode_slice(input, output_buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// B64-encode and pad (if configured).
|
||||||
|
///
|
||||||
|
/// This helper exists to avoid recalculating encoded_size, which is relatively expensive on short
|
||||||
|
/// inputs.
|
||||||
|
///
|
||||||
|
/// `encoded_size` is the encoded size calculated for `input`.
|
||||||
|
///
|
||||||
|
/// `output` must be of size `encoded_size`.
|
||||||
|
///
|
||||||
|
/// All bytes in `output` will be written to since it is exactly the size of the output.
|
||||||
|
pub(crate) fn encode_with_padding<E: Engine + ?Sized>(
|
||||||
|
input: &[u8],
|
||||||
|
output: &mut [u8],
|
||||||
|
engine: &E,
|
||||||
|
expected_encoded_size: usize,
|
||||||
|
) {
|
||||||
|
debug_assert_eq!(expected_encoded_size, output.len());
|
||||||
|
|
||||||
|
let b64_bytes_written = engine.internal_encode(input, output);
|
||||||
|
|
||||||
|
let padding_bytes = if engine.config().encode_padding() {
|
||||||
|
add_padding(b64_bytes_written, &mut output[b64_bytes_written..])
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
};
|
||||||
|
|
||||||
|
let encoded_bytes = b64_bytes_written
|
||||||
|
.checked_add(padding_bytes)
|
||||||
|
.expect("usize overflow when calculating b64 length");
|
||||||
|
|
||||||
|
debug_assert_eq!(expected_encoded_size, encoded_bytes);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Calculate the base64 encoded length for a given input length, optionally including any
|
||||||
|
/// appropriate padding bytes.
|
||||||
|
///
|
||||||
|
/// Returns `None` if the encoded length can't be represented in `usize`. This will happen for
|
||||||
|
/// input lengths in approximately the top quarter of the range of `usize`.
|
||||||
|
pub const fn encoded_len(bytes_len: usize, padding: bool) -> Option<usize> {
|
||||||
|
let rem = bytes_len % 3;
|
||||||
|
|
||||||
|
let complete_input_chunks = bytes_len / 3;
|
||||||
|
// `?` is disallowed in const, and `let Some(_) = _ else` requires 1.65.0, whereas this
|
||||||
|
// messier syntax works on 1.48
|
||||||
|
let complete_chunk_output =
|
||||||
|
if let Some(complete_chunk_output) = complete_input_chunks.checked_mul(4) {
|
||||||
|
complete_chunk_output
|
||||||
|
} else {
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
|
||||||
|
if rem > 0 {
|
||||||
|
if padding {
|
||||||
|
complete_chunk_output.checked_add(4)
|
||||||
|
} else {
|
||||||
|
let encoded_rem = match rem {
|
||||||
|
1 => 2,
|
||||||
|
// only other possible remainder is 2
|
||||||
|
// can't use a separate _ => unreachable!() in const fns in ancient rust versions
|
||||||
|
_ => 3,
|
||||||
|
};
|
||||||
|
complete_chunk_output.checked_add(encoded_rem)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Some(complete_chunk_output)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write padding characters.
|
||||||
|
/// `unpadded_output_len` is the size of the unpadded but base64 encoded data.
|
||||||
|
/// `output` is the slice where padding should be written, of length at least 2.
|
||||||
|
///
|
||||||
|
/// Returns the number of padding bytes written.
|
||||||
|
pub(crate) fn add_padding(unpadded_output_len: usize, output: &mut [u8]) -> usize {
|
||||||
|
let pad_bytes = (4 - (unpadded_output_len % 4)) % 4;
|
||||||
|
// for just a couple bytes, this has better performance than using
|
||||||
|
// .fill(), or iterating over mutable refs, which call memset()
|
||||||
|
#[allow(clippy::needless_range_loop)]
|
||||||
|
for i in 0..pad_bytes {
|
||||||
|
output[i] = PAD_BYTE;
|
||||||
|
}
|
||||||
|
|
||||||
|
pad_bytes
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Errors that can occur while encoding into a slice.
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
|
pub enum EncodeSliceError {
|
||||||
|
/// The provided slice is too small.
|
||||||
|
OutputSliceTooSmall,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for EncodeSliceError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
Self::OutputSliceTooSmall => write!(f, "Output slice too small"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "std", test))]
|
||||||
|
impl error::Error for EncodeSliceError {}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
alphabet,
|
||||||
|
engine::general_purpose::{GeneralPurpose, NO_PAD, STANDARD},
|
||||||
|
tests::{assert_encode_sanity, random_config, random_engine},
|
||||||
|
};
|
||||||
|
use rand::{
|
||||||
|
distributions::{Distribution, Uniform},
|
||||||
|
Rng, SeedableRng,
|
||||||
|
};
|
||||||
|
use std::str;
|
||||||
|
|
||||||
|
const URL_SAFE_NO_PAD_ENGINE: GeneralPurpose = GeneralPurpose::new(&alphabet::URL_SAFE, NO_PAD);
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encoded_size_correct_standard() {
|
||||||
|
assert_encoded_length(0, 0, &STANDARD, true);
|
||||||
|
|
||||||
|
assert_encoded_length(1, 4, &STANDARD, true);
|
||||||
|
assert_encoded_length(2, 4, &STANDARD, true);
|
||||||
|
assert_encoded_length(3, 4, &STANDARD, true);
|
||||||
|
|
||||||
|
assert_encoded_length(4, 8, &STANDARD, true);
|
||||||
|
assert_encoded_length(5, 8, &STANDARD, true);
|
||||||
|
assert_encoded_length(6, 8, &STANDARD, true);
|
||||||
|
|
||||||
|
assert_encoded_length(7, 12, &STANDARD, true);
|
||||||
|
assert_encoded_length(8, 12, &STANDARD, true);
|
||||||
|
assert_encoded_length(9, 12, &STANDARD, true);
|
||||||
|
|
||||||
|
assert_encoded_length(54, 72, &STANDARD, true);
|
||||||
|
|
||||||
|
assert_encoded_length(55, 76, &STANDARD, true);
|
||||||
|
assert_encoded_length(56, 76, &STANDARD, true);
|
||||||
|
assert_encoded_length(57, 76, &STANDARD, true);
|
||||||
|
|
||||||
|
assert_encoded_length(58, 80, &STANDARD, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encoded_size_correct_no_pad() {
|
||||||
|
assert_encoded_length(0, 0, &URL_SAFE_NO_PAD_ENGINE, false);
|
||||||
|
|
||||||
|
assert_encoded_length(1, 2, &URL_SAFE_NO_PAD_ENGINE, false);
|
||||||
|
assert_encoded_length(2, 3, &URL_SAFE_NO_PAD_ENGINE, false);
|
||||||
|
assert_encoded_length(3, 4, &URL_SAFE_NO_PAD_ENGINE, false);
|
||||||
|
|
||||||
|
assert_encoded_length(4, 6, &URL_SAFE_NO_PAD_ENGINE, false);
|
||||||
|
assert_encoded_length(5, 7, &URL_SAFE_NO_PAD_ENGINE, false);
|
||||||
|
assert_encoded_length(6, 8, &URL_SAFE_NO_PAD_ENGINE, false);
|
||||||
|
|
||||||
|
assert_encoded_length(7, 10, &URL_SAFE_NO_PAD_ENGINE, false);
|
||||||
|
assert_encoded_length(8, 11, &URL_SAFE_NO_PAD_ENGINE, false);
|
||||||
|
assert_encoded_length(9, 12, &URL_SAFE_NO_PAD_ENGINE, false);
|
||||||
|
|
||||||
|
assert_encoded_length(54, 72, &URL_SAFE_NO_PAD_ENGINE, false);
|
||||||
|
|
||||||
|
assert_encoded_length(55, 74, &URL_SAFE_NO_PAD_ENGINE, false);
|
||||||
|
assert_encoded_length(56, 75, &URL_SAFE_NO_PAD_ENGINE, false);
|
||||||
|
assert_encoded_length(57, 76, &URL_SAFE_NO_PAD_ENGINE, false);
|
||||||
|
|
||||||
|
assert_encoded_length(58, 78, &URL_SAFE_NO_PAD_ENGINE, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encoded_size_overflow() {
|
||||||
|
assert_eq!(None, encoded_len(usize::MAX, true));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_engine_string_into_nonempty_buffer_doesnt_clobber_prefix() {
|
||||||
|
let mut orig_data = Vec::new();
|
||||||
|
let mut prefix = String::new();
|
||||||
|
let mut encoded_data_no_prefix = String::new();
|
||||||
|
let mut encoded_data_with_prefix = String::new();
|
||||||
|
let mut decoded = Vec::new();
|
||||||
|
|
||||||
|
let prefix_len_range = Uniform::new(0, 1000);
|
||||||
|
let input_len_range = Uniform::new(0, 1000);
|
||||||
|
|
||||||
|
let mut rng = rand::rngs::SmallRng::from_entropy();
|
||||||
|
|
||||||
|
for _ in 0..10_000 {
|
||||||
|
orig_data.clear();
|
||||||
|
prefix.clear();
|
||||||
|
encoded_data_no_prefix.clear();
|
||||||
|
encoded_data_with_prefix.clear();
|
||||||
|
decoded.clear();
|
||||||
|
|
||||||
|
let input_len = input_len_range.sample(&mut rng);
|
||||||
|
|
||||||
|
for _ in 0..input_len {
|
||||||
|
orig_data.push(rng.gen());
|
||||||
|
}
|
||||||
|
|
||||||
|
let prefix_len = prefix_len_range.sample(&mut rng);
|
||||||
|
for _ in 0..prefix_len {
|
||||||
|
// getting convenient random single-byte printable chars that aren't base64 is
|
||||||
|
// annoying
|
||||||
|
prefix.push('#');
|
||||||
|
}
|
||||||
|
encoded_data_with_prefix.push_str(&prefix);
|
||||||
|
|
||||||
|
let engine = random_engine(&mut rng);
|
||||||
|
engine.encode_string(&orig_data, &mut encoded_data_no_prefix);
|
||||||
|
engine.encode_string(&orig_data, &mut encoded_data_with_prefix);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
encoded_data_no_prefix.len() + prefix_len,
|
||||||
|
encoded_data_with_prefix.len()
|
||||||
|
);
|
||||||
|
assert_encode_sanity(
|
||||||
|
&encoded_data_no_prefix,
|
||||||
|
engine.config().encode_padding(),
|
||||||
|
input_len,
|
||||||
|
);
|
||||||
|
assert_encode_sanity(
|
||||||
|
&encoded_data_with_prefix[prefix_len..],
|
||||||
|
engine.config().encode_padding(),
|
||||||
|
input_len,
|
||||||
|
);
|
||||||
|
|
||||||
|
// append plain encode onto prefix
|
||||||
|
prefix.push_str(&encoded_data_no_prefix);
|
||||||
|
|
||||||
|
assert_eq!(prefix, encoded_data_with_prefix);
|
||||||
|
|
||||||
|
engine
|
||||||
|
.decode_vec(&encoded_data_no_prefix, &mut decoded)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(orig_data, decoded);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_engine_slice_into_nonempty_buffer_doesnt_clobber_suffix() {
|
||||||
|
let mut orig_data = Vec::new();
|
||||||
|
let mut encoded_data = Vec::new();
|
||||||
|
let mut encoded_data_original_state = Vec::new();
|
||||||
|
let mut decoded = Vec::new();
|
||||||
|
|
||||||
|
let input_len_range = Uniform::new(0, 1000);
|
||||||
|
|
||||||
|
let mut rng = rand::rngs::SmallRng::from_entropy();
|
||||||
|
|
||||||
|
for _ in 0..10_000 {
|
||||||
|
orig_data.clear();
|
||||||
|
encoded_data.clear();
|
||||||
|
encoded_data_original_state.clear();
|
||||||
|
decoded.clear();
|
||||||
|
|
||||||
|
let input_len = input_len_range.sample(&mut rng);
|
||||||
|
|
||||||
|
for _ in 0..input_len {
|
||||||
|
orig_data.push(rng.gen());
|
||||||
|
}
|
||||||
|
|
||||||
|
// plenty of existing garbage in the encoded buffer
|
||||||
|
for _ in 0..10 * input_len {
|
||||||
|
encoded_data.push(rng.gen());
|
||||||
|
}
|
||||||
|
|
||||||
|
encoded_data_original_state.extend_from_slice(&encoded_data);
|
||||||
|
|
||||||
|
let engine = random_engine(&mut rng);
|
||||||
|
|
||||||
|
let encoded_size = encoded_len(input_len, engine.config().encode_padding()).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
encoded_size,
|
||||||
|
engine.encode_slice(&orig_data, &mut encoded_data).unwrap()
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_encode_sanity(
|
||||||
|
str::from_utf8(&encoded_data[0..encoded_size]).unwrap(),
|
||||||
|
engine.config().encode_padding(),
|
||||||
|
input_len,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
&encoded_data[encoded_size..],
|
||||||
|
&encoded_data_original_state[encoded_size..]
|
||||||
|
);
|
||||||
|
|
||||||
|
engine
|
||||||
|
.decode_vec(&encoded_data[0..encoded_size], &mut decoded)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(orig_data, decoded);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_to_slice_random_valid_utf8() {
|
||||||
|
let mut input = Vec::new();
|
||||||
|
let mut output = Vec::new();
|
||||||
|
|
||||||
|
let input_len_range = Uniform::new(0, 1000);
|
||||||
|
|
||||||
|
let mut rng = rand::rngs::SmallRng::from_entropy();
|
||||||
|
|
||||||
|
for _ in 0..10_000 {
|
||||||
|
input.clear();
|
||||||
|
output.clear();
|
||||||
|
|
||||||
|
let input_len = input_len_range.sample(&mut rng);
|
||||||
|
|
||||||
|
for _ in 0..input_len {
|
||||||
|
input.push(rng.gen());
|
||||||
|
}
|
||||||
|
|
||||||
|
let config = random_config(&mut rng);
|
||||||
|
let engine = random_engine(&mut rng);
|
||||||
|
|
||||||
|
// fill up the output buffer with garbage
|
||||||
|
let encoded_size = encoded_len(input_len, config.encode_padding()).unwrap();
|
||||||
|
for _ in 0..encoded_size {
|
||||||
|
output.push(rng.gen());
|
||||||
|
}
|
||||||
|
|
||||||
|
let orig_output_buf = output.clone();
|
||||||
|
|
||||||
|
let bytes_written = engine.internal_encode(&input, &mut output);
|
||||||
|
|
||||||
|
// make sure the part beyond bytes_written is the same garbage it was before
|
||||||
|
assert_eq!(orig_output_buf[bytes_written..], output[bytes_written..]);
|
||||||
|
|
||||||
|
// make sure the encoded bytes are UTF-8
|
||||||
|
let _ = str::from_utf8(&output[0..bytes_written]).unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_with_padding_random_valid_utf8() {
|
||||||
|
let mut input = Vec::new();
|
||||||
|
let mut output = Vec::new();
|
||||||
|
|
||||||
|
let input_len_range = Uniform::new(0, 1000);
|
||||||
|
|
||||||
|
let mut rng = rand::rngs::SmallRng::from_entropy();
|
||||||
|
|
||||||
|
for _ in 0..10_000 {
|
||||||
|
input.clear();
|
||||||
|
output.clear();
|
||||||
|
|
||||||
|
let input_len = input_len_range.sample(&mut rng);
|
||||||
|
|
||||||
|
for _ in 0..input_len {
|
||||||
|
input.push(rng.gen());
|
||||||
|
}
|
||||||
|
|
||||||
|
let engine = random_engine(&mut rng);
|
||||||
|
|
||||||
|
// fill up the output buffer with garbage
|
||||||
|
let encoded_size = encoded_len(input_len, engine.config().encode_padding()).unwrap();
|
||||||
|
for _ in 0..encoded_size + 1000 {
|
||||||
|
output.push(rng.gen());
|
||||||
|
}
|
||||||
|
|
||||||
|
let orig_output_buf = output.clone();
|
||||||
|
|
||||||
|
encode_with_padding(&input, &mut output[0..encoded_size], &engine, encoded_size);
|
||||||
|
|
||||||
|
// make sure the part beyond b64 is the same garbage it was before
|
||||||
|
assert_eq!(orig_output_buf[encoded_size..], output[encoded_size..]);
|
||||||
|
|
||||||
|
// make sure the encoded bytes are UTF-8
|
||||||
|
let _ = str::from_utf8(&output[0..encoded_size]).unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn add_padding_random_valid_utf8() {
|
||||||
|
let mut output = Vec::new();
|
||||||
|
|
||||||
|
let mut rng = rand::rngs::SmallRng::from_entropy();
|
||||||
|
|
||||||
|
// cover our bases for length % 4
|
||||||
|
for unpadded_output_len in 0..20 {
|
||||||
|
output.clear();
|
||||||
|
|
||||||
|
// fill output with random
|
||||||
|
for _ in 0..100 {
|
||||||
|
output.push(rng.gen());
|
||||||
|
}
|
||||||
|
|
||||||
|
let orig_output_buf = output.clone();
|
||||||
|
|
||||||
|
let bytes_written = add_padding(unpadded_output_len, &mut output);
|
||||||
|
|
||||||
|
// make sure the part beyond bytes_written is the same garbage it was before
|
||||||
|
assert_eq!(orig_output_buf[bytes_written..], output[bytes_written..]);
|
||||||
|
|
||||||
|
// make sure the encoded bytes are UTF-8
|
||||||
|
let _ = str::from_utf8(&output[0..bytes_written]).unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn assert_encoded_length<E: Engine>(
|
||||||
|
input_len: usize,
|
||||||
|
enc_len: usize,
|
||||||
|
engine: &E,
|
||||||
|
padded: bool,
|
||||||
|
) {
|
||||||
|
assert_eq!(enc_len, encoded_len(input_len, padded).unwrap());
|
||||||
|
|
||||||
|
let mut bytes: Vec<u8> = Vec::new();
|
||||||
|
let mut rng = rand::rngs::SmallRng::from_entropy();
|
||||||
|
|
||||||
|
for _ in 0..input_len {
|
||||||
|
bytes.push(rng.gen());
|
||||||
|
}
|
||||||
|
|
||||||
|
let encoded = engine.encode(&bytes);
|
||||||
|
assert_encode_sanity(&encoded, padded, input_len);
|
||||||
|
|
||||||
|
assert_eq!(enc_len, encoded.len());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_imap() {
|
||||||
|
assert_eq!(
|
||||||
|
&GeneralPurpose::new(&alphabet::IMAP_MUTF7, NO_PAD).encode(b"\xFB\xFF"),
|
||||||
|
&GeneralPurpose::new(&alphabet::STANDARD, NO_PAD)
|
||||||
|
.encode(b"\xFB\xFF")
|
||||||
|
.replace('/', ",")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
357
vendor/base64/src/engine/general_purpose/decode.rs
vendored
Normal file
357
vendor/base64/src/engine/general_purpose/decode.rs
vendored
Normal file
|
|
@ -0,0 +1,357 @@
|
||||||
|
use crate::{
|
||||||
|
engine::{general_purpose::INVALID_VALUE, DecodeEstimate, DecodeMetadata, DecodePaddingMode},
|
||||||
|
DecodeError, DecodeSliceError, PAD_BYTE,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub struct GeneralPurposeEstimate {
|
||||||
|
/// input len % 4
|
||||||
|
rem: usize,
|
||||||
|
conservative_decoded_len: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GeneralPurposeEstimate {
|
||||||
|
pub(crate) fn new(encoded_len: usize) -> Self {
|
||||||
|
let rem = encoded_len % 4;
|
||||||
|
Self {
|
||||||
|
rem,
|
||||||
|
conservative_decoded_len: (encoded_len / 4 + (rem > 0) as usize) * 3,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DecodeEstimate for GeneralPurposeEstimate {
|
||||||
|
fn decoded_len_estimate(&self) -> usize {
|
||||||
|
self.conservative_decoded_len
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper to avoid duplicating num_chunks calculation, which is costly on short inputs.
|
||||||
|
/// Returns the decode metadata, or an error.
|
||||||
|
// We're on the fragile edge of compiler heuristics here. If this is not inlined, slow. If this is
|
||||||
|
// inlined(always), a different slow. plain ol' inline makes the benchmarks happiest at the moment,
|
||||||
|
// but this is fragile and the best setting changes with only minor code modifications.
|
||||||
|
#[inline]
|
||||||
|
pub(crate) fn decode_helper(
|
||||||
|
input: &[u8],
|
||||||
|
estimate: GeneralPurposeEstimate,
|
||||||
|
output: &mut [u8],
|
||||||
|
decode_table: &[u8; 256],
|
||||||
|
decode_allow_trailing_bits: bool,
|
||||||
|
padding_mode: DecodePaddingMode,
|
||||||
|
) -> Result<DecodeMetadata, DecodeSliceError> {
|
||||||
|
let input_complete_nonterminal_quads_len =
|
||||||
|
complete_quads_len(input, estimate.rem, output.len(), decode_table)?;
|
||||||
|
|
||||||
|
const UNROLLED_INPUT_CHUNK_SIZE: usize = 32;
|
||||||
|
const UNROLLED_OUTPUT_CHUNK_SIZE: usize = UNROLLED_INPUT_CHUNK_SIZE / 4 * 3;
|
||||||
|
|
||||||
|
let input_complete_quads_after_unrolled_chunks_len =
|
||||||
|
input_complete_nonterminal_quads_len % UNROLLED_INPUT_CHUNK_SIZE;
|
||||||
|
|
||||||
|
let input_unrolled_loop_len =
|
||||||
|
input_complete_nonterminal_quads_len - input_complete_quads_after_unrolled_chunks_len;
|
||||||
|
|
||||||
|
// chunks of 32 bytes
|
||||||
|
for (chunk_index, chunk) in input[..input_unrolled_loop_len]
|
||||||
|
.chunks_exact(UNROLLED_INPUT_CHUNK_SIZE)
|
||||||
|
.enumerate()
|
||||||
|
{
|
||||||
|
let input_index = chunk_index * UNROLLED_INPUT_CHUNK_SIZE;
|
||||||
|
let chunk_output = &mut output[chunk_index * UNROLLED_OUTPUT_CHUNK_SIZE
|
||||||
|
..(chunk_index + 1) * UNROLLED_OUTPUT_CHUNK_SIZE];
|
||||||
|
|
||||||
|
decode_chunk_8(
|
||||||
|
&chunk[0..8],
|
||||||
|
input_index,
|
||||||
|
decode_table,
|
||||||
|
&mut chunk_output[0..6],
|
||||||
|
)?;
|
||||||
|
decode_chunk_8(
|
||||||
|
&chunk[8..16],
|
||||||
|
input_index + 8,
|
||||||
|
decode_table,
|
||||||
|
&mut chunk_output[6..12],
|
||||||
|
)?;
|
||||||
|
decode_chunk_8(
|
||||||
|
&chunk[16..24],
|
||||||
|
input_index + 16,
|
||||||
|
decode_table,
|
||||||
|
&mut chunk_output[12..18],
|
||||||
|
)?;
|
||||||
|
decode_chunk_8(
|
||||||
|
&chunk[24..32],
|
||||||
|
input_index + 24,
|
||||||
|
decode_table,
|
||||||
|
&mut chunk_output[18..24],
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// remaining quads, except for the last possibly partial one, as it may have padding
|
||||||
|
let output_unrolled_loop_len = input_unrolled_loop_len / 4 * 3;
|
||||||
|
let output_complete_quad_len = input_complete_nonterminal_quads_len / 4 * 3;
|
||||||
|
{
|
||||||
|
let output_after_unroll = &mut output[output_unrolled_loop_len..output_complete_quad_len];
|
||||||
|
|
||||||
|
for (chunk_index, chunk) in input
|
||||||
|
[input_unrolled_loop_len..input_complete_nonterminal_quads_len]
|
||||||
|
.chunks_exact(4)
|
||||||
|
.enumerate()
|
||||||
|
{
|
||||||
|
let chunk_output = &mut output_after_unroll[chunk_index * 3..chunk_index * 3 + 3];
|
||||||
|
|
||||||
|
decode_chunk_4(
|
||||||
|
chunk,
|
||||||
|
input_unrolled_loop_len + chunk_index * 4,
|
||||||
|
decode_table,
|
||||||
|
chunk_output,
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
super::decode_suffix::decode_suffix(
|
||||||
|
input,
|
||||||
|
input_complete_nonterminal_quads_len,
|
||||||
|
output,
|
||||||
|
output_complete_quad_len,
|
||||||
|
decode_table,
|
||||||
|
decode_allow_trailing_bits,
|
||||||
|
padding_mode,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the length of complete quads, except for the last one, even if it is complete.
|
||||||
|
///
|
||||||
|
/// Returns an error if the output len is not big enough for decoding those complete quads, or if
|
||||||
|
/// the input % 4 == 1, and that last byte is an invalid value other than a pad byte.
|
||||||
|
///
|
||||||
|
/// - `input` is the base64 input
|
||||||
|
/// - `input_len_rem` is input len % 4
|
||||||
|
/// - `output_len` is the length of the output slice
|
||||||
|
pub(crate) fn complete_quads_len(
|
||||||
|
input: &[u8],
|
||||||
|
input_len_rem: usize,
|
||||||
|
output_len: usize,
|
||||||
|
decode_table: &[u8; 256],
|
||||||
|
) -> Result<usize, DecodeSliceError> {
|
||||||
|
debug_assert!(input.len() % 4 == input_len_rem);
|
||||||
|
|
||||||
|
// detect a trailing invalid byte, like a newline, as a user convenience
|
||||||
|
if input_len_rem == 1 {
|
||||||
|
let last_byte = input[input.len() - 1];
|
||||||
|
// exclude pad bytes; might be part of padding that extends from earlier in the input
|
||||||
|
if last_byte != PAD_BYTE && decode_table[usize::from(last_byte)] == INVALID_VALUE {
|
||||||
|
return Err(DecodeError::InvalidByte(input.len() - 1, last_byte).into());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// skip last quad, even if it's complete, as it may have padding
|
||||||
|
let input_complete_nonterminal_quads_len = input
|
||||||
|
.len()
|
||||||
|
.saturating_sub(input_len_rem)
|
||||||
|
// if rem was 0, subtract 4 to avoid padding
|
||||||
|
.saturating_sub((input_len_rem == 0) as usize * 4);
|
||||||
|
debug_assert!(
|
||||||
|
input.is_empty() || (1..=4).contains(&(input.len() - input_complete_nonterminal_quads_len))
|
||||||
|
);
|
||||||
|
|
||||||
|
// check that everything except the last quad handled by decode_suffix will fit
|
||||||
|
if output_len < input_complete_nonterminal_quads_len / 4 * 3 {
|
||||||
|
return Err(DecodeSliceError::OutputSliceTooSmall);
|
||||||
|
};
|
||||||
|
Ok(input_complete_nonterminal_quads_len)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decode 8 bytes of input into 6 bytes of output.
|
||||||
|
///
|
||||||
|
/// `input` is the 8 bytes to decode.
|
||||||
|
/// `index_at_start_of_input` is the offset in the overall input (used for reporting errors
|
||||||
|
/// accurately)
|
||||||
|
/// `decode_table` is the lookup table for the particular base64 alphabet.
|
||||||
|
/// `output` will have its first 6 bytes overwritten
|
||||||
|
// yes, really inline (worth 30-50% speedup)
|
||||||
|
#[inline(always)]
|
||||||
|
fn decode_chunk_8(
|
||||||
|
input: &[u8],
|
||||||
|
index_at_start_of_input: usize,
|
||||||
|
decode_table: &[u8; 256],
|
||||||
|
output: &mut [u8],
|
||||||
|
) -> Result<(), DecodeError> {
|
||||||
|
let morsel = decode_table[usize::from(input[0])];
|
||||||
|
if morsel == INVALID_VALUE {
|
||||||
|
return Err(DecodeError::InvalidByte(index_at_start_of_input, input[0]));
|
||||||
|
}
|
||||||
|
let mut accum = u64::from(morsel) << 58;
|
||||||
|
|
||||||
|
let morsel = decode_table[usize::from(input[1])];
|
||||||
|
if morsel == INVALID_VALUE {
|
||||||
|
return Err(DecodeError::InvalidByte(
|
||||||
|
index_at_start_of_input + 1,
|
||||||
|
input[1],
|
||||||
|
));
|
||||||
|
}
|
||||||
|
accum |= u64::from(morsel) << 52;
|
||||||
|
|
||||||
|
let morsel = decode_table[usize::from(input[2])];
|
||||||
|
if morsel == INVALID_VALUE {
|
||||||
|
return Err(DecodeError::InvalidByte(
|
||||||
|
index_at_start_of_input + 2,
|
||||||
|
input[2],
|
||||||
|
));
|
||||||
|
}
|
||||||
|
accum |= u64::from(morsel) << 46;
|
||||||
|
|
||||||
|
let morsel = decode_table[usize::from(input[3])];
|
||||||
|
if morsel == INVALID_VALUE {
|
||||||
|
return Err(DecodeError::InvalidByte(
|
||||||
|
index_at_start_of_input + 3,
|
||||||
|
input[3],
|
||||||
|
));
|
||||||
|
}
|
||||||
|
accum |= u64::from(morsel) << 40;
|
||||||
|
|
||||||
|
let morsel = decode_table[usize::from(input[4])];
|
||||||
|
if morsel == INVALID_VALUE {
|
||||||
|
return Err(DecodeError::InvalidByte(
|
||||||
|
index_at_start_of_input + 4,
|
||||||
|
input[4],
|
||||||
|
));
|
||||||
|
}
|
||||||
|
accum |= u64::from(morsel) << 34;
|
||||||
|
|
||||||
|
let morsel = decode_table[usize::from(input[5])];
|
||||||
|
if morsel == INVALID_VALUE {
|
||||||
|
return Err(DecodeError::InvalidByte(
|
||||||
|
index_at_start_of_input + 5,
|
||||||
|
input[5],
|
||||||
|
));
|
||||||
|
}
|
||||||
|
accum |= u64::from(morsel) << 28;
|
||||||
|
|
||||||
|
let morsel = decode_table[usize::from(input[6])];
|
||||||
|
if morsel == INVALID_VALUE {
|
||||||
|
return Err(DecodeError::InvalidByte(
|
||||||
|
index_at_start_of_input + 6,
|
||||||
|
input[6],
|
||||||
|
));
|
||||||
|
}
|
||||||
|
accum |= u64::from(morsel) << 22;
|
||||||
|
|
||||||
|
let morsel = decode_table[usize::from(input[7])];
|
||||||
|
if morsel == INVALID_VALUE {
|
||||||
|
return Err(DecodeError::InvalidByte(
|
||||||
|
index_at_start_of_input + 7,
|
||||||
|
input[7],
|
||||||
|
));
|
||||||
|
}
|
||||||
|
accum |= u64::from(morsel) << 16;
|
||||||
|
|
||||||
|
output[..6].copy_from_slice(&accum.to_be_bytes()[..6]);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Like [decode_chunk_8] but for 4 bytes of input and 3 bytes of output.
|
||||||
|
#[inline(always)]
|
||||||
|
fn decode_chunk_4(
|
||||||
|
input: &[u8],
|
||||||
|
index_at_start_of_input: usize,
|
||||||
|
decode_table: &[u8; 256],
|
||||||
|
output: &mut [u8],
|
||||||
|
) -> Result<(), DecodeError> {
|
||||||
|
let morsel = decode_table[usize::from(input[0])];
|
||||||
|
if morsel == INVALID_VALUE {
|
||||||
|
return Err(DecodeError::InvalidByte(index_at_start_of_input, input[0]));
|
||||||
|
}
|
||||||
|
let mut accum = u32::from(morsel) << 26;
|
||||||
|
|
||||||
|
let morsel = decode_table[usize::from(input[1])];
|
||||||
|
if morsel == INVALID_VALUE {
|
||||||
|
return Err(DecodeError::InvalidByte(
|
||||||
|
index_at_start_of_input + 1,
|
||||||
|
input[1],
|
||||||
|
));
|
||||||
|
}
|
||||||
|
accum |= u32::from(morsel) << 20;
|
||||||
|
|
||||||
|
let morsel = decode_table[usize::from(input[2])];
|
||||||
|
if morsel == INVALID_VALUE {
|
||||||
|
return Err(DecodeError::InvalidByte(
|
||||||
|
index_at_start_of_input + 2,
|
||||||
|
input[2],
|
||||||
|
));
|
||||||
|
}
|
||||||
|
accum |= u32::from(morsel) << 14;
|
||||||
|
|
||||||
|
let morsel = decode_table[usize::from(input[3])];
|
||||||
|
if morsel == INVALID_VALUE {
|
||||||
|
return Err(DecodeError::InvalidByte(
|
||||||
|
index_at_start_of_input + 3,
|
||||||
|
input[3],
|
||||||
|
));
|
||||||
|
}
|
||||||
|
accum |= u32::from(morsel) << 8;
|
||||||
|
|
||||||
|
output[..3].copy_from_slice(&accum.to_be_bytes()[..3]);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
use crate::engine::general_purpose::STANDARD;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn decode_chunk_8_writes_only_6_bytes() {
|
||||||
|
let input = b"Zm9vYmFy"; // "foobar"
|
||||||
|
let mut output = [0_u8, 1, 2, 3, 4, 5, 6, 7];
|
||||||
|
|
||||||
|
decode_chunk_8(&input[..], 0, &STANDARD.decode_table, &mut output).unwrap();
|
||||||
|
assert_eq!(&vec![b'f', b'o', b'o', b'b', b'a', b'r', 6, 7], &output);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn decode_chunk_4_writes_only_3_bytes() {
|
||||||
|
let input = b"Zm9v"; // "foobar"
|
||||||
|
let mut output = [0_u8, 1, 2, 3];
|
||||||
|
|
||||||
|
decode_chunk_4(&input[..], 0, &STANDARD.decode_table, &mut output).unwrap();
|
||||||
|
assert_eq!(&vec![b'f', b'o', b'o', 3], &output);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn estimate_short_lengths() {
|
||||||
|
for (range, decoded_len_estimate) in [
|
||||||
|
(0..=0, 0),
|
||||||
|
(1..=4, 3),
|
||||||
|
(5..=8, 6),
|
||||||
|
(9..=12, 9),
|
||||||
|
(13..=16, 12),
|
||||||
|
(17..=20, 15),
|
||||||
|
] {
|
||||||
|
for encoded_len in range {
|
||||||
|
let estimate = GeneralPurposeEstimate::new(encoded_len);
|
||||||
|
assert_eq!(decoded_len_estimate, estimate.decoded_len_estimate());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn estimate_via_u128_inflation() {
|
||||||
|
// cover both ends of usize
|
||||||
|
(0..1000)
|
||||||
|
.chain(usize::MAX - 1000..=usize::MAX)
|
||||||
|
.for_each(|encoded_len| {
|
||||||
|
// inflate to 128 bit type to be able to safely use the easy formulas
|
||||||
|
let len_128 = encoded_len as u128;
|
||||||
|
|
||||||
|
let estimate = GeneralPurposeEstimate::new(encoded_len);
|
||||||
|
assert_eq!(
|
||||||
|
(len_128 + 3) / 4 * 3,
|
||||||
|
estimate.conservative_decoded_len as u128
|
||||||
|
);
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
162
vendor/base64/src/engine/general_purpose/decode_suffix.rs
vendored
Normal file
162
vendor/base64/src/engine/general_purpose/decode_suffix.rs
vendored
Normal file
|
|
@ -0,0 +1,162 @@
|
||||||
|
use crate::{
|
||||||
|
engine::{general_purpose::INVALID_VALUE, DecodeMetadata, DecodePaddingMode},
|
||||||
|
DecodeError, DecodeSliceError, PAD_BYTE,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Decode the last 0-4 bytes, checking for trailing set bits and padding per the provided
|
||||||
|
/// parameters.
|
||||||
|
///
|
||||||
|
/// Returns the decode metadata representing the total number of bytes decoded, including the ones
|
||||||
|
/// indicated as already written by `output_index`.
|
||||||
|
pub(crate) fn decode_suffix(
|
||||||
|
input: &[u8],
|
||||||
|
input_index: usize,
|
||||||
|
output: &mut [u8],
|
||||||
|
mut output_index: usize,
|
||||||
|
decode_table: &[u8; 256],
|
||||||
|
decode_allow_trailing_bits: bool,
|
||||||
|
padding_mode: DecodePaddingMode,
|
||||||
|
) -> Result<DecodeMetadata, DecodeSliceError> {
|
||||||
|
debug_assert!((input.len() - input_index) <= 4);
|
||||||
|
|
||||||
|
// Decode any leftovers that might not be a complete input chunk of 4 bytes.
|
||||||
|
// Use a u32 as a stack-resident 4 byte buffer.
|
||||||
|
let mut morsels_in_leftover = 0;
|
||||||
|
let mut padding_bytes_count = 0;
|
||||||
|
// offset from input_index
|
||||||
|
let mut first_padding_offset: usize = 0;
|
||||||
|
let mut last_symbol = 0_u8;
|
||||||
|
let mut morsels = [0_u8; 4];
|
||||||
|
|
||||||
|
for (leftover_index, &b) in input[input_index..].iter().enumerate() {
|
||||||
|
// '=' padding
|
||||||
|
if b == PAD_BYTE {
|
||||||
|
// There can be bad padding bytes in a few ways:
|
||||||
|
// 1 - Padding with non-padding characters after it
|
||||||
|
// 2 - Padding after zero or one characters in the current quad (should only
|
||||||
|
// be after 2 or 3 chars)
|
||||||
|
// 3 - More than two characters of padding. If 3 or 4 padding chars
|
||||||
|
// are in the same quad, that implies it will be caught by #2.
|
||||||
|
// If it spreads from one quad to another, it will be an invalid byte
|
||||||
|
// in the first quad.
|
||||||
|
// 4 - Non-canonical padding -- 1 byte when it should be 2, etc.
|
||||||
|
// Per config, non-canonical but still functional non- or partially-padded base64
|
||||||
|
// may be treated as an error condition.
|
||||||
|
|
||||||
|
if leftover_index < 2 {
|
||||||
|
// Check for error #2.
|
||||||
|
// Either the previous byte was padding, in which case we would have already hit
|
||||||
|
// this case, or it wasn't, in which case this is the first such error.
|
||||||
|
debug_assert!(
|
||||||
|
leftover_index == 0 || (leftover_index == 1 && padding_bytes_count == 0)
|
||||||
|
);
|
||||||
|
let bad_padding_index = input_index + leftover_index;
|
||||||
|
return Err(DecodeError::InvalidByte(bad_padding_index, b).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
if padding_bytes_count == 0 {
|
||||||
|
first_padding_offset = leftover_index;
|
||||||
|
}
|
||||||
|
|
||||||
|
padding_bytes_count += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for case #1.
|
||||||
|
// To make '=' handling consistent with the main loop, don't allow
|
||||||
|
// non-suffix '=' in trailing chunk either. Report error as first
|
||||||
|
// erroneous padding.
|
||||||
|
if padding_bytes_count > 0 {
|
||||||
|
return Err(
|
||||||
|
DecodeError::InvalidByte(input_index + first_padding_offset, PAD_BYTE).into(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
last_symbol = b;
|
||||||
|
|
||||||
|
// can use up to 8 * 6 = 48 bits of the u64, if last chunk has no padding.
|
||||||
|
// Pack the leftovers from left to right.
|
||||||
|
let morsel = decode_table[b as usize];
|
||||||
|
if morsel == INVALID_VALUE {
|
||||||
|
return Err(DecodeError::InvalidByte(input_index + leftover_index, b).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
morsels[morsels_in_leftover] = morsel;
|
||||||
|
morsels_in_leftover += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If there was 1 trailing byte, and it was valid, and we got to this point without hitting
|
||||||
|
// an invalid byte, now we can report invalid length
|
||||||
|
if !input.is_empty() && morsels_in_leftover < 2 {
|
||||||
|
return Err(DecodeError::InvalidLength(input_index + morsels_in_leftover).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
match padding_mode {
|
||||||
|
DecodePaddingMode::Indifferent => { /* everything we care about was already checked */ }
|
||||||
|
DecodePaddingMode::RequireCanonical => {
|
||||||
|
// allow empty input
|
||||||
|
if (padding_bytes_count + morsels_in_leftover) % 4 != 0 {
|
||||||
|
return Err(DecodeError::InvalidPadding.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
DecodePaddingMode::RequireNone => {
|
||||||
|
if padding_bytes_count > 0 {
|
||||||
|
// check at the end to make sure we let the cases of padding that should be InvalidByte
|
||||||
|
// get hit
|
||||||
|
return Err(DecodeError::InvalidPadding.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// When encoding 1 trailing byte (e.g. 0xFF), 2 base64 bytes ("/w") are needed.
|
||||||
|
// / is the symbol for 63 (0x3F, bottom 6 bits all set) and w is 48 (0x30, top 2 bits
|
||||||
|
// of bottom 6 bits set).
|
||||||
|
// When decoding two symbols back to one trailing byte, any final symbol higher than
|
||||||
|
// w would still decode to the original byte because we only care about the top two
|
||||||
|
// bits in the bottom 6, but would be a non-canonical encoding. So, we calculate a
|
||||||
|
// mask based on how many bits are used for just the canonical encoding, and optionally
|
||||||
|
// error if any other bits are set. In the example of one encoded byte -> 2 symbols,
|
||||||
|
// 2 symbols can technically encode 12 bits, but the last 4 are non-canonical, and
|
||||||
|
// useless since there are no more symbols to provide the necessary 4 additional bits
|
||||||
|
// to finish the second original byte.
|
||||||
|
|
||||||
|
let leftover_bytes_to_append = morsels_in_leftover * 6 / 8;
|
||||||
|
// Put the up to 6 complete bytes as the high bytes.
|
||||||
|
// Gain a couple percent speedup from nudging these ORs to use more ILP with a two-way split.
|
||||||
|
let mut leftover_num = (u32::from(morsels[0]) << 26)
|
||||||
|
| (u32::from(morsels[1]) << 20)
|
||||||
|
| (u32::from(morsels[2]) << 14)
|
||||||
|
| (u32::from(morsels[3]) << 8);
|
||||||
|
|
||||||
|
// if there are bits set outside the bits we care about, last symbol encodes trailing bits that
|
||||||
|
// will not be included in the output
|
||||||
|
let mask = !0_u32 >> (leftover_bytes_to_append * 8);
|
||||||
|
if !decode_allow_trailing_bits && (leftover_num & mask) != 0 {
|
||||||
|
// last morsel is at `morsels_in_leftover` - 1
|
||||||
|
return Err(DecodeError::InvalidLastSymbol(
|
||||||
|
input_index + morsels_in_leftover - 1,
|
||||||
|
last_symbol,
|
||||||
|
)
|
||||||
|
.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strangely, this approach benchmarks better than writing bytes one at a time,
|
||||||
|
// or copy_from_slice into output.
|
||||||
|
for _ in 0..leftover_bytes_to_append {
|
||||||
|
let hi_byte = (leftover_num >> 24) as u8;
|
||||||
|
leftover_num <<= 8;
|
||||||
|
*output
|
||||||
|
.get_mut(output_index)
|
||||||
|
.ok_or(DecodeSliceError::OutputSliceTooSmall)? = hi_byte;
|
||||||
|
output_index += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(DecodeMetadata::new(
|
||||||
|
output_index,
|
||||||
|
if padding_bytes_count > 0 {
|
||||||
|
Some(input_index + first_padding_offset)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
352
vendor/base64/src/engine/general_purpose/mod.rs
vendored
Normal file
352
vendor/base64/src/engine/general_purpose/mod.rs
vendored
Normal file
|
|
@ -0,0 +1,352 @@
|
||||||
|
//! Provides the [GeneralPurpose] engine and associated config types.
|
||||||
|
use crate::{
|
||||||
|
alphabet,
|
||||||
|
alphabet::Alphabet,
|
||||||
|
engine::{Config, DecodeMetadata, DecodePaddingMode},
|
||||||
|
DecodeSliceError,
|
||||||
|
};
|
||||||
|
use core::convert::TryInto;
|
||||||
|
|
||||||
|
pub(crate) mod decode;
|
||||||
|
pub(crate) mod decode_suffix;
|
||||||
|
|
||||||
|
pub use decode::GeneralPurposeEstimate;
|
||||||
|
|
||||||
|
pub(crate) const INVALID_VALUE: u8 = 255;
|
||||||
|
|
||||||
|
/// A general-purpose base64 engine.
|
||||||
|
///
|
||||||
|
/// - It uses no vector CPU instructions, so it will work on any system.
|
||||||
|
/// - It is reasonably fast (~2-3GiB/s).
|
||||||
|
/// - It is not constant-time, though, so it is vulnerable to timing side-channel attacks. For loading cryptographic keys, etc, it is suggested to use the forthcoming constant-time implementation.
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct GeneralPurpose {
|
||||||
|
encode_table: [u8; 64],
|
||||||
|
decode_table: [u8; 256],
|
||||||
|
config: GeneralPurposeConfig,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GeneralPurpose {
|
||||||
|
/// Create a `GeneralPurpose` engine from an [Alphabet].
|
||||||
|
///
|
||||||
|
/// While not very expensive to initialize, ideally these should be cached
|
||||||
|
/// if the engine will be used repeatedly.
|
||||||
|
pub const fn new(alphabet: &Alphabet, config: GeneralPurposeConfig) -> Self {
|
||||||
|
Self {
|
||||||
|
encode_table: encode_table(alphabet),
|
||||||
|
decode_table: decode_table(alphabet),
|
||||||
|
config,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl super::Engine for GeneralPurpose {
|
||||||
|
type Config = GeneralPurposeConfig;
|
||||||
|
type DecodeEstimate = GeneralPurposeEstimate;
|
||||||
|
|
||||||
|
fn internal_encode(&self, input: &[u8], output: &mut [u8]) -> usize {
|
||||||
|
let mut input_index: usize = 0;
|
||||||
|
|
||||||
|
const BLOCKS_PER_FAST_LOOP: usize = 4;
|
||||||
|
const LOW_SIX_BITS: u64 = 0x3F;
|
||||||
|
|
||||||
|
// we read 8 bytes at a time (u64) but only actually consume 6 of those bytes. Thus, we need
|
||||||
|
// 2 trailing bytes to be available to read..
|
||||||
|
let last_fast_index = input.len().saturating_sub(BLOCKS_PER_FAST_LOOP * 6 + 2);
|
||||||
|
let mut output_index = 0;
|
||||||
|
|
||||||
|
if last_fast_index > 0 {
|
||||||
|
while input_index <= last_fast_index {
|
||||||
|
// Major performance wins from letting the optimizer do the bounds check once, mostly
|
||||||
|
// on the output side
|
||||||
|
let input_chunk =
|
||||||
|
&input[input_index..(input_index + (BLOCKS_PER_FAST_LOOP * 6 + 2))];
|
||||||
|
let output_chunk =
|
||||||
|
&mut output[output_index..(output_index + BLOCKS_PER_FAST_LOOP * 8)];
|
||||||
|
|
||||||
|
// Hand-unrolling for 32 vs 16 or 8 bytes produces yields performance about equivalent
|
||||||
|
// to unsafe pointer code on a Xeon E5-1650v3. 64 byte unrolling was slightly better for
|
||||||
|
// large inputs but significantly worse for 50-byte input, unsurprisingly. I suspect
|
||||||
|
// that it's a not uncommon use case to encode smallish chunks of data (e.g. a 64-byte
|
||||||
|
// SHA-512 digest), so it would be nice if that fit in the unrolled loop at least once.
|
||||||
|
// Plus, single-digit percentage performance differences might well be quite different
|
||||||
|
// on different hardware.
|
||||||
|
|
||||||
|
let input_u64 = read_u64(&input_chunk[0..]);
|
||||||
|
|
||||||
|
output_chunk[0] = self.encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[1] = self.encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[2] = self.encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[3] = self.encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[4] = self.encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[5] = self.encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[6] = self.encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[7] = self.encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize];
|
||||||
|
|
||||||
|
let input_u64 = read_u64(&input_chunk[6..]);
|
||||||
|
|
||||||
|
output_chunk[8] = self.encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[9] = self.encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[10] = self.encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[11] = self.encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[12] = self.encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[13] = self.encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[14] = self.encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[15] = self.encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize];
|
||||||
|
|
||||||
|
let input_u64 = read_u64(&input_chunk[12..]);
|
||||||
|
|
||||||
|
output_chunk[16] = self.encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[17] = self.encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[18] = self.encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[19] = self.encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[20] = self.encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[21] = self.encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[22] = self.encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[23] = self.encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize];
|
||||||
|
|
||||||
|
let input_u64 = read_u64(&input_chunk[18..]);
|
||||||
|
|
||||||
|
output_chunk[24] = self.encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[25] = self.encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[26] = self.encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[27] = self.encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[28] = self.encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[29] = self.encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[30] = self.encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize];
|
||||||
|
output_chunk[31] = self.encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize];
|
||||||
|
|
||||||
|
output_index += BLOCKS_PER_FAST_LOOP * 8;
|
||||||
|
input_index += BLOCKS_PER_FAST_LOOP * 6;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encode what's left after the fast loop.
|
||||||
|
|
||||||
|
const LOW_SIX_BITS_U8: u8 = 0x3F;
|
||||||
|
|
||||||
|
let rem = input.len() % 3;
|
||||||
|
let start_of_rem = input.len() - rem;
|
||||||
|
|
||||||
|
// start at the first index not handled by fast loop, which may be 0.
|
||||||
|
|
||||||
|
while input_index < start_of_rem {
|
||||||
|
let input_chunk = &input[input_index..(input_index + 3)];
|
||||||
|
let output_chunk = &mut output[output_index..(output_index + 4)];
|
||||||
|
|
||||||
|
output_chunk[0] = self.encode_table[(input_chunk[0] >> 2) as usize];
|
||||||
|
output_chunk[1] = self.encode_table
|
||||||
|
[((input_chunk[0] << 4 | input_chunk[1] >> 4) & LOW_SIX_BITS_U8) as usize];
|
||||||
|
output_chunk[2] = self.encode_table
|
||||||
|
[((input_chunk[1] << 2 | input_chunk[2] >> 6) & LOW_SIX_BITS_U8) as usize];
|
||||||
|
output_chunk[3] = self.encode_table[(input_chunk[2] & LOW_SIX_BITS_U8) as usize];
|
||||||
|
|
||||||
|
input_index += 3;
|
||||||
|
output_index += 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
if rem == 2 {
|
||||||
|
output[output_index] = self.encode_table[(input[start_of_rem] >> 2) as usize];
|
||||||
|
output[output_index + 1] =
|
||||||
|
self.encode_table[((input[start_of_rem] << 4 | input[start_of_rem + 1] >> 4)
|
||||||
|
& LOW_SIX_BITS_U8) as usize];
|
||||||
|
output[output_index + 2] =
|
||||||
|
self.encode_table[((input[start_of_rem + 1] << 2) & LOW_SIX_BITS_U8) as usize];
|
||||||
|
output_index += 3;
|
||||||
|
} else if rem == 1 {
|
||||||
|
output[output_index] = self.encode_table[(input[start_of_rem] >> 2) as usize];
|
||||||
|
output[output_index + 1] =
|
||||||
|
self.encode_table[((input[start_of_rem] << 4) & LOW_SIX_BITS_U8) as usize];
|
||||||
|
output_index += 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
output_index
|
||||||
|
}
|
||||||
|
|
||||||
|
fn internal_decoded_len_estimate(&self, input_len: usize) -> Self::DecodeEstimate {
|
||||||
|
GeneralPurposeEstimate::new(input_len)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn internal_decode(
|
||||||
|
&self,
|
||||||
|
input: &[u8],
|
||||||
|
output: &mut [u8],
|
||||||
|
estimate: Self::DecodeEstimate,
|
||||||
|
) -> Result<DecodeMetadata, DecodeSliceError> {
|
||||||
|
decode::decode_helper(
|
||||||
|
input,
|
||||||
|
estimate,
|
||||||
|
output,
|
||||||
|
&self.decode_table,
|
||||||
|
self.config.decode_allow_trailing_bits,
|
||||||
|
self.config.decode_padding_mode,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn config(&self) -> &Self::Config {
|
||||||
|
&self.config
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a table mapping a 6-bit index to the ASCII byte encoding of the index
|
||||||
|
pub(crate) const fn encode_table(alphabet: &Alphabet) -> [u8; 64] {
|
||||||
|
// the encode table is just the alphabet:
|
||||||
|
// 6-bit index lookup -> printable byte
|
||||||
|
let mut encode_table = [0_u8; 64];
|
||||||
|
{
|
||||||
|
let mut index = 0;
|
||||||
|
while index < 64 {
|
||||||
|
encode_table[index] = alphabet.symbols[index];
|
||||||
|
index += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
encode_table
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a table mapping base64 bytes as the lookup index to either:
|
||||||
|
/// - [INVALID_VALUE] for bytes that aren't members of the alphabet
|
||||||
|
/// - a byte whose lower 6 bits are the value that was encoded into the index byte
|
||||||
|
pub(crate) const fn decode_table(alphabet: &Alphabet) -> [u8; 256] {
|
||||||
|
let mut decode_table = [INVALID_VALUE; 256];
|
||||||
|
|
||||||
|
// Since the table is full of `INVALID_VALUE` already, we only need to overwrite
|
||||||
|
// the parts that are valid.
|
||||||
|
let mut index = 0;
|
||||||
|
while index < 64 {
|
||||||
|
// The index in the alphabet is the 6-bit value we care about.
|
||||||
|
// Since the index is in 0-63, it is safe to cast to u8.
|
||||||
|
decode_table[alphabet.symbols[index] as usize] = index as u8;
|
||||||
|
index += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
decode_table
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn read_u64(s: &[u8]) -> u64 {
|
||||||
|
u64::from_be_bytes(s[..8].try_into().unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Contains configuration parameters for base64 encoding and decoding.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # use base64::engine::GeneralPurposeConfig;
|
||||||
|
/// let config = GeneralPurposeConfig::new()
|
||||||
|
/// .with_encode_padding(false);
|
||||||
|
/// // further customize using `.with_*` methods as needed
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// The constants [PAD] and [NO_PAD] cover most use cases.
|
||||||
|
///
|
||||||
|
/// To specify the characters used, see [Alphabet].
|
||||||
|
#[derive(Clone, Copy, Debug)]
|
||||||
|
pub struct GeneralPurposeConfig {
|
||||||
|
encode_padding: bool,
|
||||||
|
decode_allow_trailing_bits: bool,
|
||||||
|
decode_padding_mode: DecodePaddingMode,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GeneralPurposeConfig {
|
||||||
|
/// Create a new config with `padding` = `true`, `decode_allow_trailing_bits` = `false`, and
|
||||||
|
/// `decode_padding_mode = DecodePaddingMode::RequireCanonicalPadding`.
|
||||||
|
///
|
||||||
|
/// This probably matches most people's expectations, but consider disabling padding to save
|
||||||
|
/// a few bytes unless you specifically need it for compatibility with some legacy system.
|
||||||
|
pub const fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
// RFC states that padding must be applied by default
|
||||||
|
encode_padding: true,
|
||||||
|
decode_allow_trailing_bits: false,
|
||||||
|
decode_padding_mode: DecodePaddingMode::RequireCanonical,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new config based on `self` with an updated `padding` setting.
|
||||||
|
///
|
||||||
|
/// If `padding` is `true`, encoding will append either 1 or 2 `=` padding characters as needed
|
||||||
|
/// to produce an output whose length is a multiple of 4.
|
||||||
|
///
|
||||||
|
/// Padding is not needed for correct decoding and only serves to waste bytes, but it's in the
|
||||||
|
/// [spec](https://datatracker.ietf.org/doc/html/rfc4648#section-3.2).
|
||||||
|
///
|
||||||
|
/// For new applications, consider not using padding if the decoders you're using don't require
|
||||||
|
/// padding to be present.
|
||||||
|
pub const fn with_encode_padding(self, padding: bool) -> Self {
|
||||||
|
Self {
|
||||||
|
encode_padding: padding,
|
||||||
|
..self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new config based on `self` with an updated `decode_allow_trailing_bits` setting.
|
||||||
|
///
|
||||||
|
/// Most users will not need to configure this. It's useful if you need to decode base64
|
||||||
|
/// produced by a buggy encoder that has bits set in the unused space on the last base64
|
||||||
|
/// character as per [forgiving-base64 decode](https://infra.spec.whatwg.org/#forgiving-base64-decode).
|
||||||
|
/// If invalid trailing bits are present and this is `true`, those bits will
|
||||||
|
/// be silently ignored, else `DecodeError::InvalidLastSymbol` will be emitted.
|
||||||
|
pub const fn with_decode_allow_trailing_bits(self, allow: bool) -> Self {
|
||||||
|
Self {
|
||||||
|
decode_allow_trailing_bits: allow,
|
||||||
|
..self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new config based on `self` with an updated `decode_padding_mode` setting.
|
||||||
|
///
|
||||||
|
/// Padding is not useful in terms of representing encoded data -- it makes no difference to
|
||||||
|
/// the decoder if padding is present or not, so if you have some un-padded input to decode, it
|
||||||
|
/// is perfectly fine to use `DecodePaddingMode::Indifferent` to prevent errors from being
|
||||||
|
/// emitted.
|
||||||
|
///
|
||||||
|
/// However, since in practice
|
||||||
|
/// [people who learned nothing from BER vs DER seem to expect base64 to have one canonical encoding](https://eprint.iacr.org/2022/361),
|
||||||
|
/// the default setting is the stricter `DecodePaddingMode::RequireCanonicalPadding`.
|
||||||
|
///
|
||||||
|
/// Or, if "canonical" in your circumstance means _no_ padding rather than padding to the
|
||||||
|
/// next multiple of four, there's `DecodePaddingMode::RequireNoPadding`.
|
||||||
|
pub const fn with_decode_padding_mode(self, mode: DecodePaddingMode) -> Self {
|
||||||
|
Self {
|
||||||
|
decode_padding_mode: mode,
|
||||||
|
..self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for GeneralPurposeConfig {
|
||||||
|
/// Delegates to [GeneralPurposeConfig::new].
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Config for GeneralPurposeConfig {
|
||||||
|
fn encode_padding(&self) -> bool {
|
||||||
|
self.encode_padding
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A [GeneralPurpose] engine using the [alphabet::STANDARD] base64 alphabet and [PAD] config.
|
||||||
|
pub const STANDARD: GeneralPurpose = GeneralPurpose::new(&alphabet::STANDARD, PAD);
|
||||||
|
|
||||||
|
/// A [GeneralPurpose] engine using the [alphabet::STANDARD] base64 alphabet and [NO_PAD] config.
|
||||||
|
pub const STANDARD_NO_PAD: GeneralPurpose = GeneralPurpose::new(&alphabet::STANDARD, NO_PAD);
|
||||||
|
|
||||||
|
/// A [GeneralPurpose] engine using the [alphabet::URL_SAFE] base64 alphabet and [PAD] config.
|
||||||
|
pub const URL_SAFE: GeneralPurpose = GeneralPurpose::new(&alphabet::URL_SAFE, PAD);
|
||||||
|
|
||||||
|
/// A [GeneralPurpose] engine using the [alphabet::URL_SAFE] base64 alphabet and [NO_PAD] config.
|
||||||
|
pub const URL_SAFE_NO_PAD: GeneralPurpose = GeneralPurpose::new(&alphabet::URL_SAFE, NO_PAD);
|
||||||
|
|
||||||
|
/// Include padding bytes when encoding, and require that they be present when decoding.
|
||||||
|
///
|
||||||
|
/// This is the standard per the base64 RFC, but consider using [NO_PAD] instead as padding serves
|
||||||
|
/// little purpose in practice.
|
||||||
|
pub const PAD: GeneralPurposeConfig = GeneralPurposeConfig::new();
|
||||||
|
|
||||||
|
/// Don't add padding when encoding, and require no padding when decoding.
|
||||||
|
pub const NO_PAD: GeneralPurposeConfig = GeneralPurposeConfig::new()
|
||||||
|
.with_encode_padding(false)
|
||||||
|
.with_decode_padding_mode(DecodePaddingMode::RequireNone);
|
||||||
478
vendor/base64/src/engine/mod.rs
vendored
Normal file
478
vendor/base64/src/engine/mod.rs
vendored
Normal file
|
|
@ -0,0 +1,478 @@
|
||||||
|
//! Provides the [Engine] abstraction and out of the box implementations.
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
use crate::chunked_encoder;
|
||||||
|
use crate::{
|
||||||
|
encode::{encode_with_padding, EncodeSliceError},
|
||||||
|
encoded_len, DecodeError, DecodeSliceError,
|
||||||
|
};
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
use alloc::vec::Vec;
|
||||||
|
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
use alloc::{string::String, vec};
|
||||||
|
|
||||||
|
pub mod general_purpose;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod naive;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
||||||
|
|
||||||
|
pub use general_purpose::{GeneralPurpose, GeneralPurposeConfig};
|
||||||
|
|
||||||
|
/// An `Engine` provides low-level encoding and decoding operations that all other higher-level parts of the API use. Users of the library will generally not need to implement this.
|
||||||
|
///
|
||||||
|
/// Different implementations offer different characteristics. The library currently ships with
|
||||||
|
/// [GeneralPurpose] that offers good speed and works on any CPU, with more choices
|
||||||
|
/// coming later, like a constant-time one when side channel resistance is called for, and vendor-specific vectorized ones for more speed.
|
||||||
|
///
|
||||||
|
/// See [general_purpose::STANDARD_NO_PAD] if you just want standard base64. Otherwise, when possible, it's
|
||||||
|
/// recommended to store the engine in a `const` so that references to it won't pose any lifetime
|
||||||
|
/// issues, and to avoid repeating the cost of engine setup.
|
||||||
|
///
|
||||||
|
/// Since almost nobody will need to implement `Engine`, docs for internal methods are hidden.
|
||||||
|
// When adding an implementation of Engine, include them in the engine test suite:
|
||||||
|
// - add an implementation of [engine::tests::EngineWrapper]
|
||||||
|
// - add the implementation to the `all_engines` macro
|
||||||
|
// All tests run on all engines listed in the macro.
|
||||||
|
pub trait Engine: Send + Sync {
|
||||||
|
/// The config type used by this engine
|
||||||
|
type Config: Config;
|
||||||
|
/// The decode estimate used by this engine
|
||||||
|
type DecodeEstimate: DecodeEstimate;
|
||||||
|
|
||||||
|
/// This is not meant to be called directly; it is only for `Engine` implementors.
|
||||||
|
/// See the other `encode*` functions on this trait.
|
||||||
|
///
|
||||||
|
/// Encode the `input` bytes into the `output` buffer based on the mapping in `encode_table`.
|
||||||
|
///
|
||||||
|
/// `output` will be long enough to hold the encoded data.
|
||||||
|
///
|
||||||
|
/// Returns the number of bytes written.
|
||||||
|
///
|
||||||
|
/// No padding should be written; that is handled separately.
|
||||||
|
///
|
||||||
|
/// Must not write any bytes into the output slice other than the encoded data.
|
||||||
|
#[doc(hidden)]
|
||||||
|
fn internal_encode(&self, input: &[u8], output: &mut [u8]) -> usize;
|
||||||
|
|
||||||
|
/// This is not meant to be called directly; it is only for `Engine` implementors.
|
||||||
|
///
|
||||||
|
/// As an optimization to prevent the decoded length from being calculated twice, it is
|
||||||
|
/// sometimes helpful to have a conservative estimate of the decoded size before doing the
|
||||||
|
/// decoding, so this calculation is done separately and passed to [Engine::decode()] as needed.
|
||||||
|
#[doc(hidden)]
|
||||||
|
fn internal_decoded_len_estimate(&self, input_len: usize) -> Self::DecodeEstimate;
|
||||||
|
|
||||||
|
/// This is not meant to be called directly; it is only for `Engine` implementors.
|
||||||
|
/// See the other `decode*` functions on this trait.
|
||||||
|
///
|
||||||
|
/// Decode `input` base64 bytes into the `output` buffer.
|
||||||
|
///
|
||||||
|
/// `decode_estimate` is the result of [Engine::internal_decoded_len_estimate()], which is passed in to avoid
|
||||||
|
/// calculating it again (expensive on short inputs).`
|
||||||
|
///
|
||||||
|
/// Each complete 4-byte chunk of encoded data decodes to 3 bytes of decoded data, but this
|
||||||
|
/// function must also handle the final possibly partial chunk.
|
||||||
|
/// If the input length is not a multiple of 4, or uses padding bytes to reach a multiple of 4,
|
||||||
|
/// the trailing 2 or 3 bytes must decode to 1 or 2 bytes, respectively, as per the
|
||||||
|
/// [RFC](https://tools.ietf.org/html/rfc4648#section-3.5).
|
||||||
|
///
|
||||||
|
/// Decoding must not write any bytes into the output slice other than the decoded data.
|
||||||
|
///
|
||||||
|
/// Non-canonical trailing bits in the final tokens or non-canonical padding must be reported as
|
||||||
|
/// errors unless the engine is configured otherwise.
|
||||||
|
#[doc(hidden)]
|
||||||
|
fn internal_decode(
|
||||||
|
&self,
|
||||||
|
input: &[u8],
|
||||||
|
output: &mut [u8],
|
||||||
|
decode_estimate: Self::DecodeEstimate,
|
||||||
|
) -> Result<DecodeMetadata, DecodeSliceError>;
|
||||||
|
|
||||||
|
/// Returns the config for this engine.
|
||||||
|
fn config(&self) -> &Self::Config;
|
||||||
|
|
||||||
|
/// Encode arbitrary octets as base64 using the provided `Engine`.
|
||||||
|
/// Returns a `String`.
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```rust
|
||||||
|
/// use base64::{Engine as _, engine::{self, general_purpose}, alphabet};
|
||||||
|
///
|
||||||
|
/// let b64 = general_purpose::STANDARD.encode(b"hello world~");
|
||||||
|
/// println!("{}", b64);
|
||||||
|
///
|
||||||
|
/// const CUSTOM_ENGINE: engine::GeneralPurpose =
|
||||||
|
/// engine::GeneralPurpose::new(&alphabet::URL_SAFE, general_purpose::NO_PAD);
|
||||||
|
///
|
||||||
|
/// let b64_url = CUSTOM_ENGINE.encode(b"hello internet~");
|
||||||
|
/// ```
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
#[inline]
|
||||||
|
fn encode<T: AsRef<[u8]>>(&self, input: T) -> String {
|
||||||
|
fn inner<E>(engine: &E, input_bytes: &[u8]) -> String
|
||||||
|
where
|
||||||
|
E: Engine + ?Sized,
|
||||||
|
{
|
||||||
|
let encoded_size = encoded_len(input_bytes.len(), engine.config().encode_padding())
|
||||||
|
.expect("integer overflow when calculating buffer size");
|
||||||
|
|
||||||
|
let mut buf = vec![0; encoded_size];
|
||||||
|
|
||||||
|
encode_with_padding(input_bytes, &mut buf[..], engine, encoded_size);
|
||||||
|
|
||||||
|
String::from_utf8(buf).expect("Invalid UTF8")
|
||||||
|
}
|
||||||
|
|
||||||
|
inner(self, input.as_ref())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Encode arbitrary octets as base64 into a supplied `String`.
|
||||||
|
/// Writes into the supplied `String`, which may allocate if its internal buffer isn't big enough.
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```rust
|
||||||
|
/// use base64::{Engine as _, engine::{self, general_purpose}, alphabet};
|
||||||
|
/// const CUSTOM_ENGINE: engine::GeneralPurpose =
|
||||||
|
/// engine::GeneralPurpose::new(&alphabet::URL_SAFE, general_purpose::NO_PAD);
|
||||||
|
///
|
||||||
|
/// fn main() {
|
||||||
|
/// let mut buf = String::new();
|
||||||
|
/// general_purpose::STANDARD.encode_string(b"hello world~", &mut buf);
|
||||||
|
/// println!("{}", buf);
|
||||||
|
///
|
||||||
|
/// buf.clear();
|
||||||
|
/// CUSTOM_ENGINE.encode_string(b"hello internet~", &mut buf);
|
||||||
|
/// println!("{}", buf);
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
#[inline]
|
||||||
|
fn encode_string<T: AsRef<[u8]>>(&self, input: T, output_buf: &mut String) {
|
||||||
|
fn inner<E>(engine: &E, input_bytes: &[u8], output_buf: &mut String)
|
||||||
|
where
|
||||||
|
E: Engine + ?Sized,
|
||||||
|
{
|
||||||
|
let mut sink = chunked_encoder::StringSink::new(output_buf);
|
||||||
|
|
||||||
|
chunked_encoder::ChunkedEncoder::new(engine)
|
||||||
|
.encode(input_bytes, &mut sink)
|
||||||
|
.expect("Writing to a String shouldn't fail");
|
||||||
|
}
|
||||||
|
|
||||||
|
inner(self, input.as_ref(), output_buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Encode arbitrary octets as base64 into a supplied slice.
|
||||||
|
/// Writes into the supplied output buffer.
|
||||||
|
///
|
||||||
|
/// This is useful if you wish to avoid allocation entirely (e.g. encoding into a stack-resident
|
||||||
|
/// or statically-allocated buffer).
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
#[cfg_attr(feature = "alloc", doc = "```")]
|
||||||
|
#[cfg_attr(not(feature = "alloc"), doc = "```ignore")]
|
||||||
|
/// use base64::{Engine as _, engine::general_purpose};
|
||||||
|
/// let s = b"hello internet!";
|
||||||
|
/// let mut buf = Vec::new();
|
||||||
|
/// // make sure we'll have a slice big enough for base64 + padding
|
||||||
|
/// buf.resize(s.len() * 4 / 3 + 4, 0);
|
||||||
|
///
|
||||||
|
/// let bytes_written = general_purpose::STANDARD.encode_slice(s, &mut buf).unwrap();
|
||||||
|
///
|
||||||
|
/// // shorten our vec down to just what was written
|
||||||
|
/// buf.truncate(bytes_written);
|
||||||
|
///
|
||||||
|
/// assert_eq!(s, general_purpose::STANDARD.decode(&buf).unwrap().as_slice());
|
||||||
|
/// ```
|
||||||
|
#[inline]
|
||||||
|
fn encode_slice<T: AsRef<[u8]>>(
|
||||||
|
&self,
|
||||||
|
input: T,
|
||||||
|
output_buf: &mut [u8],
|
||||||
|
) -> Result<usize, EncodeSliceError> {
|
||||||
|
fn inner<E>(
|
||||||
|
engine: &E,
|
||||||
|
input_bytes: &[u8],
|
||||||
|
output_buf: &mut [u8],
|
||||||
|
) -> Result<usize, EncodeSliceError>
|
||||||
|
where
|
||||||
|
E: Engine + ?Sized,
|
||||||
|
{
|
||||||
|
let encoded_size = encoded_len(input_bytes.len(), engine.config().encode_padding())
|
||||||
|
.expect("usize overflow when calculating buffer size");
|
||||||
|
|
||||||
|
if output_buf.len() < encoded_size {
|
||||||
|
return Err(EncodeSliceError::OutputSliceTooSmall);
|
||||||
|
}
|
||||||
|
|
||||||
|
let b64_output = &mut output_buf[0..encoded_size];
|
||||||
|
|
||||||
|
encode_with_padding(input_bytes, b64_output, engine, encoded_size);
|
||||||
|
|
||||||
|
Ok(encoded_size)
|
||||||
|
}
|
||||||
|
|
||||||
|
inner(self, input.as_ref(), output_buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decode the input into a new `Vec`.
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```rust
|
||||||
|
/// use base64::{Engine as _, alphabet, engine::{self, general_purpose}};
|
||||||
|
///
|
||||||
|
/// let bytes = general_purpose::STANDARD
|
||||||
|
/// .decode("aGVsbG8gd29ybGR+Cg==").unwrap();
|
||||||
|
/// println!("{:?}", bytes);
|
||||||
|
///
|
||||||
|
/// // custom engine setup
|
||||||
|
/// let bytes_url = engine::GeneralPurpose::new(
|
||||||
|
/// &alphabet::URL_SAFE,
|
||||||
|
/// general_purpose::NO_PAD)
|
||||||
|
/// .decode("aGVsbG8gaW50ZXJuZXR-Cg").unwrap();
|
||||||
|
/// println!("{:?}", bytes_url);
|
||||||
|
/// ```
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
#[inline]
|
||||||
|
fn decode<T: AsRef<[u8]>>(&self, input: T) -> Result<Vec<u8>, DecodeError> {
|
||||||
|
fn inner<E>(engine: &E, input_bytes: &[u8]) -> Result<Vec<u8>, DecodeError>
|
||||||
|
where
|
||||||
|
E: Engine + ?Sized,
|
||||||
|
{
|
||||||
|
let estimate = engine.internal_decoded_len_estimate(input_bytes.len());
|
||||||
|
let mut buffer = vec![0; estimate.decoded_len_estimate()];
|
||||||
|
|
||||||
|
let bytes_written = engine
|
||||||
|
.internal_decode(input_bytes, &mut buffer, estimate)
|
||||||
|
.map_err(|e| match e {
|
||||||
|
DecodeSliceError::DecodeError(e) => e,
|
||||||
|
DecodeSliceError::OutputSliceTooSmall => {
|
||||||
|
unreachable!("Vec is sized conservatively")
|
||||||
|
}
|
||||||
|
})?
|
||||||
|
.decoded_len;
|
||||||
|
|
||||||
|
buffer.truncate(bytes_written);
|
||||||
|
|
||||||
|
Ok(buffer)
|
||||||
|
}
|
||||||
|
|
||||||
|
inner(self, input.as_ref())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decode the `input` into the supplied `buffer`.
|
||||||
|
///
|
||||||
|
/// Writes into the supplied `Vec`, which may allocate if its internal buffer isn't big enough.
|
||||||
|
/// Returns a `Result` containing an empty tuple, aka `()`.
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```rust
|
||||||
|
/// use base64::{Engine as _, alphabet, engine::{self, general_purpose}};
|
||||||
|
/// const CUSTOM_ENGINE: engine::GeneralPurpose =
|
||||||
|
/// engine::GeneralPurpose::new(&alphabet::URL_SAFE, general_purpose::PAD);
|
||||||
|
///
|
||||||
|
/// fn main() {
|
||||||
|
/// use base64::Engine;
|
||||||
|
/// let mut buffer = Vec::<u8>::new();
|
||||||
|
/// // with the default engine
|
||||||
|
/// general_purpose::STANDARD
|
||||||
|
/// .decode_vec("aGVsbG8gd29ybGR+Cg==", &mut buffer,).unwrap();
|
||||||
|
/// println!("{:?}", buffer);
|
||||||
|
///
|
||||||
|
/// buffer.clear();
|
||||||
|
///
|
||||||
|
/// // with a custom engine
|
||||||
|
/// CUSTOM_ENGINE.decode_vec(
|
||||||
|
/// "aGVsbG8gaW50ZXJuZXR-Cg==",
|
||||||
|
/// &mut buffer,
|
||||||
|
/// ).unwrap();
|
||||||
|
/// println!("{:?}", buffer);
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
#[inline]
|
||||||
|
fn decode_vec<T: AsRef<[u8]>>(
|
||||||
|
&self,
|
||||||
|
input: T,
|
||||||
|
buffer: &mut Vec<u8>,
|
||||||
|
) -> Result<(), DecodeError> {
|
||||||
|
fn inner<E>(engine: &E, input_bytes: &[u8], buffer: &mut Vec<u8>) -> Result<(), DecodeError>
|
||||||
|
where
|
||||||
|
E: Engine + ?Sized,
|
||||||
|
{
|
||||||
|
let starting_output_len = buffer.len();
|
||||||
|
let estimate = engine.internal_decoded_len_estimate(input_bytes.len());
|
||||||
|
|
||||||
|
let total_len_estimate = estimate
|
||||||
|
.decoded_len_estimate()
|
||||||
|
.checked_add(starting_output_len)
|
||||||
|
.expect("Overflow when calculating output buffer length");
|
||||||
|
|
||||||
|
buffer.resize(total_len_estimate, 0);
|
||||||
|
|
||||||
|
let buffer_slice = &mut buffer.as_mut_slice()[starting_output_len..];
|
||||||
|
|
||||||
|
let bytes_written = engine
|
||||||
|
.internal_decode(input_bytes, buffer_slice, estimate)
|
||||||
|
.map_err(|e| match e {
|
||||||
|
DecodeSliceError::DecodeError(e) => e,
|
||||||
|
DecodeSliceError::OutputSliceTooSmall => {
|
||||||
|
unreachable!("Vec is sized conservatively")
|
||||||
|
}
|
||||||
|
})?
|
||||||
|
.decoded_len;
|
||||||
|
|
||||||
|
buffer.truncate(starting_output_len + bytes_written);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
inner(self, input.as_ref(), buffer)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decode the input into the provided output slice.
|
||||||
|
///
|
||||||
|
/// Returns the number of bytes written to the slice, or an error if `output` is smaller than
|
||||||
|
/// the estimated decoded length.
|
||||||
|
///
|
||||||
|
/// This will not write any bytes past exactly what is decoded (no stray garbage bytes at the end).
|
||||||
|
///
|
||||||
|
/// See [crate::decoded_len_estimate] for calculating buffer sizes.
|
||||||
|
///
|
||||||
|
/// See [Engine::decode_slice_unchecked] for a version that panics instead of returning an error
|
||||||
|
/// if the output buffer is too small.
|
||||||
|
#[inline]
|
||||||
|
fn decode_slice<T: AsRef<[u8]>>(
|
||||||
|
&self,
|
||||||
|
input: T,
|
||||||
|
output: &mut [u8],
|
||||||
|
) -> Result<usize, DecodeSliceError> {
|
||||||
|
fn inner<E>(
|
||||||
|
engine: &E,
|
||||||
|
input_bytes: &[u8],
|
||||||
|
output: &mut [u8],
|
||||||
|
) -> Result<usize, DecodeSliceError>
|
||||||
|
where
|
||||||
|
E: Engine + ?Sized,
|
||||||
|
{
|
||||||
|
engine
|
||||||
|
.internal_decode(
|
||||||
|
input_bytes,
|
||||||
|
output,
|
||||||
|
engine.internal_decoded_len_estimate(input_bytes.len()),
|
||||||
|
)
|
||||||
|
.map(|dm| dm.decoded_len)
|
||||||
|
}
|
||||||
|
|
||||||
|
inner(self, input.as_ref(), output)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decode the input into the provided output slice.
|
||||||
|
///
|
||||||
|
/// Returns the number of bytes written to the slice.
|
||||||
|
///
|
||||||
|
/// This will not write any bytes past exactly what is decoded (no stray garbage bytes at the end).
|
||||||
|
///
|
||||||
|
/// See [crate::decoded_len_estimate] for calculating buffer sizes.
|
||||||
|
///
|
||||||
|
/// See [Engine::decode_slice] for a version that returns an error instead of panicking if the output
|
||||||
|
/// buffer is too small.
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
///
|
||||||
|
/// Panics if the provided output buffer is too small for the decoded data.
|
||||||
|
#[inline]
|
||||||
|
fn decode_slice_unchecked<T: AsRef<[u8]>>(
|
||||||
|
&self,
|
||||||
|
input: T,
|
||||||
|
output: &mut [u8],
|
||||||
|
) -> Result<usize, DecodeError> {
|
||||||
|
fn inner<E>(engine: &E, input_bytes: &[u8], output: &mut [u8]) -> Result<usize, DecodeError>
|
||||||
|
where
|
||||||
|
E: Engine + ?Sized,
|
||||||
|
{
|
||||||
|
engine
|
||||||
|
.internal_decode(
|
||||||
|
input_bytes,
|
||||||
|
output,
|
||||||
|
engine.internal_decoded_len_estimate(input_bytes.len()),
|
||||||
|
)
|
||||||
|
.map(|dm| dm.decoded_len)
|
||||||
|
.map_err(|e| match e {
|
||||||
|
DecodeSliceError::DecodeError(e) => e,
|
||||||
|
DecodeSliceError::OutputSliceTooSmall => {
|
||||||
|
panic!("Output slice is too small")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
inner(self, input.as_ref(), output)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The minimal level of configuration that engines must support.
|
||||||
|
pub trait Config {
|
||||||
|
/// Returns `true` if padding should be added after the encoded output.
|
||||||
|
///
|
||||||
|
/// Padding is added outside the engine's encode() since the engine may be used
|
||||||
|
/// to encode only a chunk of the overall output, so it can't always know when
|
||||||
|
/// the output is "done" and would therefore need padding (if configured).
|
||||||
|
// It could be provided as a separate parameter when encoding, but that feels like
|
||||||
|
// leaking an implementation detail to the user, and it's hopefully more convenient
|
||||||
|
// to have to only pass one thing (the engine) to any part of the API.
|
||||||
|
fn encode_padding(&self) -> bool;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The decode estimate used by an engine implementation. Users do not need to interact with this;
|
||||||
|
/// it is only for engine implementors.
|
||||||
|
///
|
||||||
|
/// Implementors may store relevant data here when constructing this to avoid having to calculate
|
||||||
|
/// them again during actual decoding.
|
||||||
|
pub trait DecodeEstimate {
|
||||||
|
/// Returns a conservative (err on the side of too big) estimate of the decoded length to use
|
||||||
|
/// for pre-allocating buffers, etc.
|
||||||
|
///
|
||||||
|
/// The estimate must be no larger than the next largest complete triple of decoded bytes.
|
||||||
|
/// That is, the final quad of tokens to decode may be assumed to be complete with no padding.
|
||||||
|
fn decoded_len_estimate(&self) -> usize;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Controls how pad bytes are handled when decoding.
|
||||||
|
///
|
||||||
|
/// Each [Engine] must support at least the behavior indicated by
|
||||||
|
/// [DecodePaddingMode::RequireCanonical], and may support other modes.
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||||
|
pub enum DecodePaddingMode {
|
||||||
|
/// Canonical padding is allowed, but any fewer padding bytes than that is also allowed.
|
||||||
|
Indifferent,
|
||||||
|
/// Padding must be canonical (0, 1, or 2 `=` as needed to produce a 4 byte suffix).
|
||||||
|
RequireCanonical,
|
||||||
|
/// Padding must be absent -- for when you want predictable padding, without any wasted bytes.
|
||||||
|
RequireNone,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Metadata about the result of a decode operation
|
||||||
|
#[derive(PartialEq, Eq, Debug)]
|
||||||
|
pub struct DecodeMetadata {
|
||||||
|
/// Number of decoded bytes output
|
||||||
|
pub(crate) decoded_len: usize,
|
||||||
|
/// Offset of the first padding byte in the input, if any
|
||||||
|
pub(crate) padding_offset: Option<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DecodeMetadata {
|
||||||
|
pub(crate) fn new(decoded_bytes: usize, padding_index: Option<usize>) -> Self {
|
||||||
|
Self {
|
||||||
|
decoded_len: decoded_bytes,
|
||||||
|
padding_offset: padding_index,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
195
vendor/base64/src/engine/naive.rs
vendored
Normal file
195
vendor/base64/src/engine/naive.rs
vendored
Normal file
|
|
@ -0,0 +1,195 @@
|
||||||
|
use crate::{
|
||||||
|
alphabet::Alphabet,
|
||||||
|
engine::{
|
||||||
|
general_purpose::{self, decode_table, encode_table},
|
||||||
|
Config, DecodeEstimate, DecodeMetadata, DecodePaddingMode, Engine,
|
||||||
|
},
|
||||||
|
DecodeError, DecodeSliceError,
|
||||||
|
};
|
||||||
|
use std::ops::{BitAnd, BitOr, Shl, Shr};
|
||||||
|
|
||||||
|
/// Comparatively simple implementation that can be used as something to compare against in tests
|
||||||
|
pub struct Naive {
|
||||||
|
encode_table: [u8; 64],
|
||||||
|
decode_table: [u8; 256],
|
||||||
|
config: NaiveConfig,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Naive {
|
||||||
|
const ENCODE_INPUT_CHUNK_SIZE: usize = 3;
|
||||||
|
const DECODE_INPUT_CHUNK_SIZE: usize = 4;
|
||||||
|
|
||||||
|
pub const fn new(alphabet: &Alphabet, config: NaiveConfig) -> Self {
|
||||||
|
Self {
|
||||||
|
encode_table: encode_table(alphabet),
|
||||||
|
decode_table: decode_table(alphabet),
|
||||||
|
config,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decode_byte_into_u32(&self, offset: usize, byte: u8) -> Result<u32, DecodeError> {
|
||||||
|
let decoded = self.decode_table[byte as usize];
|
||||||
|
|
||||||
|
if decoded == general_purpose::INVALID_VALUE {
|
||||||
|
return Err(DecodeError::InvalidByte(offset, byte));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(decoded as u32)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Engine for Naive {
|
||||||
|
type Config = NaiveConfig;
|
||||||
|
type DecodeEstimate = NaiveEstimate;
|
||||||
|
|
||||||
|
fn internal_encode(&self, input: &[u8], output: &mut [u8]) -> usize {
|
||||||
|
// complete chunks first
|
||||||
|
|
||||||
|
const LOW_SIX_BITS: u32 = 0x3F;
|
||||||
|
|
||||||
|
let rem = input.len() % Self::ENCODE_INPUT_CHUNK_SIZE;
|
||||||
|
// will never underflow
|
||||||
|
let complete_chunk_len = input.len() - rem;
|
||||||
|
|
||||||
|
let mut input_index = 0_usize;
|
||||||
|
let mut output_index = 0_usize;
|
||||||
|
if let Some(last_complete_chunk_index) =
|
||||||
|
complete_chunk_len.checked_sub(Self::ENCODE_INPUT_CHUNK_SIZE)
|
||||||
|
{
|
||||||
|
while input_index <= last_complete_chunk_index {
|
||||||
|
let chunk = &input[input_index..input_index + Self::ENCODE_INPUT_CHUNK_SIZE];
|
||||||
|
|
||||||
|
// populate low 24 bits from 3 bytes
|
||||||
|
let chunk_int: u32 =
|
||||||
|
(chunk[0] as u32).shl(16) | (chunk[1] as u32).shl(8) | (chunk[2] as u32);
|
||||||
|
// encode 4x 6-bit output bytes
|
||||||
|
output[output_index] = self.encode_table[chunk_int.shr(18) as usize];
|
||||||
|
output[output_index + 1] =
|
||||||
|
self.encode_table[chunk_int.shr(12_u8).bitand(LOW_SIX_BITS) as usize];
|
||||||
|
output[output_index + 2] =
|
||||||
|
self.encode_table[chunk_int.shr(6_u8).bitand(LOW_SIX_BITS) as usize];
|
||||||
|
output[output_index + 3] =
|
||||||
|
self.encode_table[chunk_int.bitand(LOW_SIX_BITS) as usize];
|
||||||
|
|
||||||
|
input_index += Self::ENCODE_INPUT_CHUNK_SIZE;
|
||||||
|
output_index += 4;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// then leftovers
|
||||||
|
if rem == 2 {
|
||||||
|
let chunk = &input[input_index..input_index + 2];
|
||||||
|
|
||||||
|
// high six bits of chunk[0]
|
||||||
|
output[output_index] = self.encode_table[chunk[0].shr(2) as usize];
|
||||||
|
// bottom 2 bits of [0], high 4 bits of [1]
|
||||||
|
output[output_index + 1] =
|
||||||
|
self.encode_table[(chunk[0].shl(4_u8).bitor(chunk[1].shr(4_u8)) as u32)
|
||||||
|
.bitand(LOW_SIX_BITS) as usize];
|
||||||
|
// bottom 4 bits of [1], with the 2 bottom bits as zero
|
||||||
|
output[output_index + 2] =
|
||||||
|
self.encode_table[(chunk[1].shl(2_u8) as u32).bitand(LOW_SIX_BITS) as usize];
|
||||||
|
|
||||||
|
output_index += 3;
|
||||||
|
} else if rem == 1 {
|
||||||
|
let byte = input[input_index];
|
||||||
|
output[output_index] = self.encode_table[byte.shr(2) as usize];
|
||||||
|
output[output_index + 1] =
|
||||||
|
self.encode_table[(byte.shl(4_u8) as u32).bitand(LOW_SIX_BITS) as usize];
|
||||||
|
output_index += 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
output_index
|
||||||
|
}
|
||||||
|
|
||||||
|
fn internal_decoded_len_estimate(&self, input_len: usize) -> Self::DecodeEstimate {
|
||||||
|
NaiveEstimate::new(input_len)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn internal_decode(
|
||||||
|
&self,
|
||||||
|
input: &[u8],
|
||||||
|
output: &mut [u8],
|
||||||
|
estimate: Self::DecodeEstimate,
|
||||||
|
) -> Result<DecodeMetadata, DecodeSliceError> {
|
||||||
|
let complete_nonterminal_quads_len = general_purpose::decode::complete_quads_len(
|
||||||
|
input,
|
||||||
|
estimate.rem,
|
||||||
|
output.len(),
|
||||||
|
&self.decode_table,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
const BOTTOM_BYTE: u32 = 0xFF;
|
||||||
|
|
||||||
|
for (chunk_index, chunk) in input[..complete_nonterminal_quads_len]
|
||||||
|
.chunks_exact(4)
|
||||||
|
.enumerate()
|
||||||
|
{
|
||||||
|
let input_index = chunk_index * 4;
|
||||||
|
let output_index = chunk_index * 3;
|
||||||
|
|
||||||
|
let decoded_int: u32 = self.decode_byte_into_u32(input_index, chunk[0])?.shl(18)
|
||||||
|
| self
|
||||||
|
.decode_byte_into_u32(input_index + 1, chunk[1])?
|
||||||
|
.shl(12)
|
||||||
|
| self.decode_byte_into_u32(input_index + 2, chunk[2])?.shl(6)
|
||||||
|
| self.decode_byte_into_u32(input_index + 3, chunk[3])?;
|
||||||
|
|
||||||
|
output[output_index] = decoded_int.shr(16_u8).bitand(BOTTOM_BYTE) as u8;
|
||||||
|
output[output_index + 1] = decoded_int.shr(8_u8).bitand(BOTTOM_BYTE) as u8;
|
||||||
|
output[output_index + 2] = decoded_int.bitand(BOTTOM_BYTE) as u8;
|
||||||
|
}
|
||||||
|
|
||||||
|
general_purpose::decode_suffix::decode_suffix(
|
||||||
|
input,
|
||||||
|
complete_nonterminal_quads_len,
|
||||||
|
output,
|
||||||
|
complete_nonterminal_quads_len / 4 * 3,
|
||||||
|
&self.decode_table,
|
||||||
|
self.config.decode_allow_trailing_bits,
|
||||||
|
self.config.decode_padding_mode,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn config(&self) -> &Self::Config {
|
||||||
|
&self.config
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct NaiveEstimate {
|
||||||
|
/// remainder from dividing input by `Naive::DECODE_CHUNK_SIZE`
|
||||||
|
rem: usize,
|
||||||
|
/// Length of input that is in complete `Naive::DECODE_CHUNK_SIZE`-length chunks
|
||||||
|
complete_chunk_len: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NaiveEstimate {
|
||||||
|
fn new(input_len: usize) -> Self {
|
||||||
|
let rem = input_len % Naive::DECODE_INPUT_CHUNK_SIZE;
|
||||||
|
let complete_chunk_len = input_len - rem;
|
||||||
|
|
||||||
|
Self {
|
||||||
|
rem,
|
||||||
|
complete_chunk_len,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DecodeEstimate for NaiveEstimate {
|
||||||
|
fn decoded_len_estimate(&self) -> usize {
|
||||||
|
((self.complete_chunk_len / 4) + ((self.rem > 0) as usize)) * 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug)]
|
||||||
|
pub struct NaiveConfig {
|
||||||
|
pub encode_padding: bool,
|
||||||
|
pub decode_allow_trailing_bits: bool,
|
||||||
|
pub decode_padding_mode: DecodePaddingMode,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Config for NaiveConfig {
|
||||||
|
fn encode_padding(&self) -> bool {
|
||||||
|
self.encode_padding
|
||||||
|
}
|
||||||
|
}
|
||||||
1579
vendor/base64/src/engine/tests.rs
vendored
Normal file
1579
vendor/base64/src/engine/tests.rs
vendored
Normal file
File diff suppressed because it is too large
Load diff
277
vendor/base64/src/lib.rs
vendored
Normal file
277
vendor/base64/src/lib.rs
vendored
Normal file
|
|
@ -0,0 +1,277 @@
|
||||||
|
//! Correct, fast, and configurable [base64][] decoding and encoding. Base64
|
||||||
|
//! transports binary data efficiently in contexts where only plain text is
|
||||||
|
//! allowed.
|
||||||
|
//!
|
||||||
|
//! [base64]: https://developer.mozilla.org/en-US/docs/Glossary/Base64
|
||||||
|
//!
|
||||||
|
//! # Usage
|
||||||
|
//!
|
||||||
|
//! Use an [`Engine`] to decode or encode base64, configured with the base64
|
||||||
|
//! alphabet and padding behavior best suited to your application.
|
||||||
|
//!
|
||||||
|
//! ## Engine setup
|
||||||
|
//!
|
||||||
|
//! There is more than one way to encode a stream of bytes as “base64”.
|
||||||
|
//! Different applications use different encoding
|
||||||
|
//! [alphabets][alphabet::Alphabet] and
|
||||||
|
//! [padding behaviors][engine::general_purpose::GeneralPurposeConfig].
|
||||||
|
//!
|
||||||
|
//! ### Encoding alphabet
|
||||||
|
//!
|
||||||
|
//! Almost all base64 [alphabets][alphabet::Alphabet] use `A-Z`, `a-z`, and
|
||||||
|
//! `0-9`, which gives nearly 64 characters (26 + 26 + 10 = 62), but they differ
|
||||||
|
//! in their choice of their final 2.
|
||||||
|
//!
|
||||||
|
//! Most applications use the [standard][alphabet::STANDARD] alphabet specified
|
||||||
|
//! in [RFC 4648][rfc-alphabet]. If that’s all you need, you can get started
|
||||||
|
//! quickly by using the pre-configured
|
||||||
|
//! [`STANDARD`][engine::general_purpose::STANDARD] engine, which is also available
|
||||||
|
//! in the [`prelude`] module as shown here, if you prefer a minimal `use`
|
||||||
|
//! footprint.
|
||||||
|
//!
|
||||||
|
#![cfg_attr(feature = "alloc", doc = "```")]
|
||||||
|
#![cfg_attr(not(feature = "alloc"), doc = "```ignore")]
|
||||||
|
//! use base64::prelude::*;
|
||||||
|
//!
|
||||||
|
//! # fn main() -> Result<(), base64::DecodeError> {
|
||||||
|
//! assert_eq!(BASE64_STANDARD.decode(b"+uwgVQA=")?, b"\xFA\xEC\x20\x55\0");
|
||||||
|
//! assert_eq!(BASE64_STANDARD.encode(b"\xFF\xEC\x20\x55\0"), "/+wgVQA=");
|
||||||
|
//! # Ok(())
|
||||||
|
//! # }
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! [rfc-alphabet]: https://datatracker.ietf.org/doc/html/rfc4648#section-4
|
||||||
|
//!
|
||||||
|
//! Other common alphabets are available in the [`alphabet`] module.
|
||||||
|
//!
|
||||||
|
//! #### URL-safe alphabet
|
||||||
|
//!
|
||||||
|
//! The standard alphabet uses `+` and `/` as its two non-alphanumeric tokens,
|
||||||
|
//! which cannot be safely used in URL’s without encoding them as `%2B` and
|
||||||
|
//! `%2F`.
|
||||||
|
//!
|
||||||
|
//! To avoid that, some applications use a [“URL-safe” alphabet][alphabet::URL_SAFE],
|
||||||
|
//! which uses `-` and `_` instead. To use that alternative alphabet, use the
|
||||||
|
//! [`URL_SAFE`][engine::general_purpose::URL_SAFE] engine. This example doesn't
|
||||||
|
//! use [`prelude`] to show what a more explicit `use` would look like.
|
||||||
|
//!
|
||||||
|
#![cfg_attr(feature = "alloc", doc = "```")]
|
||||||
|
#![cfg_attr(not(feature = "alloc"), doc = "```ignore")]
|
||||||
|
//! use base64::{engine::general_purpose::URL_SAFE, Engine as _};
|
||||||
|
//!
|
||||||
|
//! # fn main() -> Result<(), base64::DecodeError> {
|
||||||
|
//! assert_eq!(URL_SAFE.decode(b"-uwgVQA=")?, b"\xFA\xEC\x20\x55\0");
|
||||||
|
//! assert_eq!(URL_SAFE.encode(b"\xFF\xEC\x20\x55\0"), "_-wgVQA=");
|
||||||
|
//! # Ok(())
|
||||||
|
//! # }
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! ### Padding characters
|
||||||
|
//!
|
||||||
|
//! Each base64 character represents 6 bits (2⁶ = 64) of the original binary
|
||||||
|
//! data, and every 3 bytes of input binary data will encode to 4 base64
|
||||||
|
//! characters (8 bits × 3 = 6 bits × 4 = 24 bits).
|
||||||
|
//!
|
||||||
|
//! When the input is not an even multiple of 3 bytes in length, [canonical][]
|
||||||
|
//! base64 encoders insert padding characters at the end, so that the output
|
||||||
|
//! length is always a multiple of 4:
|
||||||
|
//!
|
||||||
|
//! [canonical]: https://datatracker.ietf.org/doc/html/rfc4648#section-3.5
|
||||||
|
//!
|
||||||
|
#![cfg_attr(feature = "alloc", doc = "```")]
|
||||||
|
#![cfg_attr(not(feature = "alloc"), doc = "```ignore")]
|
||||||
|
//! use base64::{engine::general_purpose::STANDARD, Engine as _};
|
||||||
|
//!
|
||||||
|
//! assert_eq!(STANDARD.encode(b""), "");
|
||||||
|
//! assert_eq!(STANDARD.encode(b"f"), "Zg==");
|
||||||
|
//! assert_eq!(STANDARD.encode(b"fo"), "Zm8=");
|
||||||
|
//! assert_eq!(STANDARD.encode(b"foo"), "Zm9v");
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! Canonical encoding ensures that base64 encodings will be exactly the same,
|
||||||
|
//! byte-for-byte, regardless of input length. But the `=` padding characters
|
||||||
|
//! aren’t necessary for decoding, and they may be omitted by using a
|
||||||
|
//! [`NO_PAD`][engine::general_purpose::NO_PAD] configuration:
|
||||||
|
//!
|
||||||
|
#![cfg_attr(feature = "alloc", doc = "```")]
|
||||||
|
#![cfg_attr(not(feature = "alloc"), doc = "```ignore")]
|
||||||
|
//! use base64::{engine::general_purpose::STANDARD_NO_PAD, Engine as _};
|
||||||
|
//!
|
||||||
|
//! assert_eq!(STANDARD_NO_PAD.encode(b""), "");
|
||||||
|
//! assert_eq!(STANDARD_NO_PAD.encode(b"f"), "Zg");
|
||||||
|
//! assert_eq!(STANDARD_NO_PAD.encode(b"fo"), "Zm8");
|
||||||
|
//! assert_eq!(STANDARD_NO_PAD.encode(b"foo"), "Zm9v");
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! The pre-configured `NO_PAD` engines will reject inputs containing padding
|
||||||
|
//! `=` characters. To encode without padding and still accept padding while
|
||||||
|
//! decoding, create an [engine][engine::general_purpose::GeneralPurpose] with
|
||||||
|
//! that [padding mode][engine::DecodePaddingMode].
|
||||||
|
//!
|
||||||
|
#![cfg_attr(feature = "alloc", doc = "```")]
|
||||||
|
#![cfg_attr(not(feature = "alloc"), doc = "```ignore")]
|
||||||
|
//! # use base64::{engine::general_purpose::STANDARD_NO_PAD, Engine as _};
|
||||||
|
//! assert_eq!(STANDARD_NO_PAD.decode(b"Zm8="), Err(base64::DecodeError::InvalidPadding));
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! ### Further customization
|
||||||
|
//!
|
||||||
|
//! Decoding and encoding behavior can be customized by creating an
|
||||||
|
//! [engine][engine::GeneralPurpose] with an [alphabet][alphabet::Alphabet] and
|
||||||
|
//! [padding configuration][engine::GeneralPurposeConfig]:
|
||||||
|
//!
|
||||||
|
#![cfg_attr(feature = "alloc", doc = "```")]
|
||||||
|
#![cfg_attr(not(feature = "alloc"), doc = "```ignore")]
|
||||||
|
//! use base64::{engine, alphabet, Engine as _};
|
||||||
|
//!
|
||||||
|
//! // bizarro-world base64: +/ as the first symbols instead of the last
|
||||||
|
//! let alphabet =
|
||||||
|
//! alphabet::Alphabet::new("+/ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789")
|
||||||
|
//! .unwrap();
|
||||||
|
//!
|
||||||
|
//! // a very weird config that encodes with padding but requires no padding when decoding...?
|
||||||
|
//! let crazy_config = engine::GeneralPurposeConfig::new()
|
||||||
|
//! .with_decode_allow_trailing_bits(true)
|
||||||
|
//! .with_encode_padding(true)
|
||||||
|
//! .with_decode_padding_mode(engine::DecodePaddingMode::RequireNone);
|
||||||
|
//!
|
||||||
|
//! let crazy_engine = engine::GeneralPurpose::new(&alphabet, crazy_config);
|
||||||
|
//!
|
||||||
|
//! let encoded = crazy_engine.encode(b"abc 123");
|
||||||
|
//!
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! ## Memory allocation
|
||||||
|
//!
|
||||||
|
//! The [decode][Engine::decode()] and [encode][Engine::encode()] engine methods
|
||||||
|
//! allocate memory for their results – `decode` returns a `Vec<u8>` and
|
||||||
|
//! `encode` returns a `String`. To instead decode or encode into a buffer that
|
||||||
|
//! you allocated, use one of the alternative methods:
|
||||||
|
//!
|
||||||
|
//! #### Decoding
|
||||||
|
//!
|
||||||
|
//! | Method | Output | Allocates memory |
|
||||||
|
//! | -------------------------- | ----------------------------- | ----------------------------- |
|
||||||
|
//! | [`Engine::decode`] | returns a new `Vec<u8>` | always |
|
||||||
|
//! | [`Engine::decode_vec`] | appends to provided `Vec<u8>` | if `Vec` lacks capacity |
|
||||||
|
//! | [`Engine::decode_slice`] | writes to provided `&[u8]` | never
|
||||||
|
//!
|
||||||
|
//! #### Encoding
|
||||||
|
//!
|
||||||
|
//! | Method | Output | Allocates memory |
|
||||||
|
//! | -------------------------- | ---------------------------- | ------------------------------ |
|
||||||
|
//! | [`Engine::encode`] | returns a new `String` | always |
|
||||||
|
//! | [`Engine::encode_string`] | appends to provided `String` | if `String` lacks capacity |
|
||||||
|
//! | [`Engine::encode_slice`] | writes to provided `&[u8]` | never |
|
||||||
|
//!
|
||||||
|
//! ## Input and output
|
||||||
|
//!
|
||||||
|
//! The `base64` crate can [decode][Engine::decode()] and
|
||||||
|
//! [encode][Engine::encode()] values in memory, or
|
||||||
|
//! [`DecoderReader`][read::DecoderReader] and
|
||||||
|
//! [`EncoderWriter`][write::EncoderWriter] provide streaming decoding and
|
||||||
|
//! encoding for any [readable][std::io::Read] or [writable][std::io::Write]
|
||||||
|
//! byte stream.
|
||||||
|
//!
|
||||||
|
//! #### Decoding
|
||||||
|
//!
|
||||||
|
#![cfg_attr(feature = "std", doc = "```")]
|
||||||
|
#![cfg_attr(not(feature = "std"), doc = "```ignore")]
|
||||||
|
//! # use std::io;
|
||||||
|
//! use base64::{engine::general_purpose::STANDARD, read::DecoderReader};
|
||||||
|
//!
|
||||||
|
//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
//! let mut input = io::stdin();
|
||||||
|
//! let mut decoder = DecoderReader::new(&mut input, &STANDARD);
|
||||||
|
//! io::copy(&mut decoder, &mut io::stdout())?;
|
||||||
|
//! # Ok(())
|
||||||
|
//! # }
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! #### Encoding
|
||||||
|
//!
|
||||||
|
#![cfg_attr(feature = "std", doc = "```")]
|
||||||
|
#![cfg_attr(not(feature = "std"), doc = "```ignore")]
|
||||||
|
//! # use std::io;
|
||||||
|
//! use base64::{engine::general_purpose::STANDARD, write::EncoderWriter};
|
||||||
|
//!
|
||||||
|
//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
//! let mut output = io::stdout();
|
||||||
|
//! let mut encoder = EncoderWriter::new(&mut output, &STANDARD);
|
||||||
|
//! io::copy(&mut io::stdin(), &mut encoder)?;
|
||||||
|
//! # Ok(())
|
||||||
|
//! # }
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! #### Display
|
||||||
|
//!
|
||||||
|
//! If you only need a base64 representation for implementing the
|
||||||
|
//! [`Display`][std::fmt::Display] trait, use
|
||||||
|
//! [`Base64Display`][display::Base64Display]:
|
||||||
|
//!
|
||||||
|
//! ```
|
||||||
|
//! use base64::{display::Base64Display, engine::general_purpose::STANDARD};
|
||||||
|
//!
|
||||||
|
//! let value = Base64Display::new(b"\0\x01\x02\x03", &STANDARD);
|
||||||
|
//! assert_eq!("base64: AAECAw==", format!("base64: {}", value));
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! # Panics
|
||||||
|
//!
|
||||||
|
//! If length calculations result in overflowing `usize`, a panic will result.
|
||||||
|
|
||||||
|
#![cfg_attr(feature = "cargo-clippy", allow(clippy::cast_lossless))]
|
||||||
|
#![deny(
|
||||||
|
missing_docs,
|
||||||
|
trivial_casts,
|
||||||
|
trivial_numeric_casts,
|
||||||
|
unused_extern_crates,
|
||||||
|
unused_import_braces,
|
||||||
|
unused_results,
|
||||||
|
variant_size_differences
|
||||||
|
)]
|
||||||
|
#![forbid(unsafe_code)]
|
||||||
|
// Allow globally until https://github.com/rust-lang/rust-clippy/issues/8768 is resolved.
|
||||||
|
// The desired state is to allow it only for the rstest_reuse import.
|
||||||
|
#![allow(clippy::single_component_path_imports)]
|
||||||
|
#![cfg_attr(not(any(feature = "std", test)), no_std)]
|
||||||
|
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
extern crate alloc;
|
||||||
|
|
||||||
|
// has to be included at top level because of the way rstest_reuse defines its macros
|
||||||
|
#[cfg(test)]
|
||||||
|
use rstest_reuse;
|
||||||
|
|
||||||
|
mod chunked_encoder;
|
||||||
|
pub mod display;
|
||||||
|
#[cfg(any(feature = "std", test))]
|
||||||
|
pub mod read;
|
||||||
|
#[cfg(any(feature = "std", test))]
|
||||||
|
pub mod write;
|
||||||
|
|
||||||
|
pub mod engine;
|
||||||
|
pub use engine::Engine;
|
||||||
|
|
||||||
|
pub mod alphabet;
|
||||||
|
|
||||||
|
mod encode;
|
||||||
|
#[allow(deprecated)]
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
pub use crate::encode::{encode, encode_engine, encode_engine_string};
|
||||||
|
#[allow(deprecated)]
|
||||||
|
pub use crate::encode::{encode_engine_slice, encoded_len, EncodeSliceError};
|
||||||
|
|
||||||
|
mod decode;
|
||||||
|
#[allow(deprecated)]
|
||||||
|
#[cfg(any(feature = "alloc", test))]
|
||||||
|
pub use crate::decode::{decode, decode_engine, decode_engine_vec};
|
||||||
|
#[allow(deprecated)]
|
||||||
|
pub use crate::decode::{decode_engine_slice, decoded_len_estimate, DecodeError, DecodeSliceError};
|
||||||
|
|
||||||
|
pub mod prelude;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
||||||
|
|
||||||
|
const PAD_BYTE: u8 = b'=';
|
||||||
20
vendor/base64/src/prelude.rs
vendored
Normal file
20
vendor/base64/src/prelude.rs
vendored
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
//! Preconfigured engines for common use cases.
|
||||||
|
//!
|
||||||
|
//! These are re-exports of `const` engines in [crate::engine::general_purpose], renamed with a `BASE64_`
|
||||||
|
//! prefix for those who prefer to `use` the entire path to a name.
|
||||||
|
//!
|
||||||
|
//! # Examples
|
||||||
|
//!
|
||||||
|
#![cfg_attr(feature = "alloc", doc = "```")]
|
||||||
|
#![cfg_attr(not(feature = "alloc"), doc = "```ignore")]
|
||||||
|
//! use base64::prelude::{Engine as _, BASE64_STANDARD_NO_PAD};
|
||||||
|
//!
|
||||||
|
//! assert_eq!("c29tZSBieXRlcw", &BASE64_STANDARD_NO_PAD.encode(b"some bytes"));
|
||||||
|
//! ```
|
||||||
|
|
||||||
|
pub use crate::engine::Engine;
|
||||||
|
|
||||||
|
pub use crate::engine::general_purpose::STANDARD as BASE64_STANDARD;
|
||||||
|
pub use crate::engine::general_purpose::STANDARD_NO_PAD as BASE64_STANDARD_NO_PAD;
|
||||||
|
pub use crate::engine::general_purpose::URL_SAFE as BASE64_URL_SAFE;
|
||||||
|
pub use crate::engine::general_purpose::URL_SAFE_NO_PAD as BASE64_URL_SAFE_NO_PAD;
|
||||||
335
vendor/base64/src/read/decoder.rs
vendored
Normal file
335
vendor/base64/src/read/decoder.rs
vendored
Normal file
|
|
@ -0,0 +1,335 @@
|
||||||
|
use crate::{engine::Engine, DecodeError, DecodeSliceError, PAD_BYTE};
|
||||||
|
use std::{cmp, fmt, io};
|
||||||
|
|
||||||
|
// This should be large, but it has to fit on the stack.
|
||||||
|
pub(crate) const BUF_SIZE: usize = 1024;
|
||||||
|
|
||||||
|
// 4 bytes of base64 data encode 3 bytes of raw data (modulo padding).
|
||||||
|
const BASE64_CHUNK_SIZE: usize = 4;
|
||||||
|
const DECODED_CHUNK_SIZE: usize = 3;
|
||||||
|
|
||||||
|
/// A `Read` implementation that decodes base64 data read from an underlying reader.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use std::io::Read;
|
||||||
|
/// use std::io::Cursor;
|
||||||
|
/// use base64::engine::general_purpose;
|
||||||
|
///
|
||||||
|
/// // use a cursor as the simplest possible `Read` -- in real code this is probably a file, etc.
|
||||||
|
/// let mut wrapped_reader = Cursor::new(b"YXNkZg==");
|
||||||
|
/// let mut decoder = base64::read::DecoderReader::new(
|
||||||
|
/// &mut wrapped_reader,
|
||||||
|
/// &general_purpose::STANDARD);
|
||||||
|
///
|
||||||
|
/// // handle errors as you normally would
|
||||||
|
/// let mut result = Vec::new();
|
||||||
|
/// decoder.read_to_end(&mut result).unwrap();
|
||||||
|
///
|
||||||
|
/// assert_eq!(b"asdf", &result[..]);
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
pub struct DecoderReader<'e, E: Engine, R: io::Read> {
|
||||||
|
engine: &'e E,
|
||||||
|
/// Where b64 data is read from
|
||||||
|
inner: R,
|
||||||
|
|
||||||
|
/// Holds b64 data read from the delegate reader.
|
||||||
|
b64_buffer: [u8; BUF_SIZE],
|
||||||
|
/// The start of the pending buffered data in `b64_buffer`.
|
||||||
|
b64_offset: usize,
|
||||||
|
/// The amount of buffered b64 data after `b64_offset` in `b64_len`.
|
||||||
|
b64_len: usize,
|
||||||
|
/// Since the caller may provide us with a buffer of size 1 or 2 that's too small to copy a
|
||||||
|
/// decoded chunk in to, we have to be able to hang on to a few decoded bytes.
|
||||||
|
/// Technically we only need to hold 2 bytes, but then we'd need a separate temporary buffer to
|
||||||
|
/// decode 3 bytes into and then juggle copying one byte into the provided read buf and the rest
|
||||||
|
/// into here, which seems like a lot of complexity for 1 extra byte of storage.
|
||||||
|
decoded_chunk_buffer: [u8; DECODED_CHUNK_SIZE],
|
||||||
|
/// Index of start of decoded data in `decoded_chunk_buffer`
|
||||||
|
decoded_offset: usize,
|
||||||
|
/// Length of decoded data after `decoded_offset` in `decoded_chunk_buffer`
|
||||||
|
decoded_len: usize,
|
||||||
|
/// Input length consumed so far.
|
||||||
|
/// Used to provide accurate offsets in errors
|
||||||
|
input_consumed_len: usize,
|
||||||
|
/// offset of previously seen padding, if any
|
||||||
|
padding_offset: Option<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// exclude b64_buffer as it's uselessly large
|
||||||
|
impl<'e, E: Engine, R: io::Read> fmt::Debug for DecoderReader<'e, E, R> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
f.debug_struct("DecoderReader")
|
||||||
|
.field("b64_offset", &self.b64_offset)
|
||||||
|
.field("b64_len", &self.b64_len)
|
||||||
|
.field("decoded_chunk_buffer", &self.decoded_chunk_buffer)
|
||||||
|
.field("decoded_offset", &self.decoded_offset)
|
||||||
|
.field("decoded_len", &self.decoded_len)
|
||||||
|
.field("input_consumed_len", &self.input_consumed_len)
|
||||||
|
.field("padding_offset", &self.padding_offset)
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'e, E: Engine, R: io::Read> DecoderReader<'e, E, R> {
|
||||||
|
/// Create a new decoder that will read from the provided reader `r`.
|
||||||
|
pub fn new(reader: R, engine: &'e E) -> Self {
|
||||||
|
DecoderReader {
|
||||||
|
engine,
|
||||||
|
inner: reader,
|
||||||
|
b64_buffer: [0; BUF_SIZE],
|
||||||
|
b64_offset: 0,
|
||||||
|
b64_len: 0,
|
||||||
|
decoded_chunk_buffer: [0; DECODED_CHUNK_SIZE],
|
||||||
|
decoded_offset: 0,
|
||||||
|
decoded_len: 0,
|
||||||
|
input_consumed_len: 0,
|
||||||
|
padding_offset: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write as much as possible of the decoded buffer into the target buffer.
|
||||||
|
/// Must only be called when there is something to write and space to write into.
|
||||||
|
/// Returns a Result with the number of (decoded) bytes copied.
|
||||||
|
fn flush_decoded_buf(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
||||||
|
debug_assert!(self.decoded_len > 0);
|
||||||
|
debug_assert!(!buf.is_empty());
|
||||||
|
|
||||||
|
let copy_len = cmp::min(self.decoded_len, buf.len());
|
||||||
|
debug_assert!(copy_len > 0);
|
||||||
|
debug_assert!(copy_len <= self.decoded_len);
|
||||||
|
|
||||||
|
buf[..copy_len].copy_from_slice(
|
||||||
|
&self.decoded_chunk_buffer[self.decoded_offset..self.decoded_offset + copy_len],
|
||||||
|
);
|
||||||
|
|
||||||
|
self.decoded_offset += copy_len;
|
||||||
|
self.decoded_len -= copy_len;
|
||||||
|
|
||||||
|
debug_assert!(self.decoded_len < DECODED_CHUNK_SIZE);
|
||||||
|
|
||||||
|
Ok(copy_len)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read into the remaining space in the buffer after the current contents.
|
||||||
|
/// Must only be called when there is space to read into in the buffer.
|
||||||
|
/// Returns the number of bytes read.
|
||||||
|
fn read_from_delegate(&mut self) -> io::Result<usize> {
|
||||||
|
debug_assert!(self.b64_offset + self.b64_len < BUF_SIZE);
|
||||||
|
|
||||||
|
let read = self
|
||||||
|
.inner
|
||||||
|
.read(&mut self.b64_buffer[self.b64_offset + self.b64_len..])?;
|
||||||
|
self.b64_len += read;
|
||||||
|
|
||||||
|
debug_assert!(self.b64_offset + self.b64_len <= BUF_SIZE);
|
||||||
|
|
||||||
|
Ok(read)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decode the requested number of bytes from the b64 buffer into the provided buffer. It's the
|
||||||
|
/// caller's responsibility to choose the number of b64 bytes to decode correctly.
|
||||||
|
///
|
||||||
|
/// Returns a Result with the number of decoded bytes written to `buf`.
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
///
|
||||||
|
/// panics if `buf` is too small
|
||||||
|
fn decode_to_buf(&mut self, b64_len_to_decode: usize, buf: &mut [u8]) -> io::Result<usize> {
|
||||||
|
debug_assert!(self.b64_len >= b64_len_to_decode);
|
||||||
|
debug_assert!(self.b64_offset + self.b64_len <= BUF_SIZE);
|
||||||
|
debug_assert!(!buf.is_empty());
|
||||||
|
|
||||||
|
let b64_to_decode = &self.b64_buffer[self.b64_offset..self.b64_offset + b64_len_to_decode];
|
||||||
|
let decode_metadata = self
|
||||||
|
.engine
|
||||||
|
.internal_decode(
|
||||||
|
b64_to_decode,
|
||||||
|
buf,
|
||||||
|
self.engine.internal_decoded_len_estimate(b64_len_to_decode),
|
||||||
|
)
|
||||||
|
.map_err(|dse| match dse {
|
||||||
|
DecodeSliceError::DecodeError(de) => {
|
||||||
|
match de {
|
||||||
|
DecodeError::InvalidByte(offset, byte) => {
|
||||||
|
match (byte, self.padding_offset) {
|
||||||
|
// if there was padding in a previous block of decoding that happened to
|
||||||
|
// be correct, and we now find more padding that happens to be incorrect,
|
||||||
|
// to be consistent with non-reader decodes, record the error at the first
|
||||||
|
// padding
|
||||||
|
(PAD_BYTE, Some(first_pad_offset)) => {
|
||||||
|
DecodeError::InvalidByte(first_pad_offset, PAD_BYTE)
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
DecodeError::InvalidByte(self.input_consumed_len + offset, byte)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
DecodeError::InvalidLength(len) => {
|
||||||
|
DecodeError::InvalidLength(self.input_consumed_len + len)
|
||||||
|
}
|
||||||
|
DecodeError::InvalidLastSymbol(offset, byte) => {
|
||||||
|
DecodeError::InvalidLastSymbol(self.input_consumed_len + offset, byte)
|
||||||
|
}
|
||||||
|
DecodeError::InvalidPadding => DecodeError::InvalidPadding,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
DecodeSliceError::OutputSliceTooSmall => {
|
||||||
|
unreachable!("buf is sized correctly in calling code")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
|
||||||
|
|
||||||
|
if let Some(offset) = self.padding_offset {
|
||||||
|
// we've already seen padding
|
||||||
|
if decode_metadata.decoded_len > 0 {
|
||||||
|
// we read more after already finding padding; report error at first padding byte
|
||||||
|
return Err(io::Error::new(
|
||||||
|
io::ErrorKind::InvalidData,
|
||||||
|
DecodeError::InvalidByte(offset, PAD_BYTE),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.padding_offset = self.padding_offset.or(decode_metadata
|
||||||
|
.padding_offset
|
||||||
|
.map(|offset| self.input_consumed_len + offset));
|
||||||
|
self.input_consumed_len += b64_len_to_decode;
|
||||||
|
self.b64_offset += b64_len_to_decode;
|
||||||
|
self.b64_len -= b64_len_to_decode;
|
||||||
|
|
||||||
|
debug_assert!(self.b64_offset + self.b64_len <= BUF_SIZE);
|
||||||
|
|
||||||
|
Ok(decode_metadata.decoded_len)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Unwraps this `DecoderReader`, returning the base reader which it reads base64 encoded
|
||||||
|
/// input from.
|
||||||
|
///
|
||||||
|
/// Because `DecoderReader` performs internal buffering, the state of the inner reader is
|
||||||
|
/// unspecified. This function is mainly provided because the inner reader type may provide
|
||||||
|
/// additional functionality beyond the `Read` implementation which may still be useful.
|
||||||
|
pub fn into_inner(self) -> R {
|
||||||
|
self.inner
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'e, E: Engine, R: io::Read> io::Read for DecoderReader<'e, E, R> {
|
||||||
|
/// Decode input from the wrapped reader.
|
||||||
|
///
|
||||||
|
/// Under non-error circumstances, this returns `Ok` with the value being the number of bytes
|
||||||
|
/// written in `buf`.
|
||||||
|
///
|
||||||
|
/// Where possible, this function buffers base64 to minimize the number of read() calls to the
|
||||||
|
/// delegate reader.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// Any errors emitted by the delegate reader are returned. Decoding errors due to invalid
|
||||||
|
/// base64 are also possible, and will have `io::ErrorKind::InvalidData`.
|
||||||
|
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
||||||
|
if buf.is_empty() {
|
||||||
|
return Ok(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// offset == BUF_SIZE when we copied it all last time
|
||||||
|
debug_assert!(self.b64_offset <= BUF_SIZE);
|
||||||
|
debug_assert!(self.b64_offset + self.b64_len <= BUF_SIZE);
|
||||||
|
debug_assert!(if self.b64_offset == BUF_SIZE {
|
||||||
|
self.b64_len == 0
|
||||||
|
} else {
|
||||||
|
self.b64_len <= BUF_SIZE
|
||||||
|
});
|
||||||
|
|
||||||
|
debug_assert!(if self.decoded_len == 0 {
|
||||||
|
// can be = when we were able to copy the complete chunk
|
||||||
|
self.decoded_offset <= DECODED_CHUNK_SIZE
|
||||||
|
} else {
|
||||||
|
self.decoded_offset < DECODED_CHUNK_SIZE
|
||||||
|
});
|
||||||
|
|
||||||
|
// We shouldn't ever decode into decoded_buffer when we can't immediately write at least one
|
||||||
|
// byte into the provided buf, so the effective length should only be 3 momentarily between
|
||||||
|
// when we decode and when we copy into the target buffer.
|
||||||
|
debug_assert!(self.decoded_len < DECODED_CHUNK_SIZE);
|
||||||
|
debug_assert!(self.decoded_len + self.decoded_offset <= DECODED_CHUNK_SIZE);
|
||||||
|
|
||||||
|
if self.decoded_len > 0 {
|
||||||
|
// we have a few leftover decoded bytes; flush that rather than pull in more b64
|
||||||
|
self.flush_decoded_buf(buf)
|
||||||
|
} else {
|
||||||
|
let mut at_eof = false;
|
||||||
|
while self.b64_len < BASE64_CHUNK_SIZE {
|
||||||
|
// Copy any bytes we have to the start of the buffer.
|
||||||
|
self.b64_buffer
|
||||||
|
.copy_within(self.b64_offset..self.b64_offset + self.b64_len, 0);
|
||||||
|
self.b64_offset = 0;
|
||||||
|
|
||||||
|
// then fill in more data
|
||||||
|
let read = self.read_from_delegate()?;
|
||||||
|
if read == 0 {
|
||||||
|
// we never read into an empty buf, so 0 => we've hit EOF
|
||||||
|
at_eof = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.b64_len == 0 {
|
||||||
|
debug_assert!(at_eof);
|
||||||
|
// we must be at EOF, and we have no data left to decode
|
||||||
|
return Ok(0);
|
||||||
|
};
|
||||||
|
|
||||||
|
debug_assert!(if at_eof {
|
||||||
|
// if we are at eof, we may not have a complete chunk
|
||||||
|
self.b64_len > 0
|
||||||
|
} else {
|
||||||
|
// otherwise, we must have at least one chunk
|
||||||
|
self.b64_len >= BASE64_CHUNK_SIZE
|
||||||
|
});
|
||||||
|
|
||||||
|
debug_assert_eq!(0, self.decoded_len);
|
||||||
|
|
||||||
|
if buf.len() < DECODED_CHUNK_SIZE {
|
||||||
|
// caller requested an annoyingly short read
|
||||||
|
// have to write to a tmp buf first to avoid double mutable borrow
|
||||||
|
let mut decoded_chunk = [0_u8; DECODED_CHUNK_SIZE];
|
||||||
|
// if we are at eof, could have less than BASE64_CHUNK_SIZE, in which case we have
|
||||||
|
// to assume that these last few tokens are, in fact, valid (i.e. must be 2-4 b64
|
||||||
|
// tokens, not 1, since 1 token can't decode to 1 byte).
|
||||||
|
let to_decode = cmp::min(self.b64_len, BASE64_CHUNK_SIZE);
|
||||||
|
|
||||||
|
let decoded = self.decode_to_buf(to_decode, &mut decoded_chunk[..])?;
|
||||||
|
self.decoded_chunk_buffer[..decoded].copy_from_slice(&decoded_chunk[..decoded]);
|
||||||
|
|
||||||
|
self.decoded_offset = 0;
|
||||||
|
self.decoded_len = decoded;
|
||||||
|
|
||||||
|
// can be less than 3 on last block due to padding
|
||||||
|
debug_assert!(decoded <= 3);
|
||||||
|
|
||||||
|
self.flush_decoded_buf(buf)
|
||||||
|
} else {
|
||||||
|
let b64_bytes_that_can_decode_into_buf = (buf.len() / DECODED_CHUNK_SIZE)
|
||||||
|
.checked_mul(BASE64_CHUNK_SIZE)
|
||||||
|
.expect("too many chunks");
|
||||||
|
debug_assert!(b64_bytes_that_can_decode_into_buf >= BASE64_CHUNK_SIZE);
|
||||||
|
|
||||||
|
let b64_bytes_available_to_decode = if at_eof {
|
||||||
|
self.b64_len
|
||||||
|
} else {
|
||||||
|
// only use complete chunks
|
||||||
|
self.b64_len - self.b64_len % 4
|
||||||
|
};
|
||||||
|
|
||||||
|
let actual_decode_len = cmp::min(
|
||||||
|
b64_bytes_that_can_decode_into_buf,
|
||||||
|
b64_bytes_available_to_decode,
|
||||||
|
);
|
||||||
|
self.decode_to_buf(actual_decode_len, buf)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
487
vendor/base64/src/read/decoder_tests.rs
vendored
Normal file
487
vendor/base64/src/read/decoder_tests.rs
vendored
Normal file
|
|
@ -0,0 +1,487 @@
|
||||||
|
use std::{
|
||||||
|
cmp,
|
||||||
|
io::{self, Read as _},
|
||||||
|
iter,
|
||||||
|
};
|
||||||
|
|
||||||
|
use rand::{Rng as _, RngCore as _};
|
||||||
|
|
||||||
|
use super::decoder::{DecoderReader, BUF_SIZE};
|
||||||
|
use crate::{
|
||||||
|
alphabet,
|
||||||
|
engine::{general_purpose::STANDARD, Engine, GeneralPurpose},
|
||||||
|
tests::{random_alphabet, random_config, random_engine},
|
||||||
|
DecodeError, PAD_BYTE,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn simple() {
|
||||||
|
let tests: &[(&[u8], &[u8])] = &[
|
||||||
|
(&b"0"[..], &b"MA=="[..]),
|
||||||
|
(b"01", b"MDE="),
|
||||||
|
(b"012", b"MDEy"),
|
||||||
|
(b"0123", b"MDEyMw=="),
|
||||||
|
(b"01234", b"MDEyMzQ="),
|
||||||
|
(b"012345", b"MDEyMzQ1"),
|
||||||
|
(b"0123456", b"MDEyMzQ1Ng=="),
|
||||||
|
(b"01234567", b"MDEyMzQ1Njc="),
|
||||||
|
(b"012345678", b"MDEyMzQ1Njc4"),
|
||||||
|
(b"0123456789", b"MDEyMzQ1Njc4OQ=="),
|
||||||
|
][..];
|
||||||
|
|
||||||
|
for (text_expected, base64data) in tests.iter() {
|
||||||
|
// Read n bytes at a time.
|
||||||
|
for n in 1..base64data.len() + 1 {
|
||||||
|
let mut wrapped_reader = io::Cursor::new(base64data);
|
||||||
|
let mut decoder = DecoderReader::new(&mut wrapped_reader, &STANDARD);
|
||||||
|
|
||||||
|
// handle errors as you normally would
|
||||||
|
let mut text_got = Vec::new();
|
||||||
|
let mut buffer = vec![0u8; n];
|
||||||
|
while let Ok(read) = decoder.read(&mut buffer[..]) {
|
||||||
|
if read == 0 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
text_got.extend_from_slice(&buffer[..read]);
|
||||||
|
}
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
text_got,
|
||||||
|
*text_expected,
|
||||||
|
"\nGot: {}\nExpected: {}",
|
||||||
|
String::from_utf8_lossy(&text_got[..]),
|
||||||
|
String::from_utf8_lossy(text_expected)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure we error out on trailing junk.
|
||||||
|
#[test]
|
||||||
|
fn trailing_junk() {
|
||||||
|
let tests: &[&[u8]] = &[&b"MDEyMzQ1Njc4*!@#$%^&"[..], b"MDEyMzQ1Njc4OQ== "][..];
|
||||||
|
|
||||||
|
for base64data in tests.iter() {
|
||||||
|
// Read n bytes at a time.
|
||||||
|
for n in 1..base64data.len() + 1 {
|
||||||
|
let mut wrapped_reader = io::Cursor::new(base64data);
|
||||||
|
let mut decoder = DecoderReader::new(&mut wrapped_reader, &STANDARD);
|
||||||
|
|
||||||
|
// handle errors as you normally would
|
||||||
|
let mut buffer = vec![0u8; n];
|
||||||
|
let mut saw_error = false;
|
||||||
|
loop {
|
||||||
|
match decoder.read(&mut buffer[..]) {
|
||||||
|
Err(_) => {
|
||||||
|
saw_error = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
Ok(0) => break,
|
||||||
|
Ok(_len) => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert!(saw_error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn handles_short_read_from_delegate() {
|
||||||
|
let mut rng = rand::thread_rng();
|
||||||
|
let mut bytes = Vec::new();
|
||||||
|
let mut b64 = String::new();
|
||||||
|
let mut decoded = Vec::new();
|
||||||
|
|
||||||
|
for _ in 0..10_000 {
|
||||||
|
bytes.clear();
|
||||||
|
b64.clear();
|
||||||
|
decoded.clear();
|
||||||
|
|
||||||
|
let size = rng.gen_range(0..(10 * BUF_SIZE));
|
||||||
|
bytes.extend(iter::repeat(0).take(size));
|
||||||
|
bytes.truncate(size);
|
||||||
|
rng.fill_bytes(&mut bytes[..size]);
|
||||||
|
assert_eq!(size, bytes.len());
|
||||||
|
|
||||||
|
let engine = random_engine(&mut rng);
|
||||||
|
engine.encode_string(&bytes[..], &mut b64);
|
||||||
|
|
||||||
|
let mut wrapped_reader = io::Cursor::new(b64.as_bytes());
|
||||||
|
let mut short_reader = RandomShortRead {
|
||||||
|
delegate: &mut wrapped_reader,
|
||||||
|
rng: &mut rng,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut decoder = DecoderReader::new(&mut short_reader, &engine);
|
||||||
|
|
||||||
|
let decoded_len = decoder.read_to_end(&mut decoded).unwrap();
|
||||||
|
assert_eq!(size, decoded_len);
|
||||||
|
assert_eq!(&bytes[..], &decoded[..]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn read_in_short_increments() {
|
||||||
|
let mut rng = rand::thread_rng();
|
||||||
|
let mut bytes = Vec::new();
|
||||||
|
let mut b64 = String::new();
|
||||||
|
let mut decoded = Vec::new();
|
||||||
|
|
||||||
|
for _ in 0..10_000 {
|
||||||
|
bytes.clear();
|
||||||
|
b64.clear();
|
||||||
|
decoded.clear();
|
||||||
|
|
||||||
|
let size = rng.gen_range(0..(10 * BUF_SIZE));
|
||||||
|
bytes.extend(iter::repeat(0).take(size));
|
||||||
|
// leave room to play around with larger buffers
|
||||||
|
decoded.extend(iter::repeat(0).take(size * 3));
|
||||||
|
|
||||||
|
rng.fill_bytes(&mut bytes[..]);
|
||||||
|
assert_eq!(size, bytes.len());
|
||||||
|
|
||||||
|
let engine = random_engine(&mut rng);
|
||||||
|
|
||||||
|
engine.encode_string(&bytes[..], &mut b64);
|
||||||
|
|
||||||
|
let mut wrapped_reader = io::Cursor::new(&b64[..]);
|
||||||
|
let mut decoder = DecoderReader::new(&mut wrapped_reader, &engine);
|
||||||
|
|
||||||
|
consume_with_short_reads_and_validate(&mut rng, &bytes[..], &mut decoded, &mut decoder);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn read_in_short_increments_with_short_delegate_reads() {
|
||||||
|
let mut rng = rand::thread_rng();
|
||||||
|
let mut bytes = Vec::new();
|
||||||
|
let mut b64 = String::new();
|
||||||
|
let mut decoded = Vec::new();
|
||||||
|
|
||||||
|
for _ in 0..10_000 {
|
||||||
|
bytes.clear();
|
||||||
|
b64.clear();
|
||||||
|
decoded.clear();
|
||||||
|
|
||||||
|
let size = rng.gen_range(0..(10 * BUF_SIZE));
|
||||||
|
bytes.extend(iter::repeat(0).take(size));
|
||||||
|
// leave room to play around with larger buffers
|
||||||
|
decoded.extend(iter::repeat(0).take(size * 3));
|
||||||
|
|
||||||
|
rng.fill_bytes(&mut bytes[..]);
|
||||||
|
assert_eq!(size, bytes.len());
|
||||||
|
|
||||||
|
let engine = random_engine(&mut rng);
|
||||||
|
|
||||||
|
engine.encode_string(&bytes[..], &mut b64);
|
||||||
|
|
||||||
|
let mut base_reader = io::Cursor::new(&b64[..]);
|
||||||
|
let mut decoder = DecoderReader::new(&mut base_reader, &engine);
|
||||||
|
let mut short_reader = RandomShortRead {
|
||||||
|
delegate: &mut decoder,
|
||||||
|
rng: &mut rand::thread_rng(),
|
||||||
|
};
|
||||||
|
|
||||||
|
consume_with_short_reads_and_validate(
|
||||||
|
&mut rng,
|
||||||
|
&bytes[..],
|
||||||
|
&mut decoded,
|
||||||
|
&mut short_reader,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reports_invalid_last_symbol_correctly() {
|
||||||
|
let mut rng = rand::thread_rng();
|
||||||
|
let mut bytes = Vec::new();
|
||||||
|
let mut b64 = String::new();
|
||||||
|
let mut b64_bytes = Vec::new();
|
||||||
|
let mut decoded = Vec::new();
|
||||||
|
let mut bulk_decoded = Vec::new();
|
||||||
|
|
||||||
|
for _ in 0..1_000 {
|
||||||
|
bytes.clear();
|
||||||
|
b64.clear();
|
||||||
|
b64_bytes.clear();
|
||||||
|
|
||||||
|
let size = rng.gen_range(1..(10 * BUF_SIZE));
|
||||||
|
bytes.extend(iter::repeat(0).take(size));
|
||||||
|
decoded.extend(iter::repeat(0).take(size));
|
||||||
|
rng.fill_bytes(&mut bytes[..]);
|
||||||
|
assert_eq!(size, bytes.len());
|
||||||
|
|
||||||
|
let config = random_config(&mut rng);
|
||||||
|
let alphabet = random_alphabet(&mut rng);
|
||||||
|
// changing padding will cause invalid padding errors when we twiddle the last byte
|
||||||
|
let engine = GeneralPurpose::new(alphabet, config.with_encode_padding(false));
|
||||||
|
engine.encode_string(&bytes[..], &mut b64);
|
||||||
|
b64_bytes.extend(b64.bytes());
|
||||||
|
assert_eq!(b64_bytes.len(), b64.len());
|
||||||
|
|
||||||
|
// change the last character to every possible symbol. Should behave the same as bulk
|
||||||
|
// decoding whether invalid or valid.
|
||||||
|
for &s1 in alphabet.symbols.iter() {
|
||||||
|
decoded.clear();
|
||||||
|
bulk_decoded.clear();
|
||||||
|
|
||||||
|
// replace the last
|
||||||
|
*b64_bytes.last_mut().unwrap() = s1;
|
||||||
|
let bulk_res = engine.decode_vec(&b64_bytes[..], &mut bulk_decoded);
|
||||||
|
|
||||||
|
let mut wrapped_reader = io::Cursor::new(&b64_bytes[..]);
|
||||||
|
let mut decoder = DecoderReader::new(&mut wrapped_reader, &engine);
|
||||||
|
|
||||||
|
let stream_res = decoder.read_to_end(&mut decoded).map(|_| ()).map_err(|e| {
|
||||||
|
e.into_inner()
|
||||||
|
.and_then(|e| e.downcast::<DecodeError>().ok())
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(bulk_res.map_err(|e| Some(Box::new(e))), stream_res);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reports_invalid_byte_correctly() {
|
||||||
|
let mut rng = rand::thread_rng();
|
||||||
|
let mut bytes = Vec::new();
|
||||||
|
let mut b64 = String::new();
|
||||||
|
let mut stream_decoded = Vec::new();
|
||||||
|
let mut bulk_decoded = Vec::new();
|
||||||
|
|
||||||
|
for _ in 0..10_000 {
|
||||||
|
bytes.clear();
|
||||||
|
b64.clear();
|
||||||
|
stream_decoded.clear();
|
||||||
|
bulk_decoded.clear();
|
||||||
|
|
||||||
|
let size = rng.gen_range(1..(10 * BUF_SIZE));
|
||||||
|
bytes.extend(iter::repeat(0).take(size));
|
||||||
|
rng.fill_bytes(&mut bytes[..size]);
|
||||||
|
assert_eq!(size, bytes.len());
|
||||||
|
|
||||||
|
let engine = GeneralPurpose::new(&alphabet::STANDARD, random_config(&mut rng));
|
||||||
|
|
||||||
|
engine.encode_string(&bytes[..], &mut b64);
|
||||||
|
// replace one byte, somewhere, with '*', which is invalid
|
||||||
|
let bad_byte_pos = rng.gen_range(0..b64.len());
|
||||||
|
let mut b64_bytes = b64.bytes().collect::<Vec<u8>>();
|
||||||
|
b64_bytes[bad_byte_pos] = b'*';
|
||||||
|
|
||||||
|
let mut wrapped_reader = io::Cursor::new(b64_bytes.clone());
|
||||||
|
let mut decoder = DecoderReader::new(&mut wrapped_reader, &engine);
|
||||||
|
|
||||||
|
let read_decode_err = decoder
|
||||||
|
.read_to_end(&mut stream_decoded)
|
||||||
|
.map_err(|e| {
|
||||||
|
let kind = e.kind();
|
||||||
|
let inner = e
|
||||||
|
.into_inner()
|
||||||
|
.and_then(|e| e.downcast::<DecodeError>().ok());
|
||||||
|
inner.map(|i| (*i, kind))
|
||||||
|
})
|
||||||
|
.err()
|
||||||
|
.and_then(|o| o);
|
||||||
|
|
||||||
|
let bulk_decode_err = engine.decode_vec(&b64_bytes[..], &mut bulk_decoded).err();
|
||||||
|
|
||||||
|
// it's tricky to predict where the invalid data's offset will be since if it's in the last
|
||||||
|
// chunk it will be reported at the first padding location because it's treated as invalid
|
||||||
|
// padding. So, we just check that it's the same as it is for decoding all at once.
|
||||||
|
assert_eq!(
|
||||||
|
bulk_decode_err.map(|e| (e, io::ErrorKind::InvalidData)),
|
||||||
|
read_decode_err
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn internal_padding_error_with_short_read_concatenated_texts_invalid_byte_error() {
|
||||||
|
let mut rng = rand::thread_rng();
|
||||||
|
let mut bytes = Vec::new();
|
||||||
|
let mut b64 = String::new();
|
||||||
|
let mut reader_decoded = Vec::new();
|
||||||
|
let mut bulk_decoded = Vec::new();
|
||||||
|
|
||||||
|
// encodes with padding, requires that padding be present so we don't get InvalidPadding
|
||||||
|
// just because padding is there at all
|
||||||
|
let engine = STANDARD;
|
||||||
|
|
||||||
|
for _ in 0..10_000 {
|
||||||
|
bytes.clear();
|
||||||
|
b64.clear();
|
||||||
|
reader_decoded.clear();
|
||||||
|
bulk_decoded.clear();
|
||||||
|
|
||||||
|
// at least 2 bytes so there can be a split point between bytes
|
||||||
|
let size = rng.gen_range(2..(10 * BUF_SIZE));
|
||||||
|
bytes.resize(size, 0);
|
||||||
|
rng.fill_bytes(&mut bytes[..size]);
|
||||||
|
|
||||||
|
// Concatenate two valid b64s, yielding padding in the middle.
|
||||||
|
// This avoids scenarios that are challenging to assert on, like random padding location
|
||||||
|
// that might be InvalidLastSymbol when decoded at certain buffer sizes but InvalidByte
|
||||||
|
// when done all at once.
|
||||||
|
let split = loop {
|
||||||
|
// find a split point that will produce padding on the first part
|
||||||
|
let s = rng.gen_range(1..size);
|
||||||
|
if s % 3 != 0 {
|
||||||
|
// short enough to need padding
|
||||||
|
break s;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
engine.encode_string(&bytes[..split], &mut b64);
|
||||||
|
assert!(b64.contains('='), "split: {}, b64: {}", split, b64);
|
||||||
|
let bad_byte_pos = b64.find('=').unwrap();
|
||||||
|
engine.encode_string(&bytes[split..], &mut b64);
|
||||||
|
let b64_bytes = b64.as_bytes();
|
||||||
|
|
||||||
|
// short read to make it plausible for padding to happen on a read boundary
|
||||||
|
let read_len = rng.gen_range(1..10);
|
||||||
|
let mut wrapped_reader = ShortRead {
|
||||||
|
max_read_len: read_len,
|
||||||
|
delegate: io::Cursor::new(&b64_bytes),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut decoder = DecoderReader::new(&mut wrapped_reader, &engine);
|
||||||
|
|
||||||
|
let read_decode_err = decoder
|
||||||
|
.read_to_end(&mut reader_decoded)
|
||||||
|
.map_err(|e| {
|
||||||
|
*e.into_inner()
|
||||||
|
.and_then(|e| e.downcast::<DecodeError>().ok())
|
||||||
|
.unwrap()
|
||||||
|
})
|
||||||
|
.unwrap_err();
|
||||||
|
|
||||||
|
let bulk_decode_err = engine.decode_vec(b64_bytes, &mut bulk_decoded).unwrap_err();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
bulk_decode_err,
|
||||||
|
read_decode_err,
|
||||||
|
"read len: {}, bad byte pos: {}, b64: {}",
|
||||||
|
read_len,
|
||||||
|
bad_byte_pos,
|
||||||
|
std::str::from_utf8(b64_bytes).unwrap()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
DecodeError::InvalidByte(
|
||||||
|
split / 3 * 4
|
||||||
|
+ match split % 3 {
|
||||||
|
1 => 2,
|
||||||
|
2 => 3,
|
||||||
|
_ => unreachable!(),
|
||||||
|
},
|
||||||
|
PAD_BYTE
|
||||||
|
),
|
||||||
|
read_decode_err
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn internal_padding_anywhere_error() {
|
||||||
|
let mut rng = rand::thread_rng();
|
||||||
|
let mut bytes = Vec::new();
|
||||||
|
let mut b64 = String::new();
|
||||||
|
let mut reader_decoded = Vec::new();
|
||||||
|
|
||||||
|
// encodes with padding, requires that padding be present so we don't get InvalidPadding
|
||||||
|
// just because padding is there at all
|
||||||
|
let engine = STANDARD;
|
||||||
|
|
||||||
|
for _ in 0..10_000 {
|
||||||
|
bytes.clear();
|
||||||
|
b64.clear();
|
||||||
|
reader_decoded.clear();
|
||||||
|
|
||||||
|
bytes.resize(10 * BUF_SIZE, 0);
|
||||||
|
rng.fill_bytes(&mut bytes[..]);
|
||||||
|
|
||||||
|
// Just shove a padding byte in there somewhere.
|
||||||
|
// The specific error to expect is challenging to predict precisely because it
|
||||||
|
// will vary based on the position of the padding in the quad and the read buffer
|
||||||
|
// length, but SOMETHING should go wrong.
|
||||||
|
|
||||||
|
engine.encode_string(&bytes[..], &mut b64);
|
||||||
|
let mut b64_bytes = b64.as_bytes().to_vec();
|
||||||
|
// put padding somewhere other than the last quad
|
||||||
|
b64_bytes[rng.gen_range(0..bytes.len() - 4)] = PAD_BYTE;
|
||||||
|
|
||||||
|
// short read to make it plausible for padding to happen on a read boundary
|
||||||
|
let read_len = rng.gen_range(1..10);
|
||||||
|
let mut wrapped_reader = ShortRead {
|
||||||
|
max_read_len: read_len,
|
||||||
|
delegate: io::Cursor::new(&b64_bytes),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut decoder = DecoderReader::new(&mut wrapped_reader, &engine);
|
||||||
|
|
||||||
|
let result = decoder.read_to_end(&mut reader_decoded);
|
||||||
|
assert!(result.is_err());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn consume_with_short_reads_and_validate<R: io::Read>(
|
||||||
|
rng: &mut rand::rngs::ThreadRng,
|
||||||
|
expected_bytes: &[u8],
|
||||||
|
decoded: &mut [u8],
|
||||||
|
short_reader: &mut R,
|
||||||
|
) {
|
||||||
|
let mut total_read = 0_usize;
|
||||||
|
loop {
|
||||||
|
assert!(
|
||||||
|
total_read <= expected_bytes.len(),
|
||||||
|
"tr {} size {}",
|
||||||
|
total_read,
|
||||||
|
expected_bytes.len()
|
||||||
|
);
|
||||||
|
if total_read == expected_bytes.len() {
|
||||||
|
assert_eq!(expected_bytes, &decoded[..total_read]);
|
||||||
|
// should be done
|
||||||
|
assert_eq!(0, short_reader.read(&mut *decoded).unwrap());
|
||||||
|
// didn't write anything
|
||||||
|
assert_eq!(expected_bytes, &decoded[..total_read]);
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
let decode_len = rng.gen_range(1..cmp::max(2, expected_bytes.len() * 2));
|
||||||
|
|
||||||
|
let read = short_reader
|
||||||
|
.read(&mut decoded[total_read..total_read + decode_len])
|
||||||
|
.unwrap();
|
||||||
|
total_read += read;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Limits how many bytes a reader will provide in each read call.
|
||||||
|
/// Useful for shaking out code that may work fine only with typical input sources that always fill
|
||||||
|
/// the buffer.
|
||||||
|
struct RandomShortRead<'a, 'b, R: io::Read, N: rand::Rng> {
|
||||||
|
delegate: &'b mut R,
|
||||||
|
rng: &'a mut N,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, 'b, R: io::Read, N: rand::Rng> io::Read for RandomShortRead<'a, 'b, R, N> {
|
||||||
|
fn read(&mut self, buf: &mut [u8]) -> Result<usize, io::Error> {
|
||||||
|
// avoid 0 since it means EOF for non-empty buffers
|
||||||
|
let effective_len = cmp::min(self.rng.gen_range(1..20), buf.len());
|
||||||
|
|
||||||
|
self.delegate.read(&mut buf[..effective_len])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ShortRead<R: io::Read> {
|
||||||
|
delegate: R,
|
||||||
|
max_read_len: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<R: io::Read> io::Read for ShortRead<R> {
|
||||||
|
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
||||||
|
let len = self.max_read_len.max(buf.len());
|
||||||
|
self.delegate.read(&mut buf[..len])
|
||||||
|
}
|
||||||
|
}
|
||||||
6
vendor/base64/src/read/mod.rs
vendored
Normal file
6
vendor/base64/src/read/mod.rs
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
//! Implementations of `io::Read` to transparently decode base64.
|
||||||
|
mod decoder;
|
||||||
|
pub use self::decoder::DecoderReader;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod decoder_tests;
|
||||||
117
vendor/base64/src/tests.rs
vendored
Normal file
117
vendor/base64/src/tests.rs
vendored
Normal file
|
|
@ -0,0 +1,117 @@
|
||||||
|
use std::str;
|
||||||
|
|
||||||
|
use rand::{
|
||||||
|
distributions,
|
||||||
|
distributions::{Distribution as _, Uniform},
|
||||||
|
seq::SliceRandom,
|
||||||
|
Rng, SeedableRng,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
alphabet,
|
||||||
|
encode::encoded_len,
|
||||||
|
engine::{
|
||||||
|
general_purpose::{GeneralPurpose, GeneralPurposeConfig},
|
||||||
|
Config, DecodePaddingMode, Engine,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn roundtrip_random_config_short() {
|
||||||
|
// exercise the slower encode/decode routines that operate on shorter buffers more vigorously
|
||||||
|
roundtrip_random_config(Uniform::new(0, 50), 10_000);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn roundtrip_random_config_long() {
|
||||||
|
roundtrip_random_config(Uniform::new(0, 1000), 10_000);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn assert_encode_sanity(encoded: &str, padded: bool, input_len: usize) {
|
||||||
|
let input_rem = input_len % 3;
|
||||||
|
let expected_padding_len = if input_rem > 0 {
|
||||||
|
if padded {
|
||||||
|
3 - input_rem
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
};
|
||||||
|
|
||||||
|
let expected_encoded_len = encoded_len(input_len, padded).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(expected_encoded_len, encoded.len());
|
||||||
|
|
||||||
|
let padding_len = encoded.chars().filter(|&c| c == '=').count();
|
||||||
|
|
||||||
|
assert_eq!(expected_padding_len, padding_len);
|
||||||
|
|
||||||
|
let _ = str::from_utf8(encoded.as_bytes()).expect("Base64 should be valid utf8");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn roundtrip_random_config(input_len_range: Uniform<usize>, iterations: u32) {
|
||||||
|
let mut input_buf: Vec<u8> = Vec::new();
|
||||||
|
let mut encoded_buf = String::new();
|
||||||
|
let mut rng = rand::rngs::SmallRng::from_entropy();
|
||||||
|
|
||||||
|
for _ in 0..iterations {
|
||||||
|
input_buf.clear();
|
||||||
|
encoded_buf.clear();
|
||||||
|
|
||||||
|
let input_len = input_len_range.sample(&mut rng);
|
||||||
|
|
||||||
|
let engine = random_engine(&mut rng);
|
||||||
|
|
||||||
|
for _ in 0..input_len {
|
||||||
|
input_buf.push(rng.gen());
|
||||||
|
}
|
||||||
|
|
||||||
|
engine.encode_string(&input_buf, &mut encoded_buf);
|
||||||
|
|
||||||
|
assert_encode_sanity(&encoded_buf, engine.config().encode_padding(), input_len);
|
||||||
|
|
||||||
|
assert_eq!(input_buf, engine.decode(&encoded_buf).unwrap());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn random_config<R: Rng>(rng: &mut R) -> GeneralPurposeConfig {
|
||||||
|
let mode = rng.gen();
|
||||||
|
GeneralPurposeConfig::new()
|
||||||
|
.with_encode_padding(match mode {
|
||||||
|
DecodePaddingMode::Indifferent => rng.gen(),
|
||||||
|
DecodePaddingMode::RequireCanonical => true,
|
||||||
|
DecodePaddingMode::RequireNone => false,
|
||||||
|
})
|
||||||
|
.with_decode_padding_mode(mode)
|
||||||
|
.with_decode_allow_trailing_bits(rng.gen())
|
||||||
|
}
|
||||||
|
|
||||||
|
impl distributions::Distribution<DecodePaddingMode> for distributions::Standard {
|
||||||
|
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> DecodePaddingMode {
|
||||||
|
match rng.gen_range(0..=2) {
|
||||||
|
0 => DecodePaddingMode::Indifferent,
|
||||||
|
1 => DecodePaddingMode::RequireCanonical,
|
||||||
|
_ => DecodePaddingMode::RequireNone,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn random_alphabet<R: Rng>(rng: &mut R) -> &'static alphabet::Alphabet {
|
||||||
|
ALPHABETS.choose(rng).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn random_engine<R: Rng>(rng: &mut R) -> GeneralPurpose {
|
||||||
|
let alphabet = random_alphabet(rng);
|
||||||
|
let config = random_config(rng);
|
||||||
|
GeneralPurpose::new(alphabet, config)
|
||||||
|
}
|
||||||
|
|
||||||
|
const ALPHABETS: &[alphabet::Alphabet] = &[
|
||||||
|
alphabet::URL_SAFE,
|
||||||
|
alphabet::STANDARD,
|
||||||
|
alphabet::CRYPT,
|
||||||
|
alphabet::BCRYPT,
|
||||||
|
alphabet::IMAP_MUTF7,
|
||||||
|
alphabet::BIN_HEX,
|
||||||
|
];
|
||||||
407
vendor/base64/src/write/encoder.rs
vendored
Normal file
407
vendor/base64/src/write/encoder.rs
vendored
Normal file
|
|
@ -0,0 +1,407 @@
|
||||||
|
use crate::engine::Engine;
|
||||||
|
use std::{
|
||||||
|
cmp, fmt, io,
|
||||||
|
io::{ErrorKind, Result},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub(crate) const BUF_SIZE: usize = 1024;
|
||||||
|
/// The most bytes whose encoding will fit in `BUF_SIZE`
|
||||||
|
const MAX_INPUT_LEN: usize = BUF_SIZE / 4 * 3;
|
||||||
|
// 3 bytes of input = 4 bytes of base64, always (because we don't allow line wrapping)
|
||||||
|
const MIN_ENCODE_CHUNK_SIZE: usize = 3;
|
||||||
|
|
||||||
|
/// A `Write` implementation that base64 encodes data before delegating to the wrapped writer.
|
||||||
|
///
|
||||||
|
/// Because base64 has special handling for the end of the input data (padding, etc), there's a
|
||||||
|
/// `finish()` method on this type that encodes any leftover input bytes and adds padding if
|
||||||
|
/// appropriate. It's called automatically when deallocated (see the `Drop` implementation), but
|
||||||
|
/// any error that occurs when invoking the underlying writer will be suppressed. If you want to
|
||||||
|
/// handle such errors, call `finish()` yourself.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use std::io::Write;
|
||||||
|
/// use base64::engine::general_purpose;
|
||||||
|
///
|
||||||
|
/// // use a vec as the simplest possible `Write` -- in real code this is probably a file, etc.
|
||||||
|
/// let mut enc = base64::write::EncoderWriter::new(Vec::new(), &general_purpose::STANDARD);
|
||||||
|
///
|
||||||
|
/// // handle errors as you normally would
|
||||||
|
/// enc.write_all(b"asdf").unwrap();
|
||||||
|
///
|
||||||
|
/// // could leave this out to be called by Drop, if you don't care
|
||||||
|
/// // about handling errors or getting the delegate writer back
|
||||||
|
/// let delegate = enc.finish().unwrap();
|
||||||
|
///
|
||||||
|
/// // base64 was written to the writer
|
||||||
|
/// assert_eq!(b"YXNkZg==", &delegate[..]);
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
///
|
||||||
|
/// Calling `write()` (or related methods) or `finish()` after `finish()` has completed without
|
||||||
|
/// error is invalid and will panic.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// Base64 encoding itself does not generate errors, but errors from the wrapped writer will be
|
||||||
|
/// returned as per the contract of `Write`.
|
||||||
|
///
|
||||||
|
/// # Performance
|
||||||
|
///
|
||||||
|
/// It has some minor performance loss compared to encoding slices (a couple percent).
|
||||||
|
/// It does not do any heap allocation.
|
||||||
|
///
|
||||||
|
/// # Limitations
|
||||||
|
///
|
||||||
|
/// Owing to the specification of the `write` and `flush` methods on the `Write` trait and their
|
||||||
|
/// implications for a buffering implementation, these methods may not behave as expected. In
|
||||||
|
/// particular, calling `write_all` on this interface may fail with `io::ErrorKind::WriteZero`.
|
||||||
|
/// See the documentation of the `Write` trait implementation for further details.
|
||||||
|
pub struct EncoderWriter<'e, E: Engine, W: io::Write> {
|
||||||
|
engine: &'e E,
|
||||||
|
/// Where encoded data is written to. It's an Option as it's None immediately before Drop is
|
||||||
|
/// called so that finish() can return the underlying writer. None implies that finish() has
|
||||||
|
/// been called successfully.
|
||||||
|
delegate: Option<W>,
|
||||||
|
/// Holds a partial chunk, if any, after the last `write()`, so that we may then fill the chunk
|
||||||
|
/// with the next `write()`, encode it, then proceed with the rest of the input normally.
|
||||||
|
extra_input: [u8; MIN_ENCODE_CHUNK_SIZE],
|
||||||
|
/// How much of `extra` is occupied, in `[0, MIN_ENCODE_CHUNK_SIZE]`.
|
||||||
|
extra_input_occupied_len: usize,
|
||||||
|
/// Buffer to encode into. May hold leftover encoded bytes from a previous write call that the underlying writer
|
||||||
|
/// did not write last time.
|
||||||
|
output: [u8; BUF_SIZE],
|
||||||
|
/// How much of `output` is occupied with encoded data that couldn't be written last time
|
||||||
|
output_occupied_len: usize,
|
||||||
|
/// panic safety: don't write again in destructor if writer panicked while we were writing to it
|
||||||
|
panicked: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'e, E: Engine, W: io::Write> fmt::Debug for EncoderWriter<'e, E, W> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"extra_input: {:?} extra_input_occupied_len:{:?} output[..5]: {:?} output_occupied_len: {:?}",
|
||||||
|
self.extra_input,
|
||||||
|
self.extra_input_occupied_len,
|
||||||
|
&self.output[0..5],
|
||||||
|
self.output_occupied_len
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'e, E: Engine, W: io::Write> EncoderWriter<'e, E, W> {
|
||||||
|
/// Create a new encoder that will write to the provided delegate writer.
|
||||||
|
pub fn new(delegate: W, engine: &'e E) -> EncoderWriter<'e, E, W> {
|
||||||
|
EncoderWriter {
|
||||||
|
engine,
|
||||||
|
delegate: Some(delegate),
|
||||||
|
extra_input: [0u8; MIN_ENCODE_CHUNK_SIZE],
|
||||||
|
extra_input_occupied_len: 0,
|
||||||
|
output: [0u8; BUF_SIZE],
|
||||||
|
output_occupied_len: 0,
|
||||||
|
panicked: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Encode all remaining buffered data and write it, including any trailing incomplete input
|
||||||
|
/// triples and associated padding.
|
||||||
|
///
|
||||||
|
/// Once this succeeds, no further writes or calls to this method are allowed.
|
||||||
|
///
|
||||||
|
/// This may write to the delegate writer multiple times if the delegate writer does not accept
|
||||||
|
/// all input provided to its `write` each invocation.
|
||||||
|
///
|
||||||
|
/// If you don't care about error handling, it is not necessary to call this function, as the
|
||||||
|
/// equivalent finalization is done by the Drop impl.
|
||||||
|
///
|
||||||
|
/// Returns the writer that this was constructed around.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// The first error that is not of `ErrorKind::Interrupted` will be returned.
|
||||||
|
pub fn finish(&mut self) -> Result<W> {
|
||||||
|
// If we could consume self in finish(), we wouldn't have to worry about this case, but
|
||||||
|
// finish() is retryable in the face of I/O errors, so we can't consume here.
|
||||||
|
if self.delegate.is_none() {
|
||||||
|
panic!("Encoder has already had finish() called");
|
||||||
|
};
|
||||||
|
|
||||||
|
self.write_final_leftovers()?;
|
||||||
|
|
||||||
|
let writer = self.delegate.take().expect("Writer must be present");
|
||||||
|
|
||||||
|
Ok(writer)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write any remaining buffered data to the delegate writer.
|
||||||
|
fn write_final_leftovers(&mut self) -> Result<()> {
|
||||||
|
if self.delegate.is_none() {
|
||||||
|
// finish() has already successfully called this, and we are now in drop() with a None
|
||||||
|
// writer, so just no-op
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
self.write_all_encoded_output()?;
|
||||||
|
|
||||||
|
if self.extra_input_occupied_len > 0 {
|
||||||
|
let encoded_len = self
|
||||||
|
.engine
|
||||||
|
.encode_slice(
|
||||||
|
&self.extra_input[..self.extra_input_occupied_len],
|
||||||
|
&mut self.output[..],
|
||||||
|
)
|
||||||
|
.expect("buffer is large enough");
|
||||||
|
|
||||||
|
self.output_occupied_len = encoded_len;
|
||||||
|
|
||||||
|
self.write_all_encoded_output()?;
|
||||||
|
|
||||||
|
// write succeeded, do not write the encoding of extra again if finish() is retried
|
||||||
|
self.extra_input_occupied_len = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write as much of the encoded output to the delegate writer as it will accept, and store the
|
||||||
|
/// leftovers to be attempted at the next write() call. Updates `self.output_occupied_len`.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// Errors from the delegate writer are returned. In the case of an error,
|
||||||
|
/// `self.output_occupied_len` will not be updated, as errors from `write` are specified to mean
|
||||||
|
/// that no write took place.
|
||||||
|
fn write_to_delegate(&mut self, current_output_len: usize) -> Result<()> {
|
||||||
|
self.panicked = true;
|
||||||
|
let res = self
|
||||||
|
.delegate
|
||||||
|
.as_mut()
|
||||||
|
.expect("Writer must be present")
|
||||||
|
.write(&self.output[..current_output_len]);
|
||||||
|
self.panicked = false;
|
||||||
|
|
||||||
|
res.map(|consumed| {
|
||||||
|
debug_assert!(consumed <= current_output_len);
|
||||||
|
|
||||||
|
if consumed < current_output_len {
|
||||||
|
self.output_occupied_len = current_output_len.checked_sub(consumed).unwrap();
|
||||||
|
// If we're blocking on I/O, the minor inefficiency of copying bytes to the
|
||||||
|
// start of the buffer is the least of our concerns...
|
||||||
|
// TODO Rotate moves more than we need to; copy_within now stable.
|
||||||
|
self.output.rotate_left(consumed);
|
||||||
|
} else {
|
||||||
|
self.output_occupied_len = 0;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write all buffered encoded output. If this returns `Ok`, `self.output_occupied_len` is `0`.
|
||||||
|
///
|
||||||
|
/// This is basically write_all for the remaining buffered data but without the undesirable
|
||||||
|
/// abort-on-`Ok(0)` behavior.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// Any error emitted by the delegate writer abort the write loop and is returned, unless it's
|
||||||
|
/// `Interrupted`, in which case the error is ignored and writes will continue.
|
||||||
|
fn write_all_encoded_output(&mut self) -> Result<()> {
|
||||||
|
while self.output_occupied_len > 0 {
|
||||||
|
let remaining_len = self.output_occupied_len;
|
||||||
|
match self.write_to_delegate(remaining_len) {
|
||||||
|
// try again on interrupts ala write_all
|
||||||
|
Err(ref e) if e.kind() == ErrorKind::Interrupted => {}
|
||||||
|
// other errors return
|
||||||
|
Err(e) => return Err(e),
|
||||||
|
// success no-ops because remaining length is already updated
|
||||||
|
Ok(_) => {}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
debug_assert_eq!(0, self.output_occupied_len);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Unwraps this `EncoderWriter`, returning the base writer it writes base64 encoded output
|
||||||
|
/// to.
|
||||||
|
///
|
||||||
|
/// Normally this method should not be needed, since `finish()` returns the inner writer if
|
||||||
|
/// it completes successfully. That will also ensure all data has been flushed, which the
|
||||||
|
/// `into_inner()` function does *not* do.
|
||||||
|
///
|
||||||
|
/// Calling this method after `finish()` has completed successfully will panic, since the
|
||||||
|
/// writer has already been returned.
|
||||||
|
///
|
||||||
|
/// This method may be useful if the writer implements additional APIs beyond the `Write`
|
||||||
|
/// trait. Note that the inner writer might be in an error state or have an incomplete
|
||||||
|
/// base64 string written to it.
|
||||||
|
pub fn into_inner(mut self) -> W {
|
||||||
|
self.delegate
|
||||||
|
.take()
|
||||||
|
.expect("Encoder has already had finish() called")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'e, E: Engine, W: io::Write> io::Write for EncoderWriter<'e, E, W> {
|
||||||
|
/// Encode input and then write to the delegate writer.
|
||||||
|
///
|
||||||
|
/// Under non-error circumstances, this returns `Ok` with the value being the number of bytes
|
||||||
|
/// of `input` consumed. The value may be `0`, which interacts poorly with `write_all`, which
|
||||||
|
/// interprets `Ok(0)` as an error, despite it being allowed by the contract of `write`. See
|
||||||
|
/// <https://github.com/rust-lang/rust/issues/56889> for more on that.
|
||||||
|
///
|
||||||
|
/// If the previous call to `write` provided more (encoded) data than the delegate writer could
|
||||||
|
/// accept in a single call to its `write`, the remaining data is buffered. As long as buffered
|
||||||
|
/// data is present, subsequent calls to `write` will try to write the remaining buffered data
|
||||||
|
/// to the delegate and return either `Ok(0)` -- and therefore not consume any of `input` -- or
|
||||||
|
/// an error.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// Any errors emitted by the delegate writer are returned.
|
||||||
|
fn write(&mut self, input: &[u8]) -> Result<usize> {
|
||||||
|
if self.delegate.is_none() {
|
||||||
|
panic!("Cannot write more after calling finish()");
|
||||||
|
}
|
||||||
|
|
||||||
|
if input.is_empty() {
|
||||||
|
return Ok(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// The contract of `Write::write` places some constraints on this implementation:
|
||||||
|
// - a call to `write()` represents at most one call to a wrapped `Write`, so we can't
|
||||||
|
// iterate over the input and encode multiple chunks.
|
||||||
|
// - Errors mean that "no bytes were written to this writer", so we need to reset the
|
||||||
|
// internal state to what it was before the error occurred
|
||||||
|
|
||||||
|
// before reading any input, write any leftover encoded output from last time
|
||||||
|
if self.output_occupied_len > 0 {
|
||||||
|
let current_len = self.output_occupied_len;
|
||||||
|
return self
|
||||||
|
.write_to_delegate(current_len)
|
||||||
|
// did not read any input
|
||||||
|
.map(|_| 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
debug_assert_eq!(0, self.output_occupied_len);
|
||||||
|
|
||||||
|
// how many bytes, if any, were read into `extra` to create a triple to encode
|
||||||
|
let mut extra_input_read_len = 0;
|
||||||
|
let mut input = input;
|
||||||
|
|
||||||
|
let orig_extra_len = self.extra_input_occupied_len;
|
||||||
|
|
||||||
|
let mut encoded_size = 0;
|
||||||
|
// always a multiple of MIN_ENCODE_CHUNK_SIZE
|
||||||
|
let mut max_input_len = MAX_INPUT_LEN;
|
||||||
|
|
||||||
|
// process leftover un-encoded input from last write
|
||||||
|
if self.extra_input_occupied_len > 0 {
|
||||||
|
debug_assert!(self.extra_input_occupied_len < 3);
|
||||||
|
if input.len() + self.extra_input_occupied_len >= MIN_ENCODE_CHUNK_SIZE {
|
||||||
|
// Fill up `extra`, encode that into `output`, and consume as much of the rest of
|
||||||
|
// `input` as possible.
|
||||||
|
// We could write just the encoding of `extra` by itself but then we'd have to
|
||||||
|
// return after writing only 4 bytes, which is inefficient if the underlying writer
|
||||||
|
// would make a syscall.
|
||||||
|
extra_input_read_len = MIN_ENCODE_CHUNK_SIZE - self.extra_input_occupied_len;
|
||||||
|
debug_assert!(extra_input_read_len > 0);
|
||||||
|
// overwrite only bytes that weren't already used. If we need to rollback extra_len
|
||||||
|
// (when the subsequent write errors), the old leading bytes will still be there.
|
||||||
|
self.extra_input[self.extra_input_occupied_len..MIN_ENCODE_CHUNK_SIZE]
|
||||||
|
.copy_from_slice(&input[0..extra_input_read_len]);
|
||||||
|
|
||||||
|
let len = self.engine.internal_encode(
|
||||||
|
&self.extra_input[0..MIN_ENCODE_CHUNK_SIZE],
|
||||||
|
&mut self.output[..],
|
||||||
|
);
|
||||||
|
debug_assert_eq!(4, len);
|
||||||
|
|
||||||
|
input = &input[extra_input_read_len..];
|
||||||
|
|
||||||
|
// consider extra to be used up, since we encoded it
|
||||||
|
self.extra_input_occupied_len = 0;
|
||||||
|
// don't clobber where we just encoded to
|
||||||
|
encoded_size = 4;
|
||||||
|
// and don't read more than can be encoded
|
||||||
|
max_input_len = MAX_INPUT_LEN - MIN_ENCODE_CHUNK_SIZE;
|
||||||
|
|
||||||
|
// fall through to normal encoding
|
||||||
|
} else {
|
||||||
|
// `extra` and `input` are non empty, but `|extra| + |input| < 3`, so there must be
|
||||||
|
// 1 byte in each.
|
||||||
|
debug_assert_eq!(1, input.len());
|
||||||
|
debug_assert_eq!(1, self.extra_input_occupied_len);
|
||||||
|
|
||||||
|
self.extra_input[self.extra_input_occupied_len] = input[0];
|
||||||
|
self.extra_input_occupied_len += 1;
|
||||||
|
return Ok(1);
|
||||||
|
};
|
||||||
|
} else if input.len() < MIN_ENCODE_CHUNK_SIZE {
|
||||||
|
// `extra` is empty, and `input` fits inside it
|
||||||
|
self.extra_input[0..input.len()].copy_from_slice(input);
|
||||||
|
self.extra_input_occupied_len = input.len();
|
||||||
|
return Ok(input.len());
|
||||||
|
};
|
||||||
|
|
||||||
|
// either 0 or 1 complete chunks encoded from extra
|
||||||
|
debug_assert!(encoded_size == 0 || encoded_size == 4);
|
||||||
|
debug_assert!(
|
||||||
|
// didn't encode extra input
|
||||||
|
MAX_INPUT_LEN == max_input_len
|
||||||
|
// encoded one triple
|
||||||
|
|| MAX_INPUT_LEN == max_input_len + MIN_ENCODE_CHUNK_SIZE
|
||||||
|
);
|
||||||
|
|
||||||
|
// encode complete triples only
|
||||||
|
let input_complete_chunks_len = input.len() - (input.len() % MIN_ENCODE_CHUNK_SIZE);
|
||||||
|
let input_chunks_to_encode_len = cmp::min(input_complete_chunks_len, max_input_len);
|
||||||
|
debug_assert_eq!(0, max_input_len % MIN_ENCODE_CHUNK_SIZE);
|
||||||
|
debug_assert_eq!(0, input_chunks_to_encode_len % MIN_ENCODE_CHUNK_SIZE);
|
||||||
|
|
||||||
|
encoded_size += self.engine.internal_encode(
|
||||||
|
&input[..(input_chunks_to_encode_len)],
|
||||||
|
&mut self.output[encoded_size..],
|
||||||
|
);
|
||||||
|
|
||||||
|
// not updating `self.output_occupied_len` here because if the below write fails, it should
|
||||||
|
// "never take place" -- the buffer contents we encoded are ignored and perhaps retried
|
||||||
|
// later, if the consumer chooses.
|
||||||
|
|
||||||
|
self.write_to_delegate(encoded_size)
|
||||||
|
// no matter whether we wrote the full encoded buffer or not, we consumed the same
|
||||||
|
// input
|
||||||
|
.map(|_| extra_input_read_len + input_chunks_to_encode_len)
|
||||||
|
.map_err(|e| {
|
||||||
|
// in case we filled and encoded `extra`, reset extra_len
|
||||||
|
self.extra_input_occupied_len = orig_extra_len;
|
||||||
|
|
||||||
|
e
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Because this is usually treated as OK to call multiple times, it will *not* flush any
|
||||||
|
/// incomplete chunks of input or write padding.
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// The first error that is not of [`ErrorKind::Interrupted`] will be returned.
|
||||||
|
fn flush(&mut self) -> Result<()> {
|
||||||
|
self.write_all_encoded_output()?;
|
||||||
|
self.delegate
|
||||||
|
.as_mut()
|
||||||
|
.expect("Writer must be present")
|
||||||
|
.flush()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'e, E: Engine, W: io::Write> Drop for EncoderWriter<'e, E, W> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
if !self.panicked {
|
||||||
|
// like `BufWriter`, ignore errors during drop
|
||||||
|
let _ = self.write_final_leftovers();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
207
vendor/base64/src/write/encoder_string_writer.rs
vendored
Normal file
207
vendor/base64/src/write/encoder_string_writer.rs
vendored
Normal file
|
|
@ -0,0 +1,207 @@
|
||||||
|
use super::encoder::EncoderWriter;
|
||||||
|
use crate::engine::Engine;
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
/// A `Write` implementation that base64-encodes data using the provided config and accumulates the
|
||||||
|
/// resulting base64 utf8 `&str` in a [StrConsumer] implementation (typically `String`), which is
|
||||||
|
/// then exposed via `into_inner()`.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// Buffer base64 in a new String:
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use std::io::Write;
|
||||||
|
/// use base64::engine::general_purpose;
|
||||||
|
///
|
||||||
|
/// let mut enc = base64::write::EncoderStringWriter::new(&general_purpose::STANDARD);
|
||||||
|
///
|
||||||
|
/// enc.write_all(b"asdf").unwrap();
|
||||||
|
///
|
||||||
|
/// // get the resulting String
|
||||||
|
/// let b64_string = enc.into_inner();
|
||||||
|
///
|
||||||
|
/// assert_eq!("YXNkZg==", &b64_string);
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// Or, append to an existing `String`, which implements `StrConsumer`:
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use std::io::Write;
|
||||||
|
/// use base64::engine::general_purpose;
|
||||||
|
///
|
||||||
|
/// let mut buf = String::from("base64: ");
|
||||||
|
///
|
||||||
|
/// let mut enc = base64::write::EncoderStringWriter::from_consumer(
|
||||||
|
/// &mut buf,
|
||||||
|
/// &general_purpose::STANDARD);
|
||||||
|
///
|
||||||
|
/// enc.write_all(b"asdf").unwrap();
|
||||||
|
///
|
||||||
|
/// // release the &mut reference on buf
|
||||||
|
/// let _ = enc.into_inner();
|
||||||
|
///
|
||||||
|
/// assert_eq!("base64: YXNkZg==", &buf);
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// # Performance
|
||||||
|
///
|
||||||
|
/// Because it has to validate that the base64 is UTF-8, it is about 80% as fast as writing plain
|
||||||
|
/// bytes to a `io::Write`.
|
||||||
|
pub struct EncoderStringWriter<'e, E: Engine, S: StrConsumer> {
|
||||||
|
encoder: EncoderWriter<'e, E, Utf8SingleCodeUnitWriter<S>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'e, E: Engine, S: StrConsumer> EncoderStringWriter<'e, E, S> {
|
||||||
|
/// Create a EncoderStringWriter that will append to the provided `StrConsumer`.
|
||||||
|
pub fn from_consumer(str_consumer: S, engine: &'e E) -> Self {
|
||||||
|
EncoderStringWriter {
|
||||||
|
encoder: EncoderWriter::new(Utf8SingleCodeUnitWriter { str_consumer }, engine),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Encode all remaining buffered data, including any trailing incomplete input triples and
|
||||||
|
/// associated padding.
|
||||||
|
///
|
||||||
|
/// Returns the base64-encoded form of the accumulated written data.
|
||||||
|
pub fn into_inner(mut self) -> S {
|
||||||
|
self.encoder
|
||||||
|
.finish()
|
||||||
|
.expect("Writing to a consumer should never fail")
|
||||||
|
.str_consumer
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'e, E: Engine> EncoderStringWriter<'e, E, String> {
|
||||||
|
/// Create a EncoderStringWriter that will encode into a new `String` with the provided config.
|
||||||
|
pub fn new(engine: &'e E) -> Self {
|
||||||
|
EncoderStringWriter::from_consumer(String::new(), engine)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'e, E: Engine, S: StrConsumer> io::Write for EncoderStringWriter<'e, E, S> {
|
||||||
|
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
|
||||||
|
self.encoder.write(buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn flush(&mut self) -> io::Result<()> {
|
||||||
|
self.encoder.flush()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An abstraction around consuming `str`s produced by base64 encoding.
|
||||||
|
pub trait StrConsumer {
|
||||||
|
/// Consume the base64 encoded data in `buf`
|
||||||
|
fn consume(&mut self, buf: &str);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// As for io::Write, `StrConsumer` is implemented automatically for `&mut S`.
|
||||||
|
impl<S: StrConsumer + ?Sized> StrConsumer for &mut S {
|
||||||
|
fn consume(&mut self, buf: &str) {
|
||||||
|
(**self).consume(buf);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Pushes the str onto the end of the String
|
||||||
|
impl StrConsumer for String {
|
||||||
|
fn consume(&mut self, buf: &str) {
|
||||||
|
self.push_str(buf);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A `Write` that only can handle bytes that are valid single-byte UTF-8 code units.
|
||||||
|
///
|
||||||
|
/// This is safe because we only use it when writing base64, which is always valid UTF-8.
|
||||||
|
struct Utf8SingleCodeUnitWriter<S: StrConsumer> {
|
||||||
|
str_consumer: S,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: StrConsumer> io::Write for Utf8SingleCodeUnitWriter<S> {
|
||||||
|
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
|
||||||
|
// Because we expect all input to be valid utf-8 individual bytes, we can encode any buffer
|
||||||
|
// length
|
||||||
|
let s = std::str::from_utf8(buf).expect("Input must be valid UTF-8");
|
||||||
|
|
||||||
|
self.str_consumer.consume(s);
|
||||||
|
|
||||||
|
Ok(buf.len())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn flush(&mut self) -> io::Result<()> {
|
||||||
|
// no op
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::{
|
||||||
|
engine::Engine, tests::random_engine, write::encoder_string_writer::EncoderStringWriter,
|
||||||
|
};
|
||||||
|
use rand::Rng;
|
||||||
|
use std::cmp;
|
||||||
|
use std::io::Write;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn every_possible_split_of_input() {
|
||||||
|
let mut rng = rand::thread_rng();
|
||||||
|
let mut orig_data = Vec::<u8>::new();
|
||||||
|
let mut normal_encoded = String::new();
|
||||||
|
|
||||||
|
let size = 5_000;
|
||||||
|
|
||||||
|
for i in 0..size {
|
||||||
|
orig_data.clear();
|
||||||
|
normal_encoded.clear();
|
||||||
|
|
||||||
|
orig_data.resize(size, 0);
|
||||||
|
rng.fill(&mut orig_data[..]);
|
||||||
|
|
||||||
|
let engine = random_engine(&mut rng);
|
||||||
|
engine.encode_string(&orig_data, &mut normal_encoded);
|
||||||
|
|
||||||
|
let mut stream_encoder = EncoderStringWriter::new(&engine);
|
||||||
|
// Write the first i bytes, then the rest
|
||||||
|
stream_encoder.write_all(&orig_data[0..i]).unwrap();
|
||||||
|
stream_encoder.write_all(&orig_data[i..]).unwrap();
|
||||||
|
|
||||||
|
let stream_encoded = stream_encoder.into_inner();
|
||||||
|
|
||||||
|
assert_eq!(normal_encoded, stream_encoded);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
|
fn incremental_writes() {
|
||||||
|
let mut rng = rand::thread_rng();
|
||||||
|
let mut orig_data = Vec::<u8>::new();
|
||||||
|
let mut normal_encoded = String::new();
|
||||||
|
|
||||||
|
let size = 5_000;
|
||||||
|
|
||||||
|
for _ in 0..size {
|
||||||
|
orig_data.clear();
|
||||||
|
normal_encoded.clear();
|
||||||
|
|
||||||
|
orig_data.resize(size, 0);
|
||||||
|
rng.fill(&mut orig_data[..]);
|
||||||
|
|
||||||
|
let engine = random_engine(&mut rng);
|
||||||
|
engine.encode_string(&orig_data, &mut normal_encoded);
|
||||||
|
|
||||||
|
let mut stream_encoder = EncoderStringWriter::new(&engine);
|
||||||
|
// write small nibbles of data
|
||||||
|
let mut offset = 0;
|
||||||
|
while offset < size {
|
||||||
|
let nibble_size = cmp::min(rng.gen_range(0..=64), size - offset);
|
||||||
|
let len = stream_encoder
|
||||||
|
.write(&orig_data[offset..offset + nibble_size])
|
||||||
|
.unwrap();
|
||||||
|
offset += len;
|
||||||
|
}
|
||||||
|
|
||||||
|
let stream_encoded = stream_encoder.into_inner();
|
||||||
|
|
||||||
|
assert_eq!(normal_encoded, stream_encoded);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
554
vendor/base64/src/write/encoder_tests.rs
vendored
Normal file
554
vendor/base64/src/write/encoder_tests.rs
vendored
Normal file
|
|
@ -0,0 +1,554 @@
|
||||||
|
use std::io::{Cursor, Write};
|
||||||
|
use std::{cmp, io, str};
|
||||||
|
|
||||||
|
use rand::Rng;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
alphabet::{STANDARD, URL_SAFE},
|
||||||
|
engine::{
|
||||||
|
general_purpose::{GeneralPurpose, NO_PAD, PAD},
|
||||||
|
Engine,
|
||||||
|
},
|
||||||
|
tests::random_engine,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::EncoderWriter;
|
||||||
|
|
||||||
|
const URL_SAFE_ENGINE: GeneralPurpose = GeneralPurpose::new(&URL_SAFE, PAD);
|
||||||
|
const NO_PAD_ENGINE: GeneralPurpose = GeneralPurpose::new(&STANDARD, NO_PAD);
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_three_bytes() {
|
||||||
|
let mut c = Cursor::new(Vec::new());
|
||||||
|
{
|
||||||
|
let mut enc = EncoderWriter::new(&mut c, &URL_SAFE_ENGINE);
|
||||||
|
|
||||||
|
let sz = enc.write(b"abc").unwrap();
|
||||||
|
assert_eq!(sz, 3);
|
||||||
|
}
|
||||||
|
assert_eq!(&c.get_ref()[..], URL_SAFE_ENGINE.encode("abc").as_bytes());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_nine_bytes_two_writes() {
|
||||||
|
let mut c = Cursor::new(Vec::new());
|
||||||
|
{
|
||||||
|
let mut enc = EncoderWriter::new(&mut c, &URL_SAFE_ENGINE);
|
||||||
|
|
||||||
|
let sz = enc.write(b"abcdef").unwrap();
|
||||||
|
assert_eq!(sz, 6);
|
||||||
|
let sz = enc.write(b"ghi").unwrap();
|
||||||
|
assert_eq!(sz, 3);
|
||||||
|
}
|
||||||
|
assert_eq!(
|
||||||
|
&c.get_ref()[..],
|
||||||
|
URL_SAFE_ENGINE.encode("abcdefghi").as_bytes()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_one_then_two_bytes() {
|
||||||
|
let mut c = Cursor::new(Vec::new());
|
||||||
|
{
|
||||||
|
let mut enc = EncoderWriter::new(&mut c, &URL_SAFE_ENGINE);
|
||||||
|
|
||||||
|
let sz = enc.write(b"a").unwrap();
|
||||||
|
assert_eq!(sz, 1);
|
||||||
|
let sz = enc.write(b"bc").unwrap();
|
||||||
|
assert_eq!(sz, 2);
|
||||||
|
}
|
||||||
|
assert_eq!(&c.get_ref()[..], URL_SAFE_ENGINE.encode("abc").as_bytes());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_one_then_five_bytes() {
|
||||||
|
let mut c = Cursor::new(Vec::new());
|
||||||
|
{
|
||||||
|
let mut enc = EncoderWriter::new(&mut c, &URL_SAFE_ENGINE);
|
||||||
|
|
||||||
|
let sz = enc.write(b"a").unwrap();
|
||||||
|
assert_eq!(sz, 1);
|
||||||
|
let sz = enc.write(b"bcdef").unwrap();
|
||||||
|
assert_eq!(sz, 5);
|
||||||
|
}
|
||||||
|
assert_eq!(
|
||||||
|
&c.get_ref()[..],
|
||||||
|
URL_SAFE_ENGINE.encode("abcdef").as_bytes()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_1_2_3_bytes() {
|
||||||
|
let mut c = Cursor::new(Vec::new());
|
||||||
|
{
|
||||||
|
let mut enc = EncoderWriter::new(&mut c, &URL_SAFE_ENGINE);
|
||||||
|
|
||||||
|
let sz = enc.write(b"a").unwrap();
|
||||||
|
assert_eq!(sz, 1);
|
||||||
|
let sz = enc.write(b"bc").unwrap();
|
||||||
|
assert_eq!(sz, 2);
|
||||||
|
let sz = enc.write(b"def").unwrap();
|
||||||
|
assert_eq!(sz, 3);
|
||||||
|
}
|
||||||
|
assert_eq!(
|
||||||
|
&c.get_ref()[..],
|
||||||
|
URL_SAFE_ENGINE.encode("abcdef").as_bytes()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_with_padding() {
|
||||||
|
let mut c = Cursor::new(Vec::new());
|
||||||
|
{
|
||||||
|
let mut enc = EncoderWriter::new(&mut c, &URL_SAFE_ENGINE);
|
||||||
|
|
||||||
|
enc.write_all(b"abcd").unwrap();
|
||||||
|
|
||||||
|
enc.flush().unwrap();
|
||||||
|
}
|
||||||
|
assert_eq!(&c.get_ref()[..], URL_SAFE_ENGINE.encode("abcd").as_bytes());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_with_padding_multiple_writes() {
|
||||||
|
let mut c = Cursor::new(Vec::new());
|
||||||
|
{
|
||||||
|
let mut enc = EncoderWriter::new(&mut c, &URL_SAFE_ENGINE);
|
||||||
|
|
||||||
|
assert_eq!(1, enc.write(b"a").unwrap());
|
||||||
|
assert_eq!(2, enc.write(b"bc").unwrap());
|
||||||
|
assert_eq!(3, enc.write(b"def").unwrap());
|
||||||
|
assert_eq!(1, enc.write(b"g").unwrap());
|
||||||
|
|
||||||
|
enc.flush().unwrap();
|
||||||
|
}
|
||||||
|
assert_eq!(
|
||||||
|
&c.get_ref()[..],
|
||||||
|
URL_SAFE_ENGINE.encode("abcdefg").as_bytes()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn finish_writes_extra_byte() {
|
||||||
|
let mut c = Cursor::new(Vec::new());
|
||||||
|
{
|
||||||
|
let mut enc = EncoderWriter::new(&mut c, &URL_SAFE_ENGINE);
|
||||||
|
|
||||||
|
assert_eq!(6, enc.write(b"abcdef").unwrap());
|
||||||
|
|
||||||
|
// will be in extra
|
||||||
|
assert_eq!(1, enc.write(b"g").unwrap());
|
||||||
|
|
||||||
|
// 1 trailing byte = 2 encoded chars
|
||||||
|
let _ = enc.finish().unwrap();
|
||||||
|
}
|
||||||
|
assert_eq!(
|
||||||
|
&c.get_ref()[..],
|
||||||
|
URL_SAFE_ENGINE.encode("abcdefg").as_bytes()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn write_partial_chunk_encodes_partial_chunk() {
|
||||||
|
let mut c = Cursor::new(Vec::new());
|
||||||
|
{
|
||||||
|
let mut enc = EncoderWriter::new(&mut c, &NO_PAD_ENGINE);
|
||||||
|
|
||||||
|
// nothing encoded yet
|
||||||
|
assert_eq!(2, enc.write(b"ab").unwrap());
|
||||||
|
// encoded here
|
||||||
|
let _ = enc.finish().unwrap();
|
||||||
|
}
|
||||||
|
assert_eq!(&c.get_ref()[..], NO_PAD_ENGINE.encode("ab").as_bytes());
|
||||||
|
assert_eq!(3, c.get_ref().len());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn write_1_chunk_encodes_complete_chunk() {
|
||||||
|
let mut c = Cursor::new(Vec::new());
|
||||||
|
{
|
||||||
|
let mut enc = EncoderWriter::new(&mut c, &NO_PAD_ENGINE);
|
||||||
|
|
||||||
|
assert_eq!(3, enc.write(b"abc").unwrap());
|
||||||
|
let _ = enc.finish().unwrap();
|
||||||
|
}
|
||||||
|
assert_eq!(&c.get_ref()[..], NO_PAD_ENGINE.encode("abc").as_bytes());
|
||||||
|
assert_eq!(4, c.get_ref().len());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn write_1_chunk_and_partial_encodes_only_complete_chunk() {
|
||||||
|
let mut c = Cursor::new(Vec::new());
|
||||||
|
{
|
||||||
|
let mut enc = EncoderWriter::new(&mut c, &NO_PAD_ENGINE);
|
||||||
|
|
||||||
|
// "d" not consumed since it's not a full chunk
|
||||||
|
assert_eq!(3, enc.write(b"abcd").unwrap());
|
||||||
|
let _ = enc.finish().unwrap();
|
||||||
|
}
|
||||||
|
assert_eq!(&c.get_ref()[..], NO_PAD_ENGINE.encode("abc").as_bytes());
|
||||||
|
assert_eq!(4, c.get_ref().len());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn write_2_partials_to_exactly_complete_chunk_encodes_complete_chunk() {
|
||||||
|
let mut c = Cursor::new(Vec::new());
|
||||||
|
{
|
||||||
|
let mut enc = EncoderWriter::new(&mut c, &NO_PAD_ENGINE);
|
||||||
|
|
||||||
|
assert_eq!(1, enc.write(b"a").unwrap());
|
||||||
|
assert_eq!(2, enc.write(b"bc").unwrap());
|
||||||
|
let _ = enc.finish().unwrap();
|
||||||
|
}
|
||||||
|
assert_eq!(&c.get_ref()[..], NO_PAD_ENGINE.encode("abc").as_bytes());
|
||||||
|
assert_eq!(4, c.get_ref().len());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn write_partial_then_enough_to_complete_chunk_but_not_complete_another_chunk_encodes_complete_chunk_without_consuming_remaining(
|
||||||
|
) {
|
||||||
|
let mut c = Cursor::new(Vec::new());
|
||||||
|
{
|
||||||
|
let mut enc = EncoderWriter::new(&mut c, &NO_PAD_ENGINE);
|
||||||
|
|
||||||
|
assert_eq!(1, enc.write(b"a").unwrap());
|
||||||
|
// doesn't consume "d"
|
||||||
|
assert_eq!(2, enc.write(b"bcd").unwrap());
|
||||||
|
let _ = enc.finish().unwrap();
|
||||||
|
}
|
||||||
|
assert_eq!(&c.get_ref()[..], NO_PAD_ENGINE.encode("abc").as_bytes());
|
||||||
|
assert_eq!(4, c.get_ref().len());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn write_partial_then_enough_to_complete_chunk_and_another_chunk_encodes_complete_chunks() {
|
||||||
|
let mut c = Cursor::new(Vec::new());
|
||||||
|
{
|
||||||
|
let mut enc = EncoderWriter::new(&mut c, &NO_PAD_ENGINE);
|
||||||
|
|
||||||
|
assert_eq!(1, enc.write(b"a").unwrap());
|
||||||
|
// completes partial chunk, and another chunk
|
||||||
|
assert_eq!(5, enc.write(b"bcdef").unwrap());
|
||||||
|
let _ = enc.finish().unwrap();
|
||||||
|
}
|
||||||
|
assert_eq!(&c.get_ref()[..], NO_PAD_ENGINE.encode("abcdef").as_bytes());
|
||||||
|
assert_eq!(8, c.get_ref().len());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn write_partial_then_enough_to_complete_chunk_and_another_chunk_and_another_partial_chunk_encodes_only_complete_chunks(
|
||||||
|
) {
|
||||||
|
let mut c = Cursor::new(Vec::new());
|
||||||
|
{
|
||||||
|
let mut enc = EncoderWriter::new(&mut c, &NO_PAD_ENGINE);
|
||||||
|
|
||||||
|
assert_eq!(1, enc.write(b"a").unwrap());
|
||||||
|
// completes partial chunk, and another chunk, with one more partial chunk that's not
|
||||||
|
// consumed
|
||||||
|
assert_eq!(5, enc.write(b"bcdefe").unwrap());
|
||||||
|
let _ = enc.finish().unwrap();
|
||||||
|
}
|
||||||
|
assert_eq!(&c.get_ref()[..], NO_PAD_ENGINE.encode("abcdef").as_bytes());
|
||||||
|
assert_eq!(8, c.get_ref().len());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn drop_calls_finish_for_you() {
|
||||||
|
let mut c = Cursor::new(Vec::new());
|
||||||
|
{
|
||||||
|
let mut enc = EncoderWriter::new(&mut c, &NO_PAD_ENGINE);
|
||||||
|
assert_eq!(1, enc.write(b"a").unwrap());
|
||||||
|
}
|
||||||
|
assert_eq!(&c.get_ref()[..], NO_PAD_ENGINE.encode("a").as_bytes());
|
||||||
|
assert_eq!(2, c.get_ref().len());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn every_possible_split_of_input() {
|
||||||
|
let mut rng = rand::thread_rng();
|
||||||
|
let mut orig_data = Vec::<u8>::new();
|
||||||
|
let mut stream_encoded = Vec::<u8>::new();
|
||||||
|
let mut normal_encoded = String::new();
|
||||||
|
|
||||||
|
let size = 5_000;
|
||||||
|
|
||||||
|
for i in 0..size {
|
||||||
|
orig_data.clear();
|
||||||
|
stream_encoded.clear();
|
||||||
|
normal_encoded.clear();
|
||||||
|
|
||||||
|
for _ in 0..size {
|
||||||
|
orig_data.push(rng.gen());
|
||||||
|
}
|
||||||
|
|
||||||
|
let engine = random_engine(&mut rng);
|
||||||
|
engine.encode_string(&orig_data, &mut normal_encoded);
|
||||||
|
|
||||||
|
{
|
||||||
|
let mut stream_encoder = EncoderWriter::new(&mut stream_encoded, &engine);
|
||||||
|
// Write the first i bytes, then the rest
|
||||||
|
stream_encoder.write_all(&orig_data[0..i]).unwrap();
|
||||||
|
stream_encoder.write_all(&orig_data[i..]).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
assert_eq!(normal_encoded, str::from_utf8(&stream_encoded).unwrap());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_random_config_matches_normal_encode_reasonable_input_len() {
|
||||||
|
// choose up to 2 * buf size, so ~half the time it'll use a full buffer
|
||||||
|
do_encode_random_config_matches_normal_encode(super::encoder::BUF_SIZE * 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_random_config_matches_normal_encode_tiny_input_len() {
|
||||||
|
do_encode_random_config_matches_normal_encode(10);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn retrying_writes_that_error_with_interrupted_works() {
|
||||||
|
let mut rng = rand::thread_rng();
|
||||||
|
let mut orig_data = Vec::<u8>::new();
|
||||||
|
let mut stream_encoded = Vec::<u8>::new();
|
||||||
|
let mut normal_encoded = String::new();
|
||||||
|
|
||||||
|
for _ in 0..1_000 {
|
||||||
|
orig_data.clear();
|
||||||
|
stream_encoded.clear();
|
||||||
|
normal_encoded.clear();
|
||||||
|
|
||||||
|
let orig_len: usize = rng.gen_range(100..20_000);
|
||||||
|
for _ in 0..orig_len {
|
||||||
|
orig_data.push(rng.gen());
|
||||||
|
}
|
||||||
|
|
||||||
|
// encode the normal way
|
||||||
|
let engine = random_engine(&mut rng);
|
||||||
|
engine.encode_string(&orig_data, &mut normal_encoded);
|
||||||
|
|
||||||
|
// encode via the stream encoder
|
||||||
|
{
|
||||||
|
let mut interrupt_rng = rand::thread_rng();
|
||||||
|
let mut interrupting_writer = InterruptingWriter {
|
||||||
|
w: &mut stream_encoded,
|
||||||
|
rng: &mut interrupt_rng,
|
||||||
|
fraction: 0.8,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut stream_encoder = EncoderWriter::new(&mut interrupting_writer, &engine);
|
||||||
|
let mut bytes_consumed = 0;
|
||||||
|
while bytes_consumed < orig_len {
|
||||||
|
// use short inputs since we want to use `extra` a lot as that's what needs rollback
|
||||||
|
// when errors occur
|
||||||
|
let input_len: usize = cmp::min(rng.gen_range(0..10), orig_len - bytes_consumed);
|
||||||
|
|
||||||
|
retry_interrupted_write_all(
|
||||||
|
&mut stream_encoder,
|
||||||
|
&orig_data[bytes_consumed..bytes_consumed + input_len],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
bytes_consumed += input_len;
|
||||||
|
}
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let res = stream_encoder.finish();
|
||||||
|
match res {
|
||||||
|
Ok(_) => break,
|
||||||
|
Err(e) => match e.kind() {
|
||||||
|
io::ErrorKind::Interrupted => continue,
|
||||||
|
_ => panic!("{:?}", e), // bail
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert_eq!(orig_len, bytes_consumed);
|
||||||
|
}
|
||||||
|
|
||||||
|
assert_eq!(normal_encoded, str::from_utf8(&stream_encoded).unwrap());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn writes_that_only_write_part_of_input_and_sometimes_interrupt_produce_correct_encoded_data() {
|
||||||
|
let mut rng = rand::thread_rng();
|
||||||
|
let mut orig_data = Vec::<u8>::new();
|
||||||
|
let mut stream_encoded = Vec::<u8>::new();
|
||||||
|
let mut normal_encoded = String::new();
|
||||||
|
|
||||||
|
for _ in 0..1_000 {
|
||||||
|
orig_data.clear();
|
||||||
|
stream_encoded.clear();
|
||||||
|
normal_encoded.clear();
|
||||||
|
|
||||||
|
let orig_len: usize = rng.gen_range(100..20_000);
|
||||||
|
for _ in 0..orig_len {
|
||||||
|
orig_data.push(rng.gen());
|
||||||
|
}
|
||||||
|
|
||||||
|
// encode the normal way
|
||||||
|
let engine = random_engine(&mut rng);
|
||||||
|
engine.encode_string(&orig_data, &mut normal_encoded);
|
||||||
|
|
||||||
|
// encode via the stream encoder
|
||||||
|
{
|
||||||
|
let mut partial_rng = rand::thread_rng();
|
||||||
|
let mut partial_writer = PartialInterruptingWriter {
|
||||||
|
w: &mut stream_encoded,
|
||||||
|
rng: &mut partial_rng,
|
||||||
|
full_input_fraction: 0.1,
|
||||||
|
no_interrupt_fraction: 0.1,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut stream_encoder = EncoderWriter::new(&mut partial_writer, &engine);
|
||||||
|
let mut bytes_consumed = 0;
|
||||||
|
while bytes_consumed < orig_len {
|
||||||
|
// use at most medium-length inputs to exercise retry logic more aggressively
|
||||||
|
let input_len: usize = cmp::min(rng.gen_range(0..100), orig_len - bytes_consumed);
|
||||||
|
|
||||||
|
let res =
|
||||||
|
stream_encoder.write(&orig_data[bytes_consumed..bytes_consumed + input_len]);
|
||||||
|
|
||||||
|
// retry on interrupt
|
||||||
|
match res {
|
||||||
|
Ok(len) => bytes_consumed += len,
|
||||||
|
Err(e) => match e.kind() {
|
||||||
|
io::ErrorKind::Interrupted => continue,
|
||||||
|
_ => {
|
||||||
|
panic!("should not see other errors");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let _ = stream_encoder.finish().unwrap();
|
||||||
|
|
||||||
|
assert_eq!(orig_len, bytes_consumed);
|
||||||
|
}
|
||||||
|
|
||||||
|
assert_eq!(normal_encoded, str::from_utf8(&stream_encoded).unwrap());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Retry writes until all the data is written or an error that isn't Interrupted is returned.
|
||||||
|
fn retry_interrupted_write_all<W: Write>(w: &mut W, buf: &[u8]) -> io::Result<()> {
|
||||||
|
let mut bytes_consumed = 0;
|
||||||
|
|
||||||
|
while bytes_consumed < buf.len() {
|
||||||
|
let res = w.write(&buf[bytes_consumed..]);
|
||||||
|
|
||||||
|
match res {
|
||||||
|
Ok(len) => bytes_consumed += len,
|
||||||
|
Err(e) => match e.kind() {
|
||||||
|
io::ErrorKind::Interrupted => continue,
|
||||||
|
_ => return Err(e),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn do_encode_random_config_matches_normal_encode(max_input_len: usize) {
|
||||||
|
let mut rng = rand::thread_rng();
|
||||||
|
let mut orig_data = Vec::<u8>::new();
|
||||||
|
let mut stream_encoded = Vec::<u8>::new();
|
||||||
|
let mut normal_encoded = String::new();
|
||||||
|
|
||||||
|
for _ in 0..1_000 {
|
||||||
|
orig_data.clear();
|
||||||
|
stream_encoded.clear();
|
||||||
|
normal_encoded.clear();
|
||||||
|
|
||||||
|
let orig_len: usize = rng.gen_range(100..20_000);
|
||||||
|
for _ in 0..orig_len {
|
||||||
|
orig_data.push(rng.gen());
|
||||||
|
}
|
||||||
|
|
||||||
|
// encode the normal way
|
||||||
|
let engine = random_engine(&mut rng);
|
||||||
|
engine.encode_string(&orig_data, &mut normal_encoded);
|
||||||
|
|
||||||
|
// encode via the stream encoder
|
||||||
|
{
|
||||||
|
let mut stream_encoder = EncoderWriter::new(&mut stream_encoded, &engine);
|
||||||
|
let mut bytes_consumed = 0;
|
||||||
|
while bytes_consumed < orig_len {
|
||||||
|
let input_len: usize =
|
||||||
|
cmp::min(rng.gen_range(0..max_input_len), orig_len - bytes_consumed);
|
||||||
|
|
||||||
|
// write a little bit of the data
|
||||||
|
stream_encoder
|
||||||
|
.write_all(&orig_data[bytes_consumed..bytes_consumed + input_len])
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
bytes_consumed += input_len;
|
||||||
|
}
|
||||||
|
|
||||||
|
let _ = stream_encoder.finish().unwrap();
|
||||||
|
|
||||||
|
assert_eq!(orig_len, bytes_consumed);
|
||||||
|
}
|
||||||
|
|
||||||
|
assert_eq!(normal_encoded, str::from_utf8(&stream_encoded).unwrap());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A `Write` implementation that returns Interrupted some fraction of the time, randomly.
|
||||||
|
struct InterruptingWriter<'a, W: 'a + Write, R: 'a + Rng> {
|
||||||
|
w: &'a mut W,
|
||||||
|
rng: &'a mut R,
|
||||||
|
/// In [0, 1]. If a random number in [0, 1] is `<= threshold`, `Write` methods will return
|
||||||
|
/// an `Interrupted` error
|
||||||
|
fraction: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, W: Write, R: Rng> Write for InterruptingWriter<'a, W, R> {
|
||||||
|
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
|
||||||
|
if self.rng.gen_range(0.0..1.0) <= self.fraction {
|
||||||
|
return Err(io::Error::new(io::ErrorKind::Interrupted, "interrupted"));
|
||||||
|
}
|
||||||
|
|
||||||
|
self.w.write(buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn flush(&mut self) -> io::Result<()> {
|
||||||
|
if self.rng.gen_range(0.0..1.0) <= self.fraction {
|
||||||
|
return Err(io::Error::new(io::ErrorKind::Interrupted, "interrupted"));
|
||||||
|
}
|
||||||
|
|
||||||
|
self.w.flush()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A `Write` implementation that sometimes will only write part of its input.
|
||||||
|
struct PartialInterruptingWriter<'a, W: 'a + Write, R: 'a + Rng> {
|
||||||
|
w: &'a mut W,
|
||||||
|
rng: &'a mut R,
|
||||||
|
/// In [0, 1]. If a random number in [0, 1] is `<= threshold`, `write()` will write all its
|
||||||
|
/// input. Otherwise, it will write a random substring
|
||||||
|
full_input_fraction: f64,
|
||||||
|
no_interrupt_fraction: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, W: Write, R: Rng> Write for PartialInterruptingWriter<'a, W, R> {
|
||||||
|
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
|
||||||
|
if self.rng.gen_range(0.0..1.0) > self.no_interrupt_fraction {
|
||||||
|
return Err(io::Error::new(io::ErrorKind::Interrupted, "interrupted"));
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.rng.gen_range(0.0..1.0) <= self.full_input_fraction || buf.is_empty() {
|
||||||
|
// pass through the buf untouched
|
||||||
|
self.w.write(buf)
|
||||||
|
} else {
|
||||||
|
// only use a prefix of it
|
||||||
|
self.w
|
||||||
|
.write(&buf[0..(self.rng.gen_range(0..(buf.len() - 1)))])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn flush(&mut self) -> io::Result<()> {
|
||||||
|
self.w.flush()
|
||||||
|
}
|
||||||
|
}
|
||||||
11
vendor/base64/src/write/mod.rs
vendored
Normal file
11
vendor/base64/src/write/mod.rs
vendored
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
//! Implementations of `io::Write` to transparently handle base64.
|
||||||
|
mod encoder;
|
||||||
|
mod encoder_string_writer;
|
||||||
|
|
||||||
|
pub use self::{
|
||||||
|
encoder::EncoderWriter,
|
||||||
|
encoder_string_writer::{EncoderStringWriter, StrConsumer},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod encoder_tests;
|
||||||
77
vendor/base64/tests/encode.rs
vendored
Normal file
77
vendor/base64/tests/encode.rs
vendored
Normal file
|
|
@ -0,0 +1,77 @@
|
||||||
|
use base64::{
|
||||||
|
alphabet::URL_SAFE, engine::general_purpose::PAD, engine::general_purpose::STANDARD, *,
|
||||||
|
};
|
||||||
|
|
||||||
|
fn compare_encode(expected: &str, target: &[u8]) {
|
||||||
|
assert_eq!(expected, STANDARD.encode(target));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_all_ascii() {
|
||||||
|
let ascii: Vec<u8> = (0..=127).collect();
|
||||||
|
|
||||||
|
compare_encode(
|
||||||
|
"AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7P\
|
||||||
|
D0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn8\
|
||||||
|
=",
|
||||||
|
&ascii,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_all_bytes() {
|
||||||
|
let bytes: Vec<u8> = (0..=255).collect();
|
||||||
|
|
||||||
|
compare_encode(
|
||||||
|
"AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7P\
|
||||||
|
D0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn\
|
||||||
|
+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6\
|
||||||
|
/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/w==",
|
||||||
|
&bytes,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_all_bytes_url() {
|
||||||
|
let bytes: Vec<u8> = (0..=255).collect();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
"AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0\
|
||||||
|
-P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn\
|
||||||
|
-AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq\
|
||||||
|
-wsbKztLW2t7i5uru8vb6_wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t_g4eLj5OXm5-jp6uvs7e7v8PHy\
|
||||||
|
8_T19vf4-fr7_P3-_w==",
|
||||||
|
&engine::GeneralPurpose::new(&URL_SAFE, PAD).encode(bytes)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encoded_len_unpadded() {
|
||||||
|
assert_eq!(0, encoded_len(0, false).unwrap());
|
||||||
|
assert_eq!(2, encoded_len(1, false).unwrap());
|
||||||
|
assert_eq!(3, encoded_len(2, false).unwrap());
|
||||||
|
assert_eq!(4, encoded_len(3, false).unwrap());
|
||||||
|
assert_eq!(6, encoded_len(4, false).unwrap());
|
||||||
|
assert_eq!(7, encoded_len(5, false).unwrap());
|
||||||
|
assert_eq!(8, encoded_len(6, false).unwrap());
|
||||||
|
assert_eq!(10, encoded_len(7, false).unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encoded_len_padded() {
|
||||||
|
assert_eq!(0, encoded_len(0, true).unwrap());
|
||||||
|
assert_eq!(4, encoded_len(1, true).unwrap());
|
||||||
|
assert_eq!(4, encoded_len(2, true).unwrap());
|
||||||
|
assert_eq!(4, encoded_len(3, true).unwrap());
|
||||||
|
assert_eq!(8, encoded_len(4, true).unwrap());
|
||||||
|
assert_eq!(8, encoded_len(5, true).unwrap());
|
||||||
|
assert_eq!(8, encoded_len(6, true).unwrap());
|
||||||
|
assert_eq!(12, encoded_len(7, true).unwrap());
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
|
fn encoded_len_overflow() {
|
||||||
|
let max_size = usize::MAX / 4 * 3 + 2;
|
||||||
|
assert_eq!(2, max_size % 3);
|
||||||
|
assert_eq!(Some(usize::MAX), encoded_len(max_size, false));
|
||||||
|
assert_eq!(None, encoded_len(max_size + 1, false));
|
||||||
|
}
|
||||||
161
vendor/base64/tests/tests.rs
vendored
Normal file
161
vendor/base64/tests/tests.rs
vendored
Normal file
|
|
@ -0,0 +1,161 @@
|
||||||
|
use rand::{Rng, SeedableRng};
|
||||||
|
|
||||||
|
use base64::engine::{general_purpose::STANDARD, Engine};
|
||||||
|
use base64::*;
|
||||||
|
|
||||||
|
use base64::engine::general_purpose::{GeneralPurpose, NO_PAD};
|
||||||
|
|
||||||
|
// generate random contents of the specified length and test encode/decode roundtrip
|
||||||
|
fn roundtrip_random<E: Engine>(
|
||||||
|
byte_buf: &mut Vec<u8>,
|
||||||
|
str_buf: &mut String,
|
||||||
|
engine: &E,
|
||||||
|
byte_len: usize,
|
||||||
|
approx_values_per_byte: u8,
|
||||||
|
max_rounds: u64,
|
||||||
|
) {
|
||||||
|
// let the short ones be short but don't let it get too crazy large
|
||||||
|
let num_rounds = calculate_number_of_rounds(byte_len, approx_values_per_byte, max_rounds);
|
||||||
|
let mut r = rand::rngs::SmallRng::from_entropy();
|
||||||
|
let mut decode_buf = Vec::new();
|
||||||
|
|
||||||
|
for _ in 0..num_rounds {
|
||||||
|
byte_buf.clear();
|
||||||
|
str_buf.clear();
|
||||||
|
decode_buf.clear();
|
||||||
|
while byte_buf.len() < byte_len {
|
||||||
|
byte_buf.push(r.gen::<u8>());
|
||||||
|
}
|
||||||
|
|
||||||
|
engine.encode_string(&byte_buf, str_buf);
|
||||||
|
engine.decode_vec(&str_buf, &mut decode_buf).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(byte_buf, &decode_buf);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn calculate_number_of_rounds(byte_len: usize, approx_values_per_byte: u8, max: u64) -> u64 {
|
||||||
|
// don't overflow
|
||||||
|
let mut prod = approx_values_per_byte as u64;
|
||||||
|
|
||||||
|
for _ in 0..byte_len {
|
||||||
|
if prod > max {
|
||||||
|
return max;
|
||||||
|
}
|
||||||
|
|
||||||
|
prod = prod.saturating_mul(prod);
|
||||||
|
}
|
||||||
|
|
||||||
|
prod
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn roundtrip_random_short_standard() {
|
||||||
|
let mut byte_buf: Vec<u8> = Vec::new();
|
||||||
|
let mut str_buf = String::new();
|
||||||
|
|
||||||
|
for input_len in 0..40 {
|
||||||
|
roundtrip_random(&mut byte_buf, &mut str_buf, &STANDARD, input_len, 4, 10000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn roundtrip_random_with_fast_loop_standard() {
|
||||||
|
let mut byte_buf: Vec<u8> = Vec::new();
|
||||||
|
let mut str_buf = String::new();
|
||||||
|
|
||||||
|
for input_len in 40..100 {
|
||||||
|
roundtrip_random(&mut byte_buf, &mut str_buf, &STANDARD, input_len, 4, 1000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn roundtrip_random_short_no_padding() {
|
||||||
|
let mut byte_buf: Vec<u8> = Vec::new();
|
||||||
|
let mut str_buf = String::new();
|
||||||
|
|
||||||
|
let engine = GeneralPurpose::new(&alphabet::STANDARD, NO_PAD);
|
||||||
|
for input_len in 0..40 {
|
||||||
|
roundtrip_random(&mut byte_buf, &mut str_buf, &engine, input_len, 4, 10000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn roundtrip_random_no_padding() {
|
||||||
|
let mut byte_buf: Vec<u8> = Vec::new();
|
||||||
|
let mut str_buf = String::new();
|
||||||
|
|
||||||
|
let engine = GeneralPurpose::new(&alphabet::STANDARD, NO_PAD);
|
||||||
|
|
||||||
|
for input_len in 40..100 {
|
||||||
|
roundtrip_random(&mut byte_buf, &mut str_buf, &engine, input_len, 4, 1000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn roundtrip_decode_trailing_10_bytes() {
|
||||||
|
// This is a special case because we decode 8 byte blocks of input at a time as much as we can,
|
||||||
|
// ideally unrolled to 32 bytes at a time, in stages 1 and 2. Since we also write a u64's worth
|
||||||
|
// of bytes (8) to the output, we always write 2 garbage bytes that then will be overwritten by
|
||||||
|
// the NEXT block. However, if the next block only contains 2 bytes, it will decode to 1 byte,
|
||||||
|
// and therefore be too short to cover up the trailing 2 garbage bytes. Thus, we have stage 3
|
||||||
|
// to handle that case.
|
||||||
|
|
||||||
|
for num_quads in 0..25 {
|
||||||
|
let mut s: String = "ABCD".repeat(num_quads);
|
||||||
|
s.push_str("EFGHIJKLZg");
|
||||||
|
|
||||||
|
let engine = GeneralPurpose::new(&alphabet::STANDARD, NO_PAD);
|
||||||
|
let decoded = engine.decode(&s).unwrap();
|
||||||
|
assert_eq!(num_quads * 3 + 7, decoded.len());
|
||||||
|
|
||||||
|
assert_eq!(s, engine.encode(&decoded));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn display_wrapper_matches_normal_encode() {
|
||||||
|
let mut bytes = Vec::<u8>::with_capacity(256);
|
||||||
|
|
||||||
|
for i in 0..255 {
|
||||||
|
bytes.push(i);
|
||||||
|
}
|
||||||
|
bytes.push(255);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
STANDARD.encode(&bytes),
|
||||||
|
format!("{}", display::Base64Display::new(&bytes, &STANDARD))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encode_engine_slice_error_when_buffer_too_small() {
|
||||||
|
for num_triples in 1..100 {
|
||||||
|
let input = "AAA".repeat(num_triples);
|
||||||
|
let mut vec = vec![0; (num_triples - 1) * 4];
|
||||||
|
assert_eq!(
|
||||||
|
EncodeSliceError::OutputSliceTooSmall,
|
||||||
|
STANDARD.encode_slice(&input, &mut vec).unwrap_err()
|
||||||
|
);
|
||||||
|
vec.push(0);
|
||||||
|
assert_eq!(
|
||||||
|
EncodeSliceError::OutputSliceTooSmall,
|
||||||
|
STANDARD.encode_slice(&input, &mut vec).unwrap_err()
|
||||||
|
);
|
||||||
|
vec.push(0);
|
||||||
|
assert_eq!(
|
||||||
|
EncodeSliceError::OutputSliceTooSmall,
|
||||||
|
STANDARD.encode_slice(&input, &mut vec).unwrap_err()
|
||||||
|
);
|
||||||
|
vec.push(0);
|
||||||
|
assert_eq!(
|
||||||
|
EncodeSliceError::OutputSliceTooSmall,
|
||||||
|
STANDARD.encode_slice(&input, &mut vec).unwrap_err()
|
||||||
|
);
|
||||||
|
vec.push(0);
|
||||||
|
assert_eq!(
|
||||||
|
num_triples * 4,
|
||||||
|
STANDARD.encode_slice(&input, &mut vec).unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
1
vendor/bitflags/.cargo-checksum.json
vendored
Normal file
1
vendor/bitflags/.cargo-checksum.json
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
{"files":{"CHANGELOG.md":"c98723b209ac4c66625e034b113a55a43a5c1c9e49c0e3b86123d0cd62bae573","CODE_OF_CONDUCT.md":"42634d0f6d922f49857175af991802822f7f920487aefa2ee250a50d12251a66","CONTRIBUTING.md":"6c9f96eacb20af877ae2d16f024904f3038b93448a8488e9dbcac0df7f6439a5","Cargo.lock":"d3e3bce47b94298f2de893a7d91035f2b73a887905a7ffd4ddb668efdd0aee20","Cargo.toml":"92e110c36340bda13f3b329a2aa24aede8bdcb7fa035806926f7ce438dde87f2","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","README.md":"e9b1329fee85868f1aa674d0505cd95b86a259e2a1762347e5af4a5abedd61d4","SECURITY.md":"68704c8128fa2e776ed7cbda741fbf61ad52f998a96350ee7ee4dbf64c6573bc","benches/parse.rs":"f1390d62322c6880d65bd931e183d49b313f287879a6bfaa36b1cb1921090b51","examples/custom_bits_type.rs":"e53b32051adc5d97860e0b48c8f3a301a041d73b4939c0d7caa5f0cfcc0b9739","examples/custom_derive.rs":"a404e8f606efdd1b43e0c365e4142ccc3dc3ba230127ddeea89cd8784bb55a1e","examples/fmt.rs":"87ba37a1fb8528570c74ea26d8e8948e1179c3d867b928bea1080880258e0a99","examples/macro_free.rs":"69e7f284b53b5214d51228a686e87f127b52a3b74711e45537ebfa5583a180e5","examples/serde.rs":"08b21b35d5c10fdca132fe0f36c8067bb44f559e96617a9257ab6316a20cbc75","spec.md":"fcdd939df30c59b0643be09027df664b71cbea9b9989185441482c5576160fed","src/example_generated.rs":"d018caf059f6ffc4c2403b771a6d76679fa5af03c329a91bd9252957df695e7f","src/external.rs":"734d3f470e6a669297d2df421ce3976fe613d8aa9c071d5ce6fe3ca890e5b815","src/external/arbitrary.rs":"fa8c9187028b9bc54856977b0914676f62101010e7a9450abd577fd78c89552f","src/external/bytemuck.rs":"3afcef382122867040fddd5e4153d633d1ed5596fe5d7dfac66a8e61c2513df5","src/external/serde.rs":"4a09db12534a20fe554a08dc5f1c8124b379292d41fa75628abcd2ca21587573","src/internal.rs":"645b13af0c7302258df61239073a4b8203d09f27b6c17f8a6f1f8c3e427f5334","src/iter.rs":"dbaa6437c1c044f689185ce3fafe43df8796bed19bbdd2c20334a52de5eeee73","src/lib.rs":"1feb0eea02f88491c99c7962b0ce6e66bdedea0ab0cac375d4c9c2d879248dc7","src/parser.rs":"4e788b29f5d0542c409a8b43c703bcb4a6c2a57c181cadd17f565f0abb39681e","src/public.rs":"78ba06e1a5830b36960adf9bd79aaf47d783b9b8a0f1fa33b0d7a340c15fd1d1","src/tests.rs":"b120c27ff0c67a819527de9d8171f1f4c5d37ba4009c54abeb869c70e6035f14","src/tests/all.rs":"e99a865cd4271a524c2fe95503e96d851b35990570aed6fb2e9dac7a14da31b6","src/tests/bits.rs":"3840c34b2ea5d1802404b9ce5bcc1d3fa6ccd8dfba2e29e6d07c605f817d90df","src/tests/complement.rs":"d0e6d4c3daf49e0a7438c9f1c1ac91fad1b37f258c03593f6cd6a695ee626f5e","src/tests/contains.rs":"58bb3cb8c86550e775d11134da1d4aca85c83f943ea454e3a5f222772c674a24","src/tests/difference.rs":"d0d2b96bb52658b8ac019210da74ca75a53e76622f668855142ea6e97c28cb0e","src/tests/empty.rs":"817d6e93ced7cb7576ff0e334aa1a44703f3f96871ff2c6bdcb8f207e6551f67","src/tests/eq.rs":"b816767680a029e9c163e37af074dd4e604c4a3e4936f829f0ca3774fd5f0e37","src/tests/extend.rs":"5fabb9fd0254c64da019149c24063fceff72da3eb4ad73b57c1cc4c04b008364","src/tests/flags.rs":"2f48d3a25db1cf66fe98c9959abc70875deb9f7b38b2c278dc70c46e0d4ec277","src/tests/fmt.rs":"a2d4148491f3202f030f63633eee941b741e3be29a68cf376f008dbe5cb11e5c","src/tests/from_bits.rs":"d94c65b88bf89961d0cfc1b3152a7f1acc285bae160a1628438effda11b8e2c1","src/tests/from_bits_retain.rs":"980591dfaf91e940f42d9a1ce890f237514dd59d458fc264abcf9ceabbc40677","src/tests/from_bits_truncate.rs":"d3406b5e107ebb6449b98a59eee6cc5d84f947d4aaee1ee7e80dc7202de179f0","src/tests/from_name.rs":"f4a055d1f3c86decef70ef8f3020cef5c4e229718c20b3d59d5a3abc3a8b1298","src/tests/insert.rs":"3fab5da800a6fc0654dfb5f859f95da65a507eb9fda8695083c2712266dff0b9","src/tests/intersection.rs":"baf1454c9e4eba552264870a556ee0032d9f2bb8cac361833d571235e0b52221","src/tests/intersects.rs":"c55e36179fd8bc636f04ea9bbce346dcaafe57915d13f1df28c5b83117dbd08e","src/tests/is_all.rs":"b2f11faa7c954bd85c8fb39999e0c37d983cf7895152bc13c7ddde106aa33b6d","src/tests/is_empty.rs":"11f21323cdca7ff92dd89e09de667dba69e8dce88e2d3e27ea68ace91d15d070","src/tests/iter.rs":"4ba121932b527e787b82745405c7c65c1084c242e2dda3290d475ec160d265e4","src/tests/parser.rs":"fa2fb8dedcf16601af609a5e21d9c5840c7f96a1e3a587f7f2ea3dc8387f7628","src/tests/remove.rs":"6e75f8508d2dc1a2cba89ef691f4387a665a4fd13853bb1dd0fd80c783b89947","src/tests/symmetric_difference.rs":"0a89f084f9de1dd5b1932fe72c3b10a3c93cbaa16832b3a31b6a85e3bbd3ba6e","src/tests/union.rs":"88f398ee4600bb1e59bf6d02d1f6ff33f5f853eab5a6c700bd8a683c6ee4651a","src/traits.rs":"b79d008daec546136fae4497966fc85a33663d86ea2d9213fd23b412d4d77b66"},"package":"b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"}
|
||||||
553
vendor/bitflags/CHANGELOG.md
vendored
Normal file
553
vendor/bitflags/CHANGELOG.md
vendored
Normal file
|
|
@ -0,0 +1,553 @@
|
||||||
|
# 2.6.0
|
||||||
|
|
||||||
|
## What's Changed
|
||||||
|
* Sync CHANGELOG.md with github release notes by @dextero in https://github.com/bitflags/bitflags/pull/402
|
||||||
|
* Update error messages and zerocopy by @KodrAus in https://github.com/bitflags/bitflags/pull/403
|
||||||
|
* Bump minimum declared versions of dependencies by @dextero in https://github.com/bitflags/bitflags/pull/404
|
||||||
|
* chore(deps): bump serde_derive and bytemuck versions by @joshka in https://github.com/bitflags/bitflags/pull/405
|
||||||
|
* add OSFF Scorecard workflow by @KodrAus in https://github.com/bitflags/bitflags/pull/396
|
||||||
|
* Update stderr messages by @KodrAus in https://github.com/bitflags/bitflags/pull/408
|
||||||
|
* Fix typo by @waywardmonkeys in https://github.com/bitflags/bitflags/pull/410
|
||||||
|
* Allow specifying outer attributes in impl mode by @KodrAus in https://github.com/bitflags/bitflags/pull/411
|
||||||
|
|
||||||
|
## New Contributors
|
||||||
|
* @dextero made their first contribution in https://github.com/bitflags/bitflags/pull/402
|
||||||
|
* @joshka made their first contribution in https://github.com/bitflags/bitflags/pull/405
|
||||||
|
* @waywardmonkeys made their first contribution in https://github.com/bitflags/bitflags/pull/410
|
||||||
|
|
||||||
|
**Full Changelog**: https://github.com/bitflags/bitflags/compare/2.5.0...2.6.0
|
||||||
|
|
||||||
|
# 2.5.0
|
||||||
|
|
||||||
|
## What's Changed
|
||||||
|
* Derive `Debug` for `Flag<B>` by @tgross35 in https://github.com/bitflags/bitflags/pull/398
|
||||||
|
* Support truncating or strict-named variants of parsing and formatting by @KodrAus in https://github.com/bitflags/bitflags/pull/400
|
||||||
|
|
||||||
|
## New Contributors
|
||||||
|
* @tgross35 made their first contribution in https://github.com/bitflags/bitflags/pull/398
|
||||||
|
|
||||||
|
**Full Changelog**: https://github.com/bitflags/bitflags/compare/2.4.2...2.5.0
|
||||||
|
|
||||||
|
# 2.4.2
|
||||||
|
|
||||||
|
## What's Changed
|
||||||
|
* Cargo.toml: Anchor excludes to root of the package by @jamessan in https://github.com/bitflags/bitflags/pull/387
|
||||||
|
* Update error messages by @KodrAus in https://github.com/bitflags/bitflags/pull/390
|
||||||
|
* Add support for impl mode structs to be repr(packed) by @GnomedDev in https://github.com/bitflags/bitflags/pull/388
|
||||||
|
* Remove old `unused_tuple_struct_fields` lint by @dtolnay in https://github.com/bitflags/bitflags/pull/393
|
||||||
|
* Delete use of `local_inner_macros` by @dtolnay in https://github.com/bitflags/bitflags/pull/392
|
||||||
|
|
||||||
|
## New Contributors
|
||||||
|
* @jamessan made their first contribution in https://github.com/bitflags/bitflags/pull/387
|
||||||
|
* @GnomedDev made their first contribution in https://github.com/bitflags/bitflags/pull/388
|
||||||
|
|
||||||
|
**Full Changelog**: https://github.com/bitflags/bitflags/compare/2.4.1...2.4.2
|
||||||
|
|
||||||
|
# 2.4.1
|
||||||
|
|
||||||
|
## What's Changed
|
||||||
|
* Allow some new pedantic clippy lints by @KodrAus in https://github.com/bitflags/bitflags/pull/380
|
||||||
|
|
||||||
|
**Full Changelog**: https://github.com/bitflags/bitflags/compare/2.4.0...2.4.1
|
||||||
|
|
||||||
|
# 2.4.0
|
||||||
|
|
||||||
|
## What's Changed
|
||||||
|
* Remove html_root_url by @eldruin in https://github.com/bitflags/bitflags/pull/368
|
||||||
|
* Support unnamed flags by @KodrAus in https://github.com/bitflags/bitflags/pull/371
|
||||||
|
* Update smoke test to verify all Clippy and rustc lints by @MitMaro in https://github.com/bitflags/bitflags/pull/374
|
||||||
|
* Specify the behavior of bitflags by @KodrAus in https://github.com/bitflags/bitflags/pull/369
|
||||||
|
|
||||||
|
## New Contributors
|
||||||
|
* @eldruin made their first contribution in https://github.com/bitflags/bitflags/pull/368
|
||||||
|
* @MitMaro made their first contribution in https://github.com/bitflags/bitflags/pull/374
|
||||||
|
|
||||||
|
**Full Changelog**: https://github.com/bitflags/bitflags/compare/2.3.3...2.4.0
|
||||||
|
|
||||||
|
# 2.3.3
|
||||||
|
|
||||||
|
## Changes to `-=`
|
||||||
|
|
||||||
|
The `-=` operator was incorrectly changed to truncate bits that didn't correspond to valid flags in `2.3.0`. This has
|
||||||
|
been fixed up so it once again behaves the same as `-` and `difference`.
|
||||||
|
|
||||||
|
## Changes to `!`
|
||||||
|
|
||||||
|
The `!` operator previously called `Self::from_bits_truncate`, which would truncate any bits that only partially
|
||||||
|
overlapped with a valid flag. It will now use `bits & Self::all().bits()`, so any bits that overlap any bits
|
||||||
|
specified by any flag will be respected. This is unlikely to have any practical implications, but enables defining
|
||||||
|
a flag like `const ALL = !0` as a way to signal that any bit pattern is a known set of flags.
|
||||||
|
|
||||||
|
## Changes to formatting
|
||||||
|
|
||||||
|
Zero-valued flags will never be printed. You'll either get `0x0` for empty flags using debug formatting, or the
|
||||||
|
set of flags with zero-valued flags omitted for others.
|
||||||
|
|
||||||
|
Composite flags will no longer be redundantly printed if there are extra bits to print at the end that don't correspond
|
||||||
|
to a valid flag.
|
||||||
|
|
||||||
|
## What's Changed
|
||||||
|
* Fix up incorrect sub assign behavior and other cleanups by @KodrAus in https://github.com/bitflags/bitflags/pull/366
|
||||||
|
|
||||||
|
**Full Changelog**: https://github.com/bitflags/bitflags/compare/2.3.2...2.3.3
|
||||||
|
|
||||||
|
# 2.3.2
|
||||||
|
|
||||||
|
## What's Changed
|
||||||
|
* [doc] [src/lib.rs] delete redundant path prefix by @OccupyMars2025 in https://github.com/bitflags/bitflags/pull/361
|
||||||
|
|
||||||
|
## New Contributors
|
||||||
|
* @OccupyMars2025 made their first contribution in https://github.com/bitflags/bitflags/pull/361
|
||||||
|
|
||||||
|
**Full Changelog**: https://github.com/bitflags/bitflags/compare/2.3.1...2.3.2
|
||||||
|
|
||||||
|
# 2.3.1
|
||||||
|
|
||||||
|
## What's Changed
|
||||||
|
* Fix Self in flags value expressions by @KodrAus in https://github.com/bitflags/bitflags/pull/355
|
||||||
|
|
||||||
|
**Full Changelog**: https://github.com/bitflags/bitflags/compare/2.3.0...2.3.1
|
||||||
|
|
||||||
|
# 2.3.0
|
||||||
|
|
||||||
|
## Major changes
|
||||||
|
|
||||||
|
### `BitFlags` trait deprecated in favor of `Flags` trait
|
||||||
|
|
||||||
|
This release introduces the `Flags` trait and deprecates the `BitFlags` trait. These two traits are semver compatible so if you have public API code depending on `BitFlags` you can move to `Flags` without breaking end-users. This is possible because the `BitFlags` trait was never publicly implementable, so it now carries `Flags` as a supertrait. All implementations of `Flags` additionally implement `BitFlags`.
|
||||||
|
|
||||||
|
The `Flags` trait is a publicly implementable version of the old `BitFlags` trait. The original `BitFlags` trait carried some macro baggage that made it difficult to implement, so a new `Flags` trait has been introduced as the _One True Trait_ for interacting with flags types generically. See the the `macro_free` and `custom_derive` examples for more details.
|
||||||
|
|
||||||
|
### `Bits` trait publicly exposed
|
||||||
|
|
||||||
|
The `Bits` trait for the underlying storage of flags values is also now publicly implementable. This lets you define your own exotic backing storage for flags. See the `custom_bits_type` example for more details.
|
||||||
|
|
||||||
|
## What's Changed
|
||||||
|
* Use explicit hashes for actions steps by @KodrAus in https://github.com/bitflags/bitflags/pull/350
|
||||||
|
* Support ejecting flags types from the bitflags macro by @KodrAus in https://github.com/bitflags/bitflags/pull/351
|
||||||
|
|
||||||
|
**Full Changelog**: https://github.com/bitflags/bitflags/compare/2.2.1...2.3.0
|
||||||
|
|
||||||
|
# 2.2.1
|
||||||
|
|
||||||
|
## What's Changed
|
||||||
|
* Refactor attribute filtering to apply per-flag by @KodrAus in https://github.com/bitflags/bitflags/pull/345
|
||||||
|
|
||||||
|
**Full Changelog**: https://github.com/bitflags/bitflags/compare/2.2.0...2.2.1
|
||||||
|
|
||||||
|
# 2.2.0
|
||||||
|
|
||||||
|
## What's Changed
|
||||||
|
* Create SECURITY.md by @KodrAus in https://github.com/bitflags/bitflags/pull/338
|
||||||
|
* add docs to describe the behavior of multi-bit flags by @nicholasbishop in https://github.com/bitflags/bitflags/pull/340
|
||||||
|
* Add support for bytemuck by @KodrAus in https://github.com/bitflags/bitflags/pull/336
|
||||||
|
* Add a top-level macro for filtering attributes by @KodrAus in https://github.com/bitflags/bitflags/pull/341
|
||||||
|
|
||||||
|
## New Contributors
|
||||||
|
* @nicholasbishop made their first contribution in https://github.com/bitflags/bitflags/pull/340
|
||||||
|
|
||||||
|
**Full Changelog**: https://github.com/bitflags/bitflags/compare/2.1.0...2.2.0
|
||||||
|
|
||||||
|
# 2.1.0
|
||||||
|
|
||||||
|
## What's Changed
|
||||||
|
* Add docs for the internal Field0 and examples of formatting/parsing by @KodrAus in https://github.com/bitflags/bitflags/pull/328
|
||||||
|
* Add support for arbitrary by @KodrAus in https://github.com/bitflags/bitflags/pull/324
|
||||||
|
* Fix up missing docs for consts within consts by @KodrAus in https://github.com/bitflags/bitflags/pull/330
|
||||||
|
* Ignore clippy lint in generated code by @Jake-Shadle in https://github.com/bitflags/bitflags/pull/331
|
||||||
|
|
||||||
|
## New Contributors
|
||||||
|
* @Jake-Shadle made their first contribution in https://github.com/bitflags/bitflags/pull/331
|
||||||
|
|
||||||
|
**Full Changelog**: https://github.com/bitflags/bitflags/compare/2.0.2...2.1.0
|
||||||
|
|
||||||
|
# 2.0.2
|
||||||
|
|
||||||
|
## What's Changed
|
||||||
|
* Fix up missing isize and usize Bits impls by @KodrAus in https://github.com/bitflags/bitflags/pull/321
|
||||||
|
|
||||||
|
**Full Changelog**: https://github.com/bitflags/bitflags/compare/2.0.1...2.0.2
|
||||||
|
|
||||||
|
# 2.0.1
|
||||||
|
|
||||||
|
## What's Changed
|
||||||
|
* Fix up some docs issues by @KodrAus in https://github.com/bitflags/bitflags/pull/309
|
||||||
|
* Make empty_flag() const. by @tormeh in https://github.com/bitflags/bitflags/pull/313
|
||||||
|
* Fix formatting of multi-bit flags with partial overlap by @KodrAus in https://github.com/bitflags/bitflags/pull/316
|
||||||
|
|
||||||
|
## New Contributors
|
||||||
|
* @tormeh made their first contribution in https://github.com/bitflags/bitflags/pull/313
|
||||||
|
|
||||||
|
**Full Changelog**: https://github.com/bitflags/bitflags/compare/2.0.0...2.0.1
|
||||||
|
|
||||||
|
# 2.0.0
|
||||||
|
|
||||||
|
## Major changes
|
||||||
|
|
||||||
|
This release includes some major changes over `1.x`. If you use `bitflags!` types in your public API then upgrading this library may cause breakage in your downstream users.
|
||||||
|
|
||||||
|
### ⚠️ Serialization
|
||||||
|
|
||||||
|
You'll need to add the `serde` Cargo feature in order to `#[derive(Serialize, Deserialize)]` on your generated flags types:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
bitflags! {
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
#[serde(transparent)]
|
||||||
|
pub struct Flags: T {
|
||||||
|
..
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
where `T` is the underlying bits type you're using, such as `u32`.
|
||||||
|
|
||||||
|
The default serialization format with `serde` **has changed** if you `#[derive(Serialize, Deserialize)]` on your generated flags types. It will now use a formatted string for human-readable formats and the underlying bits type for compact formats.
|
||||||
|
|
||||||
|
To keep the old format, see the https://github.com/KodrAus/bitflags-serde-legacy library.
|
||||||
|
|
||||||
|
### ⚠️ Traits
|
||||||
|
|
||||||
|
Generated flags types now derive fewer traits. If you need to maintain backwards compatibility, you can derive the following yourself:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Clone, Copy)]
|
||||||
|
```
|
||||||
|
|
||||||
|
### ⚠️ Methods
|
||||||
|
|
||||||
|
The unsafe `from_bits_unchecked` method is now a safe `from_bits_retain` method.
|
||||||
|
|
||||||
|
You can add the following method to your generated types to keep them compatible:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#[deprecated = "use the safe `from_bits_retain` method instead"]
|
||||||
|
pub unsafe fn from_bits_unchecked(bits: T) -> Self {
|
||||||
|
Self::from_bits_retain(bits)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
where `T` is the underlying bits type you're using, such as `u32`.
|
||||||
|
|
||||||
|
### ⚠️ `.bits` field
|
||||||
|
|
||||||
|
You can now use the `.bits()` method instead of the old `.bits`.
|
||||||
|
|
||||||
|
The representation of generated flags types has changed from a struct with the single field `bits` to a newtype.
|
||||||
|
|
||||||
|
## What's Changed
|
||||||
|
* Fix a typo and call out MSRV bump by @KodrAus in https://github.com/bitflags/bitflags/pull/259
|
||||||
|
* BitFlags trait by @arturoc in https://github.com/bitflags/bitflags/pull/220
|
||||||
|
* Add a hidden trait to discourage manual impls of BitFlags by @KodrAus in https://github.com/bitflags/bitflags/pull/261
|
||||||
|
* Sanitize `Ok` by @konsumlamm in https://github.com/bitflags/bitflags/pull/266
|
||||||
|
* Fix bug in `Debug` implementation by @konsumlamm in https://github.com/bitflags/bitflags/pull/268
|
||||||
|
* Fix a typo in the generated documentation by @wackbyte in https://github.com/bitflags/bitflags/pull/271
|
||||||
|
* Use SPDX license format by @atouchet in https://github.com/bitflags/bitflags/pull/272
|
||||||
|
* serde tests fail in CI by @arturoc in https://github.com/bitflags/bitflags/pull/277
|
||||||
|
* Fix beta test output by @KodrAus in https://github.com/bitflags/bitflags/pull/279
|
||||||
|
* Add example to the README.md file by @tiaanl in https://github.com/bitflags/bitflags/pull/270
|
||||||
|
* Iterator over all the enabled options by @arturoc in https://github.com/bitflags/bitflags/pull/278
|
||||||
|
* from_bits_(truncate) fail with composite flags by @arturoc in https://github.com/bitflags/bitflags/pull/276
|
||||||
|
* Add more platform coverage to CI by @KodrAus in https://github.com/bitflags/bitflags/pull/280
|
||||||
|
* rework the way cfgs are handled by @KodrAus in https://github.com/bitflags/bitflags/pull/281
|
||||||
|
* Split generated code into two types by @KodrAus in https://github.com/bitflags/bitflags/pull/282
|
||||||
|
* expose bitflags iters using nameable types by @KodrAus in https://github.com/bitflags/bitflags/pull/286
|
||||||
|
* Support creating flags from their names by @KodrAus in https://github.com/bitflags/bitflags/pull/287
|
||||||
|
* Update README.md by @KodrAus in https://github.com/bitflags/bitflags/pull/288
|
||||||
|
* Prepare for 2.0.0-rc.1 release by @KodrAus in https://github.com/bitflags/bitflags/pull/289
|
||||||
|
* Add missing "if" to contains doc-comment in traits.rs by @rusty-snake in https://github.com/bitflags/bitflags/pull/291
|
||||||
|
* Forbid unsafe_code by @fintelia in https://github.com/bitflags/bitflags/pull/294
|
||||||
|
* serde: enable no-std support by @nim65s in https://github.com/bitflags/bitflags/pull/296
|
||||||
|
* Add a parser for flags formatted as bar-separated-values by @KodrAus in https://github.com/bitflags/bitflags/pull/297
|
||||||
|
* Prepare for 2.0.0-rc.2 release by @KodrAus in https://github.com/bitflags/bitflags/pull/299
|
||||||
|
* Use strip_prefix instead of starts_with + slice by @QuinnPainter in https://github.com/bitflags/bitflags/pull/301
|
||||||
|
* Fix up some clippy lints by @KodrAus in https://github.com/bitflags/bitflags/pull/302
|
||||||
|
* Prepare for 2.0.0-rc.3 release by @KodrAus in https://github.com/bitflags/bitflags/pull/303
|
||||||
|
* feat: Add minimum permissions to rust.yml workflow by @gabibguti in https://github.com/bitflags/bitflags/pull/305
|
||||||
|
|
||||||
|
## New Contributors
|
||||||
|
* @wackbyte made their first contribution in https://github.com/bitflags/bitflags/pull/271
|
||||||
|
* @atouchet made their first contribution in https://github.com/bitflags/bitflags/pull/272
|
||||||
|
* @tiaanl made their first contribution in https://github.com/bitflags/bitflags/pull/270
|
||||||
|
* @rusty-snake made their first contribution in https://github.com/bitflags/bitflags/pull/291
|
||||||
|
* @fintelia made their first contribution in https://github.com/bitflags/bitflags/pull/294
|
||||||
|
* @nim65s made their first contribution in https://github.com/bitflags/bitflags/pull/296
|
||||||
|
* @QuinnPainter made their first contribution in https://github.com/bitflags/bitflags/pull/301
|
||||||
|
* @gabibguti made their first contribution in https://github.com/bitflags/bitflags/pull/305
|
||||||
|
|
||||||
|
**Full Changelog**: https://github.com/bitflags/bitflags/compare/1.3.2...2.0.0
|
||||||
|
|
||||||
|
# 2.0.0-rc.3
|
||||||
|
|
||||||
|
## What's Changed
|
||||||
|
* Use strip_prefix instead of starts_with + slice by @QuinnPainter in https://github.com/bitflags/bitflags/pull/301
|
||||||
|
* Fix up some clippy lints by @KodrAus in https://github.com/bitflags/bitflags/pull/302
|
||||||
|
|
||||||
|
## New Contributors
|
||||||
|
* @QuinnPainter made their first contribution in https://github.com/bitflags/bitflags/pull/301
|
||||||
|
|
||||||
|
**Full Changelog**: https://github.com/bitflags/bitflags/compare/2.0.0-rc.2...2.0.0-rc.3
|
||||||
|
|
||||||
|
# 2.0.0-rc.2
|
||||||
|
|
||||||
|
## Changes to `serde` serialization
|
||||||
|
|
||||||
|
**⚠️ NOTE ⚠️** This release changes the default serialization you'll get if you `#[derive(Serialize, Deserialize)]`
|
||||||
|
on your generated flags types. It will now use a formatted string for human-readable formats and the underlying bits
|
||||||
|
type for compact formats.
|
||||||
|
|
||||||
|
To keep the old behavior, see the [`bitflags-serde-legacy`](https://github.com/KodrAus/bitflags-serde-legacy) library.
|
||||||
|
|
||||||
|
## What's Changed
|
||||||
|
|
||||||
|
* Add missing "if" to contains doc-comment in traits.rs by @rusty-snake in https://github.com/bitflags/bitflags/pull/291
|
||||||
|
* Forbid unsafe_code by @fintelia in https://github.com/bitflags/bitflags/pull/294
|
||||||
|
* serde: enable no-std support by @nim65s in https://github.com/bitflags/bitflags/pull/296
|
||||||
|
* Add a parser for flags formatted as bar-separated-values by @KodrAus in https://github.com/bitflags/bitflags/pull/297
|
||||||
|
|
||||||
|
## New Contributors
|
||||||
|
* @rusty-snake made their first contribution in https://github.com/bitflags/bitflags/pull/291
|
||||||
|
* @fintelia made their first contribution in https://github.com/bitflags/bitflags/pull/294
|
||||||
|
* @nim65s made their first contribution in https://github.com/bitflags/bitflags/pull/296
|
||||||
|
|
||||||
|
**Full Changelog**: https://github.com/bitflags/bitflags/compare/2.0.0-rc.1...2.0.0-rc.2
|
||||||
|
|
||||||
|
# 2.0.0-rc.1
|
||||||
|
|
||||||
|
This is a big release including a few years worth of work on a new `BitFlags` trait, iteration, and better macro organization for future extensibility.
|
||||||
|
|
||||||
|
## What's Changed
|
||||||
|
* Fix a typo and call out MSRV bump by @KodrAus in https://github.com/bitflags/bitflags/pull/259
|
||||||
|
* BitFlags trait by @arturoc in https://github.com/bitflags/bitflags/pull/220
|
||||||
|
* Add a hidden trait to discourage manual impls of BitFlags by @KodrAus in https://github.com/bitflags/bitflags/pull/261
|
||||||
|
* Sanitize `Ok` by @konsumlamm in https://github.com/bitflags/bitflags/pull/266
|
||||||
|
* Fix bug in `Debug` implementation by @konsumlamm in https://github.com/bitflags/bitflags/pull/268
|
||||||
|
* Fix a typo in the generated documentation by @wackbyte in https://github.com/bitflags/bitflags/pull/271
|
||||||
|
* Use SPDX license format by @atouchet in https://github.com/bitflags/bitflags/pull/272
|
||||||
|
* serde tests fail in CI by @arturoc in https://github.com/bitflags/bitflags/pull/277
|
||||||
|
* Fix beta test output by @KodrAus in https://github.com/bitflags/bitflags/pull/279
|
||||||
|
* Add example to the README.md file by @tiaanl in https://github.com/bitflags/bitflags/pull/270
|
||||||
|
* Iterator over all the enabled options by @arturoc in https://github.com/bitflags/bitflags/pull/278
|
||||||
|
* from_bits_(truncate) fail with composite flags by @arturoc in https://github.com/bitflags/bitflags/pull/276
|
||||||
|
* Add more platform coverage to CI by @KodrAus in https://github.com/bitflags/bitflags/pull/280
|
||||||
|
* rework the way cfgs are handled by @KodrAus in https://github.com/bitflags/bitflags/pull/281
|
||||||
|
* Split generated code into two types by @KodrAus in https://github.com/bitflags/bitflags/pull/282
|
||||||
|
* expose bitflags iters using nameable types by @KodrAus in https://github.com/bitflags/bitflags/pull/286
|
||||||
|
* Support creating flags from their names by @KodrAus in https://github.com/bitflags/bitflags/pull/287
|
||||||
|
* Update README.md by @KodrAus in https://github.com/bitflags/bitflags/pull/288
|
||||||
|
|
||||||
|
## New Contributors
|
||||||
|
* @wackbyte made their first contribution in https://github.com/bitflags/bitflags/pull/271
|
||||||
|
* @atouchet made their first contribution in https://github.com/bitflags/bitflags/pull/272
|
||||||
|
* @tiaanl made their first contribution in https://github.com/bitflags/bitflags/pull/270
|
||||||
|
|
||||||
|
**Full Changelog**: https://github.com/bitflags/bitflags/compare/1.3.2...2.0.0-rc.1
|
||||||
|
|
||||||
|
# 1.3.2
|
||||||
|
|
||||||
|
- Allow `non_snake_case` in generated flags types ([#256])
|
||||||
|
|
||||||
|
[#256]: https://github.com/bitflags/bitflags/pull/256
|
||||||
|
|
||||||
|
# 1.3.1
|
||||||
|
|
||||||
|
- Revert unconditional `#[repr(transparent)]` ([#252])
|
||||||
|
|
||||||
|
[#252]: https://github.com/bitflags/bitflags/pull/252
|
||||||
|
|
||||||
|
# 1.3.0 (yanked)
|
||||||
|
|
||||||
|
**This release bumps the Minimum Supported Rust Version to `1.46.0`**
|
||||||
|
|
||||||
|
- Add `#[repr(transparent)]` ([#187])
|
||||||
|
|
||||||
|
- End `empty` doc comment with full stop ([#202])
|
||||||
|
|
||||||
|
- Fix typo in crate root docs ([#206])
|
||||||
|
|
||||||
|
- Document from_bits_unchecked unsafety ([#207])
|
||||||
|
|
||||||
|
- Let `is_all` ignore extra bits ([#211])
|
||||||
|
|
||||||
|
- Allows empty flag definition ([#225])
|
||||||
|
|
||||||
|
- Making crate accessible from std ([#227])
|
||||||
|
|
||||||
|
- Make `from_bits` a const fn ([#229])
|
||||||
|
|
||||||
|
- Allow multiple bitflags structs in one macro invocation ([#235])
|
||||||
|
|
||||||
|
- Add named functions to perform set operations ([#244])
|
||||||
|
|
||||||
|
- Fix typos in method docs ([#245])
|
||||||
|
|
||||||
|
- Modernization of the `bitflags` macro to take advantage of newer features and 2018 idioms ([#246])
|
||||||
|
|
||||||
|
- Fix regression (in an unreleased feature) and simplify tests ([#247])
|
||||||
|
|
||||||
|
- Use `Self` and fix bug when overriding `stringify!` ([#249])
|
||||||
|
|
||||||
|
[#187]: https://github.com/bitflags/bitflags/pull/187
|
||||||
|
[#202]: https://github.com/bitflags/bitflags/pull/202
|
||||||
|
[#206]: https://github.com/bitflags/bitflags/pull/206
|
||||||
|
[#207]: https://github.com/bitflags/bitflags/pull/207
|
||||||
|
[#211]: https://github.com/bitflags/bitflags/pull/211
|
||||||
|
[#225]: https://github.com/bitflags/bitflags/pull/225
|
||||||
|
[#227]: https://github.com/bitflags/bitflags/pull/227
|
||||||
|
[#229]: https://github.com/bitflags/bitflags/pull/229
|
||||||
|
[#235]: https://github.com/bitflags/bitflags/pull/235
|
||||||
|
[#244]: https://github.com/bitflags/bitflags/pull/244
|
||||||
|
[#245]: https://github.com/bitflags/bitflags/pull/245
|
||||||
|
[#246]: https://github.com/bitflags/bitflags/pull/246
|
||||||
|
[#247]: https://github.com/bitflags/bitflags/pull/247
|
||||||
|
[#249]: https://github.com/bitflags/bitflags/pull/249
|
||||||
|
|
||||||
|
# 1.2.1
|
||||||
|
|
||||||
|
- Remove extraneous `#[inline]` attributes ([#194])
|
||||||
|
|
||||||
|
[#194]: https://github.com/bitflags/bitflags/pull/194
|
||||||
|
|
||||||
|
# 1.2.0
|
||||||
|
|
||||||
|
- Fix typo: {Lower, Upper}Exp - {Lower, Upper}Hex ([#183])
|
||||||
|
|
||||||
|
- Add support for "unknown" bits ([#188])
|
||||||
|
|
||||||
|
[#183]: https://github.com/rust-lang-nursery/bitflags/pull/183
|
||||||
|
[#188]: https://github.com/rust-lang-nursery/bitflags/pull/188
|
||||||
|
|
||||||
|
# 1.1.0
|
||||||
|
|
||||||
|
This is a re-release of `1.0.5`, which was yanked due to a bug in the RLS.
|
||||||
|
|
||||||
|
# 1.0.5
|
||||||
|
|
||||||
|
- Use compiletest_rs flags supported by stable toolchain ([#171])
|
||||||
|
|
||||||
|
- Put the user provided attributes first ([#173])
|
||||||
|
|
||||||
|
- Make bitflags methods `const` on newer compilers ([#175])
|
||||||
|
|
||||||
|
[#171]: https://github.com/rust-lang-nursery/bitflags/pull/171
|
||||||
|
[#173]: https://github.com/rust-lang-nursery/bitflags/pull/173
|
||||||
|
[#175]: https://github.com/rust-lang-nursery/bitflags/pull/175
|
||||||
|
|
||||||
|
# 1.0.4
|
||||||
|
|
||||||
|
- Support Rust 2018 style macro imports ([#165])
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use bitflags::bitflags;
|
||||||
|
```
|
||||||
|
|
||||||
|
[#165]: https://github.com/rust-lang-nursery/bitflags/pull/165
|
||||||
|
|
||||||
|
# 1.0.3
|
||||||
|
|
||||||
|
- Improve zero value flag handling and documentation ([#157])
|
||||||
|
|
||||||
|
[#157]: https://github.com/rust-lang-nursery/bitflags/pull/157
|
||||||
|
|
||||||
|
# 1.0.2
|
||||||
|
|
||||||
|
- 30% improvement in compile time of bitflags crate ([#156])
|
||||||
|
|
||||||
|
- Documentation improvements ([#153])
|
||||||
|
|
||||||
|
- Implementation cleanup ([#149])
|
||||||
|
|
||||||
|
[#156]: https://github.com/rust-lang-nursery/bitflags/pull/156
|
||||||
|
[#153]: https://github.com/rust-lang-nursery/bitflags/pull/153
|
||||||
|
[#149]: https://github.com/rust-lang-nursery/bitflags/pull/149
|
||||||
|
|
||||||
|
# 1.0.1
|
||||||
|
- Add support for `pub(restricted)` specifier on the bitflags struct ([#135])
|
||||||
|
- Optimize performance of `all()` when called from a separate crate ([#136])
|
||||||
|
|
||||||
|
[#135]: https://github.com/rust-lang-nursery/bitflags/pull/135
|
||||||
|
[#136]: https://github.com/rust-lang-nursery/bitflags/pull/136
|
||||||
|
|
||||||
|
# 1.0.0
|
||||||
|
- **[breaking change]** Macro now generates [associated constants](https://doc.rust-lang.org/reference/items.html#associated-constants) ([#24])
|
||||||
|
|
||||||
|
- **[breaking change]** Minimum supported version is Rust **1.20**, due to usage of associated constants
|
||||||
|
|
||||||
|
- After being broken in 0.9, the `#[deprecated]` attribute is now supported again ([#112])
|
||||||
|
|
||||||
|
- Other improvements to unit tests and documentation ([#106] and [#115])
|
||||||
|
|
||||||
|
[#24]: https://github.com/rust-lang-nursery/bitflags/pull/24
|
||||||
|
[#106]: https://github.com/rust-lang-nursery/bitflags/pull/106
|
||||||
|
[#112]: https://github.com/rust-lang-nursery/bitflags/pull/112
|
||||||
|
[#115]: https://github.com/rust-lang-nursery/bitflags/pull/115
|
||||||
|
|
||||||
|
## How to update your code to use associated constants
|
||||||
|
Assuming the following structure definition:
|
||||||
|
```rust
|
||||||
|
bitflags! {
|
||||||
|
struct Something: u8 {
|
||||||
|
const FOO = 0b01,
|
||||||
|
const BAR = 0b10
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
In 0.9 and older you could do:
|
||||||
|
```rust
|
||||||
|
let x = FOO.bits | BAR.bits;
|
||||||
|
```
|
||||||
|
Now you must use:
|
||||||
|
```rust
|
||||||
|
let x = Something::FOO.bits | Something::BAR.bits;
|
||||||
|
```
|
||||||
|
|
||||||
|
# 0.9.1
|
||||||
|
- Fix the implementation of `Formatting` traits when other formatting traits were present in scope ([#105])
|
||||||
|
|
||||||
|
[#105]: https://github.com/rust-lang-nursery/bitflags/pull/105
|
||||||
|
|
||||||
|
# 0.9.0
|
||||||
|
- **[breaking change]** Use struct keyword instead of flags to define bitflag types ([#84])
|
||||||
|
|
||||||
|
- **[breaking change]** Terminate const items with semicolons instead of commas ([#87])
|
||||||
|
|
||||||
|
- Implement the `Hex`, `Octal`, and `Binary` formatting traits ([#86])
|
||||||
|
|
||||||
|
- Printing an empty flag value with the `Debug` trait now prints "(empty)" instead of nothing ([#85])
|
||||||
|
|
||||||
|
- The `bitflags!` macro can now be used inside of a fn body, to define a type local to that function ([#74])
|
||||||
|
|
||||||
|
[#74]: https://github.com/rust-lang-nursery/bitflags/pull/74
|
||||||
|
[#84]: https://github.com/rust-lang-nursery/bitflags/pull/84
|
||||||
|
[#85]: https://github.com/rust-lang-nursery/bitflags/pull/85
|
||||||
|
[#86]: https://github.com/rust-lang-nursery/bitflags/pull/86
|
||||||
|
[#87]: https://github.com/rust-lang-nursery/bitflags/pull/87
|
||||||
|
|
||||||
|
# 0.8.2
|
||||||
|
- Update feature flag used when building bitflags as a dependency of the Rust toolchain
|
||||||
|
|
||||||
|
# 0.8.1
|
||||||
|
- Allow bitflags to be used as a dependency of the Rust toolchain
|
||||||
|
|
||||||
|
# 0.8.0
|
||||||
|
- Add support for the experimental `i128` and `u128` integer types ([#57])
|
||||||
|
- Add set method: `flags.set(SOME_FLAG, true)` or `flags.set(SOME_FLAG, false)` ([#55])
|
||||||
|
This may break code that defines its own set method
|
||||||
|
|
||||||
|
[#55]: https://github.com/rust-lang-nursery/bitflags/pull/55
|
||||||
|
[#57]: https://github.com/rust-lang-nursery/bitflags/pull/57
|
||||||
|
|
||||||
|
# 0.7.1
|
||||||
|
*(yanked)*
|
||||||
|
|
||||||
|
# 0.7.0
|
||||||
|
- Implement the Extend trait ([#49])
|
||||||
|
- Allow definitions inside the `bitflags!` macro to refer to items imported from other modules ([#51])
|
||||||
|
|
||||||
|
[#49]: https://github.com/rust-lang-nursery/bitflags/pull/49
|
||||||
|
[#51]: https://github.com/rust-lang-nursery/bitflags/pull/51
|
||||||
|
|
||||||
|
# 0.6.0
|
||||||
|
- The `no_std` feature was removed as it is now the default
|
||||||
|
- The `assignment_operators` feature was remove as it is now enabled by default
|
||||||
|
- Some clippy suggestions have been applied
|
||||||
73
vendor/bitflags/CODE_OF_CONDUCT.md
vendored
Normal file
73
vendor/bitflags/CODE_OF_CONDUCT.md
vendored
Normal file
|
|
@ -0,0 +1,73 @@
|
||||||
|
# Contributor Covenant Code of Conduct
|
||||||
|
|
||||||
|
## Our Pledge
|
||||||
|
|
||||||
|
In the interest of fostering an open and welcoming environment, we as
|
||||||
|
contributors and maintainers pledge to making participation in our project and
|
||||||
|
our community a harassment-free experience for everyone, regardless of age, body
|
||||||
|
size, disability, ethnicity, gender identity and expression, level of experience,
|
||||||
|
education, socio-economic status, nationality, personal appearance, race,
|
||||||
|
religion, or sexual identity and orientation.
|
||||||
|
|
||||||
|
## Our Standards
|
||||||
|
|
||||||
|
Examples of behavior that contributes to creating a positive environment
|
||||||
|
include:
|
||||||
|
|
||||||
|
* Using welcoming and inclusive language
|
||||||
|
* Being respectful of differing viewpoints and experiences
|
||||||
|
* Gracefully accepting constructive criticism
|
||||||
|
* Focusing on what is best for the community
|
||||||
|
* Showing empathy towards other community members
|
||||||
|
|
||||||
|
Examples of unacceptable behavior by participants include:
|
||||||
|
|
||||||
|
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||||
|
advances
|
||||||
|
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||||
|
* Public or private harassment
|
||||||
|
* Publishing others' private information, such as a physical or electronic
|
||||||
|
address, without explicit permission
|
||||||
|
* Other conduct which could reasonably be considered inappropriate in a
|
||||||
|
professional setting
|
||||||
|
|
||||||
|
## Our Responsibilities
|
||||||
|
|
||||||
|
Project maintainers are responsible for clarifying the standards of acceptable
|
||||||
|
behavior and are expected to take appropriate and fair corrective action in
|
||||||
|
response to any instances of unacceptable behavior.
|
||||||
|
|
||||||
|
Project maintainers have the right and responsibility to remove, edit, or
|
||||||
|
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||||
|
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||||
|
permanently any contributor for other behaviors that they deem inappropriate,
|
||||||
|
threatening, offensive, or harmful.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
This Code of Conduct applies both within project spaces and in public spaces
|
||||||
|
when an individual is representing the project or its community. Examples of
|
||||||
|
representing a project or community include using an official project e-mail
|
||||||
|
address, posting via an official social media account, or acting as an appointed
|
||||||
|
representative at an online or offline event. Representation of a project may be
|
||||||
|
further defined and clarified by project maintainers.
|
||||||
|
|
||||||
|
## Enforcement
|
||||||
|
|
||||||
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||||
|
reported by contacting the project team at coc@senaite.org. All
|
||||||
|
complaints will be reviewed and investigated and will result in a response that
|
||||||
|
is deemed necessary and appropriate to the circumstances. The project team is
|
||||||
|
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||||
|
Further details of specific enforcement policies may be posted separately.
|
||||||
|
|
||||||
|
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||||
|
faith may face temporary or permanent repercussions as determined by other
|
||||||
|
members of the project's leadership.
|
||||||
|
|
||||||
|
## Attribution
|
||||||
|
|
||||||
|
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||||
|
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||||
|
|
||||||
|
[homepage]: https://www.contributor-covenant.org
|
||||||
9
vendor/bitflags/CONTRIBUTING.md
vendored
Normal file
9
vendor/bitflags/CONTRIBUTING.md
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
# Updating compile-fail test outputs
|
||||||
|
|
||||||
|
`bitflags` uses the `trybuild` crate to integration test its macros. Since Rust error messages change frequently enough that `nightly` builds produce spurious failures, we only check the compiler output in `beta` builds. If you run:
|
||||||
|
|
||||||
|
```
|
||||||
|
TRYBUILD=overwrite cargo +beta test --all
|
||||||
|
```
|
||||||
|
|
||||||
|
it will run the tests and update the `trybuild` output files.
|
||||||
96
vendor/bitflags/Cargo.toml
vendored
Normal file
96
vendor/bitflags/Cargo.toml
vendored
Normal file
|
|
@ -0,0 +1,96 @@
|
||||||
|
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||||
|
#
|
||||||
|
# When uploading crates to the registry Cargo will automatically
|
||||||
|
# "normalize" Cargo.toml files for maximal compatibility
|
||||||
|
# with all versions of Cargo and also rewrite `path` dependencies
|
||||||
|
# to registry (e.g., crates.io) dependencies.
|
||||||
|
#
|
||||||
|
# If you are reading this file be aware that the original Cargo.toml
|
||||||
|
# will likely look very different (and much more reasonable).
|
||||||
|
# See Cargo.toml.orig for the original contents.
|
||||||
|
|
||||||
|
[package]
|
||||||
|
edition = "2021"
|
||||||
|
rust-version = "1.56.0"
|
||||||
|
name = "bitflags"
|
||||||
|
version = "2.6.0"
|
||||||
|
authors = ["The Rust Project Developers"]
|
||||||
|
exclude = [
|
||||||
|
"/tests",
|
||||||
|
"/.github",
|
||||||
|
]
|
||||||
|
description = """
|
||||||
|
A macro to generate structures which behave like bitflags.
|
||||||
|
"""
|
||||||
|
homepage = "https://github.com/bitflags/bitflags"
|
||||||
|
documentation = "https://docs.rs/bitflags"
|
||||||
|
readme = "README.md"
|
||||||
|
keywords = [
|
||||||
|
"bit",
|
||||||
|
"bitmask",
|
||||||
|
"bitflags",
|
||||||
|
"flags",
|
||||||
|
]
|
||||||
|
categories = ["no-std"]
|
||||||
|
license = "MIT OR Apache-2.0"
|
||||||
|
repository = "https://github.com/bitflags/bitflags"
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
features = ["example_generated"]
|
||||||
|
|
||||||
|
[dependencies.arbitrary]
|
||||||
|
version = "1.0"
|
||||||
|
optional = true
|
||||||
|
|
||||||
|
[dependencies.bytemuck]
|
||||||
|
version = "1.12"
|
||||||
|
optional = true
|
||||||
|
|
||||||
|
[dependencies.compiler_builtins]
|
||||||
|
version = "0.1.2"
|
||||||
|
optional = true
|
||||||
|
|
||||||
|
[dependencies.core]
|
||||||
|
version = "1.0.0"
|
||||||
|
optional = true
|
||||||
|
package = "rustc-std-workspace-core"
|
||||||
|
|
||||||
|
[dependencies.serde]
|
||||||
|
version = "1.0.103"
|
||||||
|
optional = true
|
||||||
|
default-features = false
|
||||||
|
|
||||||
|
[dev-dependencies.arbitrary]
|
||||||
|
version = "1.0"
|
||||||
|
features = ["derive"]
|
||||||
|
|
||||||
|
[dev-dependencies.bytemuck]
|
||||||
|
version = "1.12.2"
|
||||||
|
features = ["derive"]
|
||||||
|
|
||||||
|
[dev-dependencies.rustversion]
|
||||||
|
version = "1.0"
|
||||||
|
|
||||||
|
[dev-dependencies.serde_derive]
|
||||||
|
version = "1.0.103"
|
||||||
|
|
||||||
|
[dev-dependencies.serde_json]
|
||||||
|
version = "1.0"
|
||||||
|
|
||||||
|
[dev-dependencies.serde_test]
|
||||||
|
version = "1.0.19"
|
||||||
|
|
||||||
|
[dev-dependencies.trybuild]
|
||||||
|
version = "1.0.18"
|
||||||
|
|
||||||
|
[dev-dependencies.zerocopy]
|
||||||
|
version = "0.7"
|
||||||
|
features = ["derive"]
|
||||||
|
|
||||||
|
[features]
|
||||||
|
example_generated = []
|
||||||
|
rustc-dep-of-std = [
|
||||||
|
"core",
|
||||||
|
"compiler_builtins",
|
||||||
|
]
|
||||||
|
std = []
|
||||||
201
vendor/bitflags/LICENSE-APACHE
vendored
Normal file
201
vendor/bitflags/LICENSE-APACHE
vendored
Normal file
|
|
@ -0,0 +1,201 @@
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue