Poly2Block; Block2Poly; SEA128 tasks working #2

Merged
0xalivecow merged 26 commits from dev into main 2024-10-23 15:21:06 +00:00
1905 changed files with 407545 additions and 3 deletions

5
.cargo/config.toml Normal file
View file

@ -0,0 +1,5 @@
[source.crates-io]
replace-with = "vendored-sources"
[source.vendored-sources]
directory = "vendor"

22
.github/workflows/dev_testing.yml vendored Normal file
View file

@ -0,0 +1,22 @@
name: Rust
on:
push:
branches: [ "dev" ]
pull_request:
branches: [ "dev" ]
env:
CARGO_TERM_COLOR: always
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Build
run: cargo build --verbose
- name: Run tests
run: cargo test --verbose

69
.github/workflows/kauma.yaml vendored Normal file
View file

@ -0,0 +1,69 @@
name: Kauma Pipeline (labwork-docker)
on:
push:
branches:
- master
- devel
- main
- '**'
jobs:
check_requirements:
name: Check requirements
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: build script may be present for compiling code
id: check-build
run: |
if [ -f ./build ]; then
echo "OK"
else
echo "WARNING: No \`build\` found (not required)" >> $GITHUB_STEP_SUMMARY
fi
- name: build script must be executable
id: check-build-exec
run: |
if [ -f ./build ]; then
if [ -x ./build ]; then
echo "OK"
else
echo "ERROR: \`build\` is not executable" >> $GITHUB_STEP_SUMMARY
echo "*HINT: The git command \`git add --chmod=+x build\` can be used*" >> $GITHUB_STEP_SUMMARY
exit 1
fi
fi
- name: kauma script must be present for running code
id: check-run
run: |
if [ -f ./kauma ]; then
echo "OK"
else
echo "ERROR: No \`run\` found (required)" >> $GITHUB_STEP_SUMMARY
exit 1
fi
- name: kauma script must be executable
id: check-run-exec
run: |
if [ -x ./kauma ]; then
echo "OK"
else
echo "ERROR: \`kauma\` is not executable" >> $GITHUB_STEP_SUMMARY
echo "*HINT: The git command \`git add --chmod=+x kauma\` can be used*" >> $GITHUB_STEP_SUMMARY
exit 1
fi
labwork_docker_run:
name: Test project inside labwork-docker container
needs: check_requirements
runs-on: ubuntu-latest
steps:
- name: get repo
uses: actions/checkout@v4
- name: Pull labwork-docker image
run: |
docker pull ghcr.io/johndoe31415/labwork-docker:master
docker tag ghcr.io/johndoe31415/labwork-docker:master labwork
- name: Run labwork container
run: |
docker run -v $PWD:/dut/ labwork /bin/bash -c '/dut/build && /dut/kauma ./example_challenges/block2poly.json'

4
.gitignore vendored
View file

@ -1,11 +1,11 @@
# Generated by Cargo # Generated by Cargo
# will have compiled files and executables # will have compiled files and executables
debug/ # debug/
target/ target/
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
Cargo.lock # Cargo.lock
# These are backup files generated by rustfmt # These are backup files generated by rustfmt
**/*.rs.bk **/*.rs.bk

16
.vscode/launch.json vendored Normal file
View file

@ -0,0 +1,16 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "lldb",
"request": "launch",
"name": "Debug",
"program": "${workspaceFolder}/<executable file>",
"args": [],
"cwd": "${workspaceFolder}"
}
]
}

215
Cargo.lock generated Normal file
View file

@ -0,0 +1,215 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "anyhow"
version = "1.0.90"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37bf3594c4c988a53154954629820791dde498571819ae4ca50ca811e060cc95"
[[package]]
name = "base64"
version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
[[package]]
name = "bitflags"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
[[package]]
name = "cc"
version = "1.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2e7962b54006dcfcc61cb72735f4d89bb97061dd6a7ed882ec6b8ee53714c6f"
dependencies = [
"shlex",
]
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "foreign-types"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
dependencies = [
"foreign-types-shared",
]
[[package]]
name = "foreign-types-shared"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
[[package]]
name = "itoa"
version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b"
[[package]]
name = "kauma"
version = "0.1.0"
dependencies = [
"anyhow",
"base64",
"openssl",
"serde",
"serde_json",
]
[[package]]
name = "libc"
version = "0.2.161"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1"
[[package]]
name = "memchr"
version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
[[package]]
name = "once_cell"
version = "1.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
[[package]]
name = "openssl"
version = "0.10.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5"
dependencies = [
"bitflags",
"cfg-if",
"foreign-types",
"libc",
"once_cell",
"openssl-macros",
"openssl-sys",
]
[[package]]
name = "openssl-macros"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "openssl-sys"
version = "0.9.104"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741"
dependencies = [
"cc",
"libc",
"pkg-config",
"vcpkg",
]
[[package]]
name = "pkg-config"
version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2"
[[package]]
name = "proc-macro2"
version = "1.0.88"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c3a7fc5db1e57d5a779a352c8cdb57b29aa4c40cc69c3a68a7fedc815fbf2f9"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af"
dependencies = [
"proc-macro2",
]
[[package]]
name = "ryu"
version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f"
[[package]]
name = "serde"
version = "1.0.210"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.210"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.129"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dbcf9b78a125ee667ae19388837dd12294b858d101fdd393cb9d5501ef09eb2"
dependencies = [
"itoa",
"memchr",
"ryu",
"serde",
]
[[package]]
name = "shlex"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "syn"
version = "2.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe"
[[package]]
name = "vcpkg"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"

18
Cargo.toml Normal file
View file

@ -0,0 +1,18 @@
[package]
name = "kauma"
version = "0.1.0"
edition = "2021"
rust = "1.75"
[dependencies]
anyhow = "1.0.90"
base64 = "0.22.1"
openssl = "0.10.68"
serde = { version = "1.0.210", features = ["derive"] }
serde_json = "1.0"
[source.crates-io]
replace-with = "vendored-sources"
[source.vendored-sources]
directory = "vendor"

7
build Executable file
View file

@ -0,0 +1,7 @@
#!/bin/bash
export OPENSSL_LIB_DIR=/usr/lib/x86_64-linux-gnu
export OPENSSL_INCLUDE_DIR=/usr/include/openssl
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
cd $SCRIPT_DIR
cargo build --release --locked --offline

7
kauma Executable file
View file

@ -0,0 +1,7 @@
#!/bin/bash
export OPENSSL_LIB_DIR=/usr/lib/x86_64-linux-gnu
export OPENSSL_INCLUDE_DIR=/usr/include/openssl
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
cd $SCRIPT_DIR
cargo run --release --locked --offline -- $@

2
src/lib.rs Normal file
View file

@ -0,0 +1,2 @@
pub mod tasks;
pub mod utils;

19
src/main.rs Normal file
View file

@ -0,0 +1,19 @@
use std::{
env::{self, args},
fs,
};
use anyhow::Result;
fn main() -> Result<()> {
let args: Vec<String> = env::args().collect();
let path_to_workload = &args[1];
let json = fs::read_to_string(path_to_workload).unwrap();
let workload = kauma::utils::parse::parse_json(json)?;
let response = kauma::tasks::task_distrubute(&workload);
println!("{}", serde_json::to_string(&response)?);
Ok(())
}

112
src/tasks/mod.rs Normal file
View file

@ -0,0 +1,112 @@
use std::collections::HashMap;
use crate::utils::parse::{Responses, Testcase, Testcases};
use tasks01::{
block2poly::block2poly,
poly2block::{poly2block},
sea128::sea128,
};
use anyhow::{anyhow, Result};
use serde_json::{json, Value};
mod tasks01;
pub fn task_deploy(testcase: &Testcase) -> Result<Value> {
/*
* Function to automatially distribute task workloads
* TODO: Add functionality to also pass semantics
*
* */
let args = &testcase.arguments;
match testcase.action.as_str() {
"poly2block" => {
let result = poly2block(args);
let json = json!({"block" : result});
Ok(json)
}
"block2poly" => {
let result: Vec<u8> = block2poly(args)?;
let json = json!({"coefficients" : result});
Ok(json)
}
"sea128" => {
let result = sea128(args)?;
let json = json!({"output" : result});
Ok(json)
}
_ => Err(anyhow!("Fatal. No compatible action found")),
}
}
pub fn task_distrubute(testcases: &Testcases) -> Responses {
let mut responses: HashMap<String, Value> = HashMap::new();
for (id, testcase) in &testcases.testcases {
responses.insert(id.to_owned(), task_deploy(testcase).unwrap());
}
Responses {
responses: responses,
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::utils::parse::parse_json;
use std::fs;
#[test]
fn test_task_deploy() {
let json = fs::read_to_string("src/test_json/poly2block_example.json").unwrap();
let parsed = parse_json(json).unwrap();
let testcase = parsed
.testcases
.get("b856d760-023d-4b00-bad2-15d2b6da22fe")
.unwrap();
assert!(
task_deploy(&testcase).is_ok(),
"Error: Function result was: {:?}",
task_deploy(&testcase)
);
}
#[test]
fn test_task_distribution() {
let json = fs::read_to_string("src/test_json/poly2block_example.json").unwrap();
let parsed = parse_json(json).unwrap();
let expected = json!({ "responses": { "b856d760-023d-4b00-bad2-15d2b6da22fe": {"block": "ARIAAAAAAAAAAAAAAAAAgA=="}}});
assert_eq!(
serde_json::to_value(task_distrubute(&parsed)).unwrap(),
serde_json::to_value(expected).unwrap()
);
}
#[test]
fn test_task_sea128_task_full() {
let json = fs::read_to_string("src/test_json/sea128.json").unwrap();
let parsed = parse_json(json).unwrap();
let expected = json!({
"responses": {
"b856d760-023d-4b00-bad2-15d2b6da22fe": {
"output": "D5FDo3iVBoBN9gVi9/MSKQ=="
},
"254eaee7-05fd-4e0d-8292-9b658a852245": {
"output": "yv66vvrO263eyviIiDNEVQ=="
}
}
});
assert_eq!(
serde_json::to_value(task_distrubute(&parsed)).unwrap(),
serde_json::to_value(expected).unwrap()
);
}
}

View file

@ -0,0 +1,38 @@
use crate::utils::poly::{b64_2_num, get_coefficients};
use anyhow::Result;
use serde_json::Value;
pub fn block2poly(val: &Value) -> Result<Vec<u8>> {
// Convert JSON data in to a u128
// TODO: Transfer decoding into own function?
let string: String = serde_json::from_value(val["block"].clone())?;
let number = b64_2_num(&string)?;
let coefficients: Vec<u8> = get_coefficients(number);
Ok(coefficients)
}
#[cfg(test)]
mod tests {
use serde_json::json;
use std::str::FromStr;
// Note this useful idiom: importing names from outer (for mod tests) scope.
use super::*;
#[test]
fn block2poly_task01() -> Result<()> {
let block: Value = json!({"block" : "ARIAAAAAAAAAAAAAAAAAgA=="});
let coefficients: Vec<u8> = vec![0, 9, 12, 127];
assert_eq!(
block2poly(&block)?,
coefficients,
"Coefficients were: {:?}",
block2poly(&block)?
);
Ok(())
}
}

3
src/tasks/tasks01/mod.rs Normal file
View file

@ -0,0 +1,3 @@
pub mod block2poly;
pub mod poly2block;
pub mod sea128;

View file

@ -0,0 +1,14 @@
use crate::utils::poly::{self};
use base64::prelude::*;
use serde_json::Value;
pub fn poly2block(args: &Value) -> String {
let coefficients: Vec<u8> = args["coefficients"]
.as_array()
.unwrap()
.into_iter()
.map(|x| x.as_u64().unwrap() as u8)
.collect();
BASE64_STANDARD.encode(poly::coefficient_to_binary(coefficients).to_ne_bytes())
}

View file

@ -0,0 +1,61 @@
use anyhow::{anyhow, Result};
use base64::prelude::*;
use serde_json::Value;
use crate::utils::ciphers::{sea_128_decrypt, sea_128_encrypt};
pub fn sea128(args: &Value) -> Result<String> {
let key_string: String = serde_json::from_value(args["key"].clone())?;
//let key: &[u8] = b64_2_num(key_string)?.to_ne_bytes();
let key = BASE64_STANDARD.decode(key_string)?;
//eprintln!("{:?}", key);
let input_string: String = serde_json::from_value(args["input"].clone())?;
//let plaintexts: &[u8] = &b64_2_num(plaintexts_string)?.to_ne_bytes();
let input = BASE64_STANDARD.decode(input_string)?;
let xor_val: u128 = 0xc0ffeec0ffeec0ffeec0ffeec0ffee11;
let mode: String = serde_json::from_value(args["mode"].clone())?;
match mode.as_str() {
"encrypt" => {
//eprintln!("{:?}", plaintexts);
let output = BASE64_STANDARD.encode(sea_128_encrypt(&key, &input)?);
Ok(output)
}
"decrypt" => {
let output = BASE64_STANDARD.encode(sea_128_decrypt(&key, &input)?);
Ok(output)
}
_ => Err(anyhow!("Failure. no valid mode detected")),
}
}
#[cfg(test)]
mod tests {
use std::fs;
use anyhow::Result;
use serde_json::json;
// Note this useful idiom: importing names from outer (for mod tests) scope.
use super::*;
#[test]
fn test_sea128_encrypt() -> Result<()> {
let args = json!({"mode" : "encrypt", "key" : "istDASeincoolerKEYrofg==", "input" : "yv66vvrO263eyviIiDNEVQ=="});
assert_eq!(sea128(&args)?, "D5FDo3iVBoBN9gVi9/MSKQ==");
Ok(())
}
#[test]
fn test_sea128_decrypt() -> Result<()> {
let args = json!({"mode" : "decrypt", "key" : "istDASeincoolerKEYrofg==", "input" : "D5FDo3iVBoBN9gVi9/MSKQ=="});
assert_eq!(sea128(&args)?, "yv66vvrO263eyviIiDNEVQ==");
Ok(())
}
}

View file

@ -0,0 +1,23 @@
{
"testcases": {
"b856d760-023d-4b00-bad2-15d2b6da22fe": {
"action": "block2poly",
"arguments": {
"semantic": "xex",
"block": "ARIAAAAAAAAAAAAAAAAAgA=="
}
},
"254eaee7-05fd-4e0d-8292-9b658a852245": {
"action": "poly2block",
"arguments": {
"semantic": "xex",
"coefficients": [
12,
127,
9,
0
]
}
}
}
}

View file

@ -0,0 +1,11 @@
{
"testcases": {
"b856d760-023d-4b00-bad2-15d2b6da22fe": {
"action": "block2poly",
"arguments": {
"semantic": "xex",
"block": "ARIAAAAAAAAAAAAAAAAAgA=="
}
}
}
}

View file

@ -0,0 +1,25 @@
{
"testcases": {
"b856d760-023d-4b00-bad2-15d2b6da22fe": {
"action": "add_numbers",
"arguments": {
"number1": 123,
"number2": 234
}
},
"254eaee7-05fd-4e0d-8292-9b658a852245": {
"action": "add_numbers",
"arguments": {
"number1": 333,
"number2": 444
}
},
"affbf4fc-4d2a-41e3-afe0-a79e1d174781": {
"action": "subtract_numbers",
"arguments": {
"number1": 999,
"number2": 121212
}
}
}
}

View file

@ -0,0 +1,16 @@
{
"testcases": {
"b856d760-023d-4b00-bad2-15d2b6da22fe": {
"action": "poly2block",
"arguments": {
"semantic": "xex",
"coefficients": [
12,
127,
9,
0
]
}
}
}
}

20
src/test_json/sea128.json Normal file
View file

@ -0,0 +1,20 @@
{
"testcases": {
"b856d760-023d-4b00-bad2-15d2b6da22fe": {
"action": "sea128",
"arguments": {
"mode": "encrypt",
"key": "istDASeincoolerKEYrofg==",
"input": "yv66vvrO263eyviIiDNEVQ=="
}
},
"254eaee7-05fd-4e0d-8292-9b658a852245": {
"action": "sea128",
"arguments": {
"mode": "decrypt",
"key": "istDASeincoolerKEYrofg==",
"input": "D5FDo3iVBoBN9gVi9/MSKQ=="
}
}
}
}

59
src/utils/ciphers.rs Normal file
View file

@ -0,0 +1,59 @@
use anyhow::Result;
use openssl::symm::{Cipher, Crypter, Mode};
use super::math::xor_bytes;
pub fn aes_128_encrypt(key: &Vec<u8>, input: &Vec<u8>) -> Result<Vec<u8>> {
let mut encrypter = Crypter::new(Cipher::aes_128_ecb(), Mode::Encrypt, &key, None)?;
encrypter.pad(false);
let mut ciphertext = [0; 32].to_vec();
let mut count = encrypter.update(input, &mut ciphertext)?;
count += encrypter.finalize(&mut ciphertext)?;
ciphertext.truncate(count);
//eprintln!("{:?}", &ciphertext[..]);
Ok(ciphertext)
}
pub fn aes_128_decrypt(key: &Vec<u8>, input: &Vec<u8>) -> Result<Vec<u8>> {
let mut decrypter = Crypter::new(Cipher::aes_128_ecb(), Mode::Decrypt, key, None)?;
decrypter.pad(false);
let mut plaintext = [0; 32].to_vec();
let mut count = decrypter.update(input, &mut plaintext)?;
count += decrypter.finalize(&mut plaintext)?;
plaintext.truncate(count);
let mut bytes: [u8; 16] = [0u8; 16];
bytes.copy_from_slice(&plaintext);
let number: u128 = <u128>::from_be_bytes(bytes);
Ok(plaintext)
}
pub fn sea_128_encrypt(key: &Vec<u8>, input: &Vec<u8>) -> Result<Vec<u8>> {
let xor_val: u128 = 0xc0ffeec0ffeec0ffeec0ffeec0ffee11;
let sea128_out = xor_bytes(
&aes_128_encrypt(key, input)?,
xor_val.to_be_bytes().to_vec(),
)?;
Ok(sea128_out)
}
pub fn sea_128_decrypt(key: &Vec<u8>, input: &Vec<u8>) -> Result<Vec<u8>> {
let xor_val: u128 = 0xc0ffeec0ffeec0ffeec0ffeec0ffee11;
let intermediate = xor_bytes(input, xor_val.to_be_bytes().to_vec())?;
Ok(aes_128_decrypt(&key, &intermediate)?)
}
/*
* let mut bytes: [u8; 16] = [0u8; 16];
bytes.copy_from_slice(&ciphertext);
let number: u128 = <u128>::from_be_bytes(bytes);
* */

9
src/utils/math.rs Normal file
View file

@ -0,0 +1,9 @@
use anyhow::Result;
pub fn xor_bytes(vec1: &Vec<u8>, mut vec2: Vec<u8>) -> Result<Vec<u8>> {
for (byte1, byte2) in vec1.iter().zip(vec2.iter_mut()) {
*byte2 ^= byte1;
}
Ok(vec2)
}

4
src/utils/mod.rs Normal file
View file

@ -0,0 +1,4 @@
pub mod ciphers;
pub mod math;
pub mod parse;
pub mod poly;

143
src/utils/parse.rs Normal file
View file

@ -0,0 +1,143 @@
use std::{collections::HashMap, io::Result};
use serde::{Deserialize, Serialize};
use serde_json::Value;
#[derive(Debug, Serialize, Deserialize)]
pub struct Testcases {
pub testcases: HashMap<String, Testcase>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Testcase {
pub action: String,
pub arguments: Value,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Responses {
pub responses: HashMap<String, Value>,
}
pub fn parse_json(json: String) -> Result<Testcases> {
let deserialised: Testcases = serde_json::from_str(&json)?;
Ok(deserialised)
}
#[cfg(test)]
mod tests {
use std::fs;
use serde_json::json;
// Note this useful idiom: importing names from outer (for mod tests) scope.
use super::*;
#[test]
fn test_json_parsing() {
let json = fs::read_to_string("src/test_json/parse_example.json").unwrap();
let parsed = parse_json(json).unwrap();
/*
* Test if struct is deserialised at all
* */
assert!(
!parsed.testcases.is_empty(),
"Testcases struct was: {:?}",
parsed.testcases
);
/*
* Test id the keys are set correctly in the hashmap
* */
assert!(
!parsed
.testcases
.contains_key("\"b856d760-023d-4b00-bad2-15d2b6da22fe\""),
"Testcases first elemient was: {:?}",
parsed.testcases
);
assert!(
!parsed
.testcases
.contains_key("\"254eaee7-05fd-4e0d-8292-9b658a852245\""),
"Testcases first element was: {:?}",
parsed.testcases
);
assert!(
!parsed
.testcases
.contains_key("\"affbf4fc-4d2a-41e3-afe0-a79e1d174781\""),
"Testcases first element was: {:?}",
parsed.testcases
);
/*
* Test if the actions are parsed correctly
* */
let testcase_1 = &parsed
.testcases
.get("b856d760-023d-4b00-bad2-15d2b6da22fe")
.unwrap();
assert_eq!(
testcase_1.action, "add_numbers",
"Test case was: {:?}",
testcase_1.action
);
}
// TODO: Reactivate tests
/*
#[test]
fn test_response_payload_generation() {
let testcase_id = "b856d760-023d-4b00-bad2-15d2b6da22fe";
let value: Value = json!({"sum" : serde_json::Number::from(666)});
let payload = serde_json::to_string(
&generate_response_payload(testcase_id.to_owned(), value).unwrap(),
)
.unwrap();
let expected = r#"{"b856d760-023d-4b00-bad2-15d2b6da22fe":{"sum":666}}"#;
assert_eq!(payload, expected);
}
#[test]
fn test_response_generation() {
let testcase1_id = "b856d760-023d-4b00-bad2-15d2b6da22fe";
let value1: Value = json!({"sum" : serde_json::Number::from(666)});
let payload1 = generate_response_payload(testcase1_id.to_owned(), value1).unwrap();
let testcase2_id = "b856d760-023d-4b00-bad2-15d2b6da22fe";
let value2: Value = json!({"sum" : serde_json::Number::from(666)});
let payload2 = generate_response_payload(testcase2_id.to_owned(), value2).unwrap();
let testcase3_id = "b856d760-023d-4b00-bad2-15d2b6da22fe";
let value3: Value = json!({"sum" : serde_json::Number::from(666)});
let payload3 = generate_response_payload(testcase3_id.to_owned(), value3).unwrap();
let mut responses_vec: HashMap<String, Value> = vec![];
responses_vec.insert(payload1);
responses_vec.push(payload2);
responses_vec.push(payload3);
let response = generate_response(responses_vec).unwrap();
let expected = json!(
{
"responses": {
"b856d760-023d-4b00-bad2-15d2b6da22fe": {
"sum": 357
},
"254eaee7-05fd-4e0d-8292-9b658a852245": {
"sum": 777
},
"affbf4fc-4d2a-41e3-afe0-a79e1d174781": {
"difference": -120213
}
}
});
eprintln!("{}", serde_json::to_string(&response).unwrap());
eprintln!("{}", serde_json::to_string(&expected).unwrap());
assert_eq!(response, expected);
}*/
}

117
src/utils/poly.rs Normal file
View file

@ -0,0 +1,117 @@
use anyhow::Result;
use base64::prelude::*;
use std::{str::FromStr, u128, u8};
pub fn get_alpha_rep(num: u128) -> String {
let powers: Vec<u8> = get_coefficients(num);
//println!("{:?}", powers);
let mut alpha_rep = String::new();
if powers.len() == 1 {
return String::from_str("1").unwrap();
}
for power in powers {
alpha_rep.push_str(&format!("a^{power}"));
}
alpha_rep
}
pub fn b64_2_num(string: &String) -> Result<u128> {
let decoded: Vec<u8> = BASE64_STANDARD.decode(string)?;
let mut bytes: [u8; 16] = [0u8; 16];
bytes.copy_from_slice(&decoded);
let number: u128 = <u128>::from_ne_bytes(bytes);
Ok(number)
}
pub fn get_coefficients(num: u128) -> Vec<u8> {
let mut powers: Vec<u8> = vec![];
for shift in 0..128 {
//println!("{:?}", ((num >> shift) & 1));
if ((num >> shift) & 1) == 1 {
dbg!("Shift success");
powers.push(shift);
}
}
powers
}
pub fn get_bit_indices_from_byte(byte: u8) -> Vec<u8> {
let mut coefficients: Vec<u8> = vec![];
for shift in 0..8 {
if ((byte >> shift) & 1) == 1 {
coefficients.push(shift);
}
}
coefficients
}
pub fn coefficient_to_binary(coefficients: Vec<u8>) -> u128 {
let mut binary_number: u128 = 0;
for coeff in coefficients {
binary_number = binary_number | (1 << coeff);
}
binary_number
}
#[cfg(test)]
mod tests {
use crate::utils::poly::b64_2_num;
use anyhow::Result;
// Note this useful idiom: importing names from outer (for mod tests) scope.
use super::*;
#[test]
fn byte_indices_0x01() {
let byte: u8 = 0x01;
let bit_indices: Vec<u8> = vec![0];
assert_eq!(get_bit_indices_from_byte(byte), bit_indices)
}
#[test]
fn byte_indices_0x23() {
let byte: u8 = 0x23;
let bit_indices: Vec<u8> = vec![0, 1, 5];
assert_eq!(get_bit_indices_from_byte(byte), bit_indices)
}
#[test]
fn byte_indices_0x56() {
let byte: u8 = 0x56;
let bit_indices: Vec<u8> = vec![1, 2, 4, 6];
assert_eq!(get_bit_indices_from_byte(byte), bit_indices)
}
#[test]
fn coeff_to_binary() {
let coefficients: Vec<u8> = vec![12, 127, 9, 0];
let b64: &str = "ARIAAAAAAAAAAAAAAAAAgA==";
let calculated_num: u128 = coefficient_to_binary(coefficients);
assert_eq!(
BASE64_STANDARD.encode(calculated_num.to_ne_bytes()),
"ARIAAAAAAAAAAAAAAAAAgA=="
);
}
#[test]
fn test_b64_2_num() -> Result<()> {
let b64_payload: String = String::from_str("juMqbhnlBwAAAAAAAAAAAA==")?;
assert_eq!(
b64_2_num(&b64_payload)?,
2222222222222222,
"Error: Value was: {}",
b64_2_num(&b64_payload)?
);
Ok(())
}
}

1
vendor/anyhow/.cargo-checksum.json vendored Normal file
View file

@ -0,0 +1 @@
{"files":{"Cargo.toml":"9da67208798e50ce1f391b56f0983e96fa8a84e9d9231d9e50775446cbf80d84","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"136d2b44cc4060192516f18e43ec3eafb65693c1819a80a867e7a00c60a45ee6","build.rs":"1de78cc91e63321318aa336cb550e3acdcda9b39f0648436a884d783247cfcd2","build/probe.rs":"ee0a4518493c0b3cca121ed2e937b1779eb7e8313a5c4d5fc5aea28ff015366b","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/backtrace.rs":"bbaa0e0e228475c9c9532786e305cf04f53729f386c48adb1d93bb8ce07f37ad","src/chain.rs":"85af447405f075633fab186b7f1c94d7f33a36474f239c50a961b2d6197d5426","src/context.rs":"1be432c32752778041e8acf0e7d98d4f6291ce53fd7df5bbb0167824bbea57f7","src/ensure.rs":"9763f418b5397764549866c111ec6db3a7bdc4c30ad95c3bbfc56c5434ea8c09","src/error.rs":"274c175ec92f4aa8bf479d39cf3023d1ead9865a242a0a63ad3998aea57219a6","src/fmt.rs":"adf4be906b29900153bfb4b767a6049d58697dc3bcce7dfbb85ca773f5de5b33","src/kind.rs":"d8cc91e73653049ca0b5593f36aee8632fcc85847b36005b90ecd9a6f0de13cb","src/lib.rs":"1a1aeac072921cc13bb71ad06a9b8bd4cf4db5d90048d51446c3b81859722f24","src/macros.rs":"875797636fde708dcb9c82e0cb3107cf38334086274aaada267fb5bfd60547a9","src/ptr.rs":"4cb31d2f815b178daf951bfb94a1930383e056c0ca68d494603f45d8eea35d50","src/wrapper.rs":"d4e45caee3c2d861d4609a8141310d5c901af59a57d5f0a0de30251347dbd23c","tests/common/mod.rs":"f9088c2d7afafa64ff730b629272045b776bfafc2f5957508242da630635f2e1","tests/compiletest.rs":"4e381aa8ca3eabb7ac14d1e0c3700b3223e47640547a6988cfa13ad68255f60f","tests/drop/mod.rs":"08c3e553c1cc0d2dbd936fc45f4b5b1105057186affd6865e8d261e05f0f0646","tests/test_autotrait.rs":"ba9bc18416115cb48fd08675a3e7fc89584de7926dad6b2be6645dc13d5931df","tests/test_backtrace.rs":"60afdd7ee5850dc22625ff486fe41c47fd322db874a93c4871ddfed2bf603930","tests/test_boxed.rs":"6b26db0e2eb72afe9af7352ea820837aab90f8d486294616dd5dc34c1b94038c","tests/test_chain.rs":"3a8a8d7569913bd98c0e27c69d0bda35101e7fde7c056ed57cdd8ed018e4cbcb","tests/test_context.rs":"8409c53b328562c11e822bd6c3cd17e0d4d50b9bbb8fc3617333fd77303a6a33","tests/test_convert.rs":"7e7a8b4772a427a911014ac4d1083f9519000e786177f898808980dd9bdfde61","tests/test_downcast.rs":"797e69a72d125758c4c4897e5dc776d549d52cc9a6a633e0a33193f588a62b88","tests/test_ensure.rs":"4014ead6596793f5eecd55cbaafa49286b75cee7b7092a8b9b8286fcd813a6da","tests/test_ffi.rs":"d0cb4c1d6d9154090982dee72ae3ebe05a5981f976058c3250f1c9da5a45edef","tests/test_fmt.rs":"0e49b48f08e4faaf03e2f202e1efc5250018876c4e1b01b8379d7a38ae8df870","tests/test_macros.rs":"68673942662a43bceee62aaed69c25d7ddbc55e25d62d528e13033c3e2e756cd","tests/test_repr.rs":"034dee888abd08741e11ac2e95ef4fcb2ab3943d0a76e8e976db404658e1a252","tests/test_source.rs":"b80723cf635a4f8c4df21891b34bfab9ed2b2aa407e7a2f826d24e334cd5f88e","tests/ui/chained-comparison.rs":"6504b03d95b5acc232a7f4defc9f343b2be6733bf475fa0992e8e6545b912bd4","tests/ui/chained-comparison.stderr":"7f1d0a8c251b0ede2d30b3087ec157fc660945c97a642c4a5acf5a14ec58de34","tests/ui/empty-ensure.rs":"ab5bf37c846a0d689f26ce9257a27228411ed64154f9c950f1602d88a355d94b","tests/ui/empty-ensure.stderr":"315782f5f4246290fe190e3767b22c3dcaffaabc19c5ace0373537d53e765278","tests/ui/ensure-nonbool.rs":"7e57cb93fbcd82959b36586ed6bd2ad978b051fe5facd5274651fde6b1600905","tests/ui/ensure-nonbool.stderr":"0b4d1611e3bb65081bf38c1e49b1f12e5096738f276608661016e68f1fe13f7c","tests/ui/must-use.rs":"fb59860b43f673bf4a430a6036ba463e95028844d8dd4243cfe5ebc7f2be582f","tests/ui/must-use.stderr":"c2848c5f254b4c061eea6714d9baf709924aba06619eaf2a8b3aee1266b75f9e","tests/ui/no-impl.rs":"fab6cbf2f6ea510b86f567dfb3b7c31250a9fd71ae5d110dbb9188be569ec593","tests/ui/no-impl.stderr":"0d8ed712d25de898eae18cfdffc575a47f4d5596346058cf6cd50d016c4f8ce8","tests/ui/temporary-value.rs":"4dcc96271b2403e6372cf4cfc813445e5ce4365fc6e156b6bc38274098499a70","tests/ui/temporary-value.stderr":"171f6c1c962503855480696e5d39e68946ec2a027b61a6f36ca1ad1b40265c5d","tests/ui/wrong-interpolation.rs":"9c44d4674c2dccd27b9dedd03341346ec02d993b41793ee89b5755202e7e367e","tests/ui/wrong-interpolation.stderr":"301e60e2eb9401782c7dc0b3580613a4cb2aafd4cc8065734a630a62e1161aa5"},"package":"37bf3594c4c988a53154954629820791dde498571819ae4ca50ca811e060cc95"}

127
vendor/anyhow/Cargo.toml vendored Normal file
View file

@ -0,0 +1,127 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2018"
rust-version = "1.39"
name = "anyhow"
version = "1.0.90"
authors = ["David Tolnay <dtolnay@gmail.com>"]
build = "build.rs"
autolib = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "Flexible concrete Error type built on std::error::Error"
documentation = "https://docs.rs/anyhow"
readme = "README.md"
keywords = [
"error",
"error-handling",
]
categories = [
"rust-patterns",
"no-std",
]
license = "MIT OR Apache-2.0"
repository = "https://github.com/dtolnay/anyhow"
[package.metadata.docs.rs]
rustdoc-args = ["--generate-link-to-definition"]
targets = ["x86_64-unknown-linux-gnu"]
[lib]
name = "anyhow"
path = "src/lib.rs"
doc-scrape-examples = false
[[test]]
name = "compiletest"
path = "tests/compiletest.rs"
[[test]]
name = "test_autotrait"
path = "tests/test_autotrait.rs"
[[test]]
name = "test_backtrace"
path = "tests/test_backtrace.rs"
[[test]]
name = "test_boxed"
path = "tests/test_boxed.rs"
[[test]]
name = "test_chain"
path = "tests/test_chain.rs"
[[test]]
name = "test_context"
path = "tests/test_context.rs"
[[test]]
name = "test_convert"
path = "tests/test_convert.rs"
[[test]]
name = "test_downcast"
path = "tests/test_downcast.rs"
[[test]]
name = "test_ensure"
path = "tests/test_ensure.rs"
[[test]]
name = "test_ffi"
path = "tests/test_ffi.rs"
[[test]]
name = "test_fmt"
path = "tests/test_fmt.rs"
[[test]]
name = "test_macros"
path = "tests/test_macros.rs"
[[test]]
name = "test_repr"
path = "tests/test_repr.rs"
[[test]]
name = "test_source"
path = "tests/test_source.rs"
[dependencies.backtrace]
version = "0.3.51"
optional = true
[dev-dependencies.futures]
version = "0.3"
default-features = false
[dev-dependencies.rustversion]
version = "1.0.6"
[dev-dependencies.syn]
version = "2.0"
features = ["full"]
[dev-dependencies.thiserror]
version = "1.0.45"
[dev-dependencies.trybuild]
version = "1.0.66"
features = ["diff"]
[features]
default = ["std"]
std = []

176
vendor/anyhow/LICENSE-APACHE vendored Normal file
View file

@ -0,0 +1,176 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS

23
vendor/anyhow/LICENSE-MIT vendored Normal file
View file

@ -0,0 +1,23 @@
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

181
vendor/anyhow/README.md vendored Normal file
View file

@ -0,0 +1,181 @@
Anyhow&ensp;¯\\\_(°ペ)\_/¯
==========================
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/anyhow-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/anyhow)
[<img alt="crates.io" src="https://img.shields.io/crates/v/anyhow.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/anyhow)
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-anyhow-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/anyhow)
[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/anyhow/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/anyhow/actions?query=branch%3Amaster)
This library provides [`anyhow::Error`][Error], a trait object based error type
for easy idiomatic error handling in Rust applications.
[Error]: https://docs.rs/anyhow/1.0/anyhow/struct.Error.html
```toml
[dependencies]
anyhow = "1.0"
```
*Compiler support: requires rustc 1.39+*
<br>
## Details
- Use `Result<T, anyhow::Error>`, or equivalently `anyhow::Result<T>`, as the
return type of any fallible function.
Within the function, use `?` to easily propagate any error that implements the
[`std::error::Error`] trait.
```rust
use anyhow::Result;
fn get_cluster_info() -> Result<ClusterMap> {
let config = std::fs::read_to_string("cluster.json")?;
let map: ClusterMap = serde_json::from_str(&config)?;
Ok(map)
}
```
[`std::error::Error`]: https://doc.rust-lang.org/std/error/trait.Error.html
- Attach context to help the person troubleshooting the error understand where
things went wrong. A low-level error like "No such file or directory" can be
annoying to debug without more context about what higher level step the
application was in the middle of.
```rust
use anyhow::{Context, Result};
fn main() -> Result<()> {
...
it.detach().context("Failed to detach the important thing")?;
let content = std::fs::read(path)
.with_context(|| format!("Failed to read instrs from {}", path))?;
...
}
```
```console
Error: Failed to read instrs from ./path/to/instrs.json
Caused by:
No such file or directory (os error 2)
```
- Downcasting is supported and can be by value, by shared reference, or by
mutable reference as needed.
```rust
// If the error was caused by redaction, then return a
// tombstone instead of the content.
match root_cause.downcast_ref::<DataStoreError>() {
Some(DataStoreError::Censored(_)) => Ok(Poll::Ready(REDACTED_CONTENT)),
None => Err(error),
}
```
- If using Rust &ge; 1.65, a backtrace is captured and printed with the error if
the underlying error type does not already provide its own. In order to see
backtraces, they must be enabled through the environment variables described
in [`std::backtrace`]:
- If you want panics and errors to both have backtraces, set
`RUST_BACKTRACE=1`;
- If you want only errors to have backtraces, set `RUST_LIB_BACKTRACE=1`;
- If you want only panics to have backtraces, set `RUST_BACKTRACE=1` and
`RUST_LIB_BACKTRACE=0`.
[`std::backtrace`]: https://doc.rust-lang.org/std/backtrace/index.html#environment-variables
- Anyhow works with any error type that has an impl of `std::error::Error`,
including ones defined in your crate. We do not bundle a `derive(Error)` macro
but you can write the impls yourself or use a standalone macro like
[thiserror].
```rust
use thiserror::Error;
#[derive(Error, Debug)]
pub enum FormatError {
#[error("Invalid header (expected {expected:?}, got {found:?})")]
InvalidHeader {
expected: String,
found: String,
},
#[error("Missing attribute: {0}")]
MissingAttribute(String),
}
```
- One-off error messages can be constructed using the `anyhow!` macro, which
supports string interpolation and produces an `anyhow::Error`.
```rust
return Err(anyhow!("Missing attribute: {}", missing));
```
A `bail!` macro is provided as a shorthand for the same early return.
```rust
bail!("Missing attribute: {}", missing);
```
<br>
## No-std support
In no_std mode, almost all of the same API is available and works the same way.
To depend on Anyhow in no_std mode, disable our default enabled "std" feature in
Cargo.toml. A global allocator is required.
```toml
[dependencies]
anyhow = { version = "1.0", default-features = false }
```
With versions of Rust older than 1.81, no_std mode may require an additional
`.map_err(Error::msg)` when working with a non-Anyhow error type inside a
function that returns Anyhow's error type, as the trait that `?`-based error
conversions are defined by is only available in std in those old versions.
<br>
## Comparison to failure
The `anyhow::Error` type works something like `failure::Error`, but unlike
failure ours is built around the standard library's `std::error::Error` trait
rather than a separate trait `failure::Fail`. The standard library has adopted
the necessary improvements for this to be possible as part of [RFC 2504].
[RFC 2504]: https://github.com/rust-lang/rfcs/blob/master/text/2504-fix-error.md
<br>
## Comparison to thiserror
Use Anyhow if you don't care what error type your functions return, you just
want it to be easy. This is common in application code. Use [thiserror] if you
are a library that wants to design your own dedicated error type(s) so that on
failures the caller gets exactly the information that you choose.
[thiserror]: https://github.com/dtolnay/thiserror
<br>
#### License
<sup>
Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
</sup>
<br>
<sub>
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
be dual licensed as above, without any additional terms or conditions.
</sub>

192
vendor/anyhow/build.rs vendored Normal file
View file

@ -0,0 +1,192 @@
use std::env;
use std::ffi::OsString;
use std::iter;
use std::path::Path;
use std::process::{self, Command, Stdio};
use std::str;
#[cfg(all(feature = "backtrace", not(feature = "std")))]
compile_error! {
"`backtrace` feature without `std` feature is not supported"
}
fn main() {
let mut error_generic_member_access = false;
if cfg!(feature = "std") {
println!("cargo:rerun-if-changed=build/probe.rs");
let consider_rustc_bootstrap;
if compile_probe(false) {
// This is a nightly or dev compiler, so it supports unstable
// features regardless of RUSTC_BOOTSTRAP. No need to rerun build
// script if RUSTC_BOOTSTRAP is changed.
error_generic_member_access = true;
consider_rustc_bootstrap = false;
} else if let Some(rustc_bootstrap) = env::var_os("RUSTC_BOOTSTRAP") {
if compile_probe(true) {
// This is a stable or beta compiler for which the user has set
// RUSTC_BOOTSTRAP to turn on unstable features. Rerun build
// script if they change it.
error_generic_member_access = true;
consider_rustc_bootstrap = true;
} else if rustc_bootstrap == "1" {
// This compiler does not support the generic member access API
// in the form that anyhow expects. No need to pay attention to
// RUSTC_BOOTSTRAP.
error_generic_member_access = false;
consider_rustc_bootstrap = false;
} else {
// This is a stable or beta compiler for which RUSTC_BOOTSTRAP
// is set to restrict the use of unstable features by this
// crate.
error_generic_member_access = false;
consider_rustc_bootstrap = true;
}
} else {
// Without RUSTC_BOOTSTRAP, this compiler does not support the
// generic member access API in the form that anyhow expects, but
// try again if the user turns on unstable features.
error_generic_member_access = false;
consider_rustc_bootstrap = true;
}
if error_generic_member_access {
println!("cargo:rustc-cfg=std_backtrace");
println!("cargo:rustc-cfg=error_generic_member_access");
}
if consider_rustc_bootstrap {
println!("cargo:rerun-if-env-changed=RUSTC_BOOTSTRAP");
}
}
let rustc = match rustc_minor_version() {
Some(rustc) => rustc,
None => return,
};
if rustc >= 80 {
println!("cargo:rustc-check-cfg=cfg(anyhow_nightly_testing)");
println!("cargo:rustc-check-cfg=cfg(anyhow_no_core_error)");
println!("cargo:rustc-check-cfg=cfg(anyhow_no_core_unwind_safe)");
println!("cargo:rustc-check-cfg=cfg(anyhow_no_fmt_arguments_as_str)");
println!("cargo:rustc-check-cfg=cfg(anyhow_no_ptr_addr_of)");
println!("cargo:rustc-check-cfg=cfg(anyhow_no_unsafe_op_in_unsafe_fn_lint)");
println!("cargo:rustc-check-cfg=cfg(error_generic_member_access)");
println!("cargo:rustc-check-cfg=cfg(std_backtrace)");
}
if rustc < 51 {
// core::ptr::addr_of
// https://blog.rust-lang.org/2021/03/25/Rust-1.51.0.html#stabilized-apis
println!("cargo:rustc-cfg=anyhow_no_ptr_addr_of");
}
if rustc < 52 {
// core::fmt::Arguments::as_str
// https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html#stabilized-apis
println!("cargo:rustc-cfg=anyhow_no_fmt_arguments_as_str");
// #![deny(unsafe_op_in_unsafe_fn)]
// https://github.com/rust-lang/rust/issues/71668
println!("cargo:rustc-cfg=anyhow_no_unsafe_op_in_unsafe_fn_lint");
}
if rustc < 56 {
// core::panic::{UnwindSafe, RefUnwindSafe}
// https://blog.rust-lang.org/2021/10/21/Rust-1.56.0.html#stabilized-apis
println!("cargo:rustc-cfg=anyhow_no_core_unwind_safe");
}
if !error_generic_member_access && cfg!(feature = "std") && rustc >= 65 {
// std::backtrace::Backtrace
// https://blog.rust-lang.org/2022/11/03/Rust-1.65.0.html#stabilized-apis
println!("cargo:rustc-cfg=std_backtrace");
}
if rustc < 81 {
// core::error::Error
// https://blog.rust-lang.org/2024/09/05/Rust-1.81.0.html#coreerrorerror
println!("cargo:rustc-cfg=anyhow_no_core_error");
}
}
fn compile_probe(rustc_bootstrap: bool) -> bool {
if env::var_os("RUSTC_STAGE").is_some() {
// We are running inside rustc bootstrap. This is a highly non-standard
// environment with issues such as:
//
// https://github.com/rust-lang/cargo/issues/11138
// https://github.com/rust-lang/rust/issues/114839
//
// Let's just not use nightly features here.
return false;
}
let rustc = cargo_env_var("RUSTC");
let out_dir = cargo_env_var("OUT_DIR");
let probefile = Path::new("build").join("probe.rs");
let rustc_wrapper = env::var_os("RUSTC_WRAPPER").filter(|wrapper| !wrapper.is_empty());
let rustc_workspace_wrapper =
env::var_os("RUSTC_WORKSPACE_WRAPPER").filter(|wrapper| !wrapper.is_empty());
let mut rustc = rustc_wrapper
.into_iter()
.chain(rustc_workspace_wrapper)
.chain(iter::once(rustc));
let mut cmd = Command::new(rustc.next().unwrap());
cmd.args(rustc);
if !rustc_bootstrap {
cmd.env_remove("RUSTC_BOOTSTRAP");
}
cmd.stderr(Stdio::null())
.arg("--edition=2018")
.arg("--crate-name=anyhow")
.arg("--crate-type=lib")
.arg("--emit=dep-info,metadata")
.arg("--cap-lints=allow")
.arg("--out-dir")
.arg(out_dir)
.arg(probefile);
if let Some(target) = env::var_os("TARGET") {
cmd.arg("--target").arg(target);
}
// If Cargo wants to set RUSTFLAGS, use that.
if let Ok(rustflags) = env::var("CARGO_ENCODED_RUSTFLAGS") {
if !rustflags.is_empty() {
for arg in rustflags.split('\x1f') {
cmd.arg(arg);
}
}
}
match cmd.status() {
Ok(status) => status.success(),
Err(_) => false,
}
}
fn rustc_minor_version() -> Option<u32> {
let rustc = cargo_env_var("RUSTC");
let output = Command::new(rustc).arg("--version").output().ok()?;
let version = str::from_utf8(&output.stdout).ok()?;
let mut pieces = version.split('.');
if pieces.next() != Some("rustc 1") {
return None;
}
pieces.next()?.parse().ok()
}
fn cargo_env_var(key: &str) -> OsString {
env::var_os(key).unwrap_or_else(|| {
eprintln!(
"Environment variable ${} is not set during execution of build script",
key,
);
process::exit(1);
})
}

35
vendor/anyhow/build/probe.rs vendored Normal file
View file

@ -0,0 +1,35 @@
// This code exercises the surface area that we expect of the Error generic
// member access API. If the current toolchain is able to compile it, then
// anyhow is able to provide backtrace support.
#![feature(error_generic_member_access)]
use core::error::{self, Error, Request};
use core::fmt::{self, Debug, Display};
use std::backtrace::Backtrace;
struct MyError(Thing);
struct Thing;
impl Debug for MyError {
fn fmt(&self, _formatter: &mut fmt::Formatter) -> fmt::Result {
unimplemented!()
}
}
impl Display for MyError {
fn fmt(&self, _formatter: &mut fmt::Formatter) -> fmt::Result {
unimplemented!()
}
}
impl Error for MyError {
fn provide<'a>(&'a self, request: &mut Request<'a>) {
request.provide_ref(&self.0);
}
}
const _: fn(&dyn Error) -> Option<&Backtrace> = |err| error::request_ref::<Backtrace>(err);
// Include in sccache cache key.
const _: Option<&str> = option_env!("RUSTC_BOOTSTRAP");

2
vendor/anyhow/rust-toolchain.toml vendored Normal file
View file

@ -0,0 +1,2 @@
[toolchain]
components = ["rust-src"]

411
vendor/anyhow/src/backtrace.rs vendored Normal file
View file

@ -0,0 +1,411 @@
#[cfg(std_backtrace)]
pub(crate) use std::backtrace::{Backtrace, BacktraceStatus};
#[cfg(all(not(std_backtrace), feature = "backtrace"))]
pub(crate) use self::capture::{Backtrace, BacktraceStatus};
#[cfg(not(any(std_backtrace, feature = "backtrace")))]
pub(crate) enum Backtrace {}
#[cfg(std_backtrace)]
macro_rules! impl_backtrace {
() => {
std::backtrace::Backtrace
};
}
#[cfg(all(not(std_backtrace), feature = "backtrace"))]
macro_rules! impl_backtrace {
() => {
impl core::fmt::Debug + core::fmt::Display
};
}
#[cfg(any(std_backtrace, feature = "backtrace"))]
macro_rules! backtrace {
() => {
Some(crate::backtrace::Backtrace::capture())
};
}
#[cfg(not(any(std_backtrace, feature = "backtrace")))]
macro_rules! backtrace {
() => {
None
};
}
#[cfg(error_generic_member_access)]
macro_rules! backtrace_if_absent {
($err:expr) => {
match core::error::request_ref::<std::backtrace::Backtrace>($err as &dyn core::error::Error)
{
Some(_) => None,
None => backtrace!(),
}
};
}
#[cfg(all(
any(feature = "std", not(anyhow_no_core_error)),
not(error_generic_member_access),
any(std_backtrace, feature = "backtrace")
))]
macro_rules! backtrace_if_absent {
($err:expr) => {
backtrace!()
};
}
#[cfg(all(
any(feature = "std", not(anyhow_no_core_error)),
not(std_backtrace),
not(feature = "backtrace"),
))]
macro_rules! backtrace_if_absent {
($err:expr) => {
None
};
}
#[cfg(all(not(std_backtrace), feature = "backtrace"))]
mod capture {
use alloc::borrow::{Cow, ToOwned as _};
use alloc::vec::Vec;
use backtrace::{BacktraceFmt, BytesOrWideString, Frame, PrintFmt, SymbolName};
use core::cell::UnsafeCell;
use core::fmt::{self, Debug, Display};
use core::sync::atomic::{AtomicUsize, Ordering};
use std::env;
use std::path::{self, Path, PathBuf};
use std::sync::Once;
pub(crate) struct Backtrace {
inner: Inner,
}
pub(crate) enum BacktraceStatus {
Unsupported,
Disabled,
Captured,
}
enum Inner {
Unsupported,
Disabled,
Captured(LazilyResolvedCapture),
}
struct Capture {
actual_start: usize,
resolved: bool,
frames: Vec<BacktraceFrame>,
}
struct BacktraceFrame {
frame: Frame,
symbols: Vec<BacktraceSymbol>,
}
struct BacktraceSymbol {
name: Option<Vec<u8>>,
filename: Option<BytesOrWide>,
lineno: Option<u32>,
colno: Option<u32>,
}
enum BytesOrWide {
Bytes(Vec<u8>),
Wide(Vec<u16>),
}
impl Debug for Backtrace {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let capture = match &self.inner {
Inner::Unsupported => return fmt.write_str("<unsupported>"),
Inner::Disabled => return fmt.write_str("<disabled>"),
Inner::Captured(c) => c.force(),
};
let frames = &capture.frames[capture.actual_start..];
write!(fmt, "Backtrace ")?;
let mut dbg = fmt.debug_list();
for frame in frames {
if frame.frame.ip().is_null() {
continue;
}
dbg.entries(&frame.symbols);
}
dbg.finish()
}
}
impl Debug for BacktraceFrame {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let mut dbg = fmt.debug_list();
dbg.entries(&self.symbols);
dbg.finish()
}
}
impl Debug for BacktraceSymbol {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{{ ")?;
if let Some(fn_name) = self.name.as_ref().map(|b| SymbolName::new(b)) {
write!(fmt, "fn: \"{:#}\"", fn_name)?;
} else {
write!(fmt, "fn: <unknown>")?;
}
if let Some(fname) = self.filename.as_ref() {
write!(fmt, ", file: \"{:?}\"", fname)?;
}
if let Some(line) = self.lineno {
write!(fmt, ", line: {:?}", line)?;
}
write!(fmt, " }}")
}
}
impl Debug for BytesOrWide {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
output_filename(
fmt,
match self {
BytesOrWide::Bytes(w) => BytesOrWideString::Bytes(w),
BytesOrWide::Wide(w) => BytesOrWideString::Wide(w),
},
PrintFmt::Short,
env::current_dir().as_ref().ok(),
)
}
}
impl Backtrace {
fn enabled() -> bool {
static ENABLED: AtomicUsize = AtomicUsize::new(0);
match ENABLED.load(Ordering::Relaxed) {
0 => {}
1 => return false,
_ => return true,
}
let enabled = match env::var_os("RUST_LIB_BACKTRACE") {
Some(s) => s != "0",
None => match env::var_os("RUST_BACKTRACE") {
Some(s) => s != "0",
None => false,
},
};
ENABLED.store(enabled as usize + 1, Ordering::Relaxed);
enabled
}
#[inline(never)] // want to make sure there's a frame here to remove
pub(crate) fn capture() -> Backtrace {
if Backtrace::enabled() {
Backtrace::create(Backtrace::capture as usize)
} else {
let inner = Inner::Disabled;
Backtrace { inner }
}
}
// Capture a backtrace which starts just before the function addressed
// by `ip`
fn create(ip: usize) -> Backtrace {
let mut frames = Vec::new();
let mut actual_start = None;
backtrace::trace(|frame| {
frames.push(BacktraceFrame {
frame: frame.clone(),
symbols: Vec::new(),
});
if frame.symbol_address() as usize == ip && actual_start.is_none() {
actual_start = Some(frames.len() + 1);
}
true
});
// If no frames came out assume that this is an unsupported platform
// since `backtrace` doesn't provide a way of learning this right
// now, and this should be a good enough approximation.
let inner = if frames.is_empty() {
Inner::Unsupported
} else {
Inner::Captured(LazilyResolvedCapture::new(Capture {
actual_start: actual_start.unwrap_or(0),
frames,
resolved: false,
}))
};
Backtrace { inner }
}
pub(crate) fn status(&self) -> BacktraceStatus {
match self.inner {
Inner::Unsupported => BacktraceStatus::Unsupported,
Inner::Disabled => BacktraceStatus::Disabled,
Inner::Captured(_) => BacktraceStatus::Captured,
}
}
}
impl Display for Backtrace {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let capture = match &self.inner {
Inner::Unsupported => return fmt.write_str("unsupported backtrace"),
Inner::Disabled => return fmt.write_str("disabled backtrace"),
Inner::Captured(c) => c.force(),
};
let full = fmt.alternate();
let (frames, style) = if full {
(&capture.frames[..], PrintFmt::Full)
} else {
(&capture.frames[capture.actual_start..], PrintFmt::Short)
};
// When printing paths we try to strip the cwd if it exists,
// otherwise we just print the path as-is. Note that we also only do
// this for the short format, because if it's full we presumably
// want to print everything.
let cwd = env::current_dir();
let mut print_path = move |fmt: &mut fmt::Formatter, path: BytesOrWideString| {
output_filename(fmt, path, style, cwd.as_ref().ok())
};
let mut f = BacktraceFmt::new(fmt, style, &mut print_path);
f.add_context()?;
for frame in frames {
let mut f = f.frame();
if frame.symbols.is_empty() {
f.print_raw(frame.frame.ip(), None, None, None)?;
} else {
for symbol in frame.symbols.iter() {
f.print_raw_with_column(
frame.frame.ip(),
symbol.name.as_ref().map(|b| SymbolName::new(b)),
symbol.filename.as_ref().map(|b| match b {
BytesOrWide::Bytes(w) => BytesOrWideString::Bytes(w),
BytesOrWide::Wide(w) => BytesOrWideString::Wide(w),
}),
symbol.lineno,
symbol.colno,
)?;
}
}
}
f.finish()?;
Ok(())
}
}
struct LazilyResolvedCapture {
sync: Once,
capture: UnsafeCell<Capture>,
}
impl LazilyResolvedCapture {
fn new(capture: Capture) -> Self {
LazilyResolvedCapture {
sync: Once::new(),
capture: UnsafeCell::new(capture),
}
}
fn force(&self) -> &Capture {
self.sync.call_once(|| {
// Safety: This exclusive reference can't overlap with any
// others. `Once` guarantees callers will block until this
// closure returns. `Once` also guarantees only a single caller
// will enter this closure.
unsafe { &mut *self.capture.get() }.resolve();
});
// Safety: This shared reference can't overlap with the exclusive
// reference above.
unsafe { &*self.capture.get() }
}
}
// Safety: Access to the inner value is synchronized using a thread-safe
// `Once`. So long as `Capture` is `Sync`, `LazilyResolvedCapture` is too
unsafe impl Sync for LazilyResolvedCapture where Capture: Sync {}
impl Capture {
fn resolve(&mut self) {
// If we're already resolved, nothing to do!
if self.resolved {
return;
}
self.resolved = true;
for frame in self.frames.iter_mut() {
let symbols = &mut frame.symbols;
let frame = &frame.frame;
backtrace::resolve_frame(frame, |symbol| {
symbols.push(BacktraceSymbol {
name: symbol.name().map(|m| m.as_bytes().to_vec()),
filename: symbol.filename_raw().map(|b| match b {
BytesOrWideString::Bytes(b) => BytesOrWide::Bytes(b.to_owned()),
BytesOrWideString::Wide(b) => BytesOrWide::Wide(b.to_owned()),
}),
lineno: symbol.lineno(),
colno: symbol.colno(),
});
});
}
}
}
// Prints the filename of the backtrace frame.
fn output_filename(
fmt: &mut fmt::Formatter,
bows: BytesOrWideString,
print_fmt: PrintFmt,
cwd: Option<&PathBuf>,
) -> fmt::Result {
let file: Cow<Path> = match bows {
#[cfg(unix)]
BytesOrWideString::Bytes(bytes) => {
use std::os::unix::ffi::OsStrExt;
Path::new(std::ffi::OsStr::from_bytes(bytes)).into()
}
#[cfg(not(unix))]
BytesOrWideString::Bytes(bytes) => {
Path::new(std::str::from_utf8(bytes).unwrap_or("<unknown>")).into()
}
#[cfg(windows)]
BytesOrWideString::Wide(wide) => {
use std::os::windows::ffi::OsStringExt;
Cow::Owned(std::ffi::OsString::from_wide(wide).into())
}
#[cfg(not(windows))]
BytesOrWideString::Wide(_wide) => Path::new("<unknown>").into(),
};
if print_fmt == PrintFmt::Short && file.is_absolute() {
if let Some(cwd) = cwd {
if let Ok(stripped) = file.strip_prefix(&cwd) {
if let Some(s) = stripped.to_str() {
return write!(fmt, ".{}{}", path::MAIN_SEPARATOR, s);
}
}
}
}
Display::fmt(&file.display(), fmt)
}
}
fn _assert_send_sync() {
fn assert<T: Send + Sync>() {}
assert::<Backtrace>();
}

102
vendor/anyhow/src/chain.rs vendored Normal file
View file

@ -0,0 +1,102 @@
use self::ChainState::*;
use crate::StdError;
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
use alloc::vec::{self, Vec};
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
pub(crate) use crate::Chain;
#[cfg(all(not(feature = "std"), anyhow_no_core_error))]
pub(crate) struct Chain<'a> {
state: ChainState<'a>,
}
#[derive(Clone)]
pub(crate) enum ChainState<'a> {
Linked {
next: Option<&'a (dyn StdError + 'static)>,
},
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
Buffered {
rest: vec::IntoIter<&'a (dyn StdError + 'static)>,
},
}
impl<'a> Chain<'a> {
#[cold]
pub fn new(head: &'a (dyn StdError + 'static)) -> Self {
Chain {
state: ChainState::Linked { next: Some(head) },
}
}
}
impl<'a> Iterator for Chain<'a> {
type Item = &'a (dyn StdError + 'static);
fn next(&mut self) -> Option<Self::Item> {
match &mut self.state {
Linked { next } => {
let error = (*next)?;
*next = error.source();
Some(error)
}
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
Buffered { rest } => rest.next(),
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.len();
(len, Some(len))
}
}
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
impl DoubleEndedIterator for Chain<'_> {
fn next_back(&mut self) -> Option<Self::Item> {
match &mut self.state {
Linked { mut next } => {
let mut rest = Vec::new();
while let Some(cause) = next {
next = cause.source();
rest.push(cause);
}
let mut rest = rest.into_iter();
let last = rest.next_back();
self.state = Buffered { rest };
last
}
Buffered { rest } => rest.next_back(),
}
}
}
impl ExactSizeIterator for Chain<'_> {
fn len(&self) -> usize {
match &self.state {
Linked { mut next } => {
let mut len = 0;
while let Some(cause) = next {
next = cause.source();
len += 1;
}
len
}
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
Buffered { rest } => rest.len(),
}
}
}
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
impl Default for Chain<'_> {
fn default() -> Self {
Chain {
state: ChainState::Buffered {
rest: Vec::new().into_iter(),
},
}
}
}

193
vendor/anyhow/src/context.rs vendored Normal file
View file

@ -0,0 +1,193 @@
use crate::error::ContextError;
use crate::{Context, Error, StdError};
use core::convert::Infallible;
use core::fmt::{self, Debug, Display, Write};
#[cfg(error_generic_member_access)]
use core::error::Request;
mod ext {
use super::*;
pub trait StdError {
fn ext_context<C>(self, context: C) -> Error
where
C: Display + Send + Sync + 'static;
}
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
impl<E> StdError for E
where
E: crate::StdError + Send + Sync + 'static,
{
fn ext_context<C>(self, context: C) -> Error
where
C: Display + Send + Sync + 'static,
{
let backtrace = backtrace_if_absent!(&self);
Error::from_context(context, self, backtrace)
}
}
impl StdError for Error {
fn ext_context<C>(self, context: C) -> Error
where
C: Display + Send + Sync + 'static,
{
self.context(context)
}
}
}
impl<T, E> Context<T, E> for Result<T, E>
where
E: ext::StdError + Send + Sync + 'static,
{
fn context<C>(self, context: C) -> Result<T, Error>
where
C: Display + Send + Sync + 'static,
{
// Not using map_err to save 2 useless frames off the captured backtrace
// in ext_context.
match self {
Ok(ok) => Ok(ok),
Err(error) => Err(error.ext_context(context)),
}
}
fn with_context<C, F>(self, context: F) -> Result<T, Error>
where
C: Display + Send + Sync + 'static,
F: FnOnce() -> C,
{
match self {
Ok(ok) => Ok(ok),
Err(error) => Err(error.ext_context(context())),
}
}
}
/// ```
/// # type T = ();
/// #
/// use anyhow::{Context, Result};
///
/// fn maybe_get() -> Option<T> {
/// # const IGNORE: &str = stringify! {
/// ...
/// # };
/// # unimplemented!()
/// }
///
/// fn demo() -> Result<()> {
/// let t = maybe_get().context("there is no T")?;
/// # const IGNORE: &str = stringify! {
/// ...
/// # };
/// # unimplemented!()
/// }
/// ```
impl<T> Context<T, Infallible> for Option<T> {
fn context<C>(self, context: C) -> Result<T, Error>
where
C: Display + Send + Sync + 'static,
{
// Not using ok_or_else to save 2 useless frames off the captured
// backtrace.
match self {
Some(ok) => Ok(ok),
None => Err(Error::from_display(context, backtrace!())),
}
}
fn with_context<C, F>(self, context: F) -> Result<T, Error>
where
C: Display + Send + Sync + 'static,
F: FnOnce() -> C,
{
match self {
Some(ok) => Ok(ok),
None => Err(Error::from_display(context(), backtrace!())),
}
}
}
impl<C, E> Debug for ContextError<C, E>
where
C: Display,
E: Debug,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Error")
.field("context", &Quoted(&self.context))
.field("source", &self.error)
.finish()
}
}
impl<C, E> Display for ContextError<C, E>
where
C: Display,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(&self.context, f)
}
}
impl<C, E> StdError for ContextError<C, E>
where
C: Display,
E: StdError + 'static,
{
fn source(&self) -> Option<&(dyn StdError + 'static)> {
Some(&self.error)
}
#[cfg(error_generic_member_access)]
fn provide<'a>(&'a self, request: &mut Request<'a>) {
StdError::provide(&self.error, request);
}
}
impl<C> StdError for ContextError<C, Error>
where
C: Display,
{
fn source(&self) -> Option<&(dyn StdError + 'static)> {
Some(unsafe { crate::ErrorImpl::error(self.error.inner.by_ref()) })
}
#[cfg(error_generic_member_access)]
fn provide<'a>(&'a self, request: &mut Request<'a>) {
Error::provide(&self.error, request);
}
}
struct Quoted<C>(C);
impl<C> Debug for Quoted<C>
where
C: Display,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_char('"')?;
Quoted(&mut *formatter).write_fmt(format_args!("{}", self.0))?;
formatter.write_char('"')?;
Ok(())
}
}
impl Write for Quoted<&mut fmt::Formatter<'_>> {
fn write_str(&mut self, s: &str) -> fmt::Result {
Display::fmt(&s.escape_debug(), self.0)
}
}
pub(crate) mod private {
use super::*;
pub trait Sealed {}
impl<T, E> Sealed for Result<T, E> where E: ext::StdError {}
impl<T> Sealed for Option<T> {}
}

919
vendor/anyhow/src/ensure.rs vendored Normal file
View file

@ -0,0 +1,919 @@
use crate::Error;
use alloc::string::String;
use core::fmt::{self, Debug, Write};
use core::mem::MaybeUninit;
use core::ptr;
use core::slice;
use core::str;
#[doc(hidden)]
pub trait BothDebug {
fn __dispatch_ensure(self, msg: &'static str) -> Error;
}
impl<A, B> BothDebug for (A, B)
where
A: Debug,
B: Debug,
{
fn __dispatch_ensure(self, msg: &'static str) -> Error {
render(msg, &self.0, &self.1)
}
}
#[doc(hidden)]
pub trait NotBothDebug {
fn __dispatch_ensure(self, msg: &'static str) -> Error;
}
impl<A, B> NotBothDebug for &(A, B) {
fn __dispatch_ensure(self, msg: &'static str) -> Error {
Error::msg(msg)
}
}
struct Buf {
bytes: [MaybeUninit<u8>; 40],
written: usize,
}
impl Buf {
fn new() -> Self {
Buf {
bytes: [MaybeUninit::uninit(); 40],
written: 0,
}
}
fn as_str(&self) -> &str {
unsafe {
str::from_utf8_unchecked(slice::from_raw_parts(
self.bytes.as_ptr().cast::<u8>(),
self.written,
))
}
}
}
impl Write for Buf {
fn write_str(&mut self, s: &str) -> fmt::Result {
if s.bytes().any(|b| b == b' ' || b == b'\n') {
return Err(fmt::Error);
}
let remaining = self.bytes.len() - self.written;
if s.len() > remaining {
return Err(fmt::Error);
}
unsafe {
ptr::copy_nonoverlapping(
s.as_ptr(),
self.bytes.as_mut_ptr().add(self.written).cast::<u8>(),
s.len(),
);
}
self.written += s.len();
Ok(())
}
}
fn render(msg: &'static str, lhs: &dyn Debug, rhs: &dyn Debug) -> Error {
let mut lhs_buf = Buf::new();
if fmt::write(&mut lhs_buf, format_args!("{:?}", lhs)).is_ok() {
let mut rhs_buf = Buf::new();
if fmt::write(&mut rhs_buf, format_args!("{:?}", rhs)).is_ok() {
let lhs_str = lhs_buf.as_str();
let rhs_str = rhs_buf.as_str();
// "{msg} ({lhs} vs {rhs})"
let len = msg.len() + 2 + lhs_str.len() + 4 + rhs_str.len() + 1;
let mut string = String::with_capacity(len);
string.push_str(msg);
string.push_str(" (");
string.push_str(lhs_str);
string.push_str(" vs ");
string.push_str(rhs_str);
string.push(')');
return Error::msg(string);
}
}
Error::msg(msg)
}
#[doc(hidden)]
#[macro_export]
macro_rules! __parse_ensure {
(atom () $bail:tt $fuel:tt {($($rhs:tt)+) ($($lhs:tt)+) $op:tt} $dup:tt $(,)?) => {
$crate::__fancy_ensure!($($lhs)+, $op, $($rhs)+)
};
// low precedence control flow constructs
(0 $stack:tt ($($bail:tt)*) $fuel:tt $parse:tt $dup:tt return $($rest:tt)*) => {
$crate::__fallback_ensure!($($bail)*)
};
(0 $stack:tt ($($bail:tt)*) $fuel:tt $parse:tt $dup:tt break $($rest:tt)*) => {
$crate::__fallback_ensure!($($bail)*)
};
(0 $stack:tt ($($bail:tt)*) $fuel:tt $parse:tt $dup:tt continue $($rest:tt)*) => {
$crate::__fallback_ensure!($($bail)*)
};
(0 $stack:tt ($($bail:tt)*) $fuel:tt $parse:tt $dup:tt yield $($rest:tt)*) => {
$crate::__fallback_ensure!($($bail)*)
};
(0 $stack:tt ($($bail:tt)*) $fuel:tt $parse:tt $dup:tt move $($rest:tt)*) => {
$crate::__fallback_ensure!($($bail)*)
};
// unary operators
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($deref:tt $($dup:tt)*) * $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $deref) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($not:tt $($dup:tt)*) ! $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $not) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($neg:tt $($dup:tt)*) - $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $neg) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($let:tt $($dup:tt)*) let $($rest:tt)*) => {
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $let) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($lifetime:tt $colon:tt $($dup:tt)*) $label:lifetime : $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $lifetime $colon) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $mut:tt $($dup:tt)*) &mut $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $and $mut) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $($dup:tt)*) & $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $and) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($andand:tt $mut:tt $($dup:tt)*) &&mut $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $andand $mut) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($andand:tt $($dup:tt)*) && $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $andand) $($parse)*} ($($rest)*) $($rest)*)
};
// control flow constructs
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($if:tt $($dup:tt)*) if $($rest:tt)*) => {
$crate::__parse_ensure!(0 (cond $stack) $bail ($($fuel)*) {($($buf)* $if) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($match:tt $($dup:tt)*) match $($rest:tt)*) => {
$crate::__parse_ensure!(0 (cond $stack) $bail ($($fuel)*) {($($buf)* $match) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($while:tt $($dup:tt)*) while $($rest:tt)*) => {
$crate::__parse_ensure!(0 (cond $stack) $bail ($($fuel)*) {($($buf)* $while) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($for:tt $($dup:tt)*) for $($rest:tt)*) => {
$crate::__parse_ensure!(pat (cond $stack) $bail ($($fuel)*) {($($buf)* $for) $($parse)*} ($($rest)*) $($rest)*)
};
(atom (cond $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($brace:tt $($dup:tt)*) {$($block:tt)*} $($rest:tt)*) => {
$crate::__parse_ensure!(cond $stack $bail ($($fuel)*) {($($buf)* $brace) $($parse)*} ($($rest)*) $($rest)*)
};
(cond $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($else:tt $if:tt $($dup:tt)*) else if $($rest:tt)*) => {
$crate::__parse_ensure!(0 (cond $stack) $bail ($($fuel)*) {($($buf)* $else $if) $($parse)*} ($($rest)*) $($rest)*)
};
(cond $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($else:tt $brace:tt $($dup:tt)*) else {$($block:tt)*} $($rest:tt)*) => {
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $else $brace) $($parse)*} ($($rest)*) $($rest)*)
};
(cond $stack:tt $bail:tt (~$($fuel:tt)*) $parse:tt $dup:tt $($rest:tt)*) => {
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) $parse $dup $($rest)*)
};
// atomic expressions
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($paren:tt $($dup:tt)*) ($($content:tt)*) $($rest:tt)*) => {
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $paren) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bracket:tt $($dup:tt)*) [$($array:tt)*] $($rest:tt)*) => {
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $bracket) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($brace:tt $($dup:tt)*) {$($block:tt)*} $($rest:tt)*) => {
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $brace) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($loop:tt $block:tt $($dup:tt)*) loop {$($body:tt)*} $($rest:tt)*) => {
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $loop $block) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($async:tt $block:tt $($dup:tt)*) async {$($body:tt)*} $($rest:tt)*) => {
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $async $block) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($async:tt $move:tt $block:tt $($dup:tt)*) async move {$($body:tt)*} $($rest:tt)*) => {
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $async $move $block) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($unsafe:tt $block:tt $($dup:tt)*) unsafe {$($body:tt)*} $($rest:tt)*) => {
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $unsafe $block) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($const:tt $block:tt $($dup:tt)*) const {$($body:tt)*} $($rest:tt)*) => {
// TODO: this is mostly useless due to https://github.com/rust-lang/rust/issues/86730
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $const $block) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($literal:tt $($dup:tt)*) $lit:literal $($rest:tt)*) => {
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $literal) $($parse)*} ($($rest)*) $($rest)*)
};
// path expressions
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $ident:tt $($dup:tt)*) :: $i:ident $($rest:tt)*) => {
$crate::__parse_ensure!(epath (atom $stack) $bail ($($fuel)*) {($($buf)* $colons $ident) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($ident:tt $($dup:tt)*) $i:ident $($rest:tt)*) => {
$crate::__parse_ensure!(epath (atom $stack) $bail ($($fuel)*) {($($buf)* $ident) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($langle:tt $($dup:tt)*) < $($rest:tt)*) => {
$crate::__parse_ensure!(type (qpath (epath (atom $stack))) $bail ($($fuel)*) {($($buf)* $langle) $($parse)*} ($($rest)*) $($rest)*)
};
(epath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $langle:tt $($dup:tt)*) :: < $($rest:tt)*) => {
$crate::__parse_ensure!(generic (epath $stack) $bail ($($fuel)*) {($($buf)* $colons $langle) $($parse)*} ($($rest)*) $($rest)*)
};
(epath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $langle:tt $($dup:tt)*) :: << $($rest:tt)*) => {
$crate::__parse_ensure!(type (qpath (tpath (arglist (epath $stack)))) $bail ($($fuel)*) {($($buf)* $colons $langle) $($parse)*} ($($rest)*) $($rest)*)
};
(epath $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt :: <- - $($rest:tt)*) => {
$crate::__fallback_ensure!($($bail)*)
};
(epath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $larrow:tt $($dup:tt)*) :: <- $lit:literal $($rest:tt)*) => {
$crate::__parse_ensure!(generic (epath $stack) $bail ($($fuel)*) {($($buf)* $colons $larrow) $($parse)*} ($($dup)*) $($dup)*)
};
(epath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $ident:tt $($dup:tt)*) :: $i:ident $($rest:tt)*) => {
$crate::__parse_ensure!(epath $stack $bail ($($fuel)*) {($($buf)* $colons $ident) $($parse)*} ($($rest)*) $($rest)*)
};
(epath ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bang:tt $args:tt $($dup:tt)*) ! ($($mac:tt)*) $($rest:tt)*) => {
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $bang $args) $($parse)*} ($($rest)*) $($rest)*)
};
(epath ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bang:tt $args:tt $($dup:tt)*) ! [$($mac:tt)*] $($rest:tt)*) => {
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $bang $args) $($parse)*} ($($rest)*) $($rest)*)
};
(epath ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bang:tt $args:tt $($dup:tt)*) ! {$($mac:tt)*} $($rest:tt)*) => {
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $bang $args) $($parse)*} ($($rest)*) $($rest)*)
};
(epath (split ($pop:ident $stack:tt)) $bail:tt (~$($fuel:tt)*) $parse:tt $dup:tt $($rest:tt)*) => {
$crate::__parse_ensure!($pop (split $stack) $bail ($($fuel)*) $parse $dup $($rest)*)
};
(epath ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) $parse:tt $dup:tt $($rest:tt)*) => {
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) $parse $dup $($rest)*)
};
// trailer expressions
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($paren:tt $($dup:tt)*) ($($call:tt)*) $($rest:tt)*) => {
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $paren) $($parse)*} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bracket:tt $($dup:tt)*) [$($index:tt)*] $($rest:tt)*) => {
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $bracket) $($parse)*} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($brace:tt $($dup:tt)*) {$($init:tt)*} $($rest:tt)*) => {
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $brace) $($parse)*} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($question:tt $($dup:tt)*) ? $($rest:tt)*) => {
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $question) $($parse)*} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($dot:tt $ident:tt $colons:tt $langle:tt $($dup:tt)*) . $i:ident :: < $($rest:tt)*) => {
$crate::__parse_ensure!(generic (atom $stack) $bail ($($fuel)*) {($($buf)* $dot $ident $colons $langle) $($parse)*} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($dot:tt $ident:tt $colons:tt $langle:tt $($dup:tt)*) . $i:ident :: << $($rest:tt)*) => {
$crate::__parse_ensure!(type (qpath (tpath (arglist (atom $stack)))) $bail ($($fuel)*) {($($buf)* $dot $ident $colons $langle) $($parse)*} ($($rest)*) $($rest)*)
};
(atom $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt . $i:ident :: <- - $($rest:tt)*) => {
$crate::__fallback_ensure!($($bail)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($dot:tt $ident:tt $colons:tt $larrow:tt $($dup:tt)*) . $i:ident :: <- $lit:literal $($rest:tt)*) => {
$crate::__parse_ensure!(generic (atom $stack) $bail ($($fuel)*) {($($buf)* $dot $ident $colons $larrow) $($parse)*} ($($dup)*) $($dup)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($dot:tt $field:tt $($dup:tt)*) . $i:ident $($rest:tt)*) => {
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $dot $field) $($parse)*} ($($rest)*) $($rest)*)
};
(atom $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt . - $($rest:tt)*) => {
$crate::__fallback_ensure!($($bail)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($dot:tt $index:tt $($dup:tt)*) . $lit:literal $($rest:tt)*) => {
$crate::__parse_ensure!(atom $stack $bail ($($fuel)*) {($($buf)* $dot $index) $($parse)*} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($as:tt $($dup:tt)*) as $($rest:tt)*) => {
$crate::__parse_ensure!(type (atom $stack) $bail ($($fuel)*) {($($buf)* $as) $($parse)*} ($($rest)*) $($rest)*)
};
// types
(type ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bracket:tt $($dup:tt)*) [$($content:tt)*] $($rest:tt)*) => {
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $bracket) $($parse)*} ($($rest)*) $($rest)*)
};
(type ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($paren:tt $($dup:tt)*) ($($content:tt)*) $($rest:tt)*) => {
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $paren) $($parse)*} ($($rest)*) $($rest)*)
};
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($star:tt $const:tt $($dup:tt)*) *const $($rest:tt)*) => {
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $star $const) $($parse)*} ($($rest)*) $($rest)*)
};
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($star:tt $mut:tt $($dup:tt)*) *mut $($rest:tt)*) => {
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $star $mut) $($parse)*} ($($rest)*) $($rest)*)
};
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $lifetime:tt $mut:tt $($dup:tt)*) & $l:lifetime mut $($rest:tt)*) => {
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $and $lifetime $mut) $($parse)*} ($($rest)*) $($rest)*)
};
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $mut:tt $($dup:tt)*) & mut $($rest:tt)*) => {
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $and $mut) $($parse)*} ($($rest)*) $($rest)*)
};
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $lifetime:tt $($dup:tt)*) & $l:lifetime $($rest:tt)*) => {
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $and $lifetime) $($parse)*} ($($rest)*) $($rest)*)
};
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $($dup:tt)*) & $($rest:tt)*) => {
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $and) $($parse)*} ($($rest)*) $($rest)*)
};
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $lifetime:tt $mut:tt $($dup:tt)*) && $l:lifetime mut $($rest:tt)*) => {
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $and $lifetime $mut) $($parse)*} ($($rest)*) $($rest)*)
};
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $mut:tt $($dup:tt)*) && mut $($rest:tt)*) => {
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $and $mut) $($parse)*} ($($rest)*) $($rest)*)
};
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $lifetime:tt $($dup:tt)*) && $l:lifetime $($rest:tt)*) => {
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $and $lifetime) $($parse)*} ($($rest)*) $($rest)*)
};
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $($dup:tt)*) && $($rest:tt)*) => {
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $and) $($parse)*} ($($rest)*) $($rest)*)
};
(type $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt unsafe extern - $($rest:tt)*) => {
$crate::__fallback_ensure!($($bail)*)
};
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($unsafe:tt $(extern $($abi:literal)?)? fn $($dup:tt)*) unsafe $($rest:tt)*) => {
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $unsafe) $($parse)*} ($($rest)*) $($rest)*)
};
(type $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt extern - $($rest:tt)*) => {
$crate::__fallback_ensure!($($bail)*)
};
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($extern:tt $abi:tt fn $($dup:tt)*) extern $lit:literal $($rest:tt)*) => {
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $extern $abi) $($parse)*} ($($rest)*) $($rest)*)
};
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($extern:tt fn $($dup:tt)*) extern $($rest:tt)*) => {
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $extern) $($parse)*} ($($rest)*) $($rest)*)
};
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($fn:tt $paren:tt $arrow:tt $($dup:tt)*) fn ($($args:tt)*) -> $($rest:tt)*) => {
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $fn $paren $arrow) $($parse)*} ($($rest)*) $($rest)*)
};
(type ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($fn:tt $paren:tt $($dup:tt)*) fn ($($args:tt)*) $($rest:tt)*) => {
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $fn $paren) $($parse)*} ($($rest)*) $($rest)*)
};
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($impl:tt $($dup:tt)*) impl $($rest:tt)*) => {
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $impl) $($parse)*} ($($rest)*) $($rest)*)
};
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($dyn:tt $($dup:tt)*) dyn $($rest:tt)*) => {
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $dyn) $($parse)*} ($($rest)*) $($rest)*)
};
(type ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($wild:tt $($dup:tt)*) _ $($rest:tt)*) => {
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $wild) $($parse)*} ($($rest)*) $($rest)*)
};
(type ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($never:tt $($dup:tt)*) ! $($rest:tt)*) => {
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $never) $($parse)*} ($($rest)*) $($rest)*)
};
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($for:tt $langle:tt $($dup:tt)*) for < $($rest:tt)*) => {
$crate::__parse_ensure!(generic (type $stack) $bail ($($fuel)*) {($($buf)* $for $langle) $($parse)*} ($($rest)*) $($rest)*)
};
// path types
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $ident:tt $($dup:tt)*) :: $i:ident $($rest:tt)*) => {
$crate::__parse_ensure!(tpath $stack $bail ($($fuel)*) {($($buf)* $colons $ident) $($parse)*} ($($rest)*) $($rest)*)
};
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($ident:tt $($dup:tt)*) $i:ident $($rest:tt)*) => {
$crate::__parse_ensure!(tpath $stack $bail ($($fuel)*) {($($buf)* $ident) $($parse)*} ($($rest)*) $($rest)*)
};
(type $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($langle:tt $($dup:tt)*) < $($rest:tt)*) => {
$crate::__parse_ensure!(type (qpath (tpath $stack)) $bail ($($fuel)*) {($($buf)* $langle) $($parse)*} ($($rest)*) $($rest)*)
};
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($langle:tt $($dup:tt)*) < $($rest:tt)*) => {
$crate::__parse_ensure!(generic (tpath $stack) $bail ($($fuel)*) {($($buf)* $langle) $($parse)*} ($($rest)*) $($rest)*)
};
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($langle:tt $($dup:tt)*) << $($rest:tt)*) => {
$crate::__parse_ensure!(type (qpath (tpath (arglist (tpath $stack)))) $bail ($($fuel)*) {($($buf)* $langle) $($parse)*} ($($rest)*) $($rest)*)
};
(tpath $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt <- - $($rest:tt)*) => {
$crate::__fallback_ensure!($($bail)*)
};
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($larrow:tt $($dup:tt)*) <- $lit:literal $($rest:tt)*) => {
$crate::__parse_ensure!(generic (tpath $stack) $bail ($($fuel)*) {($($buf)* $larrow) $($parse)*} ($($dup)*) $($dup)*)
};
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $langle:tt $($dup:tt)*) :: < $($rest:tt)*) => {
$crate::__parse_ensure!(generic (tpath $stack) $bail ($($fuel)*) {($($buf)* $colons $langle) $($parse)*} ($($rest)*) $($rest)*)
};
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $langle:tt $($dup:tt)*) :: << $($rest:tt)*) => {
$crate::__parse_ensure!(type (qpath (tpath (arglist (tpath $stack)))) $bail ($($fuel)*) {($($buf)* $colons $langle) $($parse)*} ($($rest)*) $($rest)*)
};
(tpath $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt :: <- - $($rest:tt)*) => {
$crate::__fallback_ensure!($($bail)*)
};
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $larrow:tt $($dup:tt)*) :: <- $lit:literal $($rest:tt)*) => {
$crate::__parse_ensure!(generic (tpath $stack) $bail ($($fuel)*) {($($buf)* $colons $larrow) $($parse)*} ($($dup)*) $($dup)*)
};
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $ident:tt $($dup:tt)*) :: $i:ident $($rest:tt)*) => {
$crate::__parse_ensure!(tpath $stack $bail ($($fuel)*) {($($buf)* $colons $ident) $($parse)*} ($($rest)*) $($rest)*)
};
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($paren:tt $arrow:tt $($dup:tt)*) ($($args:tt)*) -> $($rest:tt)*) => {
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $paren $arrow) $($parse)*} ($($rest)*) $($rest)*)
};
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($paren:tt $($dup:tt)*) ($($args:tt)*) $($rest:tt)*) => {
$crate::__parse_ensure!(object $stack $bail ($($fuel)*) {($($buf)* $paren) $($parse)*} ($($rest)*) $($rest)*)
};
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $paren:tt $arrow:tt $($dup:tt)*) :: ($($args:tt)*) -> $($rest:tt)*) => {
$crate::__parse_ensure!(type $stack $bail ($($fuel)*) {($($buf)* $colons $paren $arrow) $($parse)*} ($($rest)*) $($rest)*)
};
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $paren:tt $($dup:tt)*) :: ($($args:tt)*) $($rest:tt)*) => {
$crate::__parse_ensure!(object $stack $bail ($($fuel)*) {($($buf)* $colons $paren) $($parse)*} ($($rest)*) $($rest)*)
};
(tpath ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bang:tt $args:tt $($dup:tt)*) ! ($($mac:tt)*) $($rest:tt)*) => {
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $bang $args) $($parse)*} ($($rest)*) $($rest)*)
};
(tpath ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bang:tt $args:tt $($dup:tt)*) ! [$($mac:tt)*] $($rest:tt)*) => {
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $bang $args) $($parse)*} ($($rest)*) $($rest)*)
};
(tpath ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bang:tt $args:tt $($dup:tt)*) ! {$($mac:tt)*} $($rest:tt)*) => {
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $bang $args) $($parse)*} ($($rest)*) $($rest)*)
};
(tpath $stack:tt $bail:tt (~$($fuel:tt)*) $parse:tt $dup:tt $($rest:tt)*) => {
$crate::__parse_ensure!(object $stack $bail ($($fuel)*) $parse $dup $($rest)*)
};
// qualified paths
(qpath (split ($pop:ident $stack:tt)) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($rangle:tt $colons:tt $ident:tt $($dup:tt)*) >> :: $i:ident $($rest:tt)*) => {
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $rangle $colons $ident) $($parse)*} ($($rest)*) $($rest)*)
};
(qpath ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($rangle:tt $colons:tt $ident:tt $($dup:tt)*) > :: $i:ident $($rest:tt)*) => {
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $rangle $colons $ident) $($parse)*} ($($rest)*) $($rest)*)
};
(qpath $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($as:tt $($dup:tt)*) as $($rest:tt)*) => {
$crate::__parse_ensure!(type (qpath $stack) $bail ($($fuel)*) {($($buf)* $as) $($parse)*} ($($rest)*) $($rest)*)
};
// trait objects
(object (arglist $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($plus:tt $colons:tt $ident:tt $($dup:tt)*) + :: $i:ident $($rest:tt)*) => {
$crate::__parse_ensure!(tpath (arglist $stack) $bail ($($fuel)*) {($($buf)* $plus $colons $ident) $($parse)*} ($($rest)*) $($rest)*)
};
(object (arglist $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($plus:tt $ident:tt $($dup:tt)*) + $i:ident $($rest:tt)*) => {
$crate::__parse_ensure!(tpath (arglist $stack) $bail ($($fuel)*) {($($buf)* $plus $ident) $($parse)*} ($($rest)*) $($rest)*)
};
(object (split ($pop:ident $stack:tt)) $bail:tt (~$($fuel:tt)*) $parse:tt $dup:tt $($rest:tt)*) => {
$crate::__parse_ensure!($pop (split $stack) $bail ($($fuel)*) $parse $dup $($rest)*)
};
(object ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) $parse:tt $dup:tt $($rest:tt)*) => {
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) $parse $dup $($rest)*)
};
// angle bracketed generic arguments
(generic (split ($pop:ident $stack:tt)) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($rangle:tt $($dup:tt)*) >> $($rest:tt)*) => {
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $rangle) $($parse)*} ($($rest)*) $($rest)*)
};
(generic ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($rangle:tt $($dup:tt)*) > $($rest:tt)*) => {
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $rangle) $($parse)*} ($($rest)*) $($rest)*)
};
(generic ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($rangle:tt $($dup:tt)*) >> $($rest:tt)*) => {
$crate::__parse_ensure!($pop (split $stack) $bail ($($fuel)*) {($($buf)*) $($parse)*} ($rangle $($rest)*) $rangle $($rest)*)
};
(generic $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt - - $($rest:tt)*) => {
$crate::__fallback_ensure!($($bail)*)
};
(generic $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($neg:tt $($dup:tt)*) - $lit:literal $($rest:tt)*) => {
$crate::__parse_ensure!(generic $stack $bail ($($fuel)*) {($($buf)* $neg) $($parse)*} ($($dup)*) $($dup)*)
};
(generic $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt - $($rest:tt)*) => {
$crate::__fallback_ensure!($($bail)*)
};
(generic $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($literal:tt $($dup:tt)*) $lit:literal $($rest:tt)*) => {
$crate::__parse_ensure!(arglist $stack $bail ($($fuel)*) {($($buf)* $literal) $($parse)*} ($($rest)*) $($rest)*)
};
(generic $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($brace:tt $($dup:tt)*) {$($block:tt)*} $($rest:tt)*) => {
$crate::__parse_ensure!(arglist $stack $bail ($($fuel)*) {($($buf)* $brace) $($parse)*} ($($rest)*) $($rest)*)
};
(generic $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($lifetime:tt $($dup:tt)*) $l:lifetime $($rest:tt)*) => {
$crate::__parse_ensure!(arglist $stack $bail ($($fuel)*) {($($buf)* $lifetime) $($parse)*} ($($rest)*) $($rest)*)
};
(generic $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($assoc:tt $eq:tt $($dup:tt)*) $ident:ident = $($rest:tt)*) => {
$crate::__parse_ensure!(type (arglist $stack) $bail ($($fuel)*) {($($buf)* $assoc $eq) $($parse)*} ($($rest)*) $($rest)*)
};
(generic $stack:tt $bail:tt (~$($fuel:tt)*) $parse:tt $dup:tt $($rest:tt)*) => {
$crate::__parse_ensure!(type (arglist $stack) $bail ($($fuel)*) $parse $dup $($rest)*)
};
(arglist $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($comma:tt $($dup:tt)*) , $($rest:tt)*) => {
$crate::__parse_ensure!(generic $stack $bail ($($fuel)*) {($($buf)* $comma) $($parse)*} ($($rest)*) $($rest)*)
};
(arglist (split ($pop:ident $stack:tt)) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($rangle:tt $($dup:tt)*) >> $($rest:tt)*) => {
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)*) $rangle $($parse)*} ($($rest)*) $($rest)*)
};
(arglist ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($rangle:tt $($dup:tt)*) > $($rest:tt)*) => {
$crate::__parse_ensure!($pop $stack $bail ($($fuel)*) {($($buf)* $rangle) $($parse)*} ($($rest)*) $($rest)*)
};
(arglist ($pop:ident $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($rangle:tt $($dup:tt)*) >> $($rest:tt)*) => {
$crate::__parse_ensure!($pop (split $stack) $bail ($($fuel)*) {($($buf)*) $($parse)*} ($rangle $($rest)*) $rangle $($rest)*)
};
// patterns
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($pipe:tt $($dup:tt)*) | $($rest:tt)*) => {
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $pipe) $($parse)*} ($($rest)*) $($rest)*)
};
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($eq:tt $($dup:tt)*) = $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $eq) $($parse)*} ($($rest)*) $($rest)*)
};
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($in:tt $($dup:tt)*) in $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $in) $($parse)*} ($($rest)*) $($rest)*)
};
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($ref:tt $($dup:tt)*) ref $($rest:tt)*) => {
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $ref) $($parse)*} ($($rest)*) $($rest)*)
};
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($mut:tt $($dup:tt)*) mut $($rest:tt)*) => {
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $mut) $($parse)*} ($($rest)*) $($rest)*)
};
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($at:tt $($dup:tt)*) @ $($rest:tt)*) => {
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $at) $($parse)*} ($($rest)*) $($rest)*)
};
(pat $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt - - $($rest:tt)*) => {
$crate::__fallback_ensure!($($bail)*)
};
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($neg:tt $($dup:tt)*) - $lit:literal $($rest:tt)*) => {
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $neg) $($parse)*} ($($dup)*) $($dup)*)
};
(pat $stack:tt ($($bail:tt)*) (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt - $($rest:tt)*) => {
$crate::__fallback_ensure!($($bail)*)
};
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($literal:tt $($dup:tt)*) $lit:literal $($rest:tt)*) => {
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $literal) $($parse)*} ($($rest)*) $($rest)*)
};
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($range:tt $($dup:tt)*) .. $($rest:tt)*) => {
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $range) $($parse)*} ($($rest)*) $($rest)*)
};
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($range:tt $($dup:tt)*) ..= $($rest:tt)*) => {
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $range) $($parse)*} ($($rest)*) $($rest)*)
};
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $($dup:tt)*) & $($rest:tt)*) => {
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $and) $($parse)*} ($($rest)*) $($rest)*)
};
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($andand:tt $($dup:tt)*) && $($rest:tt)*) => {
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $andand) $($parse)*} ($($rest)*) $($rest)*)
};
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($paren:tt $($dup:tt)*) ($($content:tt)*) $($rest:tt)*) => {
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $paren) $($parse)*} ($($rest)*) $($rest)*)
};
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bracket:tt $($dup:tt)*) [$($content:tt)*] $($rest:tt)*) => {
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $bracket) $($parse)*} ($($rest)*) $($rest)*)
};
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($brace:tt $($dup:tt)*) {$($content:tt)*} $($rest:tt)*) => {
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $brace) $($parse)*} ($($rest)*) $($rest)*)
};
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($wild:tt $($dup:tt)*) _ $($rest:tt)*) => {
$crate::__parse_ensure!(pat $stack $bail ($($fuel)*) {($($buf)* $wild) $($parse)*} ($($rest)*) $($rest)*)
};
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($colons:tt $ident:tt $($dup:tt)*) :: $i:ident $($rest:tt)*) => {
$crate::__parse_ensure!(epath (pat $stack) $bail ($($fuel)*) {($($buf)* $colons $ident) $($parse)*} ($($rest)*) $($rest)*)
};
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($ident:tt $($dup:tt)*) $i:ident $($rest:tt)*) => {
$crate::__parse_ensure!(epath (pat $stack) $bail ($($fuel)*) {($($buf)* $ident) $($parse)*} ($($rest)*) $($rest)*)
};
(pat $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($langle:tt $($dup:tt)*) < $($rest:tt)*) => {
$crate::__parse_ensure!(type (qpath (epath (pat $stack))) $bail ($($fuel)*) {($($buf)* $langle) $($parse)*} ($($rest)*) $($rest)*)
};
// comparison binary operators
(atom () $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($eq:tt $($dup:tt)*) == $($rest:tt)*) => {
$crate::__parse_ensure!(0 () $bail ($($fuel)*) {() $($parse)* ($($buf)*) $eq} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)+) $($parse:tt)*} ($eq:tt $($dup:tt)*) == $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $eq) $($parse)*} ($($rest)*) $($rest)*)
};
(atom () $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($le:tt $($dup:tt)*) <= $($rest:tt)*) => {
$crate::__parse_ensure!(0 () $bail ($($fuel)*) {() $($parse)* ($($buf)*) $le} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)+) $($parse:tt)*} ($le:tt $($dup:tt)*) <= $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $le) $($parse)*} ($($rest)*) $($rest)*)
};
(atom () $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($lt:tt $($dup:tt)*) < $($rest:tt)*) => {
$crate::__parse_ensure!(0 () $bail ($($fuel)*) {() $($parse)* ($($buf)*) $lt} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)+) $($parse:tt)*} ($lt:tt $($dup:tt)*) < $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $lt) $($parse)*} ($($rest)*) $($rest)*)
};
(atom () $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($ne:tt $($dup:tt)*) != $($rest:tt)*) => {
$crate::__parse_ensure!(0 () $bail ($($fuel)*) {() $($parse)* ($($buf)*) $ne} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)+) $($parse:tt)*} ($ne:tt $($dup:tt)*) != $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $ne) $($parse)*} ($($rest)*) $($rest)*)
};
(atom () $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($ge:tt $($dup:tt)*) >= $($rest:tt)*) => {
$crate::__parse_ensure!(0 () $bail ($($fuel)*) {() $($parse)* ($($buf)*) $ge} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)+) $($parse:tt)*} ($ge:tt $($dup:tt)*) >= $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $ge) $($parse)*} ($($rest)*) $($rest)*)
};
(atom (split ()) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} $dup:tt >> $($rest:tt)*) => {
$crate::__parse_ensure!(0 () $bail ($($fuel)*) {() $($parse)* ($($buf)* > ) > } ($($rest)*) $($rest)*)
};
(atom () $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($gt:tt $($dup:tt)*) > $($rest:tt)*) => {
$crate::__parse_ensure!(0 () $bail ($($fuel)*) {() $($parse)* ($($buf)*) $gt} ($($rest)*) $($rest)*)
};
(atom (split $stack:tt) $bail:tt (~$($fuel:tt)*) {($($buf:tt)+) $($parse:tt)*} ($rangle:tt $($dup:tt)*) >> $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $rangle) $($parse)*} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)+) $($parse:tt)*} ($gt:tt $($dup:tt)*) > $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $gt) $($parse)*} ($($rest)*) $($rest)*)
};
// high precedence binary operators
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($add:tt $($dup:tt)*) + $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $add) $($parse)*} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($sub:tt $($dup:tt)*) - $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $sub) $($parse)*} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($mul:tt $($dup:tt)*) * $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $mul) $($parse)*} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($div:tt $($dup:tt)*) / $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $div) $($parse)*} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($rem:tt $($dup:tt)*) % $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $rem) $($parse)*} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bitxor:tt $($dup:tt)*) ^ $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $bitxor) $($parse)*} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bitand:tt $($dup:tt)*) & $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $bitand) $($parse)*} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bitor:tt $($dup:tt)*) | $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $bitor) $($parse)*} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($shl:tt $($dup:tt)*) << $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $shl) $($parse)*} ($($rest)*) $($rest)*)
};
(atom $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($shr:tt $($dup:tt)*) >> $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $shr) $($parse)*} ($($rest)*) $($rest)*)
};
// low precedence binary operators
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $($dup:tt)*) && $($rest:tt)*) => {
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $and) $($parse)*} ($($rest)*) $($rest)*)
};
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($or:tt $($dup:tt)*) || $($rest:tt)*) => {
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $or) $($parse)*} ($($rest)*) $($rest)*)
};
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($assign:tt $($dup:tt)*) = $($rest:tt)*) => {
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $assign) $($parse)*} ($($rest)*) $($rest)*)
};
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($addeq:tt $($dup:tt)*) += $($rest:tt)*) => {
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $addeq) $($parse)*} ($($rest)*) $($rest)*)
};
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($subeq:tt $($dup:tt)*) -= $($rest:tt)*) => {
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $subeq) $($parse)*} ($($rest)*) $($rest)*)
};
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($muleq:tt $($dup:tt)*) *= $($rest:tt)*) => {
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $muleq) $($parse)*} ($($rest)*) $($rest)*)
};
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($diveq:tt $($dup:tt)*) /= $($rest:tt)*) => {
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $diveq) $($parse)*} ($($rest)*) $($rest)*)
};
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($remeq:tt $($dup:tt)*) %= $($rest:tt)*) => {
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $remeq) $($parse)*} ($($rest)*) $($rest)*)
};
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bitxoreq:tt $($dup:tt)*) ^= $($rest:tt)*) => {
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $bitxoreq) $($parse)*} ($($rest)*) $($rest)*)
};
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bitandeq:tt $($dup:tt)*) &= $($rest:tt)*) => {
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $bitandeq) $($parse)*} ($($rest)*) $($rest)*)
};
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($bitoreq:tt $($dup:tt)*) |= $($rest:tt)*) => {
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $bitoreq) $($parse)*} ($($rest)*) $($rest)*)
};
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($shleq:tt $($dup:tt)*) <<= $($rest:tt)*) => {
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $shleq) $($parse)*} ($($rest)*) $($rest)*)
};
(atom ($($stack:tt)+) $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($shreq:tt $($dup:tt)*) >>= $($rest:tt)*) => {
$crate::__parse_ensure!(0 ($($stack)*) $bail ($($fuel)*) {($($buf)* $shreq) $($parse)*} ($($rest)*) $($rest)*)
};
// unrecognized expression
($state:tt $stack:tt ($($bail:tt)*) $($rest:tt)*) => {
$crate::__fallback_ensure!($($bail)*)
};
}
#[doc(hidden)]
#[macro_export]
macro_rules! __fancy_ensure {
($lhs:expr, $op:tt, $rhs:expr) => {
match (&$lhs, &$rhs) {
(lhs, rhs) => {
if !(lhs $op rhs) {
#[allow(unused_imports)]
use $crate::__private::{BothDebug, NotBothDebug};
return Err((lhs, rhs).__dispatch_ensure(
$crate::__private::concat!(
"Condition failed: `",
$crate::__private::stringify!($lhs),
" ",
$crate::__private::stringify!($op),
" ",
$crate::__private::stringify!($rhs),
"`",
),
));
}
}
}
};
}
#[doc(hidden)]
#[macro_export]
macro_rules! __fallback_ensure {
($cond:expr $(,)?) => {
if $crate::__private::not($cond) {
return $crate::__private::Err($crate::Error::msg(
$crate::__private::concat!("Condition failed: `", $crate::__private::stringify!($cond), "`")
));
}
};
($cond:expr, $msg:literal $(,)?) => {
if $crate::__private::not($cond) {
return $crate::__private::Err($crate::__anyhow!($msg));
}
};
($cond:expr, $err:expr $(,)?) => {
if $crate::__private::not($cond) {
return $crate::__private::Err($crate::__anyhow!($err));
}
};
($cond:expr, $fmt:expr, $($arg:tt)*) => {
if $crate::__private::not($cond) {
return $crate::__private::Err($crate::__anyhow!($fmt, $($arg)*));
}
};
}

1027
vendor/anyhow/src/error.rs vendored Normal file

File diff suppressed because it is too large Load diff

158
vendor/anyhow/src/fmt.rs vendored Normal file
View file

@ -0,0 +1,158 @@
use crate::chain::Chain;
use crate::error::ErrorImpl;
use crate::ptr::Ref;
use core::fmt::{self, Debug, Write};
impl ErrorImpl {
pub(crate) unsafe fn display(this: Ref<Self>, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", unsafe { Self::error(this) })?;
if f.alternate() {
let chain = unsafe { Self::chain(this) };
for cause in chain.skip(1) {
write!(f, ": {}", cause)?;
}
}
Ok(())
}
pub(crate) unsafe fn debug(this: Ref<Self>, f: &mut fmt::Formatter) -> fmt::Result {
let error = unsafe { Self::error(this) };
if f.alternate() {
return Debug::fmt(error, f);
}
write!(f, "{}", error)?;
if let Some(cause) = error.source() {
write!(f, "\n\nCaused by:")?;
let multiple = cause.source().is_some();
for (n, error) in Chain::new(cause).enumerate() {
writeln!(f)?;
let mut indented = Indented {
inner: f,
number: if multiple { Some(n) } else { None },
started: false,
};
write!(indented, "{}", error)?;
}
}
#[cfg(any(std_backtrace, feature = "backtrace"))]
{
use crate::backtrace::BacktraceStatus;
use alloc::string::ToString;
let backtrace = unsafe { Self::backtrace(this) };
if let BacktraceStatus::Captured = backtrace.status() {
let mut backtrace = backtrace.to_string();
write!(f, "\n\n")?;
if backtrace.starts_with("stack backtrace:") {
// Capitalize to match "Caused by:"
backtrace.replace_range(0..1, "S");
} else {
// "stack backtrace:" prefix was removed in
// https://github.com/rust-lang/backtrace-rs/pull/286
writeln!(f, "Stack backtrace:")?;
}
backtrace.truncate(backtrace.trim_end().len());
write!(f, "{}", backtrace)?;
}
}
Ok(())
}
}
struct Indented<'a, D> {
inner: &'a mut D,
number: Option<usize>,
started: bool,
}
impl<T> Write for Indented<'_, T>
where
T: Write,
{
fn write_str(&mut self, s: &str) -> fmt::Result {
for (i, line) in s.split('\n').enumerate() {
if !self.started {
self.started = true;
match self.number {
Some(number) => write!(self.inner, "{: >5}: ", number)?,
None => self.inner.write_str(" ")?,
}
} else if i > 0 {
self.inner.write_char('\n')?;
if self.number.is_some() {
self.inner.write_str(" ")?;
} else {
self.inner.write_str(" ")?;
}
}
self.inner.write_str(line)?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use alloc::string::String;
#[test]
fn one_digit() {
let input = "verify\nthis";
let expected = " 2: verify\n this";
let mut output = String::new();
Indented {
inner: &mut output,
number: Some(2),
started: false,
}
.write_str(input)
.unwrap();
assert_eq!(expected, output);
}
#[test]
fn two_digits() {
let input = "verify\nthis";
let expected = " 12: verify\n this";
let mut output = String::new();
Indented {
inner: &mut output,
number: Some(12),
started: false,
}
.write_str(input)
.unwrap();
assert_eq!(expected, output);
}
#[test]
fn no_digits() {
let input = "verify\nthis";
let expected = " verify\n this";
let mut output = String::new();
Indented {
inner: &mut output,
number: None,
started: false,
}
.write_str(input)
.unwrap();
assert_eq!(expected, output);
}
}

121
vendor/anyhow/src/kind.rs vendored Normal file
View file

@ -0,0 +1,121 @@
// Tagged dispatch mechanism for resolving the behavior of `anyhow!($expr)`.
//
// When anyhow! is given a single expr argument to turn into anyhow::Error, we
// want the resulting Error to pick up the input's implementation of source()
// and backtrace() if it has a std::error::Error impl, otherwise require nothing
// more than Display and Debug.
//
// Expressed in terms of specialization, we want something like:
//
// trait AnyhowNew {
// fn new(self) -> Error;
// }
//
// impl<T> AnyhowNew for T
// where
// T: Display + Debug + Send + Sync + 'static,
// {
// default fn new(self) -> Error {
// /* no std error impl */
// }
// }
//
// impl<T> AnyhowNew for T
// where
// T: std::error::Error + Send + Sync + 'static,
// {
// fn new(self) -> Error {
// /* use std error's source() and backtrace() */
// }
// }
//
// Since specialization is not stable yet, instead we rely on autoref behavior
// of method resolution to perform tagged dispatch. Here we have two traits
// AdhocKind and TraitKind that both have an anyhow_kind() method. AdhocKind is
// implemented whether or not the caller's type has a std error impl, while
// TraitKind is implemented only when a std error impl does exist. The ambiguity
// is resolved by AdhocKind requiring an extra autoref so that it has lower
// precedence.
//
// The anyhow! macro will set up the call in this form:
//
// #[allow(unused_imports)]
// use $crate::__private::{AdhocKind, TraitKind};
// let error = $msg;
// (&error).anyhow_kind().new(error)
use crate::Error;
use core::fmt::{Debug, Display};
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
use crate::StdError;
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
use alloc::boxed::Box;
pub struct Adhoc;
#[doc(hidden)]
pub trait AdhocKind: Sized {
#[inline]
fn anyhow_kind(&self) -> Adhoc {
Adhoc
}
}
impl<T> AdhocKind for &T where T: ?Sized + Display + Debug + Send + Sync + 'static {}
impl Adhoc {
#[cold]
pub fn new<M>(self, message: M) -> Error
where
M: Display + Debug + Send + Sync + 'static,
{
Error::from_adhoc(message, backtrace!())
}
}
pub struct Trait;
#[doc(hidden)]
pub trait TraitKind: Sized {
#[inline]
fn anyhow_kind(&self) -> Trait {
Trait
}
}
impl<E> TraitKind for E where E: Into<Error> {}
impl Trait {
#[cold]
pub fn new<E>(self, error: E) -> Error
where
E: Into<Error>,
{
error.into()
}
}
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
pub struct Boxed;
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
#[doc(hidden)]
pub trait BoxedKind: Sized {
#[inline]
fn anyhow_kind(&self) -> Boxed {
Boxed
}
}
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
impl BoxedKind for Box<dyn StdError + Send + Sync> {}
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
impl Boxed {
#[cold]
pub fn new(self, error: Box<dyn StdError + Send + Sync>) -> Error {
let backtrace = backtrace_if_absent!(&*error);
Error::from_boxed(error, backtrace)
}
}

732
vendor/anyhow/src/lib.rs vendored Normal file
View file

@ -0,0 +1,732 @@
//! [![github]](https://github.com/dtolnay/anyhow)&ensp;[![crates-io]](https://crates.io/crates/anyhow)&ensp;[![docs-rs]](https://docs.rs/anyhow)
//!
//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs
//!
//! <br>
//!
//! This library provides [`anyhow::Error`][Error], a trait object based error
//! type for easy idiomatic error handling in Rust applications.
//!
//! <br>
//!
//! # Details
//!
//! - Use `Result<T, anyhow::Error>`, or equivalently `anyhow::Result<T>`, as
//! the return type of any fallible function.
//!
//! Within the function, use `?` to easily propagate any error that implements
//! the [`std::error::Error`] trait.
//!
//! ```
//! # pub trait Deserialize {}
//! #
//! # mod serde_json {
//! # use super::Deserialize;
//! # use std::io;
//! #
//! # pub fn from_str<T: Deserialize>(json: &str) -> io::Result<T> {
//! # unimplemented!()
//! # }
//! # }
//! #
//! # struct ClusterMap;
//! #
//! # impl Deserialize for ClusterMap {}
//! #
//! use anyhow::Result;
//!
//! fn get_cluster_info() -> Result<ClusterMap> {
//! let config = std::fs::read_to_string("cluster.json")?;
//! let map: ClusterMap = serde_json::from_str(&config)?;
//! Ok(map)
//! }
//! #
//! # fn main() {}
//! ```
//!
//! - Attach context to help the person troubleshooting the error understand
//! where things went wrong. A low-level error like "No such file or
//! directory" can be annoying to debug without more context about what higher
//! level step the application was in the middle of.
//!
//! ```
//! # struct It;
//! #
//! # impl It {
//! # fn detach(&self) -> Result<()> {
//! # unimplemented!()
//! # }
//! # }
//! #
//! use anyhow::{Context, Result};
//!
//! fn main() -> Result<()> {
//! # return Ok(());
//! #
//! # const _: &str = stringify! {
//! ...
//! # };
//! #
//! # let it = It;
//! # let path = "./path/to/instrs.json";
//! #
//! it.detach().context("Failed to detach the important thing")?;
//!
//! let content = std::fs::read(path)
//! .with_context(|| format!("Failed to read instrs from {}", path))?;
//! #
//! # const _: &str = stringify! {
//! ...
//! # };
//! #
//! # Ok(())
//! }
//! ```
//!
//! ```console
//! Error: Failed to read instrs from ./path/to/instrs.json
//!
//! Caused by:
//! No such file or directory (os error 2)
//! ```
//!
//! - Downcasting is supported and can be by value, by shared reference, or by
//! mutable reference as needed.
//!
//! ```
//! # use anyhow::anyhow;
//! # use std::fmt::{self, Display};
//! # use std::task::Poll;
//! #
//! # #[derive(Debug)]
//! # enum DataStoreError {
//! # Censored(()),
//! # }
//! #
//! # impl Display for DataStoreError {
//! # fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
//! # unimplemented!()
//! # }
//! # }
//! #
//! # impl std::error::Error for DataStoreError {}
//! #
//! # const REDACTED_CONTENT: () = ();
//! #
//! # let error = anyhow!("...");
//! # let root_cause = &error;
//! #
//! # let ret =
//! // If the error was caused by redaction, then return a
//! // tombstone instead of the content.
//! match root_cause.downcast_ref::<DataStoreError>() {
//! Some(DataStoreError::Censored(_)) => Ok(Poll::Ready(REDACTED_CONTENT)),
//! None => Err(error),
//! }
//! # ;
//! ```
//!
//! - If using Rust &ge; 1.65, a backtrace is captured and printed with the
//! error if the underlying error type does not already provide its own. In
//! order to see backtraces, they must be enabled through the environment
//! variables described in [`std::backtrace`]:
//!
//! - If you want panics and errors to both have backtraces, set
//! `RUST_BACKTRACE=1`;
//! - If you want only errors to have backtraces, set `RUST_LIB_BACKTRACE=1`;
//! - If you want only panics to have backtraces, set `RUST_BACKTRACE=1` and
//! `RUST_LIB_BACKTRACE=0`.
//!
//! [`std::backtrace`]: https://doc.rust-lang.org/std/backtrace/index.html#environment-variables
//!
//! - Anyhow works with any error type that has an impl of `std::error::Error`,
//! including ones defined in your crate. We do not bundle a `derive(Error)`
//! macro but you can write the impls yourself or use a standalone macro like
//! [thiserror].
//!
//! [thiserror]: https://github.com/dtolnay/thiserror
//!
//! ```
//! use thiserror::Error;
//!
//! #[derive(Error, Debug)]
//! pub enum FormatError {
//! #[error("Invalid header (expected {expected:?}, got {found:?})")]
//! InvalidHeader {
//! expected: String,
//! found: String,
//! },
//! #[error("Missing attribute: {0}")]
//! MissingAttribute(String),
//! }
//! ```
//!
//! - One-off error messages can be constructed using the `anyhow!` macro, which
//! supports string interpolation and produces an `anyhow::Error`.
//!
//! ```
//! # use anyhow::{anyhow, Result};
//! #
//! # fn demo() -> Result<()> {
//! # let missing = "...";
//! return Err(anyhow!("Missing attribute: {}", missing));
//! # Ok(())
//! # }
//! ```
//!
//! A `bail!` macro is provided as a shorthand for the same early return.
//!
//! ```
//! # use anyhow::{bail, Result};
//! #
//! # fn demo() -> Result<()> {
//! # let missing = "...";
//! bail!("Missing attribute: {}", missing);
//! # Ok(())
//! # }
//! ```
//!
//! <br>
//!
//! # No-std support
//!
//! In no_std mode, almost all of the same API is available and works the same
//! way. To depend on Anyhow in no_std mode, disable our default enabled "std"
//! feature in Cargo.toml. A global allocator is required.
//!
//! ```toml
//! [dependencies]
//! anyhow = { version = "1.0", default-features = false }
//! ```
//!
//! With versions of Rust older than 1.81, no_std mode may require an additional
//! `.map_err(Error::msg)` when working with a non-Anyhow error type inside a
//! function that returns Anyhow's error type, as the trait that `?`-based error
//! conversions are defined by is only available in std in those old versions.
#![doc(html_root_url = "https://docs.rs/anyhow/1.0.90")]
#![cfg_attr(error_generic_member_access, feature(error_generic_member_access))]
#![no_std]
#![deny(dead_code, unused_imports, unused_mut)]
#![cfg_attr(
not(anyhow_no_unsafe_op_in_unsafe_fn_lint),
deny(unsafe_op_in_unsafe_fn)
)]
#![cfg_attr(anyhow_no_unsafe_op_in_unsafe_fn_lint, allow(unused_unsafe))]
#![allow(
clippy::doc_markdown,
clippy::enum_glob_use,
clippy::explicit_auto_deref,
clippy::extra_unused_type_parameters,
clippy::incompatible_msrv,
clippy::let_underscore_untyped,
clippy::missing_errors_doc,
clippy::missing_panics_doc,
clippy::module_name_repetitions,
clippy::must_use_candidate,
clippy::needless_doctest_main,
clippy::needless_lifetimes,
clippy::new_ret_no_self,
clippy::redundant_else,
clippy::return_self_not_must_use,
clippy::struct_field_names,
clippy::unused_self,
clippy::used_underscore_binding,
clippy::wildcard_imports,
clippy::wrong_self_convention
)]
#[cfg(all(
anyhow_nightly_testing,
feature = "std",
not(error_generic_member_access)
))]
compile_error!("Build script probe failed to compile.");
extern crate alloc;
#[cfg(feature = "std")]
extern crate std;
#[macro_use]
mod backtrace;
mod chain;
mod context;
mod ensure;
mod error;
mod fmt;
mod kind;
mod macros;
mod ptr;
mod wrapper;
use crate::error::ErrorImpl;
use crate::ptr::Own;
use core::fmt::Display;
#[cfg(all(not(feature = "std"), anyhow_no_core_error))]
use core::fmt::Debug;
#[cfg(feature = "std")]
use std::error::Error as StdError;
#[cfg(not(any(feature = "std", anyhow_no_core_error)))]
use core::error::Error as StdError;
#[cfg(all(not(feature = "std"), anyhow_no_core_error))]
trait StdError: Debug + Display {
fn source(&self) -> Option<&(dyn StdError + 'static)> {
None
}
}
#[doc(no_inline)]
pub use anyhow as format_err;
/// The `Error` type, a wrapper around a dynamic error type.
///
/// `Error` works a lot like `Box<dyn std::error::Error>`, but with these
/// differences:
///
/// - `Error` requires that the error is `Send`, `Sync`, and `'static`.
/// - `Error` guarantees that a backtrace is available, even if the underlying
/// error type does not provide one.
/// - `Error` is represented as a narrow pointer &mdash; exactly one word in
/// size instead of two.
///
/// <br>
///
/// # Display representations
///
/// When you print an error object using "{}" or to_string(), only the outermost
/// underlying error or context is printed, not any of the lower level causes.
/// This is exactly as if you had called the Display impl of the error from
/// which you constructed your anyhow::Error.
///
/// ```console
/// Failed to read instrs from ./path/to/instrs.json
/// ```
///
/// To print causes as well using anyhow's default formatting of causes, use the
/// alternate selector "{:#}".
///
/// ```console
/// Failed to read instrs from ./path/to/instrs.json: No such file or directory (os error 2)
/// ```
///
/// The Debug format "{:?}" includes your backtrace if one was captured. Note
/// that this is the representation you get by default if you return an error
/// from `fn main` instead of printing it explicitly yourself.
///
/// ```console
/// Error: Failed to read instrs from ./path/to/instrs.json
///
/// Caused by:
/// No such file or directory (os error 2)
/// ```
///
/// and if there is a backtrace available:
///
/// ```console
/// Error: Failed to read instrs from ./path/to/instrs.json
///
/// Caused by:
/// No such file or directory (os error 2)
///
/// Stack backtrace:
/// 0: <E as anyhow::context::ext::StdError>::ext_context
/// at /git/anyhow/src/backtrace.rs:26
/// 1: core::result::Result<T,E>::map_err
/// at /git/rustc/src/libcore/result.rs:596
/// 2: anyhow::context::<impl anyhow::Context<T,E> for core::result::Result<T,E>>::with_context
/// at /git/anyhow/src/context.rs:58
/// 3: testing::main
/// at src/main.rs:5
/// 4: std::rt::lang_start
/// at /git/rustc/src/libstd/rt.rs:61
/// 5: main
/// 6: __libc_start_main
/// 7: _start
/// ```
///
/// To see a conventional struct-style Debug representation, use "{:#?}".
///
/// ```console
/// Error {
/// context: "Failed to read instrs from ./path/to/instrs.json",
/// source: Os {
/// code: 2,
/// kind: NotFound,
/// message: "No such file or directory",
/// },
/// }
/// ```
///
/// If none of the built-in representations are appropriate and you would prefer
/// to render the error and its cause chain yourself, it can be done something
/// like this:
///
/// ```
/// use anyhow::{Context, Result};
///
/// fn main() {
/// if let Err(err) = try_main() {
/// eprintln!("ERROR: {}", err);
/// err.chain().skip(1).for_each(|cause| eprintln!("because: {}", cause));
/// std::process::exit(1);
/// }
/// }
///
/// fn try_main() -> Result<()> {
/// # const IGNORE: &str = stringify! {
/// ...
/// # };
/// # Ok(())
/// }
/// ```
#[repr(transparent)]
pub struct Error {
inner: Own<ErrorImpl>,
}
/// Iterator of a chain of source errors.
///
/// This type is the iterator returned by [`Error::chain`].
///
/// # Example
///
/// ```
/// use anyhow::Error;
/// use std::io;
///
/// pub fn underlying_io_error_kind(error: &Error) -> Option<io::ErrorKind> {
/// for cause in error.chain() {
/// if let Some(io_error) = cause.downcast_ref::<io::Error>() {
/// return Some(io_error.kind());
/// }
/// }
/// None
/// }
/// ```
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
#[derive(Clone)]
pub struct Chain<'a> {
state: crate::chain::ChainState<'a>,
}
/// `Result<T, Error>`
///
/// This is a reasonable return type to use throughout your application but also
/// for `fn main`; if you do, failures will be printed along with any
/// [context][Context] and a backtrace if one was captured.
///
/// `anyhow::Result` may be used with one *or* two type parameters.
///
/// ```rust
/// use anyhow::Result;
///
/// # const IGNORE: &str = stringify! {
/// fn demo1() -> Result<T> {...}
/// // ^ equivalent to std::result::Result<T, anyhow::Error>
///
/// fn demo2() -> Result<T, OtherError> {...}
/// // ^ equivalent to std::result::Result<T, OtherError>
/// # };
/// ```
///
/// # Example
///
/// ```
/// # pub trait Deserialize {}
/// #
/// # mod serde_json {
/// # use super::Deserialize;
/// # use std::io;
/// #
/// # pub fn from_str<T: Deserialize>(json: &str) -> io::Result<T> {
/// # unimplemented!()
/// # }
/// # }
/// #
/// # #[derive(Debug)]
/// # struct ClusterMap;
/// #
/// # impl Deserialize for ClusterMap {}
/// #
/// use anyhow::Result;
///
/// fn main() -> Result<()> {
/// # return Ok(());
/// let config = std::fs::read_to_string("cluster.json")?;
/// let map: ClusterMap = serde_json::from_str(&config)?;
/// println!("cluster info: {:#?}", map);
/// Ok(())
/// }
/// ```
pub type Result<T, E = Error> = core::result::Result<T, E>;
/// Provides the `context` method for `Result`.
///
/// This trait is sealed and cannot be implemented for types outside of
/// `anyhow`.
///
/// <br>
///
/// # Example
///
/// ```
/// use anyhow::{Context, Result};
/// use std::fs;
/// use std::path::PathBuf;
///
/// pub struct ImportantThing {
/// path: PathBuf,
/// }
///
/// impl ImportantThing {
/// # const IGNORE: &'static str = stringify! {
/// pub fn detach(&mut self) -> Result<()> {...}
/// # };
/// # fn detach(&mut self) -> Result<()> {
/// # unimplemented!()
/// # }
/// }
///
/// pub fn do_it(mut it: ImportantThing) -> Result<Vec<u8>> {
/// it.detach().context("Failed to detach the important thing")?;
///
/// let path = &it.path;
/// let content = fs::read(path)
/// .with_context(|| format!("Failed to read instrs from {}", path.display()))?;
///
/// Ok(content)
/// }
/// ```
///
/// When printed, the outermost context would be printed first and the lower
/// level underlying causes would be enumerated below.
///
/// ```console
/// Error: Failed to read instrs from ./path/to/instrs.json
///
/// Caused by:
/// No such file or directory (os error 2)
/// ```
///
/// Refer to the [Display representations] documentation for other forms in
/// which this context chain can be rendered.
///
/// [Display representations]: Error#display-representations
///
/// <br>
///
/// # Effect on downcasting
///
/// After attaching context of type `C` onto an error of type `E`, the resulting
/// `anyhow::Error` may be downcast to `C` **or** to `E`.
///
/// That is, in codebases that rely on downcasting, Anyhow's context supports
/// both of the following use cases:
///
/// - **Attaching context whose type is insignificant onto errors whose type
/// is used in downcasts.**
///
/// In other error libraries whose context is not designed this way, it can
/// be risky to introduce context to existing code because new context might
/// break existing working downcasts. In Anyhow, any downcast that worked
/// before adding context will continue to work after you add a context, so
/// you should freely add human-readable context to errors wherever it would
/// be helpful.
///
/// ```
/// # use anyhow::bail;
/// # use thiserror::Error;
/// #
/// # #[derive(Error, Debug)]
/// # #[error("???")]
/// # struct SuspiciousError;
/// #
/// # fn helper() -> Result<()> {
/// # bail!(SuspiciousError);
/// # }
/// #
/// use anyhow::{Context, Result};
///
/// fn do_it() -> Result<()> {
/// helper().context("Failed to complete the work")?;
/// # const IGNORE: &str = stringify! {
/// ...
/// # };
/// # unreachable!()
/// }
///
/// fn main() {
/// let err = do_it().unwrap_err();
/// if let Some(e) = err.downcast_ref::<SuspiciousError>() {
/// // If helper() returned SuspiciousError, this downcast will
/// // correctly succeed even with the context in between.
/// # return;
/// }
/// # panic!("expected downcast to succeed");
/// }
/// ```
///
/// - **Attaching context whose type is used in downcasts onto errors whose
/// type is insignificant.**
///
/// Some codebases prefer to use machine-readable context to categorize
/// lower level errors in a way that will be actionable to higher levels of
/// the application.
///
/// ```
/// # use anyhow::bail;
/// # use thiserror::Error;
/// #
/// # #[derive(Error, Debug)]
/// # #[error("???")]
/// # struct HelperFailed;
/// #
/// # fn helper() -> Result<()> {
/// # bail!("no such file or directory");
/// # }
/// #
/// use anyhow::{Context, Result};
///
/// fn do_it() -> Result<()> {
/// helper().context(HelperFailed)?;
/// # const IGNORE: &str = stringify! {
/// ...
/// # };
/// # unreachable!()
/// }
///
/// fn main() {
/// let err = do_it().unwrap_err();
/// if let Some(e) = err.downcast_ref::<HelperFailed>() {
/// // If helper failed, this downcast will succeed because
/// // HelperFailed is the context that has been attached to
/// // that error.
/// # return;
/// }
/// # panic!("expected downcast to succeed");
/// }
/// ```
pub trait Context<T, E>: context::private::Sealed {
/// Wrap the error value with additional context.
fn context<C>(self, context: C) -> Result<T, Error>
where
C: Display + Send + Sync + 'static;
/// Wrap the error value with additional context that is evaluated lazily
/// only once an error does occur.
fn with_context<C, F>(self, f: F) -> Result<T, Error>
where
C: Display + Send + Sync + 'static,
F: FnOnce() -> C;
}
/// Equivalent to Ok::<_, anyhow::Error>(value).
///
/// This simplifies creation of an anyhow::Result in places where type inference
/// cannot deduce the `E` type of the result &mdash; without needing to write
/// `Ok::<_, anyhow::Error>(value)`.
///
/// One might think that `anyhow::Result::Ok(value)` would work in such cases
/// but it does not.
///
/// ```console
/// error[E0282]: type annotations needed for `std::result::Result<i32, E>`
/// --> src/main.rs:11:13
/// |
/// 11 | let _ = anyhow::Result::Ok(1);
/// | - ^^^^^^^^^^^^^^^^^^ cannot infer type for type parameter `E` declared on the enum `Result`
/// | |
/// | consider giving this pattern the explicit type `std::result::Result<i32, E>`, where the type parameter `E` is specified
/// ```
#[allow(non_snake_case)]
pub fn Ok<T>(t: T) -> Result<T> {
Result::Ok(t)
}
// Not public API. Referenced by macro-generated code.
#[doc(hidden)]
pub mod __private {
use self::not::Bool;
use crate::Error;
use alloc::fmt;
use core::fmt::Arguments;
#[doc(hidden)]
pub use crate::ensure::{BothDebug, NotBothDebug};
#[doc(hidden)]
pub use alloc::format;
#[doc(hidden)]
pub use core::result::Result::Err;
#[doc(hidden)]
pub use core::{concat, format_args, stringify};
#[doc(hidden)]
pub mod kind {
#[doc(hidden)]
pub use crate::kind::{AdhocKind, TraitKind};
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
#[doc(hidden)]
pub use crate::kind::BoxedKind;
}
#[doc(hidden)]
#[inline]
#[cold]
pub fn format_err(args: Arguments) -> Error {
#[cfg(anyhow_no_fmt_arguments_as_str)]
let fmt_arguments_as_str = None::<&str>;
#[cfg(not(anyhow_no_fmt_arguments_as_str))]
let fmt_arguments_as_str = args.as_str();
if let Some(message) = fmt_arguments_as_str {
// anyhow!("literal"), can downcast to &'static str
Error::msg(message)
} else {
// anyhow!("interpolate {var}"), can downcast to String
Error::msg(fmt::format(args))
}
}
#[doc(hidden)]
#[inline]
#[cold]
#[must_use]
pub fn must_use(error: Error) -> Error {
error
}
#[doc(hidden)]
#[inline]
pub fn not(cond: impl Bool) -> bool {
cond.not()
}
mod not {
#[doc(hidden)]
pub trait Bool {
fn not(self) -> bool;
}
impl Bool for bool {
#[inline]
fn not(self) -> bool {
!self
}
}
impl Bool for &bool {
#[inline]
fn not(self) -> bool {
!*self
}
}
}
}

242
vendor/anyhow/src/macros.rs vendored Normal file
View file

@ -0,0 +1,242 @@
/// Return early with an error.
///
/// This macro is equivalent to
/// <code>return Err([anyhow!($args\...)][anyhow!])</code>.
///
/// The surrounding function's or closure's return value is required to be
/// <code>Result&lt;_, [anyhow::Error][crate::Error]&gt;</code>.
///
/// [anyhow!]: crate::anyhow
///
/// # Example
///
/// ```
/// # use anyhow::{bail, Result};
/// #
/// # fn has_permission(user: usize, resource: usize) -> bool {
/// # true
/// # }
/// #
/// # fn main() -> Result<()> {
/// # let user = 0;
/// # let resource = 0;
/// #
/// if !has_permission(user, resource) {
/// bail!("permission denied for accessing {}", resource);
/// }
/// # Ok(())
/// # }
/// ```
///
/// ```
/// # use anyhow::{bail, Result};
/// # use thiserror::Error;
/// #
/// # const MAX_DEPTH: usize = 1;
/// #
/// #[derive(Error, Debug)]
/// enum ScienceError {
/// #[error("recursion limit exceeded")]
/// RecursionLimitExceeded,
/// # #[error("...")]
/// # More = (stringify! {
/// ...
/// # }, 1).1,
/// }
///
/// # fn main() -> Result<()> {
/// # let depth = 0;
/// #
/// if depth > MAX_DEPTH {
/// bail!(ScienceError::RecursionLimitExceeded);
/// }
/// # Ok(())
/// # }
/// ```
#[macro_export]
macro_rules! bail {
($msg:literal $(,)?) => {
return $crate::__private::Err($crate::__anyhow!($msg))
};
($err:expr $(,)?) => {
return $crate::__private::Err($crate::__anyhow!($err))
};
($fmt:expr, $($arg:tt)*) => {
return $crate::__private::Err($crate::__anyhow!($fmt, $($arg)*))
};
}
macro_rules! __ensure {
($ensure:item) => {
/// Return early with an error if a condition is not satisfied.
///
/// This macro is equivalent to
/// <code>if !$cond { return Err([anyhow!($args\...)][anyhow!]); }</code>.
///
/// The surrounding function's or closure's return value is required to be
/// <code>Result&lt;_, [anyhow::Error][crate::Error]&gt;</code>.
///
/// Analogously to `assert!`, `ensure!` takes a condition and exits the function
/// if the condition fails. Unlike `assert!`, `ensure!` returns an `Error`
/// rather than panicking.
///
/// [anyhow!]: crate::anyhow
///
/// # Example
///
/// ```
/// # use anyhow::{ensure, Result};
/// #
/// # fn main() -> Result<()> {
/// # let user = 0;
/// #
/// ensure!(user == 0, "only user 0 is allowed");
/// # Ok(())
/// # }
/// ```
///
/// ```
/// # use anyhow::{ensure, Result};
/// # use thiserror::Error;
/// #
/// # const MAX_DEPTH: usize = 1;
/// #
/// #[derive(Error, Debug)]
/// enum ScienceError {
/// #[error("recursion limit exceeded")]
/// RecursionLimitExceeded,
/// # #[error("...")]
/// # More = (stringify! {
/// ...
/// # }, 1).1,
/// }
///
/// # fn main() -> Result<()> {
/// # let depth = 0;
/// #
/// ensure!(depth <= MAX_DEPTH, ScienceError::RecursionLimitExceeded);
/// # Ok(())
/// # }
/// ```
$ensure
};
}
#[cfg(doc)]
__ensure![
#[macro_export]
macro_rules! ensure {
($cond:expr $(,)?) => {
if !$cond {
return $crate::__private::Err($crate::Error::msg(
$crate::__private::concat!("Condition failed: `", $crate::__private::stringify!($cond), "`")
));
}
};
($cond:expr, $msg:literal $(,)?) => {
if !$cond {
return $crate::__private::Err($crate::__anyhow!($msg));
}
};
($cond:expr, $err:expr $(,)?) => {
if !$cond {
return $crate::__private::Err($crate::__anyhow!($err));
}
};
($cond:expr, $fmt:expr, $($arg:tt)*) => {
if !$cond {
return $crate::__private::Err($crate::__anyhow!($fmt, $($arg)*));
}
};
}
];
#[cfg(not(doc))]
__ensure![
#[macro_export]
macro_rules! ensure {
($($tt:tt)*) => {
$crate::__parse_ensure!(
/* state */ 0
/* stack */ ()
/* bail */ ($($tt)*)
/* fuel */ (~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~ ~~~~~~~~~~)
/* parse */ {()}
/* dup */ ($($tt)*)
/* rest */ $($tt)*
)
};
}
];
/// Construct an ad-hoc error from a string or existing non-`anyhow` error
/// value.
///
/// This evaluates to an [`Error`][crate::Error]. It can take either just a
/// string, or a format string with arguments. It also can take any custom type
/// which implements `Debug` and `Display`.
///
/// If called with a single argument whose type implements `std::error::Error`
/// (in addition to `Debug` and `Display`, which are always required), then that
/// Error impl's `source` is preserved as the `source` of the resulting
/// `anyhow::Error`.
///
/// # Example
///
/// ```
/// # type V = ();
/// #
/// use anyhow::{anyhow, Result};
///
/// fn lookup(key: &str) -> Result<V> {
/// if key.len() != 16 {
/// return Err(anyhow!("key length must be 16 characters, got {:?}", key));
/// }
///
/// // ...
/// # Ok(())
/// }
/// ```
#[macro_export]
macro_rules! anyhow {
($msg:literal $(,)?) => {
$crate::__private::must_use({
let error = $crate::__private::format_err($crate::__private::format_args!($msg));
error
})
};
($err:expr $(,)?) => {
$crate::__private::must_use({
use $crate::__private::kind::*;
let error = match $err {
error => (&error).anyhow_kind().new(error),
};
error
})
};
($fmt:expr, $($arg:tt)*) => {
$crate::Error::msg($crate::__private::format!($fmt, $($arg)*))
};
}
// Not public API. This is used in the implementation of some of the other
// macros, in which the must_use call is not needed because the value is known
// to be used.
#[doc(hidden)]
#[macro_export]
macro_rules! __anyhow {
($msg:literal $(,)?) => ({
let error = $crate::__private::format_err($crate::__private::format_args!($msg));
error
});
($err:expr $(,)?) => ({
use $crate::__private::kind::*;
let error = match $err {
error => (&error).anyhow_kind().new(error),
};
error
});
($fmt:expr, $($arg:tt)*) => {
$crate::Error::msg($crate::__private::format!($fmt, $($arg)*))
};
}

199
vendor/anyhow/src/ptr.rs vendored Normal file
View file

@ -0,0 +1,199 @@
use alloc::boxed::Box;
use core::marker::PhantomData;
use core::ptr::NonNull;
#[repr(transparent)]
pub struct Own<T>
where
T: ?Sized,
{
pub ptr: NonNull<T>,
}
unsafe impl<T> Send for Own<T> where T: ?Sized {}
unsafe impl<T> Sync for Own<T> where T: ?Sized {}
impl<T> Copy for Own<T> where T: ?Sized {}
impl<T> Clone for Own<T>
where
T: ?Sized,
{
fn clone(&self) -> Self {
*self
}
}
impl<T> Own<T>
where
T: ?Sized,
{
pub fn new(ptr: Box<T>) -> Self {
Own {
ptr: unsafe { NonNull::new_unchecked(Box::into_raw(ptr)) },
}
}
pub fn cast<U: CastTo>(self) -> Own<U::Target> {
Own {
ptr: self.ptr.cast(),
}
}
pub unsafe fn boxed(self) -> Box<T> {
unsafe { Box::from_raw(self.ptr.as_ptr()) }
}
pub fn by_ref(&self) -> Ref<T> {
Ref {
ptr: self.ptr,
lifetime: PhantomData,
}
}
pub fn by_mut(&mut self) -> Mut<T> {
Mut {
ptr: self.ptr,
lifetime: PhantomData,
}
}
}
#[repr(transparent)]
pub struct Ref<'a, T>
where
T: ?Sized,
{
pub ptr: NonNull<T>,
lifetime: PhantomData<&'a T>,
}
impl<'a, T> Copy for Ref<'a, T> where T: ?Sized {}
impl<'a, T> Clone for Ref<'a, T>
where
T: ?Sized,
{
fn clone(&self) -> Self {
*self
}
}
impl<'a, T> Ref<'a, T>
where
T: ?Sized,
{
pub fn new(ptr: &'a T) -> Self {
Ref {
ptr: NonNull::from(ptr),
lifetime: PhantomData,
}
}
#[cfg(not(anyhow_no_ptr_addr_of))]
pub fn from_raw(ptr: NonNull<T>) -> Self {
Ref {
ptr,
lifetime: PhantomData,
}
}
pub fn cast<U: CastTo>(self) -> Ref<'a, U::Target> {
Ref {
ptr: self.ptr.cast(),
lifetime: PhantomData,
}
}
#[cfg(not(anyhow_no_ptr_addr_of))]
pub fn by_mut(self) -> Mut<'a, T> {
Mut {
ptr: self.ptr,
lifetime: PhantomData,
}
}
#[cfg(not(anyhow_no_ptr_addr_of))]
pub fn as_ptr(self) -> *const T {
self.ptr.as_ptr() as *const T
}
pub unsafe fn deref(self) -> &'a T {
unsafe { &*self.ptr.as_ptr() }
}
}
#[repr(transparent)]
pub struct Mut<'a, T>
where
T: ?Sized,
{
pub ptr: NonNull<T>,
lifetime: PhantomData<&'a mut T>,
}
impl<'a, T> Copy for Mut<'a, T> where T: ?Sized {}
impl<'a, T> Clone for Mut<'a, T>
where
T: ?Sized,
{
fn clone(&self) -> Self {
*self
}
}
impl<'a, T> Mut<'a, T>
where
T: ?Sized,
{
#[cfg(anyhow_no_ptr_addr_of)]
pub fn new(ptr: &'a mut T) -> Self {
Mut {
ptr: NonNull::from(ptr),
lifetime: PhantomData,
}
}
pub fn cast<U: CastTo>(self) -> Mut<'a, U::Target> {
Mut {
ptr: self.ptr.cast(),
lifetime: PhantomData,
}
}
#[cfg(not(anyhow_no_ptr_addr_of))]
pub fn by_ref(self) -> Ref<'a, T> {
Ref {
ptr: self.ptr,
lifetime: PhantomData,
}
}
pub fn extend<'b>(self) -> Mut<'b, T> {
Mut {
ptr: self.ptr,
lifetime: PhantomData,
}
}
pub unsafe fn deref_mut(self) -> &'a mut T {
unsafe { &mut *self.ptr.as_ptr() }
}
}
impl<'a, T> Mut<'a, T> {
pub unsafe fn read(self) -> T {
unsafe { self.ptr.as_ptr().read() }
}
}
// Force turbofish on all calls of `.cast::<U>()`.
pub trait CastTo {
type Target;
}
impl<T> CastTo for T {
type Target = T;
}

84
vendor/anyhow/src/wrapper.rs vendored Normal file
View file

@ -0,0 +1,84 @@
use crate::StdError;
use core::fmt::{self, Debug, Display};
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
use alloc::boxed::Box;
#[cfg(error_generic_member_access)]
use core::error::Request;
#[repr(transparent)]
pub struct MessageError<M>(pub M);
impl<M> Debug for MessageError<M>
where
M: Display + Debug,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(&self.0, f)
}
}
impl<M> Display for MessageError<M>
where
M: Display + Debug,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(&self.0, f)
}
}
impl<M> StdError for MessageError<M> where M: Display + Debug + 'static {}
#[repr(transparent)]
pub struct DisplayError<M>(pub M);
impl<M> Debug for DisplayError<M>
where
M: Display,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(&self.0, f)
}
}
impl<M> Display for DisplayError<M>
where
M: Display,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(&self.0, f)
}
}
impl<M> StdError for DisplayError<M> where M: Display + 'static {}
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
#[repr(transparent)]
pub struct BoxedError(pub Box<dyn StdError + Send + Sync>);
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
impl Debug for BoxedError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(&self.0, f)
}
}
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
impl Display for BoxedError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(&self.0, f)
}
}
#[cfg(any(feature = "std", not(anyhow_no_core_error)))]
impl StdError for BoxedError {
fn source(&self) -> Option<&(dyn StdError + 'static)> {
self.0.source()
}
#[cfg(error_generic_member_access)]
fn provide<'a>(&'a self, request: &mut Request<'a>) {
self.0.provide(request);
}
}

14
vendor/anyhow/tests/common/mod.rs vendored Normal file
View file

@ -0,0 +1,14 @@
use anyhow::{bail, Result};
use std::io;
pub fn bail_literal() -> Result<()> {
bail!("oh no!");
}
pub fn bail_fmt() -> Result<()> {
bail!("{} {}!", "oh", "no");
}
pub fn bail_error() -> Result<()> {
bail!(io::Error::new(io::ErrorKind::Other, "oh no!"));
}

7
vendor/anyhow/tests/compiletest.rs vendored Normal file
View file

@ -0,0 +1,7 @@
#[rustversion::attr(not(nightly), ignore = "requires nightly")]
#[cfg_attr(miri, ignore = "incompatible with miri")]
#[test]
fn ui() {
let t = trybuild::TestCases::new();
t.compile_fail("tests/ui/*.rs");
}

53
vendor/anyhow/tests/drop/mod.rs vendored Normal file
View file

@ -0,0 +1,53 @@
#![allow(clippy::module_name_repetitions)]
use std::error::Error as StdError;
use std::fmt::{self, Display};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
#[derive(Debug)]
pub struct Flag {
atomic: Arc<AtomicBool>,
}
impl Flag {
pub fn new() -> Self {
Flag {
atomic: Arc::new(AtomicBool::new(false)),
}
}
pub fn get(&self) -> bool {
self.atomic.load(Ordering::Relaxed)
}
}
#[derive(Debug)]
pub struct DetectDrop {
has_dropped: Flag,
}
impl DetectDrop {
pub fn new(has_dropped: &Flag) -> Self {
DetectDrop {
has_dropped: Flag {
atomic: Arc::clone(&has_dropped.atomic),
},
}
}
}
impl StdError for DetectDrop {}
impl Display for DetectDrop {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "oh no!")
}
}
impl Drop for DetectDrop {
fn drop(&mut self) {
let already_dropped = self.has_dropped.atomic.swap(true, Ordering::Relaxed);
assert!(!already_dropped);
}
}

34
vendor/anyhow/tests/test_autotrait.rs vendored Normal file
View file

@ -0,0 +1,34 @@
#![allow(clippy::extra_unused_type_parameters)]
use anyhow::Error;
use std::panic::{RefUnwindSafe, UnwindSafe};
#[test]
fn test_send() {
fn assert_send<T: Send>() {}
assert_send::<Error>();
}
#[test]
fn test_sync() {
fn assert_sync<T: Sync>() {}
assert_sync::<Error>();
}
#[test]
fn test_unwind_safe() {
fn assert_unwind_safe<T: UnwindSafe>() {}
assert_unwind_safe::<Error>();
}
#[test]
fn test_ref_unwind_safe() {
fn assert_ref_unwind_safe<T: RefUnwindSafe>() {}
assert_ref_unwind_safe::<Error>();
}
#[test]
fn test_unpin() {
fn assert_unpin<T: Unpin>() {}
assert_unpin::<Error>();
}

15
vendor/anyhow/tests/test_backtrace.rs vendored Normal file
View file

@ -0,0 +1,15 @@
#![allow(clippy::let_underscore_untyped)]
#[rustversion::not(nightly)]
#[ignore = "requires nightly"]
#[test]
fn test_backtrace() {}
#[rustversion::nightly]
#[test]
fn test_backtrace() {
use anyhow::anyhow;
let error = anyhow!("oh no!");
let _ = error.backtrace();
}

45
vendor/anyhow/tests/test_boxed.rs vendored Normal file
View file

@ -0,0 +1,45 @@
#![allow(
// Clippy bug: https://github.com/rust-lang/rust-clippy/issues/7422
clippy::nonstandard_macro_braces,
)]
use anyhow::anyhow;
use std::error::Error as StdError;
use std::io;
use thiserror::Error;
#[derive(Error, Debug)]
#[error("outer")]
struct MyError {
source: io::Error,
}
#[test]
fn test_boxed_str() {
let error = Box::<dyn StdError + Send + Sync>::from("oh no!");
let error = anyhow!(error);
assert_eq!("oh no!", error.to_string());
assert_eq!(
"oh no!",
error
.downcast_ref::<Box<dyn StdError + Send + Sync>>()
.unwrap()
.to_string()
);
}
#[test]
fn test_boxed_thiserror() {
let error = MyError {
source: io::Error::new(io::ErrorKind::Other, "oh no!"),
};
let error = anyhow!(error);
assert_eq!("oh no!", error.source().unwrap().to_string());
}
#[test]
fn test_boxed_anyhow() {
let error = anyhow!("oh no!").context("it failed");
let error = anyhow!(error);
assert_eq!("oh no!", error.source().unwrap().to_string());
}

69
vendor/anyhow/tests/test_chain.rs vendored Normal file
View file

@ -0,0 +1,69 @@
use anyhow::{anyhow, Chain, Error};
fn error() -> Error {
anyhow!({ 0 }).context(1).context(2).context(3)
}
#[test]
fn test_iter() {
let e = error();
let mut chain = e.chain();
assert_eq!("3", chain.next().unwrap().to_string());
assert_eq!("2", chain.next().unwrap().to_string());
assert_eq!("1", chain.next().unwrap().to_string());
assert_eq!("0", chain.next().unwrap().to_string());
assert!(chain.next().is_none());
assert!(chain.next_back().is_none());
}
#[test]
fn test_rev() {
let e = error();
let mut chain = e.chain().rev();
assert_eq!("0", chain.next().unwrap().to_string());
assert_eq!("1", chain.next().unwrap().to_string());
assert_eq!("2", chain.next().unwrap().to_string());
assert_eq!("3", chain.next().unwrap().to_string());
assert!(chain.next().is_none());
assert!(chain.next_back().is_none());
}
#[test]
fn test_len() {
let e = error();
let mut chain = e.chain();
assert_eq!(4, chain.len());
assert_eq!((4, Some(4)), chain.size_hint());
assert_eq!("3", chain.next().unwrap().to_string());
assert_eq!(3, chain.len());
assert_eq!((3, Some(3)), chain.size_hint());
assert_eq!("0", chain.next_back().unwrap().to_string());
assert_eq!(2, chain.len());
assert_eq!((2, Some(2)), chain.size_hint());
assert_eq!("2", chain.next().unwrap().to_string());
assert_eq!(1, chain.len());
assert_eq!((1, Some(1)), chain.size_hint());
assert_eq!("1", chain.next_back().unwrap().to_string());
assert_eq!(0, chain.len());
assert_eq!((0, Some(0)), chain.size_hint());
assert!(chain.next().is_none());
}
#[test]
fn test_default() {
let mut c = Chain::default();
assert!(c.next().is_none());
}
#[test]
#[allow(clippy::redundant_clone)]
fn test_clone() {
let e = error();
let mut chain = e.chain().clone();
assert_eq!("3", chain.next().unwrap().to_string());
assert_eq!("2", chain.next().unwrap().to_string());
assert_eq!("1", chain.next().unwrap().to_string());
assert_eq!("0", chain.next().unwrap().to_string());
assert!(chain.next().is_none());
assert!(chain.next_back().is_none());
}

172
vendor/anyhow/tests/test_context.rs vendored Normal file
View file

@ -0,0 +1,172 @@
#![allow(
// Clippy bug: https://github.com/rust-lang/rust-clippy/issues/7422
clippy::nonstandard_macro_braces,
)]
mod drop;
use crate::drop::{DetectDrop, Flag};
use anyhow::{Context, Error, Result};
use std::fmt::{self, Display};
use thiserror::Error;
// https://github.com/dtolnay/anyhow/issues/18
#[test]
fn test_inference() -> Result<()> {
let x = "1";
let y: u32 = x.parse().context("...")?;
assert_eq!(y, 1);
Ok(())
}
macro_rules! context_type {
($name:ident) => {
#[derive(Debug)]
struct $name {
message: &'static str,
#[allow(dead_code)]
drop: DetectDrop,
}
impl Display for $name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(self.message)
}
}
};
}
context_type!(HighLevel);
context_type!(MidLevel);
#[derive(Error, Debug)]
#[error("{message}")]
struct LowLevel {
message: &'static str,
drop: DetectDrop,
}
struct Dropped {
low: Flag,
mid: Flag,
high: Flag,
}
impl Dropped {
fn none(&self) -> bool {
!self.low.get() && !self.mid.get() && !self.high.get()
}
fn all(&self) -> bool {
self.low.get() && self.mid.get() && self.high.get()
}
}
fn make_chain() -> (Error, Dropped) {
let dropped = Dropped {
low: Flag::new(),
mid: Flag::new(),
high: Flag::new(),
};
let low = LowLevel {
message: "no such file or directory",
drop: DetectDrop::new(&dropped.low),
};
// impl Context for Result<T, E>
let mid = Err::<(), LowLevel>(low)
.context(MidLevel {
message: "failed to load config",
drop: DetectDrop::new(&dropped.mid),
})
.unwrap_err();
// impl Context for Result<T, Error>
let high = Err::<(), Error>(mid)
.context(HighLevel {
message: "failed to start server",
drop: DetectDrop::new(&dropped.high),
})
.unwrap_err();
(high, dropped)
}
#[test]
fn test_downcast_ref() {
let (err, dropped) = make_chain();
assert!(!err.is::<String>());
assert!(err.downcast_ref::<String>().is_none());
assert!(err.is::<HighLevel>());
let high = err.downcast_ref::<HighLevel>().unwrap();
assert_eq!(high.to_string(), "failed to start server");
assert!(err.is::<MidLevel>());
let mid = err.downcast_ref::<MidLevel>().unwrap();
assert_eq!(mid.to_string(), "failed to load config");
assert!(err.is::<LowLevel>());
let low = err.downcast_ref::<LowLevel>().unwrap();
assert_eq!(low.to_string(), "no such file or directory");
assert!(dropped.none());
drop(err);
assert!(dropped.all());
}
#[test]
fn test_downcast_high() {
let (err, dropped) = make_chain();
let err = err.downcast::<HighLevel>().unwrap();
assert!(!dropped.high.get());
assert!(dropped.low.get() && dropped.mid.get());
drop(err);
assert!(dropped.all());
}
#[test]
fn test_downcast_mid() {
let (err, dropped) = make_chain();
let err = err.downcast::<MidLevel>().unwrap();
assert!(!dropped.mid.get());
assert!(dropped.low.get() && dropped.high.get());
drop(err);
assert!(dropped.all());
}
#[test]
fn test_downcast_low() {
let (err, dropped) = make_chain();
let err = err.downcast::<LowLevel>().unwrap();
assert!(!dropped.low.get());
assert!(dropped.mid.get() && dropped.high.get());
drop(err);
assert!(dropped.all());
}
#[test]
fn test_unsuccessful_downcast() {
let (err, dropped) = make_chain();
let err = err.downcast::<String>().unwrap_err();
assert!(dropped.none());
drop(err);
assert!(dropped.all());
}
#[test]
fn test_root_cause() {
let (err, _) = make_chain();
assert_eq!(err.root_cause().to_string(), "no such file or directory");
}

46
vendor/anyhow/tests/test_convert.rs vendored Normal file
View file

@ -0,0 +1,46 @@
#![allow(clippy::unnecessary_wraps)]
mod drop;
use self::drop::{DetectDrop, Flag};
use anyhow::{Error, Result};
use std::error::Error as StdError;
#[test]
fn test_convert() {
let has_dropped = Flag::new();
let error = Error::new(DetectDrop::new(&has_dropped));
let box_dyn = Box::<dyn StdError>::from(error);
assert_eq!("oh no!", box_dyn.to_string());
drop(box_dyn);
assert!(has_dropped.get());
}
#[test]
fn test_convert_send() {
let has_dropped = Flag::new();
let error = Error::new(DetectDrop::new(&has_dropped));
let box_dyn = Box::<dyn StdError + Send>::from(error);
assert_eq!("oh no!", box_dyn.to_string());
drop(box_dyn);
assert!(has_dropped.get());
}
#[test]
fn test_convert_send_sync() {
let has_dropped = Flag::new();
let error = Error::new(DetectDrop::new(&has_dropped));
let box_dyn = Box::<dyn StdError + Send + Sync>::from(error);
assert_eq!("oh no!", box_dyn.to_string());
drop(box_dyn);
assert!(has_dropped.get());
}
#[test]
fn test_question_mark() -> Result<(), Box<dyn StdError>> {
fn f() -> Result<()> {
Ok(())
}
f()?;
Ok(())
}

123
vendor/anyhow/tests/test_downcast.rs vendored Normal file
View file

@ -0,0 +1,123 @@
#![allow(clippy::assertions_on_result_states, clippy::wildcard_imports)]
mod common;
mod drop;
use self::common::*;
use self::drop::{DetectDrop, Flag};
use anyhow::Error;
use std::error::Error as StdError;
use std::fmt::{self, Display};
use std::io;
#[test]
fn test_downcast() {
assert_eq!(
"oh no!",
bail_literal().unwrap_err().downcast::<&str>().unwrap(),
);
assert_eq!(
"oh no!",
bail_fmt().unwrap_err().downcast::<String>().unwrap(),
);
assert_eq!(
"oh no!",
bail_error()
.unwrap_err()
.downcast::<io::Error>()
.unwrap()
.to_string(),
);
}
#[test]
fn test_downcast_ref() {
assert_eq!(
"oh no!",
*bail_literal().unwrap_err().downcast_ref::<&str>().unwrap(),
);
assert_eq!(
"oh no!",
bail_fmt().unwrap_err().downcast_ref::<String>().unwrap(),
);
assert_eq!(
"oh no!",
bail_error()
.unwrap_err()
.downcast_ref::<io::Error>()
.unwrap()
.to_string(),
);
}
#[test]
fn test_downcast_mut() {
assert_eq!(
"oh no!",
*bail_literal().unwrap_err().downcast_mut::<&str>().unwrap(),
);
assert_eq!(
"oh no!",
bail_fmt().unwrap_err().downcast_mut::<String>().unwrap(),
);
assert_eq!(
"oh no!",
bail_error()
.unwrap_err()
.downcast_mut::<io::Error>()
.unwrap()
.to_string(),
);
let mut bailed = bail_fmt().unwrap_err();
*bailed.downcast_mut::<String>().unwrap() = "clobber".to_string();
assert_eq!(bailed.downcast_ref::<String>().unwrap(), "clobber");
assert_eq!(bailed.downcast_mut::<String>().unwrap(), "clobber");
assert_eq!(bailed.downcast::<String>().unwrap(), "clobber");
}
#[test]
fn test_drop() {
let has_dropped = Flag::new();
let error = Error::new(DetectDrop::new(&has_dropped));
drop(error.downcast::<DetectDrop>().unwrap());
assert!(has_dropped.get());
}
#[test]
fn test_as_ref() {
let error = bail_error().unwrap_err();
let ref_dyn: &dyn StdError = error.as_ref();
assert_eq!("oh no!", ref_dyn.to_string());
let ref_dyn_send_sync: &(dyn StdError + Send + Sync) = error.as_ref();
assert_eq!("oh no!", ref_dyn_send_sync.to_string());
}
#[test]
fn test_large_alignment() {
#[repr(align(64))]
#[derive(Debug)]
struct LargeAlignedError(&'static str);
impl Display for LargeAlignedError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(self.0)
}
}
impl StdError for LargeAlignedError {}
let error = Error::new(LargeAlignedError("oh no!"));
assert_eq!(
"oh no!",
error.downcast_ref::<LargeAlignedError>().unwrap().0
);
}
#[test]
fn test_unsuccessful_downcast() {
let mut error = bail_error().unwrap_err();
assert!(error.downcast_ref::<&str>().is_none());
assert!(error.downcast_mut::<&str>().is_none());
assert!(error.downcast::<&str>().is_err());
}

741
vendor/anyhow/tests/test_ensure.rs vendored Normal file
View file

@ -0,0 +1,741 @@
#![allow(
clippy::bool_to_int_with_if,
clippy::char_lit_as_u8,
clippy::deref_addrof,
clippy::diverging_sub_expression,
clippy::erasing_op,
clippy::extra_unused_type_parameters,
clippy::if_same_then_else,
clippy::ifs_same_cond,
clippy::ignored_unit_patterns,
clippy::items_after_statements,
clippy::let_and_return,
clippy::let_underscore_untyped,
clippy::match_bool,
clippy::needless_else,
clippy::never_loop,
clippy::overly_complex_bool_expr,
clippy::redundant_closure_call,
clippy::redundant_pattern_matching,
clippy::too_many_lines,
clippy::unit_arg,
clippy::unnecessary_cast,
clippy::while_immutable_condition,
clippy::zero_ptr,
irrefutable_let_patterns
)]
use self::Enum::Generic;
use anyhow::{anyhow, ensure, Chain, Error, Result};
use std::fmt::{self, Debug};
use std::iter;
use std::marker::{PhantomData, PhantomData as P};
use std::mem;
use std::ops::Add;
use std::ptr;
struct S;
impl<T> Add<T> for S {
type Output = bool;
fn add(self, rhs: T) -> Self::Output {
let _ = rhs;
false
}
}
trait Trait: Sized {
const V: usize = 0;
fn t(self, i: i32) -> i32 {
i
}
}
impl<T> Trait for T {}
enum Enum<T: ?Sized> {
#[allow(dead_code)]
Thing(PhantomData<T>),
Generic,
}
impl<T: ?Sized> PartialEq for Enum<T> {
fn eq(&self, rhs: &Self) -> bool {
mem::discriminant(self) == mem::discriminant(rhs)
}
}
impl<T: ?Sized> Debug for Enum<T> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("Generic")
}
}
#[track_caller]
fn assert_err<T: Debug>(result: impl FnOnce() -> Result<T>, expected: &'static str) {
let actual = result().unwrap_err().to_string();
// In general different rustc versions will format the interpolated lhs and
// rhs $:expr fragment with insignificant differences in whitespace or
// punctuation, so we check the message in full against nightly and do just
// a cursory test on older toolchains.
if rustversion::cfg!(nightly) && !cfg!(miri) {
assert_eq!(actual, expected);
} else {
assert_eq!(actual.contains(" vs "), expected.contains(" vs "));
}
}
#[test]
fn test_recursion() {
// Must not blow the default #[recursion_limit], which is 128.
#[rustfmt::skip]
let test = || Ok(ensure!(
false | false | false | false | false | false | false | false | false |
false | false | false | false | false | false | false | false | false |
false | false | false | false | false | false | false | false | false |
false | false | false | false | false | false | false | false | false |
false | false | false | false | false | false | false | false | false |
false | false | false | false | false | false | false | false | false |
false | false | false | false | false | false | false | false | false
));
test().unwrap_err();
}
#[test]
fn test_low_precedence_control_flow() {
#[allow(unreachable_code)]
let test = || {
let val = loop {
// Break has lower precedence than the comparison operators so the
// expression here is `S + (break (1 == 1))`. It would be bad if the
// ensure macro partitioned this input into `(S + break 1) == (1)`
// because that means a different thing than what was written.
ensure!(S + break 1 == 1);
};
Ok(val)
};
assert!(test().unwrap());
}
#[test]
fn test_low_precedence_binary_operator() {
// Must not partition as `false == (true && false)`.
let test = || Ok(ensure!(false == true && false));
assert_err(test, "Condition failed: `false == true && false`");
// But outside the root level, it is fine.
let test = || Ok(ensure!(while false == true && false {} < ()));
assert_err(
test,
"Condition failed: `while false == true && false {} < ()` (() vs ())",
);
let a = 15;
let b = 3;
let test = || Ok(ensure!(a <= b || a - b <= 10));
assert_err(test, "Condition failed: `a <= b || a - b <= 10`");
}
#[test]
fn test_high_precedence_binary_operator() {
let a = 15;
let b = 3;
let test = || Ok(ensure!(a - b <= 10));
assert_err(test, "Condition failed: `a - b <= 10` (12 vs 10)");
}
#[test]
fn test_closure() {
// Must not partition as `(S + move) || (1 == 1)` by treating move as an
// identifier, nor as `(S + move || 1) == (1)` by misinterpreting the
// closure precedence.
let test = || Ok(ensure!(S + move || 1 == 1));
assert_err(test, "Condition failed: `S + move || 1 == 1`");
let test = || Ok(ensure!(S + || 1 == 1));
assert_err(test, "Condition failed: `S + || 1 == 1`");
// Must not partition as `S + ((move | ()) | 1) == 1` by treating those
// pipes as bitwise-or.
let test = || Ok(ensure!(S + move |()| 1 == 1));
assert_err(test, "Condition failed: `S + move |()| 1 == 1`");
let test = || Ok(ensure!(S + |()| 1 == 1));
assert_err(test, "Condition failed: `S + |()| 1 == 1`");
}
#[test]
fn test_unary() {
let mut x = &1;
let test = || Ok(ensure!(*x == 2));
assert_err(test, "Condition failed: `*x == 2` (1 vs 2)");
let test = || Ok(ensure!(!x == 1));
assert_err(test, "Condition failed: `!x == 1` (-2 vs 1)");
let test = || Ok(ensure!(-x == 1));
assert_err(test, "Condition failed: `-x == 1` (-1 vs 1)");
let test = || Ok(ensure!(&x == &&2));
assert_err(test, "Condition failed: `&x == &&2` (1 vs 2)");
let test = || Ok(ensure!(&mut x == *&&mut &2));
assert_err(test, "Condition failed: `&mut x == *&&mut &2` (1 vs 2)");
}
#[test]
fn test_if() {
#[rustfmt::skip]
let test = || Ok(ensure!(if false {}.t(1) == 2));
assert_err(test, "Condition failed: `if false {}.t(1) == 2` (1 vs 2)");
#[rustfmt::skip]
let test = || Ok(ensure!(if false {} else {}.t(1) == 2));
assert_err(
test,
"Condition failed: `if false {} else {}.t(1) == 2` (1 vs 2)",
);
#[rustfmt::skip]
let test = || Ok(ensure!(if false {} else if false {}.t(1) == 2));
assert_err(
test,
"Condition failed: `if false {} else if false {}.t(1) == 2` (1 vs 2)",
);
#[rustfmt::skip]
let test = || Ok(ensure!(if let 1 = 2 {}.t(1) == 2));
assert_err(
test,
"Condition failed: `if let 1 = 2 {}.t(1) == 2` (1 vs 2)",
);
#[rustfmt::skip]
let test = || Ok(ensure!(if let 1 | 2 = 2 {}.t(1) == 2));
assert_err(
test,
"Condition failed: `if let 1 | 2 = 2 {}.t(1) == 2` (1 vs 2)",
);
#[rustfmt::skip]
let test = || Ok(ensure!(if let | 1 | 2 = 2 {}.t(1) == 2));
assert_err(
test,
"Condition failed: `if let | 1 | 2 = 2 {}.t(1) == 2` (1 vs 2)",
);
}
#[test]
fn test_loop() {
#[rustfmt::skip]
let test = || Ok(ensure!(1 + loop { break 1 } == 1));
assert_err(
test,
"Condition failed: `1 + loop { break 1 } == 1` (2 vs 1)",
);
#[rustfmt::skip]
let test = || Ok(ensure!(1 + 'a: loop { break 'a 1 } == 1));
assert_err(
test,
"Condition failed: `1 + 'a: loop { break 'a 1 } == 1` (2 vs 1)",
);
#[rustfmt::skip]
let test = || Ok(ensure!(while false {}.t(1) == 2));
assert_err(
test,
"Condition failed: `while false {}.t(1) == 2` (1 vs 2)",
);
#[rustfmt::skip]
let test = || Ok(ensure!(while let None = Some(1) {}.t(1) == 2));
assert_err(
test,
"Condition failed: `while let None = Some(1) {}.t(1) == 2` (1 vs 2)",
);
#[rustfmt::skip]
let test = || Ok(ensure!(for _x in iter::once(0) {}.t(1) == 2));
assert_err(
test,
"Condition failed: `for _x in iter::once(0) {}.t(1) == 2` (1 vs 2)",
);
#[rustfmt::skip]
let test = || Ok(ensure!(for | _x in iter::once(0) {}.t(1) == 2));
assert_err(
test,
"Condition failed: `for | _x in iter::once(0) {}.t(1) == 2` (1 vs 2)",
);
#[rustfmt::skip]
let test = || Ok(ensure!(for true | false in iter::empty() {}.t(1) == 2));
assert_err(
test,
"Condition failed: `for true | false in iter::empty() {}.t(1) == 2` (1 vs 2)",
);
}
#[test]
fn test_match() {
#[rustfmt::skip]
let test = || Ok(ensure!(match 1 == 1 { true => 1, false => 0 } == 2));
assert_err(
test,
"Condition failed: `match 1 == 1 { true => 1, false => 0 } == 2` (1 vs 2)",
);
}
#[test]
fn test_atom() {
let test = || Ok(ensure!([false, false].len() > 3));
assert_err(
test,
"Condition failed: `[false, false].len() > 3` (2 vs 3)",
);
#[rustfmt::skip]
let test = || Ok(ensure!({ let x = 1; x } >= 3));
assert_err(test, "Condition failed: `{ let x = 1; x } >= 3` (1 vs 3)");
let test = || Ok(ensure!(S + async { 1 } == true));
assert_err(
test,
"Condition failed: `S + async { 1 } == true` (false vs true)",
);
let test = || Ok(ensure!(S + async move { 1 } == true));
assert_err(
test,
"Condition failed: `S + async move { 1 } == true` (false vs true)",
);
let x = &1;
let test = || Ok(ensure!(S + unsafe { ptr::read(x) } == true));
assert_err(
test,
"Condition failed: `S + unsafe { ptr::read(x) } == true` (false vs true)",
);
}
#[test]
fn test_path() {
let test = || Ok(ensure!(crate::S.t(1) == 2));
assert_err(test, "Condition failed: `crate::S.t(1) == 2` (1 vs 2)");
let test = || Ok(ensure!(::anyhow::Error::root_cause.t(1) == 2));
assert_err(
test,
"Condition failed: `::anyhow::Error::root_cause.t(1) == 2` (1 vs 2)",
);
let test = || Ok(ensure!(Error::msg::<&str>.t(1) == 2));
assert_err(
test,
"Condition failed: `Error::msg::<&str>.t(1) == 2` (1 vs 2)",
);
#[rustfmt::skip]
let test = || Ok(ensure!(Error::msg::<&str,>.t(1) == 2));
assert_err(
test,
"Condition failed: `Error::msg::<&str,>.t(1) == 2` (1 vs 2)",
);
let test = || Ok(ensure!(Error::msg::<<str as ToOwned>::Owned>.t(1) == 2));
assert_err(
test,
"Condition failed: `Error::msg::<<str as ToOwned>::Owned>.t(1) == 2` (1 vs 2)",
);
let test = || Ok(ensure!(Chain::<'static>::new.t(1) == 2));
assert_err(
test,
"Condition failed: `Chain::<'static>::new.t(1) == 2` (1 vs 2)",
);
#[rustfmt::skip]
let test = || Ok(ensure!(Chain::<'static,>::new.t(1) == 2));
assert_err(
test,
"Condition failed: `Chain::<'static,>::new.t(1) == 2` (1 vs 2)",
);
fn f<const I: isize>() {}
let test = || Ok(ensure!(f::<1>() != ()));
assert_err(test, "Condition failed: `f::<1>() != ()` (() vs ())");
let test = || Ok(ensure!(f::<-1>() != ()));
assert_err(test, "Condition failed: `f::<-1>() != ()` (() vs ())");
fn g<T, const I: isize>() {}
let test = || Ok(ensure!(g::<u8, 1>() != ()));
assert_err(test, "Condition failed: `g::<u8, 1>() != ()` (() vs ())");
let test = || Ok(ensure!(g::<u8, -1>() != ()));
assert_err(test, "Condition failed: `g::<u8, -1>() != ()` (() vs ())");
#[derive(PartialOrd, PartialEq, Debug)]
enum E<'a, T> {
#[allow(dead_code)]
T(&'a T),
U,
}
#[rustfmt::skip]
let test = || Ok(ensure!(E::U::<>>E::U::<u8>));
assert_err(test, "Condition failed: `E::U::<> > E::U::<u8>` (U vs U)");
#[rustfmt::skip]
let test = || Ok(ensure!(E::U::<u8>>E::U));
assert_err(test, "Condition failed: `E::U::<u8> > E::U` (U vs U)");
#[rustfmt::skip]
let test = || Ok(ensure!(E::U::<u8,>>E::U));
assert_err(test, "Condition failed: `E::U::<u8,> > E::U` (U vs U)");
let test = || Ok(ensure!(Generic::<dyn Debug + Sync> != Generic));
assert_err(
test,
"Condition failed: `Generic::<dyn Debug + Sync> != Generic` (Generic vs Generic)",
);
let test = || Ok(ensure!(Generic::<dyn Fn() + Sync> != Generic));
assert_err(
test,
"Condition failed: `Generic::<dyn Fn() + Sync> != Generic` (Generic vs Generic)",
);
#[rustfmt::skip]
let test = || {
Ok(ensure!(
Generic::<dyn Fn::() + ::std::marker::Sync> != Generic
))
};
assert_err(
test,
"Condition failed: `Generic::<dyn Fn::() + ::std::marker::Sync> != Generic` (Generic vs Generic)",
);
}
#[test]
fn test_macro() {
let test = || Ok(ensure!(anyhow!("...").to_string().len() <= 1));
assert_err(
test,
"Condition failed: `anyhow!(\"...\").to_string().len() <= 1` (3 vs 1)",
);
let test = || Ok(ensure!(vec![1].len() < 1));
assert_err(test, "Condition failed: `vec![1].len() < 1` (1 vs 1)");
let test = || Ok(ensure!(stringify! {} != ""));
assert_err(
test,
"Condition failed: `stringify! {} != \"\"` (\"\" vs \"\")",
);
}
#[test]
fn test_trailer() {
let test = || Ok(ensure!((|| 1)() == 2));
assert_err(test, "Condition failed: `(|| 1)() == 2` (1 vs 2)");
let test = || Ok(ensure!(b"hmm"[1] == b'c'));
assert_err(test, "Condition failed: `b\"hmm\"[1] == b'c'` (109 vs 99)");
let test = || Ok(ensure!(PhantomData::<u8> {} != PhantomData));
assert_err(
test,
"Condition failed: `PhantomData::<u8> {} != PhantomData` (PhantomData<u8> vs PhantomData<u8>)",
);
let result = Ok::<_, Error>(1);
let test = || Ok(ensure!(result? == 2));
assert_err(test, "Condition failed: `result? == 2` (1 vs 2)");
let test = || Ok(ensure!((2, 3).1 == 2));
assert_err(test, "Condition failed: `(2, 3).1 == 2` (3 vs 2)");
#[rustfmt::skip]
let test = || Ok(ensure!((2, (3, 4)). 1.1 == 2));
assert_err(test, "Condition failed: `(2, (3, 4)).1.1 == 2` (4 vs 2)");
let err = anyhow!("");
let test = || Ok(ensure!(err.is::<&str>() == false));
assert_err(
test,
"Condition failed: `err.is::<&str>() == false` (true vs false)",
);
let test = || Ok(ensure!(err.is::<<str as ToOwned>::Owned>() == true));
assert_err(
test,
"Condition failed: `err.is::<<str as ToOwned>::Owned>() == true` (false vs true)",
);
}
#[test]
fn test_whitespace() {
#[derive(Debug)]
pub struct Point {
#[allow(dead_code)]
pub x: i32,
#[allow(dead_code)]
pub y: i32,
}
let point = Point { x: 0, y: 0 };
let test = || Ok(ensure!("" == format!("{:#?}", point)));
assert_err(
test,
"Condition failed: `\"\" == format!(\"{:#?}\", point)`",
);
}
#[test]
fn test_too_long() {
let test = || Ok(ensure!("" == "x".repeat(10)));
assert_err(
test,
"Condition failed: `\"\" == \"x\".repeat(10)` (\"\" vs \"xxxxxxxxxx\")",
);
let test = || Ok(ensure!("" == "x".repeat(80)));
assert_err(test, "Condition failed: `\"\" == \"x\".repeat(80)`");
}
#[test]
fn test_as() {
let test = || Ok(ensure!('\0' as u8 > 1));
assert_err(test, "Condition failed: `'\\0' as u8 > 1` (0 vs 1)");
let test = || Ok(ensure!('\0' as ::std::primitive::u8 > 1));
assert_err(
test,
"Condition failed: `'\\0' as ::std::primitive::u8 > 1` (0 vs 1)",
);
let test = || Ok(ensure!(&[0] as &[i32] == [1]));
assert_err(
test,
"Condition failed: `&[0] as &[i32] == [1]` ([0] vs [1])",
);
let test = || Ok(ensure!(0 as *const () as *mut _ == 1 as *mut ()));
assert_err(
test,
"Condition failed: `0 as *const () as *mut _ == 1 as *mut ()` (0x0 vs 0x1)",
);
let s = "";
let test = || Ok(ensure!(s as &str != s));
assert_err(test, "Condition failed: `s as &str != s` (\"\" vs \"\")");
let test = || Ok(ensure!(&s as &&str != &s));
assert_err(test, "Condition failed: `&s as &&str != &s` (\"\" vs \"\")");
let test = || Ok(ensure!(s as &'static str != s));
assert_err(
test,
"Condition failed: `s as &'static str != s` (\"\" vs \"\")",
);
let test = || Ok(ensure!(&s as &&'static str != &s));
assert_err(
test,
"Condition failed: `&s as &&'static str != &s` (\"\" vs \"\")",
);
let m: &mut str = Default::default();
let test = || Ok(ensure!(m as &mut str != s));
assert_err(
test,
"Condition failed: `m as &mut str != s` (\"\" vs \"\")",
);
let test = || Ok(ensure!(&m as &&mut str != &s));
assert_err(
test,
"Condition failed: `&m as &&mut str != &s` (\"\" vs \"\")",
);
let test = || Ok(ensure!(&m as &&'static mut str != &s));
assert_err(
test,
"Condition failed: `&m as &&'static mut str != &s` (\"\" vs \"\")",
);
let f = || {};
let test = || Ok(ensure!(f as fn() as usize * 0 != 0));
assert_err(
test,
"Condition failed: `f as fn() as usize * 0 != 0` (0 vs 0)",
);
let test = || Ok(ensure!(f as fn() -> () as usize * 0 != 0));
assert_err(
test,
"Condition failed: `f as fn() -> () as usize * 0 != 0` (0 vs 0)",
);
let test = || Ok(ensure!(f as for<'a> fn() as usize * 0 != 0));
assert_err(
test,
"Condition failed: `f as for<'a> fn() as usize * 0 != 0` (0 vs 0)",
);
let test = || Ok(ensure!(f as unsafe fn() as usize * 0 != 0));
assert_err(
test,
"Condition failed: `f as unsafe fn() as usize * 0 != 0` (0 vs 0)",
);
#[rustfmt::skip]
let test = || Ok(ensure!(f as extern "Rust" fn() as usize * 0 != 0));
assert_err(
test,
"Condition failed: `f as extern \"Rust\" fn() as usize * 0 != 0` (0 vs 0)",
);
extern "C" fn extern_fn() {}
#[rustfmt::skip]
let test = || Ok(ensure!(extern_fn as extern fn() as usize * 0 != 0));
assert_err(
test,
"Condition failed: `extern_fn as extern fn() as usize * 0 != 0` (0 vs 0)",
);
let f = || -> ! { panic!() };
let test = || Ok(ensure!(f as fn() -> ! as usize * 0 != 0));
assert_err(
test,
"Condition failed: `f as fn() -> ! as usize * 0 != 0` (0 vs 0)",
);
trait EqDebug<T>: PartialEq<T> + Debug {
type Assoc;
}
impl<S, T> EqDebug<T> for S
where
S: PartialEq<T> + Debug,
{
type Assoc = bool;
}
let test = || Ok(ensure!(&0 as &dyn EqDebug<i32, Assoc = bool> != &0));
assert_err(
test,
"Condition failed: `&0 as &dyn EqDebug<i32, Assoc = bool> != &0` (0 vs 0)",
);
let test = || {
Ok(ensure!(
PhantomData as PhantomData<<i32 as ToOwned>::Owned> != PhantomData
))
};
assert_err(
test,
"Condition failed: `PhantomData as PhantomData<<i32 as ToOwned>::Owned> != PhantomData` (PhantomData<i32> vs PhantomData<i32>)",
);
macro_rules! int {
(...) => {
u8
};
}
let test = || Ok(ensure!(0 as int!(...) != 0));
assert_err(test, "Condition failed: `0 as int!(...) != 0` (0 vs 0)");
let test = || Ok(ensure!(0 as int![...] != 0));
assert_err(test, "Condition failed: `0 as int![...] != 0` (0 vs 0)");
let test = || Ok(ensure!(0 as int! {...} != 0));
assert_err(test, "Condition failed: `0 as int! { ... } != 0` (0 vs 0)");
}
#[test]
fn test_pat() {
let test = || Ok(ensure!(if let ref mut _x @ 0 = 0 { 0 } else { 1 } == 1));
assert_err(
test,
"Condition failed: `if let ref mut _x @ 0 = 0 { 0 } else { 1 } == 1` (0 vs 1)",
);
let test = || Ok(ensure!(if let -1..=1 = 0 { 0 } else { 1 } == 1));
assert_err(
test,
"Condition failed: `if let -1..=1 = 0 { 0 } else { 1 } == 1` (0 vs 1)",
);
let test = || Ok(ensure!(if let &0 = &0 { 0 } else { 1 } == 1));
assert_err(
test,
"Condition failed: `if let &0 = &0 { 0 } else { 1 } == 1` (0 vs 1)",
);
let test = || Ok(ensure!(if let &&0 = &&0 { 0 } else { 1 } == 1));
assert_err(
test,
"Condition failed: `if let &&0 = &&0 { 0 } else { 1 } == 1` (0 vs 1)",
);
let test = || Ok(ensure!(if let &mut 0 = &mut 0 { 0 } else { 1 } == 1));
assert_err(
test,
"Condition failed: `if let &mut 0 = &mut 0 { 0 } else { 1 } == 1` (0 vs 1)",
);
let test = || Ok(ensure!(if let &&mut 0 = &&mut 0 { 0 } else { 1 } == 1));
assert_err(
test,
"Condition failed: `if let &&mut 0 = &&mut 0 { 0 } else { 1 } == 1` (0 vs 1)",
);
let test = || Ok(ensure!(if let (0, 1) = (0, 1) { 0 } else { 1 } == 1));
assert_err(
test,
"Condition failed: `if let (0, 1) = (0, 1) { 0 } else { 1 } == 1` (0 vs 1)",
);
let test = || Ok(ensure!(if let [0] = b"\0" { 0 } else { 1 } == 1));
assert_err(
test,
"Condition failed: `if let [0] = b\"\\0\" { 0 } else { 1 } == 1` (0 vs 1)",
);
let p = PhantomData::<u8>;
let test = || Ok(ensure!(if let P::<u8> {} = p { 0 } else { 1 } == 1));
assert_err(
test,
"Condition failed: `if let P::<u8> {} = p { 0 } else { 1 } == 1` (0 vs 1)",
);
let test = || Ok(ensure!(if let ::std::marker::PhantomData = p {} != ()));
assert_err(
test,
"Condition failed: `if let ::std::marker::PhantomData = p {} != ()` (() vs ())",
);
let test = || Ok(ensure!(if let <S as Trait>::V = 0 { 0 } else { 1 } == 1));
assert_err(
test,
"Condition failed: `if let <S as Trait>::V = 0 { 0 } else { 1 } == 1` (0 vs 1)",
);
let test = || Ok(ensure!(for _ in iter::once(()) {} != ()));
assert_err(
test,
"Condition failed: `for _ in iter::once(()) {} != ()` (() vs ())",
);
let test = || Ok(ensure!(if let stringify!(x) = "x" { 0 } else { 1 } == 1));
assert_err(
test,
"Condition failed: `if let stringify!(x) = \"x\" { 0 } else { 1 } == 1` (0 vs 1)",
);
}

18
vendor/anyhow/tests/test_ffi.rs vendored Normal file
View file

@ -0,0 +1,18 @@
#![deny(improper_ctypes, improper_ctypes_definitions)]
use anyhow::anyhow;
#[no_mangle]
pub extern "C" fn anyhow1(err: anyhow::Error) {
println!("{:?}", err);
}
#[no_mangle]
pub extern "C" fn anyhow2(err: &mut Option<anyhow::Error>) {
*err = Some(anyhow!("ffi error"));
}
#[no_mangle]
pub extern "C" fn anyhow3() -> Option<anyhow::Error> {
Some(anyhow!("ffi error"))
}

93
vendor/anyhow/tests/test_fmt.rs vendored Normal file
View file

@ -0,0 +1,93 @@
use anyhow::{bail, Context, Result};
use std::io;
fn f() -> Result<()> {
bail!(io::Error::new(io::ErrorKind::PermissionDenied, "oh no!"));
}
fn g() -> Result<()> {
f().context("f failed")
}
fn h() -> Result<()> {
g().context("g failed")
}
const EXPECTED_ALTDISPLAY_F: &str = "oh no!";
const EXPECTED_ALTDISPLAY_G: &str = "f failed: oh no!";
const EXPECTED_ALTDISPLAY_H: &str = "g failed: f failed: oh no!";
const EXPECTED_DEBUG_F: &str = "oh no!";
const EXPECTED_DEBUG_G: &str = "\
f failed
Caused by:
oh no!\
";
const EXPECTED_DEBUG_H: &str = "\
g failed
Caused by:
0: f failed
1: oh no!\
";
const EXPECTED_ALTDEBUG_F: &str = "\
Custom {
kind: PermissionDenied,
error: \"oh no!\",
}\
";
const EXPECTED_ALTDEBUG_G: &str = "\
Error {
context: \"f failed\",
source: Custom {
kind: PermissionDenied,
error: \"oh no!\",
},
}\
";
const EXPECTED_ALTDEBUG_H: &str = "\
Error {
context: \"g failed\",
source: Error {
context: \"f failed\",
source: Custom {
kind: PermissionDenied,
error: \"oh no!\",
},
},
}\
";
#[test]
fn test_display() {
assert_eq!("g failed", h().unwrap_err().to_string());
}
#[test]
fn test_altdisplay() {
assert_eq!(EXPECTED_ALTDISPLAY_F, format!("{:#}", f().unwrap_err()));
assert_eq!(EXPECTED_ALTDISPLAY_G, format!("{:#}", g().unwrap_err()));
assert_eq!(EXPECTED_ALTDISPLAY_H, format!("{:#}", h().unwrap_err()));
}
#[test]
fn test_debug() {
assert_eq!(EXPECTED_DEBUG_F, format!("{:?}", f().unwrap_err()));
assert_eq!(EXPECTED_DEBUG_G, format!("{:?}", g().unwrap_err()));
assert_eq!(EXPECTED_DEBUG_H, format!("{:?}", h().unwrap_err()));
}
#[test]
fn test_altdebug() {
assert_eq!(EXPECTED_ALTDEBUG_F, format!("{:#?}", f().unwrap_err()));
assert_eq!(EXPECTED_ALTDEBUG_G, format!("{:#?}", g().unwrap_err()));
assert_eq!(EXPECTED_ALTDEBUG_H, format!("{:#?}", h().unwrap_err()));
}

96
vendor/anyhow/tests/test_macros.rs vendored Normal file
View file

@ -0,0 +1,96 @@
#![allow(
clippy::assertions_on_result_states,
clippy::eq_op,
clippy::incompatible_msrv, // https://github.com/rust-lang/rust-clippy/issues/12257
clippy::items_after_statements,
clippy::match_single_binding,
clippy::needless_pass_by_value,
clippy::shadow_unrelated,
clippy::wildcard_imports
)]
mod common;
use self::common::*;
use anyhow::{anyhow, ensure, Result};
use std::cell::Cell;
use std::future;
#[test]
fn test_messages() {
assert_eq!("oh no!", bail_literal().unwrap_err().to_string());
assert_eq!("oh no!", bail_fmt().unwrap_err().to_string());
assert_eq!("oh no!", bail_error().unwrap_err().to_string());
}
#[test]
fn test_ensure() {
let f = || {
ensure!(1 + 1 == 2, "This is correct");
Ok(())
};
assert!(f().is_ok());
let v = 1;
let f = || {
ensure!(v + v == 2, "This is correct, v: {}", v);
Ok(())
};
assert!(f().is_ok());
let f = || {
ensure!(v + v == 1, "This is not correct, v: {}", v);
Ok(())
};
assert!(f().is_err());
let f = || {
ensure!(v + v == 1);
Ok(())
};
assert_eq!(
f().unwrap_err().to_string(),
"Condition failed: `v + v == 1` (2 vs 1)",
);
}
#[test]
fn test_ensure_nonbool() -> Result<()> {
struct Struct {
condition: bool,
}
let s = Struct { condition: true };
match &s {
Struct { condition } => ensure!(condition), // &bool
}
Ok(())
}
#[test]
fn test_temporaries() {
fn require_send_sync(_: impl Send + Sync) {}
require_send_sync(async {
// If anyhow hasn't dropped any temporary format_args it creates by the
// time it's done evaluating, those will stick around until the
// semicolon, which is on the other side of the await point, making the
// enclosing future non-Send.
future::ready(anyhow!("...")).await;
});
fn message(cell: Cell<&str>) -> &str {
cell.get()
}
require_send_sync(async {
future::ready(anyhow!(message(Cell::new("...")))).await;
});
}
#[test]
fn test_brace_escape() {
let err = anyhow!("unterminated ${{..}} expression");
assert_eq!("unterminated ${..} expression", err.to_string());
}

30
vendor/anyhow/tests/test_repr.rs vendored Normal file
View file

@ -0,0 +1,30 @@
#![allow(clippy::extra_unused_type_parameters)]
mod drop;
use self::drop::{DetectDrop, Flag};
use anyhow::Error;
use std::mem;
#[test]
fn test_error_size() {
assert_eq!(mem::size_of::<Error>(), mem::size_of::<usize>());
}
#[test]
fn test_null_pointer_optimization() {
assert_eq!(mem::size_of::<Result<(), Error>>(), mem::size_of::<usize>());
}
#[test]
fn test_autotraits() {
fn assert<E: Unpin + Send + Sync + 'static>() {}
assert::<Error>();
}
#[test]
fn test_drop() {
let has_dropped = Flag::new();
drop(Error::new(DetectDrop::new(&has_dropped)));
assert!(has_dropped.get());
}

62
vendor/anyhow/tests/test_source.rs vendored Normal file
View file

@ -0,0 +1,62 @@
use anyhow::anyhow;
use std::error::Error as StdError;
use std::fmt::{self, Display};
use std::io;
#[derive(Debug)]
enum TestError {
Io(io::Error),
}
impl Display for TestError {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
match self {
TestError::Io(e) => Display::fmt(e, formatter),
}
}
}
impl StdError for TestError {
fn source(&self) -> Option<&(dyn StdError + 'static)> {
match self {
TestError::Io(io) => Some(io),
}
}
}
#[test]
fn test_literal_source() {
let error = anyhow!("oh no!");
assert!(error.source().is_none());
}
#[test]
fn test_variable_source() {
let msg = "oh no!";
let error = anyhow!(msg);
assert!(error.source().is_none());
let msg = msg.to_owned();
let error = anyhow!(msg);
assert!(error.source().is_none());
}
#[test]
fn test_fmt_source() {
let error = anyhow!("{} {}!", "oh", "no");
assert!(error.source().is_none());
}
#[test]
fn test_io_source() {
let io = io::Error::new(io::ErrorKind::Other, "oh no!");
let error = anyhow!(TestError::Io(io));
assert_eq!("oh no!", error.source().unwrap().to_string());
}
#[test]
fn test_anyhow_from_anyhow() {
let error = anyhow!("oh no!").context("context");
let error = anyhow!(error);
assert_eq!("oh no!", error.source().unwrap().to_string());
}

View file

@ -0,0 +1,8 @@
use anyhow::{ensure, Result};
fn main() -> Result<()> {
// `ensure!` must not partition this into `(false) == (false == true)`
// because Rust doesn't ordinarily allow this form of expression.
ensure!(false == false == true);
Ok(())
}

View file

@ -0,0 +1,10 @@
error: comparison operators cannot be chained
--> tests/ui/chained-comparison.rs:6:19
|
6 | ensure!(false == false == true);
| ^^ ^^
|
help: split the comparison into two
|
6 | ensure!(false == false && false == true);
| ++++++++

View file

@ -0,0 +1,6 @@
use anyhow::{ensure, Result};
fn main() -> Result<()> {
ensure!();
Ok(())
}

View file

@ -0,0 +1,12 @@
error: unexpected end of macro invocation
--> tests/ui/empty-ensure.rs:4:5
|
4 | ensure!();
| ^^^^^^^^^ missing tokens in macro arguments
|
note: while trying to match meta-variable `$cond:expr`
--> src/ensure.rs
|
| ($cond:expr $(,)?) => {
| ^^^^^^^^^^
= note: this error originates in the macro `$crate::__parse_ensure` which comes from the expansion of the macro `ensure` (in Nightly builds, run with -Z macro-backtrace for more info)

View file

@ -0,0 +1,40 @@
use anyhow::{ensure, Result};
use std::ops::{Deref, Not};
struct Bool(bool);
struct DerefBool(bool);
struct NotBool(bool);
impl Deref for DerefBool {
type Target = bool;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Not for NotBool {
type Output = bool;
fn not(self) -> Self::Output {
!self.0
}
}
fn main() -> Result<()> {
ensure!("...");
let mut s = Bool(true);
match &mut s {
Bool(cond) => ensure!(cond),
}
let db = DerefBool(true);
ensure!(db);
ensure!(&db);
let nb = NotBool(true);
ensure!(nb);
Ok(())
}

View file

@ -0,0 +1,91 @@
error[E0277]: the trait bound `&str: __private::not::Bool` is not satisfied
--> tests/ui/ensure-nonbool.rs:25:13
|
25 | ensure!("...");
| --------^^^^^-
| | |
| | the trait `__private::not::Bool` is not implemented for `&str`
| required by a bound introduced by this call
|
= help: the following other types implement trait `__private::not::Bool`:
&bool
bool
note: required by a bound in `anyhow::__private::not`
--> src/lib.rs
|
| pub fn not(cond: impl Bool) -> bool {
| ^^^^ required by this bound in `not`
error[E0277]: the trait bound `&mut bool: __private::not::Bool` is not satisfied
--> tests/ui/ensure-nonbool.rs:29:31
|
29 | Bool(cond) => ensure!(cond),
| --------^^^^-
| | |
| | the trait `__private::not::Bool` is not implemented for `&mut bool`
| required by a bound introduced by this call
|
= help: the following other types implement trait `__private::not::Bool`:
&bool
bool
= note: `__private::not::Bool` is implemented for `&bool`, but not for `&mut bool`
note: required by a bound in `anyhow::__private::not`
--> src/lib.rs
|
| pub fn not(cond: impl Bool) -> bool {
| ^^^^ required by this bound in `not`
error[E0277]: the trait bound `DerefBool: __private::not::Bool` is not satisfied
--> tests/ui/ensure-nonbool.rs:33:13
|
33 | ensure!(db);
| --------^^-
| | |
| | the trait `__private::not::Bool` is not implemented for `DerefBool`
| required by a bound introduced by this call
|
= help: the following other types implement trait `__private::not::Bool`:
&bool
bool
note: required by a bound in `anyhow::__private::not`
--> src/lib.rs
|
| pub fn not(cond: impl Bool) -> bool {
| ^^^^ required by this bound in `not`
error[E0277]: the trait bound `&DerefBool: __private::not::Bool` is not satisfied
--> tests/ui/ensure-nonbool.rs:34:13
|
34 | ensure!(&db);
| --------^^^-
| | |
| | the trait `__private::not::Bool` is not implemented for `&DerefBool`
| required by a bound introduced by this call
|
note: required by a bound in `anyhow::__private::not`
--> src/lib.rs
|
| pub fn not(cond: impl Bool) -> bool {
| ^^^^ required by this bound in `not`
help: consider dereferencing here
|
34 | ensure!(&*db);
| +
error[E0277]: the trait bound `NotBool: __private::not::Bool` is not satisfied
--> tests/ui/ensure-nonbool.rs:37:13
|
37 | ensure!(nb);
| --------^^-
| | |
| | the trait `__private::not::Bool` is not implemented for `NotBool`
| required by a bound introduced by this call
|
= help: the following other types implement trait `__private::not::Bool`:
&bool
bool
note: required by a bound in `anyhow::__private::not`
--> src/lib.rs
|
| pub fn not(cond: impl Bool) -> bool {
| ^^^^ required by this bound in `not`

11
vendor/anyhow/tests/ui/must-use.rs vendored Normal file
View file

@ -0,0 +1,11 @@
#![deny(unused_must_use)]
use anyhow::anyhow;
fn main() -> anyhow::Result<()> {
if true {
// meant to write bail!
anyhow!("it failed");
}
Ok(())
}

12
vendor/anyhow/tests/ui/must-use.stderr vendored Normal file
View file

@ -0,0 +1,12 @@
error: unused return value of `anyhow::__private::must_use` that must be used
--> tests/ui/must-use.rs:8:9
|
8 | anyhow!("it failed");
| ^^^^^^^^^^^^^^^^^^^^
|
note: the lint level is defined here
--> tests/ui/must-use.rs:1:9
|
1 | #![deny(unused_must_use)]
| ^^^^^^^^^^^^^^^
= note: this error originates in the macro `anyhow` (in Nightly builds, run with -Z macro-backtrace for more info)

8
vendor/anyhow/tests/ui/no-impl.rs vendored Normal file
View file

@ -0,0 +1,8 @@
use anyhow::anyhow;
#[derive(Debug)]
struct Error;
fn main() {
let _ = anyhow!(Error);
}

32
vendor/anyhow/tests/ui/no-impl.stderr vendored Normal file
View file

@ -0,0 +1,32 @@
error[E0599]: the method `anyhow_kind` exists for reference `&Error`, but its trait bounds were not satisfied
--> tests/ui/no-impl.rs:7:13
|
4 | struct Error;
| ------------ doesn't satisfy `Error: Into<anyhow::Error>`, `Error: anyhow::kind::TraitKind` or `Error: std::fmt::Display`
...
7 | let _ = anyhow!(Error);
| ^^^^^^^^^^^^^^ method cannot be called on `&Error` due to unsatisfied trait bounds
|
= note: the following trait bounds were not satisfied:
`Error: Into<anyhow::Error>`
which is required by `Error: anyhow::kind::TraitKind`
`Error: std::fmt::Display`
which is required by `&Error: anyhow::kind::AdhocKind`
`&Error: Into<anyhow::Error>`
which is required by `&Error: anyhow::kind::TraitKind`
note: the traits `Into` and `std::fmt::Display` must be implemented
--> $RUST/core/src/fmt/mod.rs
|
| pub trait Display {
| ^^^^^^^^^^^^^^^^^
|
::: $RUST/core/src/convert/mod.rs
|
| pub trait Into<T>: Sized {
| ^^^^^^^^^^^^^^^^^^^^^^^^
= help: items from traits can only be used if the trait is implemented and in scope
= note: the following traits define an item `anyhow_kind`, perhaps you need to implement one of them:
candidate #1: `anyhow::kind::AdhocKind`
candidate #2: `anyhow::kind::BoxedKind`
candidate #3: `anyhow::kind::TraitKind`
= note: this error originates in the macro `anyhow` (in Nightly builds, run with -Z macro-backtrace for more info)

View file

@ -0,0 +1,5 @@
use anyhow::anyhow;
fn main() {
let _ = anyhow!(&String::new());
}

View file

@ -0,0 +1,9 @@
error[E0716]: temporary value dropped while borrowed
--> tests/ui/temporary-value.rs:4:22
|
4 | let _ = anyhow!(&String::new());
| ---------^^^^^^^^^^^^^-
| | |
| | creates a temporary value which is freed while still in use
| temporary value is freed at the end of this statement
| argument requires that borrow lasts for `'static`

View file

@ -0,0 +1,5 @@
use anyhow::{bail, Result};
fn main() -> Result<()> {
bail!("{} not found");
}

View file

@ -0,0 +1,5 @@
error: 1 positional argument in format string, but no arguments were given
--> tests/ui/wrong-interpolation.rs:4:12
|
4 | bail!("{} not found");
| ^^

1
vendor/base64/.cargo-checksum.json vendored Normal file
View file

@ -0,0 +1 @@
{"files":{"Cargo.lock":"cee37732975a1ffc1f956d3d05b6edf1baec72841cfabc384a21b02b3bfa0275","Cargo.toml":"52bee6a418e14918d37058fd15fccfd0f417a06fe4f9668b6f97866bf7f991e3","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0dd882e53de11566d50f8e8e2d5a651bcf3fabee4987d70f306233cf39094ba7","README.md":"df01f5b4317d601e7de86743f9818aec9196abf9e298f5e47679b7a966ecd945","RELEASE-NOTES.md":"997a5193317a8bff266ecfe4f015ba070b782b6df7d3a1738b9b52584d57f9c6","benches/benchmarks.rs":"cebbcc8649e760e569c6be04f5e727aee2c2568ced7faab580fc0aa0d0426d26","clippy.toml":"b26be4d15ed059985ce6994f11817fd7562046f46e460a0dc64dbb71cfc246d1","examples/base64.rs":"b75ead2199a9b4389c69fe6f1ae988176a263b8fc84e7a4fea1d7e5a41592078","icon_CLion.svg":"cffa044ba75cb998ee3306991dc4a3755ec2f39ab95ddd4b74bc21988389020f","src/alphabet.rs":"5de2beb8fcccb078c61cac2c0477ebbde145122d6c10a0f7ea2e57e8159318e0","src/chunked_encoder.rs":"edfdbb9a4329b80fb2c769ada81e234e00839e0fa85faaa70bacf40ce12e951c","src/decode.rs":"b046a72d62eaac58dc42efcf7848d9d96d022f6594e851cf87074b77ce45c04a","src/display.rs":"31bf3e19274a0b80dd8948a81ea535944f756ef5b88736124c940f5fe1e8c71c","src/encode.rs":"44ddcc162f3fe9817b6e857dda0a3b9197b90a657e5f71c44aacabf5431ccf7d","src/engine/general_purpose/decode.rs":"d865b057e5788e7fefd189cf57ec913df263e6a0742dfa52513f587e14fa1a92","src/engine/general_purpose/decode_suffix.rs":"689688f7bf442b232d3b9f56a1b41c56d9393ace88556a165c224b93dd19b74e","src/engine/general_purpose/mod.rs":"901760a7f5721ec3bafad5fea6251f57de0f767ecb2e1e2fdfe64d661404ec34","src/engine/mod.rs":"5e4a6c0e86417f3b62350264ef383f91e9864390f7c315d786ecd8e9c920ee9f","src/engine/naive.rs":"70de29d909c3fe7918d2965782088b05047b8b6e30d1d2bf11ba073d3f8633ff","src/engine/tests.rs":"2cc8d1431f40f5b9c3ad8970e6fb73bba8be3f2317553dd026539f41908aaa19","src/lib.rs":"c4db7bd31ace78aec2ecd151cef3ad90dfdc76097ba12027bde79d3c82612f7c","src/prelude.rs":"c1587138e5301ac797c5c362cb3638649b33f79c20c16db6f38ad44330540752","src/read/decoder.rs":"00aaa0553a54fcf12762658c4e56663a9705cc30c07af30976291e6f69d78c3d","src/read/decoder_tests.rs":"66ec39bf6e86f21f4db1afd6c5cd63d4a4931ab896b9c38de25d99b803804bbf","src/read/mod.rs":"e0b714eda02d16b1ffa6f78fd09b2f963e01c881b1f7c17b39db4e904be5e746","src/tests.rs":"90cb9f8a1ccb7c4ddc4f8618208e0031fc97e0df0e5aa466d6a5cf45d25967d8","src/write/encoder.rs":"c889c853249220fe2ddaeb77ee6e2ee2945f7db88cd6658ef89ff71b81255ea8","src/write/encoder_string_writer.rs":"0326c9d120369b9bbc35697b5b9b141bed24283374c93d5af1052eb042e47799","src/write/encoder_tests.rs":"28695a485b17cf5db73656aae5d90127f726e02c6d70efd83e5ab53a4cc17b38","src/write/mod.rs":"73cd98dadc9d712b3fefd9449d97e825e097397441b90588e0051e4d3b0911b9","tests/encode.rs":"5309f4538b1df611436f7bfba7409c725161b6f841b1bbf8d9890ae185de7d88","tests/tests.rs":"78efcf0dc4bb6ae52f7a91fcad89e44e4dce578224c36b4e6c1c306459be8500"},"package":"72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"}

1515
vendor/base64/Cargo.lock generated vendored Normal file

File diff suppressed because it is too large Load diff

85
vendor/base64/Cargo.toml vendored Normal file
View file

@ -0,0 +1,85 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2018"
rust-version = "1.48.0"
name = "base64"
version = "0.22.1"
authors = ["Marshall Pierce <marshall@mpierce.org>"]
description = "encodes and decodes base64 as bytes or utf8"
documentation = "https://docs.rs/base64"
readme = "README.md"
keywords = [
"base64",
"utf8",
"encode",
"decode",
"no_std",
]
categories = ["encoding"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/marshallpierce/rust-base64"
[package.metadata.docs.rs]
rustdoc-args = ["--generate-link-to-definition"]
[profile.bench]
debug = 2
[profile.test]
opt-level = 3
[[example]]
name = "base64"
required-features = ["std"]
[[test]]
name = "tests"
required-features = ["alloc"]
[[test]]
name = "encode"
required-features = ["alloc"]
[[bench]]
name = "benchmarks"
harness = false
required-features = ["std"]
[dev-dependencies.clap]
version = "3.2.25"
features = ["derive"]
[dev-dependencies.criterion]
version = "0.4.0"
[dev-dependencies.once_cell]
version = "1"
[dev-dependencies.rand]
version = "0.8.5"
features = ["small_rng"]
[dev-dependencies.rstest]
version = "0.13.0"
[dev-dependencies.rstest_reuse]
version = "0.6.0"
[dev-dependencies.strum]
version = "0.25"
features = ["derive"]
[features]
alloc = []
default = ["std"]
std = ["alloc"]

201
vendor/base64/LICENSE-APACHE vendored Normal file
View file

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

21
vendor/base64/LICENSE-MIT vendored Normal file
View file

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2015 Alice Maz
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

154
vendor/base64/README.md vendored Normal file
View file

@ -0,0 +1,154 @@
# [base64](https://crates.io/crates/base64)
[![](https://img.shields.io/crates/v/base64.svg)](https://crates.io/crates/base64) [![Docs](https://docs.rs/base64/badge.svg)](https://docs.rs/base64) [![CircleCI](https://circleci.com/gh/marshallpierce/rust-base64/tree/master.svg?style=shield)](https://circleci.com/gh/marshallpierce/rust-base64/tree/master) [![codecov](https://codecov.io/gh/marshallpierce/rust-base64/branch/master/graph/badge.svg)](https://codecov.io/gh/marshallpierce/rust-base64) [![unsafe forbidden](https://img.shields.io/badge/unsafe-forbidden-success.svg)](https://github.com/rust-secure-code/safety-dance/)
<a href="https://www.jetbrains.com/?from=rust-base64"><img src="/icon_CLion.svg" height="40px"/></a>
Made with CLion. Thanks to JetBrains for supporting open source!
It's base64. What more could anyone want?
This library's goals are to be *correct* and *fast*. It's thoroughly tested and widely used. It exposes functionality at
multiple levels of abstraction so you can choose the level of convenience vs performance that you want,
e.g. `decode_engine_slice` decodes into an existing `&mut [u8]` and is pretty fast (2.6GiB/s for a 3 KiB input),
whereas `decode_engine` allocates a new `Vec<u8>` and returns it, which might be more convenient in some cases, but is
slower (although still fast enough for almost any purpose) at 2.1 GiB/s.
See the [docs](https://docs.rs/base64) for all the details.
## FAQ
### I need to decode base64 with whitespace/null bytes/other random things interspersed in it. What should I do?
Remove non-base64 characters from your input before decoding.
If you have a `Vec` of base64, [retain](https://doc.rust-lang.org/std/vec/struct.Vec.html#method.retain) can be used to
strip out whatever you need removed.
If you have a `Read` (e.g. reading a file or network socket), there are various approaches.
- Use [iter_read](https://crates.io/crates/iter-read) together with `Read`'s `bytes()` to filter out unwanted bytes.
- Implement `Read` with a `read()` impl that delegates to your actual `Read`, and then drops any bytes you don't want.
### I need to line-wrap base64, e.g. for MIME/PEM.
[line-wrap](https://crates.io/crates/line-wrap) does just that.
### I want canonical base64 encoding/decoding.
First, don't do this. You should no more expect Base64 to be canonical than you should expect compression algorithms to
produce canonical output across all usage in the wild (hint: they don't).
However, [people are drawn to their own destruction like moths to a flame](https://eprint.iacr.org/2022/361), so here we
are.
There are two opportunities for non-canonical encoding (and thus, detection of the same during decoding): the final bits
of the last encoded token in two or three token suffixes, and the `=` token used to inflate the suffix to a full four
tokens.
The trailing bits issue is unavoidable: with 6 bits available in each encoded token, 1 input byte takes 2 tokens,
with the second one having some bits unused. Same for two input bytes: 16 bits, but 3 tokens have 18 bits. Unless we
decide to stop shipping whole bytes around, we're stuck with those extra bits that a sneaky or buggy encoder might set
to 1 instead of 0.
The `=` pad bytes, on the other hand, are entirely a self-own by the Base64 standard. They do not affect decoding other
than to provide an opportunity to say "that padding is incorrect". Exabytes of storage and transfer have no doubt been
wasted on pointless `=` bytes. Somehow we all seem to be quite comfortable with, say, hex-encoded data just stopping
when it's done rather than requiring a confirmation that the author of the encoder could count to four. Anyway, there
are two ways to make pad bytes predictable: require canonical padding to the next multiple of four bytes as per the RFC,
or, if you control all producers and consumers, save a few bytes by requiring no padding (especially applicable to the
url-safe alphabet).
All `Engine` implementations must at a minimum support treating non-canonical padding of both types as an error, and
optionally may allow other behaviors.
## Rust version compatibility
The minimum supported Rust version is 1.48.0.
# Contributing
Contributions are very welcome. However, because this library is used widely, and in security-sensitive contexts, all
PRs will be carefully scrutinized. Beyond that, this sort of low level library simply needs to be 100% correct. Nobody
wants to chase bugs in encoding of any sort.
All this means that it takes me a fair amount of time to review each PR, so it might take quite a while to carve out the
free time to give each PR the attention it deserves. I will get to everyone eventually!
## Developing
Benchmarks are in `benches/`.
```bash
cargo bench
```
## no_std
This crate supports no_std. By default the crate targets std via the `std` feature. You can deactivate
the `default-features` to target `core` instead. In that case you lose out on all the functionality revolving
around `std::io`, `std::error::Error`, and heap allocations. There is an additional `alloc` feature that you can activate
to bring back the support for heap allocations.
## Profiling
On Linux, you can use [perf](https://perf.wiki.kernel.org/index.php/Main_Page) for profiling. Then compile the
benchmarks with `cargo bench --no-run`.
Run the benchmark binary with `perf` (shown here filtering to one particular benchmark, which will make the results
easier to read). `perf` is only available to the root user on most systems as it fiddles with event counters in your
CPU, so use `sudo`. We need to run the actual benchmark binary, hence the path into `target`. You can see the actual
full path with `cargo bench -v`; it will print out the commands it runs. If you use the exact path
that `bench` outputs, make sure you get the one that's for the benchmarks, not the tests. You may also want
to `cargo clean` so you have only one `benchmarks-` binary (they tend to accumulate).
```bash
sudo perf record target/release/deps/benchmarks-* --bench decode_10mib_reuse
```
Then analyze the results, again with perf:
```bash
sudo perf annotate -l
```
You'll see a bunch of interleaved rust source and assembly like this. The section with `lib.rs:327` is telling us that
4.02% of samples saw the `movzbl` aka bit shift as the active instruction. However, this percentage is not as exact as
it seems due to a phenomenon called *skid*. Basically, a consequence of how fancy modern CPUs are is that this sort of
instruction profiling is inherently inaccurate, especially in branch-heavy code.
```text
lib.rs:322 0.70 : 10698: mov %rdi,%rax
2.82 : 1069b: shr $0x38,%rax
: if morsel == decode_tables::INVALID_VALUE {
: bad_byte_index = input_index;
: break;
: };
: accum = (morsel as u64) << 58;
lib.rs:327 4.02 : 1069f: movzbl (%r9,%rax,1),%r15d
: // fast loop of 8 bytes at a time
: while input_index < length_of_full_chunks {
: let mut accum: u64;
:
: let input_chunk = BigEndian::read_u64(&input_bytes[input_index..(input_index + 8)]);
: morsel = decode_table[(input_chunk >> 56) as usize];
lib.rs:322 3.68 : 106a4: cmp $0xff,%r15
: if morsel == decode_tables::INVALID_VALUE {
0.00 : 106ab: je 1090e <base64::decode_config_buf::hbf68a45fefa299c1+0x46e>
```
## Fuzzing
This uses [cargo-fuzz](https://github.com/rust-fuzz/cargo-fuzz). See `fuzz/fuzzers` for the available fuzzing scripts.
To run, use an invocation like these:
```bash
cargo +nightly fuzz run roundtrip
cargo +nightly fuzz run roundtrip_no_pad
cargo +nightly fuzz run roundtrip_random_config -- -max_len=10240
cargo +nightly fuzz run decode_random
```
## License
This project is dual-licensed under MIT and Apache 2.0.

271
vendor/base64/RELEASE-NOTES.md vendored Normal file
View file

@ -0,0 +1,271 @@
# 0.22.1
- Correct the symbols used for the predefined `alphabet::BIN_HEX`.
# 0.22.0
- `DecodeSliceError::OutputSliceTooSmall` is now conservative rather than precise. That is, the error will only occur if the decoded output _cannot_ fit, meaning that `Engine::decode_slice` can now be used with exactly-sized output slices. As part of this, `Engine::internal_decode` now returns `DecodeSliceError` instead of `DecodeError`, but that is not expected to affect any external callers.
- `DecodeError::InvalidLength` now refers specifically to the _number of valid symbols_ being invalid (i.e. `len % 4 == 1`), rather than just the number of input bytes. This avoids confusing scenarios when based on interpretation you could make a case for either `InvalidLength` or `InvalidByte` being appropriate.
- Decoding is somewhat faster (5-10%)
# 0.21.7
- Support getting an alphabet's contents as a str via `Alphabet::as_str()`
# 0.21.6
- Improved introductory documentation and example
# 0.21.5
- Add `Debug` and `Clone` impls for the general purpose Engine
# 0.21.4
- Make `encoded_len` `const`, allowing the creation of arrays sized to encode compile-time-known data lengths
# 0.21.3
- Implement `source` instead of `cause` on Error types
- Roll back MSRV to 1.48.0 so Debian can continue to live in a time warp
- Slightly faster chunked encoding for short inputs
- Decrease binary size
# 0.21.2
- Rollback MSRV to 1.57.0 -- only dev dependencies need 1.60, not the main code
# 0.21.1
- Remove the possibility of panicking during decoded length calculations
- `DecoderReader` no longer sometimes erroneously ignores
padding [#226](https://github.com/marshallpierce/rust-base64/issues/226)
## Breaking changes
- `Engine.internal_decode` return type changed
- Update MSRV to 1.60.0
# 0.21.0
## Migration
### Functions
| < 0.20 function | 0.21 equivalent |
|-------------------------|-------------------------------------------------------------------------------------|
| `encode()` | `engine::general_purpose::STANDARD.encode()` or `prelude::BASE64_STANDARD.encode()` |
| `encode_config()` | `engine.encode()` |
| `encode_config_buf()` | `engine.encode_string()` |
| `encode_config_slice()` | `engine.encode_slice()` |
| `decode()` | `engine::general_purpose::STANDARD.decode()` or `prelude::BASE64_STANDARD.decode()` |
| `decode_config()` | `engine.decode()` |
| `decode_config_buf()` | `engine.decode_vec()` |
| `decode_config_slice()` | `engine.decode_slice()` |
The short-lived 0.20 functions were the 0.13 functions with `config` replaced with `engine`.
### Padding
If applicable, use the preset engines `engine::STANDARD`, `engine::STANDARD_NO_PAD`, `engine::URL_SAFE`,
or `engine::URL_SAFE_NO_PAD`.
The `NO_PAD` ones require that padding is absent when decoding, and the others require that
canonical padding is present .
If you need the < 0.20 behavior that did not care about padding, or want to recreate < 0.20.0's predefined `Config`s
precisely, see the following table.
| 0.13.1 Config | 0.20.0+ alphabet | `encode_padding` | `decode_padding_mode` |
|-----------------|------------------|------------------|-----------------------|
| STANDARD | STANDARD | true | Indifferent |
| STANDARD_NO_PAD | STANDARD | false | Indifferent |
| URL_SAFE | URL_SAFE | true | Indifferent |
| URL_SAFE_NO_PAD | URL_SAFE | false | Indifferent |
# 0.21.0-rc.1
- Restore the ability to decode into a slice of precisely the correct length with `Engine.decode_slice_unchecked`.
- Add `Engine` as a `pub use` in `prelude`.
# 0.21.0-beta.2
## Breaking changes
- Re-exports of preconfigured engines in `engine` are removed in favor of `base64::prelude::...` that are better suited
to those who wish to `use` the entire path to a name.
# 0.21.0-beta.1
## Breaking changes
- `FastPortable` was only meant to be an interim name, and shouldn't have shipped in 0.20. It is now `GeneralPurpose` to
make its intended usage more clear.
- `GeneralPurpose` and its config are now `pub use`'d in the `engine` module for convenience.
- Change a few `from()` functions to be `new()`. `from()` causes confusing compiler errors because of confusion
with `From::from`, and is a little misleading because some of those invocations are not very cheap as one would
usually expect from a `from` call.
- `encode*` and `decode*` top level functions are now methods on `Engine`.
- `DEFAULT_ENGINE` was replaced by `engine::general_purpose::STANDARD`
- Predefined engine consts `engine::general_purpose::{STANDARD, STANDARD_NO_PAD, URL_SAFE, URL_SAFE_NO_PAD}`
- These are `pub use`d into `engine` as well
- The `*_slice` decode/encode functions now return an error instead of panicking when the output slice is too small
- As part of this, there isn't now a public way to decode into a slice _exactly_ the size needed for inputs that
aren't multiples of 4 tokens. If adding up to 2 bytes to always be a multiple of 3 bytes for the decode buffer is
a problem, file an issue.
## Other changes
- `decoded_len_estimate()` is provided to make it easy to size decode buffers correctly.
# 0.20.0
## Breaking changes
- Update MSRV to 1.57.0
- Decoding can now either ignore padding, require correct padding, or require no padding. The default is to require
correct padding.
- The `NO_PAD` config now requires that padding be absent when decoding.
## 0.20.0-alpha.1
### Breaking changes
- Extended the `Config` concept into the `Engine` abstraction, allowing the user to pick different encoding / decoding
implementations.
- What was formerly the only algorithm is now the `FastPortable` engine, so named because it's portable (works on
any CPU) and relatively fast.
- This opens the door to a portable constant-time
implementation ([#153](https://github.com/marshallpierce/rust-base64/pull/153),
presumably `ConstantTimePortable`?) for security-sensitive applications that need side-channel resistance, and
CPU-specific SIMD implementations for more speed.
- Standard base64 per the RFC is available via `DEFAULT_ENGINE`. To use different alphabets or other settings (
padding, etc), create your own engine instance.
- `CharacterSet` is now `Alphabet` (per the RFC), and allows creating custom alphabets. The corresponding tables that
were previously code-generated are now built dynamically.
- Since there are already multiple breaking changes, various functions are renamed to be more consistent and
discoverable.
- MSRV is now 1.47.0 to allow various things to use `const fn`.
- `DecoderReader` now owns its inner reader, and can expose it via `into_inner()`. For symmetry, `EncoderWriter` can do
the same with its writer.
- `encoded_len` is now public so you can size encode buffers precisely.
# 0.13.1
- More precise decode buffer sizing, avoiding unnecessary allocation in `decode_config`.
# 0.13.0
- Config methods are const
- Added `EncoderStringWriter` to allow encoding directly to a String
- `EncoderWriter` now owns its delegate writer rather than keeping a reference to it (though refs still work)
- As a consequence, it is now possible to extract the delegate writer from an `EncoderWriter` via `finish()`, which
returns `Result<W>` instead of `Result<()>`. If you were calling `finish()` explicitly, you will now need to
use `let _ = foo.finish()` instead of just `foo.finish()` to avoid a warning about the unused value.
- When decoding input that has both an invalid length and an invalid symbol as the last byte, `InvalidByte` will be
emitted instead of `InvalidLength` to make the problem more obvious.
# 0.12.2
- Add `BinHex` alphabet
# 0.12.1
- Add `Bcrypt` alphabet
# 0.12.0
- A `Read` implementation (`DecoderReader`) to let users transparently decoded data from a b64 input source
- IMAP's modified b64 alphabet
- Relaxed type restrictions to just `AsRef<[ut8]>` for main `encode*`/`decode*` functions
- A minor performance improvement in encoding
# 0.11.0
- Minimum rust version 1.34.0
- `no_std` is now supported via the two new features `alloc` and `std`.
# 0.10.1
- Minimum rust version 1.27.2
- Fix bug in streaming encoding ([#90](https://github.com/marshallpierce/rust-base64/pull/90)): if the underlying writer
didn't write all the bytes given to it, the remaining bytes would not be retried later. See the docs
on `EncoderWriter::write`.
- Make it configurable whether or not to return an error when decoding detects excess trailing bits.
# 0.10.0
- Remove line wrapping. Line wrapping was never a great conceptual fit in this library, and other features (streaming
encoding, etc) either couldn't support it or could support only special cases of it with a great increase in
complexity. Line wrapping has been pulled out into a [line-wrap](https://crates.io/crates/line-wrap) crate, so it's
still available if you need it.
- `Base64Display` creation no longer uses a `Result` because it can't fail, which means its helper methods for
common
configs that `unwrap()` for you are no longer needed
- Add a streaming encoder `Write` impl to transparently base64 as you write.
- Remove the remaining `unsafe` code.
- Remove whitespace stripping to simplify `no_std` support. No out of the box configs use it, and it's trivial to do
yourself if needed: `filter(|b| !b" \n\t\r\x0b\x0c".contains(b)`.
- Detect invalid trailing symbols when decoding and return an error rather than silently ignoring them.
# 0.9.3
- Update safemem
# 0.9.2
- Derive `Clone` for `DecodeError`.
# 0.9.1
- Add support for `crypt(3)`'s base64 variant.
# 0.9.0
- `decode_config_slice` function for no-allocation decoding, analogous to `encode_config_slice`
- Decode performance optimization
# 0.8.0
- `encode_config_slice` function for no-allocation encoding
# 0.7.0
- `STANDARD_NO_PAD` config
- `Base64Display` heap-free wrapper for use in format strings, etc
# 0.6.0
- Decode performance improvements
- Use `unsafe` in fewer places
- Added fuzzers
# 0.5.2
- Avoid usize overflow when calculating length
- Better line wrapping performance
# 0.5.1
- Temporarily disable line wrapping
- Add Apache 2.0 license
# 0.5.0
- MIME support, including configurable line endings and line wrapping
- Removed `decode_ws`
- Renamed `Base64Error` to `DecodeError`
# 0.4.1
- Allow decoding a `AsRef<[u8]>` instead of just a `&str`
# 0.4.0
- Configurable padding
- Encode performance improvements
# 0.3.0
- Added encode/decode functions that do not allocate their own storage
- Decode performance improvements
- Extraneous padding bytes are no longer ignored. Now, an error will be returned.

238
vendor/base64/benches/benchmarks.rs vendored Normal file
View file

@ -0,0 +1,238 @@
#[macro_use]
extern crate criterion;
use base64::{
display,
engine::{general_purpose::STANDARD, Engine},
write,
};
use criterion::{black_box, Bencher, BenchmarkId, Criterion, Throughput};
use rand::{Rng, SeedableRng};
use std::io::{self, Read, Write};
fn do_decode_bench(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size * 3 / 4);
fill(&mut v);
let encoded = STANDARD.encode(&v);
b.iter(|| {
let orig = STANDARD.decode(&encoded);
black_box(&orig);
});
}
fn do_decode_bench_reuse_buf(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size * 3 / 4);
fill(&mut v);
let encoded = STANDARD.encode(&v);
let mut buf = Vec::new();
b.iter(|| {
STANDARD.decode_vec(&encoded, &mut buf).unwrap();
black_box(&buf);
buf.clear();
});
}
fn do_decode_bench_slice(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size * 3 / 4);
fill(&mut v);
let encoded = STANDARD.encode(&v);
let mut buf = vec![0; size];
b.iter(|| {
STANDARD.decode_slice(&encoded, &mut buf).unwrap();
black_box(&buf);
});
}
fn do_decode_bench_stream(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size * 3 / 4);
fill(&mut v);
let encoded = STANDARD.encode(&v);
let mut buf = vec![0; size];
buf.truncate(0);
b.iter(|| {
let mut cursor = io::Cursor::new(&encoded[..]);
let mut decoder = base64::read::DecoderReader::new(&mut cursor, &STANDARD);
decoder.read_to_end(&mut buf).unwrap();
buf.clear();
black_box(&buf);
});
}
fn do_encode_bench(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size);
fill(&mut v);
b.iter(|| {
let e = STANDARD.encode(&v);
black_box(&e);
});
}
fn do_encode_bench_display(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size);
fill(&mut v);
b.iter(|| {
let e = format!("{}", display::Base64Display::new(&v, &STANDARD));
black_box(&e);
});
}
fn do_encode_bench_reuse_buf(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size);
fill(&mut v);
let mut buf = String::new();
b.iter(|| {
STANDARD.encode_string(&v, &mut buf);
buf.clear();
});
}
fn do_encode_bench_slice(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size);
fill(&mut v);
// conservative estimate of encoded size
let mut buf = vec![0; v.len() * 2];
b.iter(|| STANDARD.encode_slice(&v, &mut buf).unwrap());
}
fn do_encode_bench_stream(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size);
fill(&mut v);
let mut buf = Vec::with_capacity(size * 2);
b.iter(|| {
buf.clear();
let mut stream_enc = write::EncoderWriter::new(&mut buf, &STANDARD);
stream_enc.write_all(&v).unwrap();
stream_enc.flush().unwrap();
});
}
fn do_encode_bench_string_stream(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size);
fill(&mut v);
b.iter(|| {
let mut stream_enc = write::EncoderStringWriter::new(&STANDARD);
stream_enc.write_all(&v).unwrap();
stream_enc.flush().unwrap();
let _ = stream_enc.into_inner();
});
}
fn do_encode_bench_string_reuse_buf_stream(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size);
fill(&mut v);
let mut buf = String::new();
b.iter(|| {
buf.clear();
let mut stream_enc = write::EncoderStringWriter::from_consumer(&mut buf, &STANDARD);
stream_enc.write_all(&v).unwrap();
stream_enc.flush().unwrap();
let _ = stream_enc.into_inner();
});
}
fn fill(v: &mut Vec<u8>) {
let cap = v.capacity();
// weak randomness is plenty; we just want to not be completely friendly to the branch predictor
let mut r = rand::rngs::SmallRng::from_entropy();
while v.len() < cap {
v.push(r.gen::<u8>());
}
}
const BYTE_SIZES: [usize; 5] = [3, 50, 100, 500, 3 * 1024];
// Benchmarks over these byte sizes take longer so we will run fewer samples to
// keep the benchmark runtime reasonable.
const LARGE_BYTE_SIZES: [usize; 3] = [3 * 1024 * 1024, 10 * 1024 * 1024, 30 * 1024 * 1024];
fn encode_benchmarks(c: &mut Criterion, label: &str, byte_sizes: &[usize]) {
let mut group = c.benchmark_group(label);
group
.warm_up_time(std::time::Duration::from_millis(500))
.measurement_time(std::time::Duration::from_secs(3));
for size in byte_sizes {
group
.throughput(Throughput::Bytes(*size as u64))
.bench_with_input(BenchmarkId::new("encode", size), size, do_encode_bench)
.bench_with_input(
BenchmarkId::new("encode_display", size),
size,
do_encode_bench_display,
)
.bench_with_input(
BenchmarkId::new("encode_reuse_buf", size),
size,
do_encode_bench_reuse_buf,
)
.bench_with_input(
BenchmarkId::new("encode_slice", size),
size,
do_encode_bench_slice,
)
.bench_with_input(
BenchmarkId::new("encode_reuse_buf_stream", size),
size,
do_encode_bench_stream,
)
.bench_with_input(
BenchmarkId::new("encode_string_stream", size),
size,
do_encode_bench_string_stream,
)
.bench_with_input(
BenchmarkId::new("encode_string_reuse_buf_stream", size),
size,
do_encode_bench_string_reuse_buf_stream,
);
}
group.finish();
}
fn decode_benchmarks(c: &mut Criterion, label: &str, byte_sizes: &[usize]) {
let mut group = c.benchmark_group(label);
for size in byte_sizes {
group
.warm_up_time(std::time::Duration::from_millis(500))
.measurement_time(std::time::Duration::from_secs(3))
.throughput(Throughput::Bytes(*size as u64))
.bench_with_input(BenchmarkId::new("decode", size), size, do_decode_bench)
.bench_with_input(
BenchmarkId::new("decode_reuse_buf", size),
size,
do_decode_bench_reuse_buf,
)
.bench_with_input(
BenchmarkId::new("decode_slice", size),
size,
do_decode_bench_slice,
)
.bench_with_input(
BenchmarkId::new("decode_stream", size),
size,
do_decode_bench_stream,
);
}
group.finish();
}
fn bench(c: &mut Criterion) {
encode_benchmarks(c, "encode_small_input", &BYTE_SIZES[..]);
encode_benchmarks(c, "encode_large_input", &LARGE_BYTE_SIZES[..]);
decode_benchmarks(c, "decode_small_input", &BYTE_SIZES[..]);
decode_benchmarks(c, "decode_large_input", &LARGE_BYTE_SIZES[..]);
}
criterion_group!(benches, bench);
criterion_main!(benches);

1
vendor/base64/clippy.toml vendored Normal file
View file

@ -0,0 +1 @@
msrv = "1.48.0"

81
vendor/base64/examples/base64.rs vendored Normal file
View file

@ -0,0 +1,81 @@
use std::fs::File;
use std::io::{self, Read};
use std::path::PathBuf;
use std::process;
use base64::{alphabet, engine, read, write};
use clap::Parser;
#[derive(Clone, Debug, Parser, strum::EnumString, Default)]
#[strum(serialize_all = "kebab-case")]
enum Alphabet {
#[default]
Standard,
UrlSafe,
}
/// Base64 encode or decode FILE (or standard input), to standard output.
#[derive(Debug, Parser)]
struct Opt {
/// Decode the base64-encoded input (default: encode the input as base64).
#[structopt(short = 'd', long = "decode")]
decode: bool,
/// The encoding alphabet: "standard" (default) or "url-safe".
#[structopt(long = "alphabet")]
alphabet: Option<Alphabet>,
/// Omit padding characters while encoding, and reject them while decoding.
#[structopt(short = 'p', long = "no-padding")]
no_padding: bool,
/// The file to encode or decode.
#[structopt(name = "FILE", parse(from_os_str))]
file: Option<PathBuf>,
}
fn main() {
let opt = Opt::parse();
let stdin;
let mut input: Box<dyn Read> = match opt.file {
None => {
stdin = io::stdin();
Box::new(stdin.lock())
}
Some(ref f) if f.as_os_str() == "-" => {
stdin = io::stdin();
Box::new(stdin.lock())
}
Some(f) => Box::new(File::open(f).unwrap()),
};
let alphabet = opt.alphabet.unwrap_or_default();
let engine = engine::GeneralPurpose::new(
&match alphabet {
Alphabet::Standard => alphabet::STANDARD,
Alphabet::UrlSafe => alphabet::URL_SAFE,
},
match opt.no_padding {
true => engine::general_purpose::NO_PAD,
false => engine::general_purpose::PAD,
},
);
let stdout = io::stdout();
let mut stdout = stdout.lock();
let r = if opt.decode {
let mut decoder = read::DecoderReader::new(&mut input, &engine);
io::copy(&mut decoder, &mut stdout)
} else {
let mut encoder = write::EncoderWriter::new(&mut stdout, &engine);
io::copy(&mut input, &mut encoder)
};
if let Err(e) = r {
eprintln!(
"Base64 {} failed with {}",
if opt.decode { "decode" } else { "encode" },
e
);
process::exit(1);
}
}

34
vendor/base64/icon_CLion.svg vendored Normal file
View file

@ -0,0 +1,34 @@
<svg id="Layer_1" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 128 128">
<defs>
<linearGradient id="linear-gradient" x1="40.69" y1="-676.56" x2="83.48" y2="-676.56" gradientTransform="matrix(1, 0, 0, -1, 0, -648.86)" gradientUnits="userSpaceOnUse">
<stop offset="0" stop-color="#ed358c"/>
<stop offset="0.16" stop-color="#e9388c"/>
<stop offset="0.3" stop-color="#de418c"/>
<stop offset="0.43" stop-color="#cc508c"/>
<stop offset="0.57" stop-color="#b2658d"/>
<stop offset="0.7" stop-color="#90808d"/>
<stop offset="0.83" stop-color="#67a18e"/>
<stop offset="0.95" stop-color="#37c78f"/>
<stop offset="1" stop-color="#22d88f"/>
</linearGradient>
<linearGradient id="linear-gradient-2" x1="32.58" y1="-665.27" x2="13.76" y2="-791.59" gradientTransform="matrix(1, 0, 0, -1, 0, -648.86)" gradientUnits="userSpaceOnUse">
<stop offset="0.09" stop-color="#22d88f"/>
<stop offset="0.9" stop-color="#029de0"/>
</linearGradient>
<linearGradient id="linear-gradient-3" x1="116.68" y1="-660.66" x2="-12.09" y2="-796.66" xlink:href="#linear-gradient-2"/>
<linearGradient id="linear-gradient-4" x1="73.35" y1="-739.1" x2="122.29" y2="-746.06" xlink:href="#linear-gradient-2"/>
</defs>
<title>icon_CLion</title>
<g>
<polygon points="49.2 51.8 40.6 55.4 48.4 0 77.8 16.2 49.2 51.8" fill="url(#linear-gradient)"/>
<polygon points="44.6 76.8 48.8 0 11.8 23.2 0 94 44.6 76.8" fill="url(#linear-gradient-2)"/>
<polygon points="125.4 38.4 109 4.8 77.8 16.2 55 41.4 0 94 41.6 124.4 93.6 77.2 125.4 38.4" fill="url(#linear-gradient-3)"/>
<polygon points="53.8 54.6 46.6 98.4 75.8 121 107.8 128 128 82.4 53.8 54.6" fill="url(#linear-gradient-4)"/>
</g>
<g>
<rect x="24" y="24" width="80" height="80"/>
<rect x="31.6" y="89" width="30" height="5" fill="#fff"/>
<path d="M31,51.2h0A16.83,16.83,0,0,1,48.2,34c6.2,0,10,2,13,5.2l-4.6,5.4c-2.6-2.4-5.2-3.8-8.4-3.8-5.6,0-9.6,4.6-9.6,10.4h0c0,5.6,4,10.4,9.6,10.4,3.8,0,6.2-1.6,8.8-3.8l4.6,4.6c-3.4,3.6-7.2,6-13.6,6A17,17,0,0,1,31,51.2" fill="#fff"/>
<path d="M66.6,34.4H74v27H88.4v6.2H66.6V34.4Z" fill="#fff"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 2.2 KiB

285
vendor/base64/src/alphabet.rs vendored Normal file
View file

@ -0,0 +1,285 @@
//! Provides [Alphabet] and constants for alphabets commonly used in the wild.
use crate::PAD_BYTE;
use core::{convert, fmt};
#[cfg(any(feature = "std", test))]
use std::error;
const ALPHABET_SIZE: usize = 64;
/// An alphabet defines the 64 ASCII characters (symbols) used for base64.
///
/// Common alphabets are provided as constants, and custom alphabets
/// can be made via `from_str` or the `TryFrom<str>` implementation.
///
/// # Examples
///
/// Building and using a custom Alphabet:
///
/// ```
/// let custom = base64::alphabet::Alphabet::new("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/").unwrap();
///
/// let engine = base64::engine::GeneralPurpose::new(
/// &custom,
/// base64::engine::general_purpose::PAD);
/// ```
///
/// Building a const:
///
/// ```
/// use base64::alphabet::Alphabet;
///
/// static CUSTOM: Alphabet = {
/// // Result::unwrap() isn't const yet, but panic!() is OK
/// match Alphabet::new("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/") {
/// Ok(x) => x,
/// Err(_) => panic!("creation of alphabet failed"),
/// }
/// };
/// ```
///
/// Building lazily:
///
/// ```
/// use base64::{
/// alphabet::Alphabet,
/// engine::{general_purpose::GeneralPurpose, GeneralPurposeConfig},
/// };
/// use once_cell::sync::Lazy;
///
/// static CUSTOM: Lazy<Alphabet> = Lazy::new(||
/// Alphabet::new("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/").unwrap()
/// );
/// ```
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Alphabet {
pub(crate) symbols: [u8; ALPHABET_SIZE],
}
impl Alphabet {
/// Performs no checks so that it can be const.
/// Used only for known-valid strings.
const fn from_str_unchecked(alphabet: &str) -> Self {
let mut symbols = [0_u8; ALPHABET_SIZE];
let source_bytes = alphabet.as_bytes();
// a way to copy that's allowed in const fn
let mut index = 0;
while index < ALPHABET_SIZE {
symbols[index] = source_bytes[index];
index += 1;
}
Self { symbols }
}
/// Create an `Alphabet` from a string of 64 unique printable ASCII bytes.
///
/// The `=` byte is not allowed as it is used for padding.
pub const fn new(alphabet: &str) -> Result<Self, ParseAlphabetError> {
let bytes = alphabet.as_bytes();
if bytes.len() != ALPHABET_SIZE {
return Err(ParseAlphabetError::InvalidLength);
}
{
let mut index = 0;
while index < ALPHABET_SIZE {
let byte = bytes[index];
// must be ascii printable. 127 (DEL) is commonly considered printable
// for some reason but clearly unsuitable for base64.
if !(byte >= 32_u8 && byte <= 126_u8) {
return Err(ParseAlphabetError::UnprintableByte(byte));
}
// = is assumed to be padding, so cannot be used as a symbol
if byte == PAD_BYTE {
return Err(ParseAlphabetError::ReservedByte(byte));
}
// Check for duplicates while staying within what const allows.
// It's n^2, but only over 64 hot bytes, and only once, so it's likely in the single digit
// microsecond range.
let mut probe_index = 0;
while probe_index < ALPHABET_SIZE {
if probe_index == index {
probe_index += 1;
continue;
}
let probe_byte = bytes[probe_index];
if byte == probe_byte {
return Err(ParseAlphabetError::DuplicatedByte(byte));
}
probe_index += 1;
}
index += 1;
}
}
Ok(Self::from_str_unchecked(alphabet))
}
/// Create a `&str` from the symbols in the `Alphabet`
pub fn as_str(&self) -> &str {
core::str::from_utf8(&self.symbols).unwrap()
}
}
impl convert::TryFrom<&str> for Alphabet {
type Error = ParseAlphabetError;
fn try_from(value: &str) -> Result<Self, Self::Error> {
Self::new(value)
}
}
/// Possible errors when constructing an [Alphabet] from a `str`.
#[derive(Debug, Eq, PartialEq)]
pub enum ParseAlphabetError {
/// Alphabets must be 64 ASCII bytes
InvalidLength,
/// All bytes must be unique
DuplicatedByte(u8),
/// All bytes must be printable (in the range `[32, 126]`).
UnprintableByte(u8),
/// `=` cannot be used
ReservedByte(u8),
}
impl fmt::Display for ParseAlphabetError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::InvalidLength => write!(f, "Invalid length - must be 64 bytes"),
Self::DuplicatedByte(b) => write!(f, "Duplicated byte: {:#04x}", b),
Self::UnprintableByte(b) => write!(f, "Unprintable byte: {:#04x}", b),
Self::ReservedByte(b) => write!(f, "Reserved byte: {:#04x}", b),
}
}
}
#[cfg(any(feature = "std", test))]
impl error::Error for ParseAlphabetError {}
/// The standard alphabet (with `+` and `/`) specified in [RFC 4648][].
///
/// [RFC 4648]: https://datatracker.ietf.org/doc/html/rfc4648#section-4
pub const STANDARD: Alphabet = Alphabet::from_str_unchecked(
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",
);
/// The URL-safe alphabet (with `-` and `_`) specified in [RFC 4648][].
///
/// [RFC 4648]: https://datatracker.ietf.org/doc/html/rfc4648#section-5
pub const URL_SAFE: Alphabet = Alphabet::from_str_unchecked(
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_",
);
/// The `crypt(3)` alphabet (with `.` and `/` as the _first_ two characters).
///
/// Not standardized, but folk wisdom on the net asserts that this alphabet is what crypt uses.
pub const CRYPT: Alphabet = Alphabet::from_str_unchecked(
"./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
);
/// The bcrypt alphabet.
pub const BCRYPT: Alphabet = Alphabet::from_str_unchecked(
"./ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789",
);
/// The alphabet used in IMAP-modified UTF-7 (with `+` and `,`).
///
/// See [RFC 3501](https://tools.ietf.org/html/rfc3501#section-5.1.3)
pub const IMAP_MUTF7: Alphabet = Alphabet::from_str_unchecked(
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+,",
);
/// The alphabet used in BinHex 4.0 files.
///
/// See [BinHex 4.0 Definition](http://files.stairways.com/other/binhex-40-specs-info.txt)
pub const BIN_HEX: Alphabet = Alphabet::from_str_unchecked(
"!\"#$%&'()*+,-012345689@ABCDEFGHIJKLMNPQRSTUVXYZ[`abcdefhijklmpqr",
);
#[cfg(test)]
mod tests {
use crate::alphabet::*;
use core::convert::TryFrom as _;
#[test]
fn detects_duplicate_start() {
assert_eq!(
ParseAlphabetError::DuplicatedByte(b'A'),
Alphabet::new("AACDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/")
.unwrap_err()
);
}
#[test]
fn detects_duplicate_end() {
assert_eq!(
ParseAlphabetError::DuplicatedByte(b'/'),
Alphabet::new("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789//")
.unwrap_err()
);
}
#[test]
fn detects_duplicate_middle() {
assert_eq!(
ParseAlphabetError::DuplicatedByte(b'Z'),
Alphabet::new("ABCDEFGHIJKLMNOPQRSTUVWXYZZbcdefghijklmnopqrstuvwxyz0123456789+/")
.unwrap_err()
);
}
#[test]
fn detects_length() {
assert_eq!(
ParseAlphabetError::InvalidLength,
Alphabet::new(
"xxxxxxxxxABCDEFGHIJKLMNOPQRSTUVWXYZZbcdefghijklmnopqrstuvwxyz0123456789+/",
)
.unwrap_err()
);
}
#[test]
fn detects_padding() {
assert_eq!(
ParseAlphabetError::ReservedByte(b'='),
Alphabet::new("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+=")
.unwrap_err()
);
}
#[test]
fn detects_unprintable() {
// form feed
assert_eq!(
ParseAlphabetError::UnprintableByte(0xc),
Alphabet::new("\x0cBCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/")
.unwrap_err()
);
}
#[test]
fn same_as_unchecked() {
assert_eq!(
STANDARD,
Alphabet::try_from("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/")
.unwrap()
);
}
#[test]
fn str_same_as_input() {
let alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
let a = Alphabet::try_from(alphabet).unwrap();
assert_eq!(alphabet, a.as_str())
}
}

172
vendor/base64/src/chunked_encoder.rs vendored Normal file
View file

@ -0,0 +1,172 @@
use crate::{
encode::add_padding,
engine::{Config, Engine},
};
#[cfg(any(feature = "alloc", test))]
use alloc::string::String;
#[cfg(any(feature = "alloc", test))]
use core::str;
/// The output mechanism for ChunkedEncoder's encoded bytes.
pub trait Sink {
type Error;
/// Handle a chunk of encoded base64 data (as UTF-8 bytes)
fn write_encoded_bytes(&mut self, encoded: &[u8]) -> Result<(), Self::Error>;
}
/// A base64 encoder that emits encoded bytes in chunks without heap allocation.
pub struct ChunkedEncoder<'e, E: Engine + ?Sized> {
engine: &'e E,
}
impl<'e, E: Engine + ?Sized> ChunkedEncoder<'e, E> {
pub fn new(engine: &'e E) -> ChunkedEncoder<'e, E> {
ChunkedEncoder { engine }
}
pub fn encode<S: Sink>(&self, bytes: &[u8], sink: &mut S) -> Result<(), S::Error> {
const BUF_SIZE: usize = 1024;
const CHUNK_SIZE: usize = BUF_SIZE / 4 * 3;
let mut buf = [0; BUF_SIZE];
for chunk in bytes.chunks(CHUNK_SIZE) {
let mut len = self.engine.internal_encode(chunk, &mut buf);
if chunk.len() != CHUNK_SIZE && self.engine.config().encode_padding() {
// Final, potentially partial, chunk.
// Only need to consider if padding is needed on a partial chunk since full chunk
// is a multiple of 3, which therefore won't be padded.
// Pad output to multiple of four bytes if required by config.
len += add_padding(len, &mut buf[len..]);
}
sink.write_encoded_bytes(&buf[..len])?;
}
Ok(())
}
}
// A really simple sink that just appends to a string
#[cfg(any(feature = "alloc", test))]
pub(crate) struct StringSink<'a> {
string: &'a mut String,
}
#[cfg(any(feature = "alloc", test))]
impl<'a> StringSink<'a> {
pub(crate) fn new(s: &mut String) -> StringSink {
StringSink { string: s }
}
}
#[cfg(any(feature = "alloc", test))]
impl<'a> Sink for StringSink<'a> {
type Error = ();
fn write_encoded_bytes(&mut self, s: &[u8]) -> Result<(), Self::Error> {
self.string.push_str(str::from_utf8(s).unwrap());
Ok(())
}
}
#[cfg(test)]
pub mod tests {
use rand::{
distributions::{Distribution, Uniform},
Rng, SeedableRng,
};
use crate::{
alphabet::STANDARD,
engine::general_purpose::{GeneralPurpose, GeneralPurposeConfig, PAD},
tests::random_engine,
};
use super::*;
#[test]
fn chunked_encode_empty() {
assert_eq!("", chunked_encode_str(&[], PAD));
}
#[test]
fn chunked_encode_intermediate_fast_loop() {
// > 8 bytes input, will enter the pretty fast loop
assert_eq!("Zm9vYmFyYmF6cXV4", chunked_encode_str(b"foobarbazqux", PAD));
}
#[test]
fn chunked_encode_fast_loop() {
// > 32 bytes input, will enter the uber fast loop
assert_eq!(
"Zm9vYmFyYmF6cXV4cXV1eGNvcmdlZ3JhdWx0Z2FycGx5eg==",
chunked_encode_str(b"foobarbazquxquuxcorgegraultgarplyz", PAD)
);
}
#[test]
fn chunked_encode_slow_loop_only() {
// < 8 bytes input, slow loop only
assert_eq!("Zm9vYmFy", chunked_encode_str(b"foobar", PAD));
}
#[test]
fn chunked_encode_matches_normal_encode_random_string_sink() {
let helper = StringSinkTestHelper;
chunked_encode_matches_normal_encode_random(&helper);
}
pub fn chunked_encode_matches_normal_encode_random<S: SinkTestHelper>(sink_test_helper: &S) {
let mut input_buf: Vec<u8> = Vec::new();
let mut output_buf = String::new();
let mut rng = rand::rngs::SmallRng::from_entropy();
let input_len_range = Uniform::new(1, 10_000);
for _ in 0..20_000 {
input_buf.clear();
output_buf.clear();
let buf_len = input_len_range.sample(&mut rng);
for _ in 0..buf_len {
input_buf.push(rng.gen());
}
let engine = random_engine(&mut rng);
let chunk_encoded_string = sink_test_helper.encode_to_string(&engine, &input_buf);
engine.encode_string(&input_buf, &mut output_buf);
assert_eq!(output_buf, chunk_encoded_string, "input len={}", buf_len);
}
}
fn chunked_encode_str(bytes: &[u8], config: GeneralPurposeConfig) -> String {
let mut s = String::new();
let mut sink = StringSink::new(&mut s);
let engine = GeneralPurpose::new(&STANDARD, config);
let encoder = ChunkedEncoder::new(&engine);
encoder.encode(bytes, &mut sink).unwrap();
s
}
// An abstraction around sinks so that we can have tests that easily to any sink implementation
pub trait SinkTestHelper {
fn encode_to_string<E: Engine>(&self, engine: &E, bytes: &[u8]) -> String;
}
struct StringSinkTestHelper;
impl SinkTestHelper for StringSinkTestHelper {
fn encode_to_string<E: Engine>(&self, engine: &E, bytes: &[u8]) -> String {
let encoder = ChunkedEncoder::new(engine);
let mut s = String::new();
let mut sink = StringSink::new(&mut s);
encoder.encode(bytes, &mut sink).unwrap();
s
}
}
}

386
vendor/base64/src/decode.rs vendored Normal file
View file

@ -0,0 +1,386 @@
use crate::engine::{general_purpose::STANDARD, DecodeEstimate, Engine};
#[cfg(any(feature = "alloc", test))]
use alloc::vec::Vec;
use core::fmt;
#[cfg(any(feature = "std", test))]
use std::error;
/// Errors that can occur while decoding.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum DecodeError {
/// An invalid byte was found in the input. The offset and offending byte are provided.
///
/// Padding characters (`=`) interspersed in the encoded form are invalid, as they may only
/// be present as the last 0-2 bytes of input.
///
/// This error may also indicate that extraneous trailing input bytes are present, causing
/// otherwise valid padding to no longer be the last bytes of input.
InvalidByte(usize, u8),
/// The length of the input, as measured in valid base64 symbols, is invalid.
/// There must be 2-4 symbols in the last input quad.
InvalidLength(usize),
/// The last non-padding input symbol's encoded 6 bits have nonzero bits that will be discarded.
/// This is indicative of corrupted or truncated Base64.
/// Unlike [DecodeError::InvalidByte], which reports symbols that aren't in the alphabet,
/// this error is for symbols that are in the alphabet but represent nonsensical encodings.
InvalidLastSymbol(usize, u8),
/// The nature of the padding was not as configured: absent or incorrect when it must be
/// canonical, or present when it must be absent, etc.
InvalidPadding,
}
impl fmt::Display for DecodeError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Self::InvalidByte(index, byte) => {
write!(f, "Invalid symbol {}, offset {}.", byte, index)
}
Self::InvalidLength(len) => write!(f, "Invalid input length: {}", len),
Self::InvalidLastSymbol(index, byte) => {
write!(f, "Invalid last symbol {}, offset {}.", byte, index)
}
Self::InvalidPadding => write!(f, "Invalid padding"),
}
}
}
#[cfg(any(feature = "std", test))]
impl error::Error for DecodeError {}
/// Errors that can occur while decoding into a slice.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum DecodeSliceError {
/// A [DecodeError] occurred
DecodeError(DecodeError),
/// The provided slice is too small.
OutputSliceTooSmall,
}
impl fmt::Display for DecodeSliceError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::DecodeError(e) => write!(f, "DecodeError: {}", e),
Self::OutputSliceTooSmall => write!(f, "Output slice too small"),
}
}
}
#[cfg(any(feature = "std", test))]
impl error::Error for DecodeSliceError {
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
match self {
DecodeSliceError::DecodeError(e) => Some(e),
DecodeSliceError::OutputSliceTooSmall => None,
}
}
}
impl From<DecodeError> for DecodeSliceError {
fn from(e: DecodeError) -> Self {
DecodeSliceError::DecodeError(e)
}
}
/// Decode base64 using the [`STANDARD` engine](STANDARD).
///
/// See [Engine::decode].
#[deprecated(since = "0.21.0", note = "Use Engine::decode")]
#[cfg(any(feature = "alloc", test))]
pub fn decode<T: AsRef<[u8]>>(input: T) -> Result<Vec<u8>, DecodeError> {
STANDARD.decode(input)
}
/// Decode from string reference as octets using the specified [Engine].
///
/// See [Engine::decode].
///Returns a `Result` containing a `Vec<u8>`.
#[deprecated(since = "0.21.0", note = "Use Engine::decode")]
#[cfg(any(feature = "alloc", test))]
pub fn decode_engine<E: Engine, T: AsRef<[u8]>>(
input: T,
engine: &E,
) -> Result<Vec<u8>, DecodeError> {
engine.decode(input)
}
/// Decode from string reference as octets.
///
/// See [Engine::decode_vec].
#[cfg(any(feature = "alloc", test))]
#[deprecated(since = "0.21.0", note = "Use Engine::decode_vec")]
pub fn decode_engine_vec<E: Engine, T: AsRef<[u8]>>(
input: T,
buffer: &mut Vec<u8>,
engine: &E,
) -> Result<(), DecodeError> {
engine.decode_vec(input, buffer)
}
/// Decode the input into the provided output slice.
///
/// See [Engine::decode_slice].
#[deprecated(since = "0.21.0", note = "Use Engine::decode_slice")]
pub fn decode_engine_slice<E: Engine, T: AsRef<[u8]>>(
input: T,
output: &mut [u8],
engine: &E,
) -> Result<usize, DecodeSliceError> {
engine.decode_slice(input, output)
}
/// Returns a conservative estimate of the decoded size of `encoded_len` base64 symbols (rounded up
/// to the next group of 3 decoded bytes).
///
/// The resulting length will be a safe choice for the size of a decode buffer, but may have up to
/// 2 trailing bytes that won't end up being needed.
///
/// # Examples
///
/// ```
/// use base64::decoded_len_estimate;
///
/// assert_eq!(3, decoded_len_estimate(1));
/// assert_eq!(3, decoded_len_estimate(2));
/// assert_eq!(3, decoded_len_estimate(3));
/// assert_eq!(3, decoded_len_estimate(4));
/// // start of the next quad of encoded symbols
/// assert_eq!(6, decoded_len_estimate(5));
/// ```
pub fn decoded_len_estimate(encoded_len: usize) -> usize {
STANDARD
.internal_decoded_len_estimate(encoded_len)
.decoded_len_estimate()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
alphabet,
engine::{general_purpose, Config, GeneralPurpose},
tests::{assert_encode_sanity, random_engine},
};
use rand::{
distributions::{Distribution, Uniform},
Rng, SeedableRng,
};
#[test]
fn decode_into_nonempty_vec_doesnt_clobber_existing_prefix() {
let mut orig_data = Vec::new();
let mut encoded_data = String::new();
let mut decoded_with_prefix = Vec::new();
let mut decoded_without_prefix = Vec::new();
let mut prefix = Vec::new();
let prefix_len_range = Uniform::new(0, 1000);
let input_len_range = Uniform::new(0, 1000);
let mut rng = rand::rngs::SmallRng::from_entropy();
for _ in 0..10_000 {
orig_data.clear();
encoded_data.clear();
decoded_with_prefix.clear();
decoded_without_prefix.clear();
prefix.clear();
let input_len = input_len_range.sample(&mut rng);
for _ in 0..input_len {
orig_data.push(rng.gen());
}
let engine = random_engine(&mut rng);
engine.encode_string(&orig_data, &mut encoded_data);
assert_encode_sanity(&encoded_data, engine.config().encode_padding(), input_len);
let prefix_len = prefix_len_range.sample(&mut rng);
// fill the buf with a prefix
for _ in 0..prefix_len {
prefix.push(rng.gen());
}
decoded_with_prefix.resize(prefix_len, 0);
decoded_with_prefix.copy_from_slice(&prefix);
// decode into the non-empty buf
engine
.decode_vec(&encoded_data, &mut decoded_with_prefix)
.unwrap();
// also decode into the empty buf
engine
.decode_vec(&encoded_data, &mut decoded_without_prefix)
.unwrap();
assert_eq!(
prefix_len + decoded_without_prefix.len(),
decoded_with_prefix.len()
);
assert_eq!(orig_data, decoded_without_prefix);
// append plain decode onto prefix
prefix.append(&mut decoded_without_prefix);
assert_eq!(prefix, decoded_with_prefix);
}
}
#[test]
fn decode_slice_doesnt_clobber_existing_prefix_or_suffix() {
do_decode_slice_doesnt_clobber_existing_prefix_or_suffix(|e, input, output| {
e.decode_slice(input, output).unwrap()
})
}
#[test]
fn decode_slice_unchecked_doesnt_clobber_existing_prefix_or_suffix() {
do_decode_slice_doesnt_clobber_existing_prefix_or_suffix(|e, input, output| {
e.decode_slice_unchecked(input, output).unwrap()
})
}
#[test]
fn decode_engine_estimation_works_for_various_lengths() {
let engine = GeneralPurpose::new(&alphabet::STANDARD, general_purpose::NO_PAD);
for num_prefix_quads in 0..100 {
for suffix in &["AA", "AAA", "AAAA"] {
let mut prefix = "AAAA".repeat(num_prefix_quads);
prefix.push_str(suffix);
// make sure no overflow (and thus a panic) occurs
let res = engine.decode(prefix);
assert!(res.is_ok());
}
}
}
#[test]
fn decode_slice_output_length_errors() {
for num_quads in 1..100 {
let input = "AAAA".repeat(num_quads);
let mut vec = vec![0; (num_quads - 1) * 3];
assert_eq!(
DecodeSliceError::OutputSliceTooSmall,
STANDARD.decode_slice(&input, &mut vec).unwrap_err()
);
vec.push(0);
assert_eq!(
DecodeSliceError::OutputSliceTooSmall,
STANDARD.decode_slice(&input, &mut vec).unwrap_err()
);
vec.push(0);
assert_eq!(
DecodeSliceError::OutputSliceTooSmall,
STANDARD.decode_slice(&input, &mut vec).unwrap_err()
);
vec.push(0);
// now it works
assert_eq!(
num_quads * 3,
STANDARD.decode_slice(&input, &mut vec).unwrap()
);
}
}
fn do_decode_slice_doesnt_clobber_existing_prefix_or_suffix<
F: Fn(&GeneralPurpose, &[u8], &mut [u8]) -> usize,
>(
call_decode: F,
) {
let mut orig_data = Vec::new();
let mut encoded_data = String::new();
let mut decode_buf = Vec::new();
let mut decode_buf_copy: Vec<u8> = Vec::new();
let input_len_range = Uniform::new(0, 1000);
let mut rng = rand::rngs::SmallRng::from_entropy();
for _ in 0..10_000 {
orig_data.clear();
encoded_data.clear();
decode_buf.clear();
decode_buf_copy.clear();
let input_len = input_len_range.sample(&mut rng);
for _ in 0..input_len {
orig_data.push(rng.gen());
}
let engine = random_engine(&mut rng);
engine.encode_string(&orig_data, &mut encoded_data);
assert_encode_sanity(&encoded_data, engine.config().encode_padding(), input_len);
// fill the buffer with random garbage, long enough to have some room before and after
for _ in 0..5000 {
decode_buf.push(rng.gen());
}
// keep a copy for later comparison
decode_buf_copy.extend(decode_buf.iter());
let offset = 1000;
// decode into the non-empty buf
let decode_bytes_written =
call_decode(&engine, encoded_data.as_bytes(), &mut decode_buf[offset..]);
assert_eq!(orig_data.len(), decode_bytes_written);
assert_eq!(
orig_data,
&decode_buf[offset..(offset + decode_bytes_written)]
);
assert_eq!(&decode_buf_copy[0..offset], &decode_buf[0..offset]);
assert_eq!(
&decode_buf_copy[offset + decode_bytes_written..],
&decode_buf[offset + decode_bytes_written..]
);
}
}
}
#[allow(deprecated)]
#[cfg(test)]
mod coverage_gaming {
use super::*;
use std::error::Error;
#[test]
fn decode_error() {
let _ = format!("{:?}", DecodeError::InvalidPadding.clone());
let _ = format!(
"{} {} {} {}",
DecodeError::InvalidByte(0, 0),
DecodeError::InvalidLength(0),
DecodeError::InvalidLastSymbol(0, 0),
DecodeError::InvalidPadding,
);
}
#[test]
fn decode_slice_error() {
let _ = format!("{:?}", DecodeSliceError::OutputSliceTooSmall.clone());
let _ = format!(
"{} {}",
DecodeSliceError::OutputSliceTooSmall,
DecodeSliceError::DecodeError(DecodeError::InvalidPadding)
);
let _ = DecodeSliceError::OutputSliceTooSmall.source();
let _ = DecodeSliceError::DecodeError(DecodeError::InvalidPadding).source();
}
#[test]
fn deprecated_fns() {
let _ = decode("");
let _ = decode_engine("", &crate::prelude::BASE64_STANDARD);
let _ = decode_engine_vec("", &mut Vec::new(), &crate::prelude::BASE64_STANDARD);
let _ = decode_engine_slice("", &mut [], &crate::prelude::BASE64_STANDARD);
}
#[test]
fn decoded_len_est() {
assert_eq!(3, decoded_len_estimate(4));
}
}

88
vendor/base64/src/display.rs vendored Normal file
View file

@ -0,0 +1,88 @@
//! Enables base64'd output anywhere you might use a `Display` implementation, like a format string.
//!
//! ```
//! use base64::{display::Base64Display, engine::general_purpose::STANDARD};
//!
//! let data = vec![0x0, 0x1, 0x2, 0x3];
//! let wrapper = Base64Display::new(&data, &STANDARD);
//!
//! assert_eq!("base64: AAECAw==", format!("base64: {}", wrapper));
//! ```
use super::chunked_encoder::ChunkedEncoder;
use crate::engine::Engine;
use core::fmt::{Display, Formatter};
use core::{fmt, str};
/// A convenience wrapper for base64'ing bytes into a format string without heap allocation.
pub struct Base64Display<'a, 'e, E: Engine> {
bytes: &'a [u8],
chunked_encoder: ChunkedEncoder<'e, E>,
}
impl<'a, 'e, E: Engine> Base64Display<'a, 'e, E> {
/// Create a `Base64Display` with the provided engine.
pub fn new(bytes: &'a [u8], engine: &'e E) -> Base64Display<'a, 'e, E> {
Base64Display {
bytes,
chunked_encoder: ChunkedEncoder::new(engine),
}
}
}
impl<'a, 'e, E: Engine> Display for Base64Display<'a, 'e, E> {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {
let mut sink = FormatterSink { f: formatter };
self.chunked_encoder.encode(self.bytes, &mut sink)
}
}
struct FormatterSink<'a, 'b: 'a> {
f: &'a mut Formatter<'b>,
}
impl<'a, 'b: 'a> super::chunked_encoder::Sink for FormatterSink<'a, 'b> {
type Error = fmt::Error;
fn write_encoded_bytes(&mut self, encoded: &[u8]) -> Result<(), Self::Error> {
// Avoid unsafe. If max performance is needed, write your own display wrapper that uses
// unsafe here to gain about 10-15%.
self.f
.write_str(str::from_utf8(encoded).expect("base64 data was not utf8"))
}
}
#[cfg(test)]
mod tests {
use super::super::chunked_encoder::tests::{
chunked_encode_matches_normal_encode_random, SinkTestHelper,
};
use super::*;
use crate::engine::general_purpose::STANDARD;
#[test]
fn basic_display() {
assert_eq!(
"~$Zm9vYmFy#*",
format!("~${}#*", Base64Display::new(b"foobar", &STANDARD))
);
assert_eq!(
"~$Zm9vYmFyZg==#*",
format!("~${}#*", Base64Display::new(b"foobarf", &STANDARD))
);
}
#[test]
fn display_encode_matches_normal_encode() {
let helper = DisplaySinkTestHelper;
chunked_encode_matches_normal_encode_random(&helper);
}
struct DisplaySinkTestHelper;
impl SinkTestHelper for DisplaySinkTestHelper {
fn encode_to_string<E: Engine>(&self, engine: &E, bytes: &[u8]) -> String {
format!("{}", Base64Display::new(bytes, engine))
}
}
}

492
vendor/base64/src/encode.rs vendored Normal file
View file

@ -0,0 +1,492 @@
#[cfg(any(feature = "alloc", test))]
use alloc::string::String;
use core::fmt;
#[cfg(any(feature = "std", test))]
use std::error;
#[cfg(any(feature = "alloc", test))]
use crate::engine::general_purpose::STANDARD;
use crate::engine::{Config, Engine};
use crate::PAD_BYTE;
/// Encode arbitrary octets as base64 using the [`STANDARD` engine](STANDARD).
///
/// See [Engine::encode].
#[allow(unused)]
#[deprecated(since = "0.21.0", note = "Use Engine::encode")]
#[cfg(any(feature = "alloc", test))]
pub fn encode<T: AsRef<[u8]>>(input: T) -> String {
STANDARD.encode(input)
}
///Encode arbitrary octets as base64 using the provided `Engine` into a new `String`.
///
/// See [Engine::encode].
#[allow(unused)]
#[deprecated(since = "0.21.0", note = "Use Engine::encode")]
#[cfg(any(feature = "alloc", test))]
pub fn encode_engine<E: Engine, T: AsRef<[u8]>>(input: T, engine: &E) -> String {
engine.encode(input)
}
///Encode arbitrary octets as base64 into a supplied `String`.
///
/// See [Engine::encode_string].
#[allow(unused)]
#[deprecated(since = "0.21.0", note = "Use Engine::encode_string")]
#[cfg(any(feature = "alloc", test))]
pub fn encode_engine_string<E: Engine, T: AsRef<[u8]>>(
input: T,
output_buf: &mut String,
engine: &E,
) {
engine.encode_string(input, output_buf)
}
/// Encode arbitrary octets as base64 into a supplied slice.
///
/// See [Engine::encode_slice].
#[allow(unused)]
#[deprecated(since = "0.21.0", note = "Use Engine::encode_slice")]
pub fn encode_engine_slice<E: Engine, T: AsRef<[u8]>>(
input: T,
output_buf: &mut [u8],
engine: &E,
) -> Result<usize, EncodeSliceError> {
engine.encode_slice(input, output_buf)
}
/// B64-encode and pad (if configured).
///
/// This helper exists to avoid recalculating encoded_size, which is relatively expensive on short
/// inputs.
///
/// `encoded_size` is the encoded size calculated for `input`.
///
/// `output` must be of size `encoded_size`.
///
/// All bytes in `output` will be written to since it is exactly the size of the output.
pub(crate) fn encode_with_padding<E: Engine + ?Sized>(
input: &[u8],
output: &mut [u8],
engine: &E,
expected_encoded_size: usize,
) {
debug_assert_eq!(expected_encoded_size, output.len());
let b64_bytes_written = engine.internal_encode(input, output);
let padding_bytes = if engine.config().encode_padding() {
add_padding(b64_bytes_written, &mut output[b64_bytes_written..])
} else {
0
};
let encoded_bytes = b64_bytes_written
.checked_add(padding_bytes)
.expect("usize overflow when calculating b64 length");
debug_assert_eq!(expected_encoded_size, encoded_bytes);
}
/// Calculate the base64 encoded length for a given input length, optionally including any
/// appropriate padding bytes.
///
/// Returns `None` if the encoded length can't be represented in `usize`. This will happen for
/// input lengths in approximately the top quarter of the range of `usize`.
pub const fn encoded_len(bytes_len: usize, padding: bool) -> Option<usize> {
let rem = bytes_len % 3;
let complete_input_chunks = bytes_len / 3;
// `?` is disallowed in const, and `let Some(_) = _ else` requires 1.65.0, whereas this
// messier syntax works on 1.48
let complete_chunk_output =
if let Some(complete_chunk_output) = complete_input_chunks.checked_mul(4) {
complete_chunk_output
} else {
return None;
};
if rem > 0 {
if padding {
complete_chunk_output.checked_add(4)
} else {
let encoded_rem = match rem {
1 => 2,
// only other possible remainder is 2
// can't use a separate _ => unreachable!() in const fns in ancient rust versions
_ => 3,
};
complete_chunk_output.checked_add(encoded_rem)
}
} else {
Some(complete_chunk_output)
}
}
/// Write padding characters.
/// `unpadded_output_len` is the size of the unpadded but base64 encoded data.
/// `output` is the slice where padding should be written, of length at least 2.
///
/// Returns the number of padding bytes written.
pub(crate) fn add_padding(unpadded_output_len: usize, output: &mut [u8]) -> usize {
let pad_bytes = (4 - (unpadded_output_len % 4)) % 4;
// for just a couple bytes, this has better performance than using
// .fill(), or iterating over mutable refs, which call memset()
#[allow(clippy::needless_range_loop)]
for i in 0..pad_bytes {
output[i] = PAD_BYTE;
}
pad_bytes
}
/// Errors that can occur while encoding into a slice.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum EncodeSliceError {
/// The provided slice is too small.
OutputSliceTooSmall,
}
impl fmt::Display for EncodeSliceError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::OutputSliceTooSmall => write!(f, "Output slice too small"),
}
}
}
#[cfg(any(feature = "std", test))]
impl error::Error for EncodeSliceError {}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
alphabet,
engine::general_purpose::{GeneralPurpose, NO_PAD, STANDARD},
tests::{assert_encode_sanity, random_config, random_engine},
};
use rand::{
distributions::{Distribution, Uniform},
Rng, SeedableRng,
};
use std::str;
const URL_SAFE_NO_PAD_ENGINE: GeneralPurpose = GeneralPurpose::new(&alphabet::URL_SAFE, NO_PAD);
#[test]
fn encoded_size_correct_standard() {
assert_encoded_length(0, 0, &STANDARD, true);
assert_encoded_length(1, 4, &STANDARD, true);
assert_encoded_length(2, 4, &STANDARD, true);
assert_encoded_length(3, 4, &STANDARD, true);
assert_encoded_length(4, 8, &STANDARD, true);
assert_encoded_length(5, 8, &STANDARD, true);
assert_encoded_length(6, 8, &STANDARD, true);
assert_encoded_length(7, 12, &STANDARD, true);
assert_encoded_length(8, 12, &STANDARD, true);
assert_encoded_length(9, 12, &STANDARD, true);
assert_encoded_length(54, 72, &STANDARD, true);
assert_encoded_length(55, 76, &STANDARD, true);
assert_encoded_length(56, 76, &STANDARD, true);
assert_encoded_length(57, 76, &STANDARD, true);
assert_encoded_length(58, 80, &STANDARD, true);
}
#[test]
fn encoded_size_correct_no_pad() {
assert_encoded_length(0, 0, &URL_SAFE_NO_PAD_ENGINE, false);
assert_encoded_length(1, 2, &URL_SAFE_NO_PAD_ENGINE, false);
assert_encoded_length(2, 3, &URL_SAFE_NO_PAD_ENGINE, false);
assert_encoded_length(3, 4, &URL_SAFE_NO_PAD_ENGINE, false);
assert_encoded_length(4, 6, &URL_SAFE_NO_PAD_ENGINE, false);
assert_encoded_length(5, 7, &URL_SAFE_NO_PAD_ENGINE, false);
assert_encoded_length(6, 8, &URL_SAFE_NO_PAD_ENGINE, false);
assert_encoded_length(7, 10, &URL_SAFE_NO_PAD_ENGINE, false);
assert_encoded_length(8, 11, &URL_SAFE_NO_PAD_ENGINE, false);
assert_encoded_length(9, 12, &URL_SAFE_NO_PAD_ENGINE, false);
assert_encoded_length(54, 72, &URL_SAFE_NO_PAD_ENGINE, false);
assert_encoded_length(55, 74, &URL_SAFE_NO_PAD_ENGINE, false);
assert_encoded_length(56, 75, &URL_SAFE_NO_PAD_ENGINE, false);
assert_encoded_length(57, 76, &URL_SAFE_NO_PAD_ENGINE, false);
assert_encoded_length(58, 78, &URL_SAFE_NO_PAD_ENGINE, false);
}
#[test]
fn encoded_size_overflow() {
assert_eq!(None, encoded_len(usize::MAX, true));
}
#[test]
fn encode_engine_string_into_nonempty_buffer_doesnt_clobber_prefix() {
let mut orig_data = Vec::new();
let mut prefix = String::new();
let mut encoded_data_no_prefix = String::new();
let mut encoded_data_with_prefix = String::new();
let mut decoded = Vec::new();
let prefix_len_range = Uniform::new(0, 1000);
let input_len_range = Uniform::new(0, 1000);
let mut rng = rand::rngs::SmallRng::from_entropy();
for _ in 0..10_000 {
orig_data.clear();
prefix.clear();
encoded_data_no_prefix.clear();
encoded_data_with_prefix.clear();
decoded.clear();
let input_len = input_len_range.sample(&mut rng);
for _ in 0..input_len {
orig_data.push(rng.gen());
}
let prefix_len = prefix_len_range.sample(&mut rng);
for _ in 0..prefix_len {
// getting convenient random single-byte printable chars that aren't base64 is
// annoying
prefix.push('#');
}
encoded_data_with_prefix.push_str(&prefix);
let engine = random_engine(&mut rng);
engine.encode_string(&orig_data, &mut encoded_data_no_prefix);
engine.encode_string(&orig_data, &mut encoded_data_with_prefix);
assert_eq!(
encoded_data_no_prefix.len() + prefix_len,
encoded_data_with_prefix.len()
);
assert_encode_sanity(
&encoded_data_no_prefix,
engine.config().encode_padding(),
input_len,
);
assert_encode_sanity(
&encoded_data_with_prefix[prefix_len..],
engine.config().encode_padding(),
input_len,
);
// append plain encode onto prefix
prefix.push_str(&encoded_data_no_prefix);
assert_eq!(prefix, encoded_data_with_prefix);
engine
.decode_vec(&encoded_data_no_prefix, &mut decoded)
.unwrap();
assert_eq!(orig_data, decoded);
}
}
#[test]
fn encode_engine_slice_into_nonempty_buffer_doesnt_clobber_suffix() {
let mut orig_data = Vec::new();
let mut encoded_data = Vec::new();
let mut encoded_data_original_state = Vec::new();
let mut decoded = Vec::new();
let input_len_range = Uniform::new(0, 1000);
let mut rng = rand::rngs::SmallRng::from_entropy();
for _ in 0..10_000 {
orig_data.clear();
encoded_data.clear();
encoded_data_original_state.clear();
decoded.clear();
let input_len = input_len_range.sample(&mut rng);
for _ in 0..input_len {
orig_data.push(rng.gen());
}
// plenty of existing garbage in the encoded buffer
for _ in 0..10 * input_len {
encoded_data.push(rng.gen());
}
encoded_data_original_state.extend_from_slice(&encoded_data);
let engine = random_engine(&mut rng);
let encoded_size = encoded_len(input_len, engine.config().encode_padding()).unwrap();
assert_eq!(
encoded_size,
engine.encode_slice(&orig_data, &mut encoded_data).unwrap()
);
assert_encode_sanity(
str::from_utf8(&encoded_data[0..encoded_size]).unwrap(),
engine.config().encode_padding(),
input_len,
);
assert_eq!(
&encoded_data[encoded_size..],
&encoded_data_original_state[encoded_size..]
);
engine
.decode_vec(&encoded_data[0..encoded_size], &mut decoded)
.unwrap();
assert_eq!(orig_data, decoded);
}
}
#[test]
fn encode_to_slice_random_valid_utf8() {
let mut input = Vec::new();
let mut output = Vec::new();
let input_len_range = Uniform::new(0, 1000);
let mut rng = rand::rngs::SmallRng::from_entropy();
for _ in 0..10_000 {
input.clear();
output.clear();
let input_len = input_len_range.sample(&mut rng);
for _ in 0..input_len {
input.push(rng.gen());
}
let config = random_config(&mut rng);
let engine = random_engine(&mut rng);
// fill up the output buffer with garbage
let encoded_size = encoded_len(input_len, config.encode_padding()).unwrap();
for _ in 0..encoded_size {
output.push(rng.gen());
}
let orig_output_buf = output.clone();
let bytes_written = engine.internal_encode(&input, &mut output);
// make sure the part beyond bytes_written is the same garbage it was before
assert_eq!(orig_output_buf[bytes_written..], output[bytes_written..]);
// make sure the encoded bytes are UTF-8
let _ = str::from_utf8(&output[0..bytes_written]).unwrap();
}
}
#[test]
fn encode_with_padding_random_valid_utf8() {
let mut input = Vec::new();
let mut output = Vec::new();
let input_len_range = Uniform::new(0, 1000);
let mut rng = rand::rngs::SmallRng::from_entropy();
for _ in 0..10_000 {
input.clear();
output.clear();
let input_len = input_len_range.sample(&mut rng);
for _ in 0..input_len {
input.push(rng.gen());
}
let engine = random_engine(&mut rng);
// fill up the output buffer with garbage
let encoded_size = encoded_len(input_len, engine.config().encode_padding()).unwrap();
for _ in 0..encoded_size + 1000 {
output.push(rng.gen());
}
let orig_output_buf = output.clone();
encode_with_padding(&input, &mut output[0..encoded_size], &engine, encoded_size);
// make sure the part beyond b64 is the same garbage it was before
assert_eq!(orig_output_buf[encoded_size..], output[encoded_size..]);
// make sure the encoded bytes are UTF-8
let _ = str::from_utf8(&output[0..encoded_size]).unwrap();
}
}
#[test]
fn add_padding_random_valid_utf8() {
let mut output = Vec::new();
let mut rng = rand::rngs::SmallRng::from_entropy();
// cover our bases for length % 4
for unpadded_output_len in 0..20 {
output.clear();
// fill output with random
for _ in 0..100 {
output.push(rng.gen());
}
let orig_output_buf = output.clone();
let bytes_written = add_padding(unpadded_output_len, &mut output);
// make sure the part beyond bytes_written is the same garbage it was before
assert_eq!(orig_output_buf[bytes_written..], output[bytes_written..]);
// make sure the encoded bytes are UTF-8
let _ = str::from_utf8(&output[0..bytes_written]).unwrap();
}
}
fn assert_encoded_length<E: Engine>(
input_len: usize,
enc_len: usize,
engine: &E,
padded: bool,
) {
assert_eq!(enc_len, encoded_len(input_len, padded).unwrap());
let mut bytes: Vec<u8> = Vec::new();
let mut rng = rand::rngs::SmallRng::from_entropy();
for _ in 0..input_len {
bytes.push(rng.gen());
}
let encoded = engine.encode(&bytes);
assert_encode_sanity(&encoded, padded, input_len);
assert_eq!(enc_len, encoded.len());
}
#[test]
fn encode_imap() {
assert_eq!(
&GeneralPurpose::new(&alphabet::IMAP_MUTF7, NO_PAD).encode(b"\xFB\xFF"),
&GeneralPurpose::new(&alphabet::STANDARD, NO_PAD)
.encode(b"\xFB\xFF")
.replace('/', ",")
);
}
}

View file

@ -0,0 +1,357 @@
use crate::{
engine::{general_purpose::INVALID_VALUE, DecodeEstimate, DecodeMetadata, DecodePaddingMode},
DecodeError, DecodeSliceError, PAD_BYTE,
};
#[doc(hidden)]
pub struct GeneralPurposeEstimate {
/// input len % 4
rem: usize,
conservative_decoded_len: usize,
}
impl GeneralPurposeEstimate {
pub(crate) fn new(encoded_len: usize) -> Self {
let rem = encoded_len % 4;
Self {
rem,
conservative_decoded_len: (encoded_len / 4 + (rem > 0) as usize) * 3,
}
}
}
impl DecodeEstimate for GeneralPurposeEstimate {
fn decoded_len_estimate(&self) -> usize {
self.conservative_decoded_len
}
}
/// Helper to avoid duplicating num_chunks calculation, which is costly on short inputs.
/// Returns the decode metadata, or an error.
// We're on the fragile edge of compiler heuristics here. If this is not inlined, slow. If this is
// inlined(always), a different slow. plain ol' inline makes the benchmarks happiest at the moment,
// but this is fragile and the best setting changes with only minor code modifications.
#[inline]
pub(crate) fn decode_helper(
input: &[u8],
estimate: GeneralPurposeEstimate,
output: &mut [u8],
decode_table: &[u8; 256],
decode_allow_trailing_bits: bool,
padding_mode: DecodePaddingMode,
) -> Result<DecodeMetadata, DecodeSliceError> {
let input_complete_nonterminal_quads_len =
complete_quads_len(input, estimate.rem, output.len(), decode_table)?;
const UNROLLED_INPUT_CHUNK_SIZE: usize = 32;
const UNROLLED_OUTPUT_CHUNK_SIZE: usize = UNROLLED_INPUT_CHUNK_SIZE / 4 * 3;
let input_complete_quads_after_unrolled_chunks_len =
input_complete_nonterminal_quads_len % UNROLLED_INPUT_CHUNK_SIZE;
let input_unrolled_loop_len =
input_complete_nonterminal_quads_len - input_complete_quads_after_unrolled_chunks_len;
// chunks of 32 bytes
for (chunk_index, chunk) in input[..input_unrolled_loop_len]
.chunks_exact(UNROLLED_INPUT_CHUNK_SIZE)
.enumerate()
{
let input_index = chunk_index * UNROLLED_INPUT_CHUNK_SIZE;
let chunk_output = &mut output[chunk_index * UNROLLED_OUTPUT_CHUNK_SIZE
..(chunk_index + 1) * UNROLLED_OUTPUT_CHUNK_SIZE];
decode_chunk_8(
&chunk[0..8],
input_index,
decode_table,
&mut chunk_output[0..6],
)?;
decode_chunk_8(
&chunk[8..16],
input_index + 8,
decode_table,
&mut chunk_output[6..12],
)?;
decode_chunk_8(
&chunk[16..24],
input_index + 16,
decode_table,
&mut chunk_output[12..18],
)?;
decode_chunk_8(
&chunk[24..32],
input_index + 24,
decode_table,
&mut chunk_output[18..24],
)?;
}
// remaining quads, except for the last possibly partial one, as it may have padding
let output_unrolled_loop_len = input_unrolled_loop_len / 4 * 3;
let output_complete_quad_len = input_complete_nonterminal_quads_len / 4 * 3;
{
let output_after_unroll = &mut output[output_unrolled_loop_len..output_complete_quad_len];
for (chunk_index, chunk) in input
[input_unrolled_loop_len..input_complete_nonterminal_quads_len]
.chunks_exact(4)
.enumerate()
{
let chunk_output = &mut output_after_unroll[chunk_index * 3..chunk_index * 3 + 3];
decode_chunk_4(
chunk,
input_unrolled_loop_len + chunk_index * 4,
decode_table,
chunk_output,
)?;
}
}
super::decode_suffix::decode_suffix(
input,
input_complete_nonterminal_quads_len,
output,
output_complete_quad_len,
decode_table,
decode_allow_trailing_bits,
padding_mode,
)
}
/// Returns the length of complete quads, except for the last one, even if it is complete.
///
/// Returns an error if the output len is not big enough for decoding those complete quads, or if
/// the input % 4 == 1, and that last byte is an invalid value other than a pad byte.
///
/// - `input` is the base64 input
/// - `input_len_rem` is input len % 4
/// - `output_len` is the length of the output slice
pub(crate) fn complete_quads_len(
input: &[u8],
input_len_rem: usize,
output_len: usize,
decode_table: &[u8; 256],
) -> Result<usize, DecodeSliceError> {
debug_assert!(input.len() % 4 == input_len_rem);
// detect a trailing invalid byte, like a newline, as a user convenience
if input_len_rem == 1 {
let last_byte = input[input.len() - 1];
// exclude pad bytes; might be part of padding that extends from earlier in the input
if last_byte != PAD_BYTE && decode_table[usize::from(last_byte)] == INVALID_VALUE {
return Err(DecodeError::InvalidByte(input.len() - 1, last_byte).into());
}
};
// skip last quad, even if it's complete, as it may have padding
let input_complete_nonterminal_quads_len = input
.len()
.saturating_sub(input_len_rem)
// if rem was 0, subtract 4 to avoid padding
.saturating_sub((input_len_rem == 0) as usize * 4);
debug_assert!(
input.is_empty() || (1..=4).contains(&(input.len() - input_complete_nonterminal_quads_len))
);
// check that everything except the last quad handled by decode_suffix will fit
if output_len < input_complete_nonterminal_quads_len / 4 * 3 {
return Err(DecodeSliceError::OutputSliceTooSmall);
};
Ok(input_complete_nonterminal_quads_len)
}
/// Decode 8 bytes of input into 6 bytes of output.
///
/// `input` is the 8 bytes to decode.
/// `index_at_start_of_input` is the offset in the overall input (used for reporting errors
/// accurately)
/// `decode_table` is the lookup table for the particular base64 alphabet.
/// `output` will have its first 6 bytes overwritten
// yes, really inline (worth 30-50% speedup)
#[inline(always)]
fn decode_chunk_8(
input: &[u8],
index_at_start_of_input: usize,
decode_table: &[u8; 256],
output: &mut [u8],
) -> Result<(), DecodeError> {
let morsel = decode_table[usize::from(input[0])];
if morsel == INVALID_VALUE {
return Err(DecodeError::InvalidByte(index_at_start_of_input, input[0]));
}
let mut accum = u64::from(morsel) << 58;
let morsel = decode_table[usize::from(input[1])];
if morsel == INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 1,
input[1],
));
}
accum |= u64::from(morsel) << 52;
let morsel = decode_table[usize::from(input[2])];
if morsel == INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 2,
input[2],
));
}
accum |= u64::from(morsel) << 46;
let morsel = decode_table[usize::from(input[3])];
if morsel == INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 3,
input[3],
));
}
accum |= u64::from(morsel) << 40;
let morsel = decode_table[usize::from(input[4])];
if morsel == INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 4,
input[4],
));
}
accum |= u64::from(morsel) << 34;
let morsel = decode_table[usize::from(input[5])];
if morsel == INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 5,
input[5],
));
}
accum |= u64::from(morsel) << 28;
let morsel = decode_table[usize::from(input[6])];
if morsel == INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 6,
input[6],
));
}
accum |= u64::from(morsel) << 22;
let morsel = decode_table[usize::from(input[7])];
if morsel == INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 7,
input[7],
));
}
accum |= u64::from(morsel) << 16;
output[..6].copy_from_slice(&accum.to_be_bytes()[..6]);
Ok(())
}
/// Like [decode_chunk_8] but for 4 bytes of input and 3 bytes of output.
#[inline(always)]
fn decode_chunk_4(
input: &[u8],
index_at_start_of_input: usize,
decode_table: &[u8; 256],
output: &mut [u8],
) -> Result<(), DecodeError> {
let morsel = decode_table[usize::from(input[0])];
if morsel == INVALID_VALUE {
return Err(DecodeError::InvalidByte(index_at_start_of_input, input[0]));
}
let mut accum = u32::from(morsel) << 26;
let morsel = decode_table[usize::from(input[1])];
if morsel == INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 1,
input[1],
));
}
accum |= u32::from(morsel) << 20;
let morsel = decode_table[usize::from(input[2])];
if morsel == INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 2,
input[2],
));
}
accum |= u32::from(morsel) << 14;
let morsel = decode_table[usize::from(input[3])];
if morsel == INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 3,
input[3],
));
}
accum |= u32::from(morsel) << 8;
output[..3].copy_from_slice(&accum.to_be_bytes()[..3]);
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::engine::general_purpose::STANDARD;
#[test]
fn decode_chunk_8_writes_only_6_bytes() {
let input = b"Zm9vYmFy"; // "foobar"
let mut output = [0_u8, 1, 2, 3, 4, 5, 6, 7];
decode_chunk_8(&input[..], 0, &STANDARD.decode_table, &mut output).unwrap();
assert_eq!(&vec![b'f', b'o', b'o', b'b', b'a', b'r', 6, 7], &output);
}
#[test]
fn decode_chunk_4_writes_only_3_bytes() {
let input = b"Zm9v"; // "foobar"
let mut output = [0_u8, 1, 2, 3];
decode_chunk_4(&input[..], 0, &STANDARD.decode_table, &mut output).unwrap();
assert_eq!(&vec![b'f', b'o', b'o', 3], &output);
}
#[test]
fn estimate_short_lengths() {
for (range, decoded_len_estimate) in [
(0..=0, 0),
(1..=4, 3),
(5..=8, 6),
(9..=12, 9),
(13..=16, 12),
(17..=20, 15),
] {
for encoded_len in range {
let estimate = GeneralPurposeEstimate::new(encoded_len);
assert_eq!(decoded_len_estimate, estimate.decoded_len_estimate());
}
}
}
#[test]
fn estimate_via_u128_inflation() {
// cover both ends of usize
(0..1000)
.chain(usize::MAX - 1000..=usize::MAX)
.for_each(|encoded_len| {
// inflate to 128 bit type to be able to safely use the easy formulas
let len_128 = encoded_len as u128;
let estimate = GeneralPurposeEstimate::new(encoded_len);
assert_eq!(
(len_128 + 3) / 4 * 3,
estimate.conservative_decoded_len as u128
);
})
}
}

View file

@ -0,0 +1,162 @@
use crate::{
engine::{general_purpose::INVALID_VALUE, DecodeMetadata, DecodePaddingMode},
DecodeError, DecodeSliceError, PAD_BYTE,
};
/// Decode the last 0-4 bytes, checking for trailing set bits and padding per the provided
/// parameters.
///
/// Returns the decode metadata representing the total number of bytes decoded, including the ones
/// indicated as already written by `output_index`.
pub(crate) fn decode_suffix(
input: &[u8],
input_index: usize,
output: &mut [u8],
mut output_index: usize,
decode_table: &[u8; 256],
decode_allow_trailing_bits: bool,
padding_mode: DecodePaddingMode,
) -> Result<DecodeMetadata, DecodeSliceError> {
debug_assert!((input.len() - input_index) <= 4);
// Decode any leftovers that might not be a complete input chunk of 4 bytes.
// Use a u32 as a stack-resident 4 byte buffer.
let mut morsels_in_leftover = 0;
let mut padding_bytes_count = 0;
// offset from input_index
let mut first_padding_offset: usize = 0;
let mut last_symbol = 0_u8;
let mut morsels = [0_u8; 4];
for (leftover_index, &b) in input[input_index..].iter().enumerate() {
// '=' padding
if b == PAD_BYTE {
// There can be bad padding bytes in a few ways:
// 1 - Padding with non-padding characters after it
// 2 - Padding after zero or one characters in the current quad (should only
// be after 2 or 3 chars)
// 3 - More than two characters of padding. If 3 or 4 padding chars
// are in the same quad, that implies it will be caught by #2.
// If it spreads from one quad to another, it will be an invalid byte
// in the first quad.
// 4 - Non-canonical padding -- 1 byte when it should be 2, etc.
// Per config, non-canonical but still functional non- or partially-padded base64
// may be treated as an error condition.
if leftover_index < 2 {
// Check for error #2.
// Either the previous byte was padding, in which case we would have already hit
// this case, or it wasn't, in which case this is the first such error.
debug_assert!(
leftover_index == 0 || (leftover_index == 1 && padding_bytes_count == 0)
);
let bad_padding_index = input_index + leftover_index;
return Err(DecodeError::InvalidByte(bad_padding_index, b).into());
}
if padding_bytes_count == 0 {
first_padding_offset = leftover_index;
}
padding_bytes_count += 1;
continue;
}
// Check for case #1.
// To make '=' handling consistent with the main loop, don't allow
// non-suffix '=' in trailing chunk either. Report error as first
// erroneous padding.
if padding_bytes_count > 0 {
return Err(
DecodeError::InvalidByte(input_index + first_padding_offset, PAD_BYTE).into(),
);
}
last_symbol = b;
// can use up to 8 * 6 = 48 bits of the u64, if last chunk has no padding.
// Pack the leftovers from left to right.
let morsel = decode_table[b as usize];
if morsel == INVALID_VALUE {
return Err(DecodeError::InvalidByte(input_index + leftover_index, b).into());
}
morsels[morsels_in_leftover] = morsel;
morsels_in_leftover += 1;
}
// If there was 1 trailing byte, and it was valid, and we got to this point without hitting
// an invalid byte, now we can report invalid length
if !input.is_empty() && morsels_in_leftover < 2 {
return Err(DecodeError::InvalidLength(input_index + morsels_in_leftover).into());
}
match padding_mode {
DecodePaddingMode::Indifferent => { /* everything we care about was already checked */ }
DecodePaddingMode::RequireCanonical => {
// allow empty input
if (padding_bytes_count + morsels_in_leftover) % 4 != 0 {
return Err(DecodeError::InvalidPadding.into());
}
}
DecodePaddingMode::RequireNone => {
if padding_bytes_count > 0 {
// check at the end to make sure we let the cases of padding that should be InvalidByte
// get hit
return Err(DecodeError::InvalidPadding.into());
}
}
}
// When encoding 1 trailing byte (e.g. 0xFF), 2 base64 bytes ("/w") are needed.
// / is the symbol for 63 (0x3F, bottom 6 bits all set) and w is 48 (0x30, top 2 bits
// of bottom 6 bits set).
// When decoding two symbols back to one trailing byte, any final symbol higher than
// w would still decode to the original byte because we only care about the top two
// bits in the bottom 6, but would be a non-canonical encoding. So, we calculate a
// mask based on how many bits are used for just the canonical encoding, and optionally
// error if any other bits are set. In the example of one encoded byte -> 2 symbols,
// 2 symbols can technically encode 12 bits, but the last 4 are non-canonical, and
// useless since there are no more symbols to provide the necessary 4 additional bits
// to finish the second original byte.
let leftover_bytes_to_append = morsels_in_leftover * 6 / 8;
// Put the up to 6 complete bytes as the high bytes.
// Gain a couple percent speedup from nudging these ORs to use more ILP with a two-way split.
let mut leftover_num = (u32::from(morsels[0]) << 26)
| (u32::from(morsels[1]) << 20)
| (u32::from(morsels[2]) << 14)
| (u32::from(morsels[3]) << 8);
// if there are bits set outside the bits we care about, last symbol encodes trailing bits that
// will not be included in the output
let mask = !0_u32 >> (leftover_bytes_to_append * 8);
if !decode_allow_trailing_bits && (leftover_num & mask) != 0 {
// last morsel is at `morsels_in_leftover` - 1
return Err(DecodeError::InvalidLastSymbol(
input_index + morsels_in_leftover - 1,
last_symbol,
)
.into());
}
// Strangely, this approach benchmarks better than writing bytes one at a time,
// or copy_from_slice into output.
for _ in 0..leftover_bytes_to_append {
let hi_byte = (leftover_num >> 24) as u8;
leftover_num <<= 8;
*output
.get_mut(output_index)
.ok_or(DecodeSliceError::OutputSliceTooSmall)? = hi_byte;
output_index += 1;
}
Ok(DecodeMetadata::new(
output_index,
if padding_bytes_count > 0 {
Some(input_index + first_padding_offset)
} else {
None
},
))
}

View file

@ -0,0 +1,352 @@
//! Provides the [GeneralPurpose] engine and associated config types.
use crate::{
alphabet,
alphabet::Alphabet,
engine::{Config, DecodeMetadata, DecodePaddingMode},
DecodeSliceError,
};
use core::convert::TryInto;
pub(crate) mod decode;
pub(crate) mod decode_suffix;
pub use decode::GeneralPurposeEstimate;
pub(crate) const INVALID_VALUE: u8 = 255;
/// A general-purpose base64 engine.
///
/// - It uses no vector CPU instructions, so it will work on any system.
/// - It is reasonably fast (~2-3GiB/s).
/// - It is not constant-time, though, so it is vulnerable to timing side-channel attacks. For loading cryptographic keys, etc, it is suggested to use the forthcoming constant-time implementation.
#[derive(Debug, Clone)]
pub struct GeneralPurpose {
encode_table: [u8; 64],
decode_table: [u8; 256],
config: GeneralPurposeConfig,
}
impl GeneralPurpose {
/// Create a `GeneralPurpose` engine from an [Alphabet].
///
/// While not very expensive to initialize, ideally these should be cached
/// if the engine will be used repeatedly.
pub const fn new(alphabet: &Alphabet, config: GeneralPurposeConfig) -> Self {
Self {
encode_table: encode_table(alphabet),
decode_table: decode_table(alphabet),
config,
}
}
}
impl super::Engine for GeneralPurpose {
type Config = GeneralPurposeConfig;
type DecodeEstimate = GeneralPurposeEstimate;
fn internal_encode(&self, input: &[u8], output: &mut [u8]) -> usize {
let mut input_index: usize = 0;
const BLOCKS_PER_FAST_LOOP: usize = 4;
const LOW_SIX_BITS: u64 = 0x3F;
// we read 8 bytes at a time (u64) but only actually consume 6 of those bytes. Thus, we need
// 2 trailing bytes to be available to read..
let last_fast_index = input.len().saturating_sub(BLOCKS_PER_FAST_LOOP * 6 + 2);
let mut output_index = 0;
if last_fast_index > 0 {
while input_index <= last_fast_index {
// Major performance wins from letting the optimizer do the bounds check once, mostly
// on the output side
let input_chunk =
&input[input_index..(input_index + (BLOCKS_PER_FAST_LOOP * 6 + 2))];
let output_chunk =
&mut output[output_index..(output_index + BLOCKS_PER_FAST_LOOP * 8)];
// Hand-unrolling for 32 vs 16 or 8 bytes produces yields performance about equivalent
// to unsafe pointer code on a Xeon E5-1650v3. 64 byte unrolling was slightly better for
// large inputs but significantly worse for 50-byte input, unsurprisingly. I suspect
// that it's a not uncommon use case to encode smallish chunks of data (e.g. a 64-byte
// SHA-512 digest), so it would be nice if that fit in the unrolled loop at least once.
// Plus, single-digit percentage performance differences might well be quite different
// on different hardware.
let input_u64 = read_u64(&input_chunk[0..]);
output_chunk[0] = self.encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize];
output_chunk[1] = self.encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize];
output_chunk[2] = self.encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize];
output_chunk[3] = self.encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize];
output_chunk[4] = self.encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize];
output_chunk[5] = self.encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize];
output_chunk[6] = self.encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize];
output_chunk[7] = self.encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize];
let input_u64 = read_u64(&input_chunk[6..]);
output_chunk[8] = self.encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize];
output_chunk[9] = self.encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize];
output_chunk[10] = self.encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize];
output_chunk[11] = self.encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize];
output_chunk[12] = self.encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize];
output_chunk[13] = self.encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize];
output_chunk[14] = self.encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize];
output_chunk[15] = self.encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize];
let input_u64 = read_u64(&input_chunk[12..]);
output_chunk[16] = self.encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize];
output_chunk[17] = self.encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize];
output_chunk[18] = self.encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize];
output_chunk[19] = self.encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize];
output_chunk[20] = self.encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize];
output_chunk[21] = self.encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize];
output_chunk[22] = self.encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize];
output_chunk[23] = self.encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize];
let input_u64 = read_u64(&input_chunk[18..]);
output_chunk[24] = self.encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize];
output_chunk[25] = self.encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize];
output_chunk[26] = self.encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize];
output_chunk[27] = self.encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize];
output_chunk[28] = self.encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize];
output_chunk[29] = self.encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize];
output_chunk[30] = self.encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize];
output_chunk[31] = self.encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize];
output_index += BLOCKS_PER_FAST_LOOP * 8;
input_index += BLOCKS_PER_FAST_LOOP * 6;
}
}
// Encode what's left after the fast loop.
const LOW_SIX_BITS_U8: u8 = 0x3F;
let rem = input.len() % 3;
let start_of_rem = input.len() - rem;
// start at the first index not handled by fast loop, which may be 0.
while input_index < start_of_rem {
let input_chunk = &input[input_index..(input_index + 3)];
let output_chunk = &mut output[output_index..(output_index + 4)];
output_chunk[0] = self.encode_table[(input_chunk[0] >> 2) as usize];
output_chunk[1] = self.encode_table
[((input_chunk[0] << 4 | input_chunk[1] >> 4) & LOW_SIX_BITS_U8) as usize];
output_chunk[2] = self.encode_table
[((input_chunk[1] << 2 | input_chunk[2] >> 6) & LOW_SIX_BITS_U8) as usize];
output_chunk[3] = self.encode_table[(input_chunk[2] & LOW_SIX_BITS_U8) as usize];
input_index += 3;
output_index += 4;
}
if rem == 2 {
output[output_index] = self.encode_table[(input[start_of_rem] >> 2) as usize];
output[output_index + 1] =
self.encode_table[((input[start_of_rem] << 4 | input[start_of_rem + 1] >> 4)
& LOW_SIX_BITS_U8) as usize];
output[output_index + 2] =
self.encode_table[((input[start_of_rem + 1] << 2) & LOW_SIX_BITS_U8) as usize];
output_index += 3;
} else if rem == 1 {
output[output_index] = self.encode_table[(input[start_of_rem] >> 2) as usize];
output[output_index + 1] =
self.encode_table[((input[start_of_rem] << 4) & LOW_SIX_BITS_U8) as usize];
output_index += 2;
}
output_index
}
fn internal_decoded_len_estimate(&self, input_len: usize) -> Self::DecodeEstimate {
GeneralPurposeEstimate::new(input_len)
}
fn internal_decode(
&self,
input: &[u8],
output: &mut [u8],
estimate: Self::DecodeEstimate,
) -> Result<DecodeMetadata, DecodeSliceError> {
decode::decode_helper(
input,
estimate,
output,
&self.decode_table,
self.config.decode_allow_trailing_bits,
self.config.decode_padding_mode,
)
}
fn config(&self) -> &Self::Config {
&self.config
}
}
/// Returns a table mapping a 6-bit index to the ASCII byte encoding of the index
pub(crate) const fn encode_table(alphabet: &Alphabet) -> [u8; 64] {
// the encode table is just the alphabet:
// 6-bit index lookup -> printable byte
let mut encode_table = [0_u8; 64];
{
let mut index = 0;
while index < 64 {
encode_table[index] = alphabet.symbols[index];
index += 1;
}
}
encode_table
}
/// Returns a table mapping base64 bytes as the lookup index to either:
/// - [INVALID_VALUE] for bytes that aren't members of the alphabet
/// - a byte whose lower 6 bits are the value that was encoded into the index byte
pub(crate) const fn decode_table(alphabet: &Alphabet) -> [u8; 256] {
let mut decode_table = [INVALID_VALUE; 256];
// Since the table is full of `INVALID_VALUE` already, we only need to overwrite
// the parts that are valid.
let mut index = 0;
while index < 64 {
// The index in the alphabet is the 6-bit value we care about.
// Since the index is in 0-63, it is safe to cast to u8.
decode_table[alphabet.symbols[index] as usize] = index as u8;
index += 1;
}
decode_table
}
#[inline]
fn read_u64(s: &[u8]) -> u64 {
u64::from_be_bytes(s[..8].try_into().unwrap())
}
/// Contains configuration parameters for base64 encoding and decoding.
///
/// ```
/// # use base64::engine::GeneralPurposeConfig;
/// let config = GeneralPurposeConfig::new()
/// .with_encode_padding(false);
/// // further customize using `.with_*` methods as needed
/// ```
///
/// The constants [PAD] and [NO_PAD] cover most use cases.
///
/// To specify the characters used, see [Alphabet].
#[derive(Clone, Copy, Debug)]
pub struct GeneralPurposeConfig {
encode_padding: bool,
decode_allow_trailing_bits: bool,
decode_padding_mode: DecodePaddingMode,
}
impl GeneralPurposeConfig {
/// Create a new config with `padding` = `true`, `decode_allow_trailing_bits` = `false`, and
/// `decode_padding_mode = DecodePaddingMode::RequireCanonicalPadding`.
///
/// This probably matches most people's expectations, but consider disabling padding to save
/// a few bytes unless you specifically need it for compatibility with some legacy system.
pub const fn new() -> Self {
Self {
// RFC states that padding must be applied by default
encode_padding: true,
decode_allow_trailing_bits: false,
decode_padding_mode: DecodePaddingMode::RequireCanonical,
}
}
/// Create a new config based on `self` with an updated `padding` setting.
///
/// If `padding` is `true`, encoding will append either 1 or 2 `=` padding characters as needed
/// to produce an output whose length is a multiple of 4.
///
/// Padding is not needed for correct decoding and only serves to waste bytes, but it's in the
/// [spec](https://datatracker.ietf.org/doc/html/rfc4648#section-3.2).
///
/// For new applications, consider not using padding if the decoders you're using don't require
/// padding to be present.
pub const fn with_encode_padding(self, padding: bool) -> Self {
Self {
encode_padding: padding,
..self
}
}
/// Create a new config based on `self` with an updated `decode_allow_trailing_bits` setting.
///
/// Most users will not need to configure this. It's useful if you need to decode base64
/// produced by a buggy encoder that has bits set in the unused space on the last base64
/// character as per [forgiving-base64 decode](https://infra.spec.whatwg.org/#forgiving-base64-decode).
/// If invalid trailing bits are present and this is `true`, those bits will
/// be silently ignored, else `DecodeError::InvalidLastSymbol` will be emitted.
pub const fn with_decode_allow_trailing_bits(self, allow: bool) -> Self {
Self {
decode_allow_trailing_bits: allow,
..self
}
}
/// Create a new config based on `self` with an updated `decode_padding_mode` setting.
///
/// Padding is not useful in terms of representing encoded data -- it makes no difference to
/// the decoder if padding is present or not, so if you have some un-padded input to decode, it
/// is perfectly fine to use `DecodePaddingMode::Indifferent` to prevent errors from being
/// emitted.
///
/// However, since in practice
/// [people who learned nothing from BER vs DER seem to expect base64 to have one canonical encoding](https://eprint.iacr.org/2022/361),
/// the default setting is the stricter `DecodePaddingMode::RequireCanonicalPadding`.
///
/// Or, if "canonical" in your circumstance means _no_ padding rather than padding to the
/// next multiple of four, there's `DecodePaddingMode::RequireNoPadding`.
pub const fn with_decode_padding_mode(self, mode: DecodePaddingMode) -> Self {
Self {
decode_padding_mode: mode,
..self
}
}
}
impl Default for GeneralPurposeConfig {
/// Delegates to [GeneralPurposeConfig::new].
fn default() -> Self {
Self::new()
}
}
impl Config for GeneralPurposeConfig {
fn encode_padding(&self) -> bool {
self.encode_padding
}
}
/// A [GeneralPurpose] engine using the [alphabet::STANDARD] base64 alphabet and [PAD] config.
pub const STANDARD: GeneralPurpose = GeneralPurpose::new(&alphabet::STANDARD, PAD);
/// A [GeneralPurpose] engine using the [alphabet::STANDARD] base64 alphabet and [NO_PAD] config.
pub const STANDARD_NO_PAD: GeneralPurpose = GeneralPurpose::new(&alphabet::STANDARD, NO_PAD);
/// A [GeneralPurpose] engine using the [alphabet::URL_SAFE] base64 alphabet and [PAD] config.
pub const URL_SAFE: GeneralPurpose = GeneralPurpose::new(&alphabet::URL_SAFE, PAD);
/// A [GeneralPurpose] engine using the [alphabet::URL_SAFE] base64 alphabet and [NO_PAD] config.
pub const URL_SAFE_NO_PAD: GeneralPurpose = GeneralPurpose::new(&alphabet::URL_SAFE, NO_PAD);
/// Include padding bytes when encoding, and require that they be present when decoding.
///
/// This is the standard per the base64 RFC, but consider using [NO_PAD] instead as padding serves
/// little purpose in practice.
pub const PAD: GeneralPurposeConfig = GeneralPurposeConfig::new();
/// Don't add padding when encoding, and require no padding when decoding.
pub const NO_PAD: GeneralPurposeConfig = GeneralPurposeConfig::new()
.with_encode_padding(false)
.with_decode_padding_mode(DecodePaddingMode::RequireNone);

478
vendor/base64/src/engine/mod.rs vendored Normal file
View file

@ -0,0 +1,478 @@
//! Provides the [Engine] abstraction and out of the box implementations.
#[cfg(any(feature = "alloc", test))]
use crate::chunked_encoder;
use crate::{
encode::{encode_with_padding, EncodeSliceError},
encoded_len, DecodeError, DecodeSliceError,
};
#[cfg(any(feature = "alloc", test))]
use alloc::vec::Vec;
#[cfg(any(feature = "alloc", test))]
use alloc::{string::String, vec};
pub mod general_purpose;
#[cfg(test)]
mod naive;
#[cfg(test)]
mod tests;
pub use general_purpose::{GeneralPurpose, GeneralPurposeConfig};
/// An `Engine` provides low-level encoding and decoding operations that all other higher-level parts of the API use. Users of the library will generally not need to implement this.
///
/// Different implementations offer different characteristics. The library currently ships with
/// [GeneralPurpose] that offers good speed and works on any CPU, with more choices
/// coming later, like a constant-time one when side channel resistance is called for, and vendor-specific vectorized ones for more speed.
///
/// See [general_purpose::STANDARD_NO_PAD] if you just want standard base64. Otherwise, when possible, it's
/// recommended to store the engine in a `const` so that references to it won't pose any lifetime
/// issues, and to avoid repeating the cost of engine setup.
///
/// Since almost nobody will need to implement `Engine`, docs for internal methods are hidden.
// When adding an implementation of Engine, include them in the engine test suite:
// - add an implementation of [engine::tests::EngineWrapper]
// - add the implementation to the `all_engines` macro
// All tests run on all engines listed in the macro.
pub trait Engine: Send + Sync {
/// The config type used by this engine
type Config: Config;
/// The decode estimate used by this engine
type DecodeEstimate: DecodeEstimate;
/// This is not meant to be called directly; it is only for `Engine` implementors.
/// See the other `encode*` functions on this trait.
///
/// Encode the `input` bytes into the `output` buffer based on the mapping in `encode_table`.
///
/// `output` will be long enough to hold the encoded data.
///
/// Returns the number of bytes written.
///
/// No padding should be written; that is handled separately.
///
/// Must not write any bytes into the output slice other than the encoded data.
#[doc(hidden)]
fn internal_encode(&self, input: &[u8], output: &mut [u8]) -> usize;
/// This is not meant to be called directly; it is only for `Engine` implementors.
///
/// As an optimization to prevent the decoded length from being calculated twice, it is
/// sometimes helpful to have a conservative estimate of the decoded size before doing the
/// decoding, so this calculation is done separately and passed to [Engine::decode()] as needed.
#[doc(hidden)]
fn internal_decoded_len_estimate(&self, input_len: usize) -> Self::DecodeEstimate;
/// This is not meant to be called directly; it is only for `Engine` implementors.
/// See the other `decode*` functions on this trait.
///
/// Decode `input` base64 bytes into the `output` buffer.
///
/// `decode_estimate` is the result of [Engine::internal_decoded_len_estimate()], which is passed in to avoid
/// calculating it again (expensive on short inputs).`
///
/// Each complete 4-byte chunk of encoded data decodes to 3 bytes of decoded data, but this
/// function must also handle the final possibly partial chunk.
/// If the input length is not a multiple of 4, or uses padding bytes to reach a multiple of 4,
/// the trailing 2 or 3 bytes must decode to 1 or 2 bytes, respectively, as per the
/// [RFC](https://tools.ietf.org/html/rfc4648#section-3.5).
///
/// Decoding must not write any bytes into the output slice other than the decoded data.
///
/// Non-canonical trailing bits in the final tokens or non-canonical padding must be reported as
/// errors unless the engine is configured otherwise.
#[doc(hidden)]
fn internal_decode(
&self,
input: &[u8],
output: &mut [u8],
decode_estimate: Self::DecodeEstimate,
) -> Result<DecodeMetadata, DecodeSliceError>;
/// Returns the config for this engine.
fn config(&self) -> &Self::Config;
/// Encode arbitrary octets as base64 using the provided `Engine`.
/// Returns a `String`.
///
/// # Example
///
/// ```rust
/// use base64::{Engine as _, engine::{self, general_purpose}, alphabet};
///
/// let b64 = general_purpose::STANDARD.encode(b"hello world~");
/// println!("{}", b64);
///
/// const CUSTOM_ENGINE: engine::GeneralPurpose =
/// engine::GeneralPurpose::new(&alphabet::URL_SAFE, general_purpose::NO_PAD);
///
/// let b64_url = CUSTOM_ENGINE.encode(b"hello internet~");
/// ```
#[cfg(any(feature = "alloc", test))]
#[inline]
fn encode<T: AsRef<[u8]>>(&self, input: T) -> String {
fn inner<E>(engine: &E, input_bytes: &[u8]) -> String
where
E: Engine + ?Sized,
{
let encoded_size = encoded_len(input_bytes.len(), engine.config().encode_padding())
.expect("integer overflow when calculating buffer size");
let mut buf = vec![0; encoded_size];
encode_with_padding(input_bytes, &mut buf[..], engine, encoded_size);
String::from_utf8(buf).expect("Invalid UTF8")
}
inner(self, input.as_ref())
}
/// Encode arbitrary octets as base64 into a supplied `String`.
/// Writes into the supplied `String`, which may allocate if its internal buffer isn't big enough.
///
/// # Example
///
/// ```rust
/// use base64::{Engine as _, engine::{self, general_purpose}, alphabet};
/// const CUSTOM_ENGINE: engine::GeneralPurpose =
/// engine::GeneralPurpose::new(&alphabet::URL_SAFE, general_purpose::NO_PAD);
///
/// fn main() {
/// let mut buf = String::new();
/// general_purpose::STANDARD.encode_string(b"hello world~", &mut buf);
/// println!("{}", buf);
///
/// buf.clear();
/// CUSTOM_ENGINE.encode_string(b"hello internet~", &mut buf);
/// println!("{}", buf);
/// }
/// ```
#[cfg(any(feature = "alloc", test))]
#[inline]
fn encode_string<T: AsRef<[u8]>>(&self, input: T, output_buf: &mut String) {
fn inner<E>(engine: &E, input_bytes: &[u8], output_buf: &mut String)
where
E: Engine + ?Sized,
{
let mut sink = chunked_encoder::StringSink::new(output_buf);
chunked_encoder::ChunkedEncoder::new(engine)
.encode(input_bytes, &mut sink)
.expect("Writing to a String shouldn't fail");
}
inner(self, input.as_ref(), output_buf)
}
/// Encode arbitrary octets as base64 into a supplied slice.
/// Writes into the supplied output buffer.
///
/// This is useful if you wish to avoid allocation entirely (e.g. encoding into a stack-resident
/// or statically-allocated buffer).
///
/// # Example
///
#[cfg_attr(feature = "alloc", doc = "```")]
#[cfg_attr(not(feature = "alloc"), doc = "```ignore")]
/// use base64::{Engine as _, engine::general_purpose};
/// let s = b"hello internet!";
/// let mut buf = Vec::new();
/// // make sure we'll have a slice big enough for base64 + padding
/// buf.resize(s.len() * 4 / 3 + 4, 0);
///
/// let bytes_written = general_purpose::STANDARD.encode_slice(s, &mut buf).unwrap();
///
/// // shorten our vec down to just what was written
/// buf.truncate(bytes_written);
///
/// assert_eq!(s, general_purpose::STANDARD.decode(&buf).unwrap().as_slice());
/// ```
#[inline]
fn encode_slice<T: AsRef<[u8]>>(
&self,
input: T,
output_buf: &mut [u8],
) -> Result<usize, EncodeSliceError> {
fn inner<E>(
engine: &E,
input_bytes: &[u8],
output_buf: &mut [u8],
) -> Result<usize, EncodeSliceError>
where
E: Engine + ?Sized,
{
let encoded_size = encoded_len(input_bytes.len(), engine.config().encode_padding())
.expect("usize overflow when calculating buffer size");
if output_buf.len() < encoded_size {
return Err(EncodeSliceError::OutputSliceTooSmall);
}
let b64_output = &mut output_buf[0..encoded_size];
encode_with_padding(input_bytes, b64_output, engine, encoded_size);
Ok(encoded_size)
}
inner(self, input.as_ref(), output_buf)
}
/// Decode the input into a new `Vec`.
///
/// # Example
///
/// ```rust
/// use base64::{Engine as _, alphabet, engine::{self, general_purpose}};
///
/// let bytes = general_purpose::STANDARD
/// .decode("aGVsbG8gd29ybGR+Cg==").unwrap();
/// println!("{:?}", bytes);
///
/// // custom engine setup
/// let bytes_url = engine::GeneralPurpose::new(
/// &alphabet::URL_SAFE,
/// general_purpose::NO_PAD)
/// .decode("aGVsbG8gaW50ZXJuZXR-Cg").unwrap();
/// println!("{:?}", bytes_url);
/// ```
#[cfg(any(feature = "alloc", test))]
#[inline]
fn decode<T: AsRef<[u8]>>(&self, input: T) -> Result<Vec<u8>, DecodeError> {
fn inner<E>(engine: &E, input_bytes: &[u8]) -> Result<Vec<u8>, DecodeError>
where
E: Engine + ?Sized,
{
let estimate = engine.internal_decoded_len_estimate(input_bytes.len());
let mut buffer = vec![0; estimate.decoded_len_estimate()];
let bytes_written = engine
.internal_decode(input_bytes, &mut buffer, estimate)
.map_err(|e| match e {
DecodeSliceError::DecodeError(e) => e,
DecodeSliceError::OutputSliceTooSmall => {
unreachable!("Vec is sized conservatively")
}
})?
.decoded_len;
buffer.truncate(bytes_written);
Ok(buffer)
}
inner(self, input.as_ref())
}
/// Decode the `input` into the supplied `buffer`.
///
/// Writes into the supplied `Vec`, which may allocate if its internal buffer isn't big enough.
/// Returns a `Result` containing an empty tuple, aka `()`.
///
/// # Example
///
/// ```rust
/// use base64::{Engine as _, alphabet, engine::{self, general_purpose}};
/// const CUSTOM_ENGINE: engine::GeneralPurpose =
/// engine::GeneralPurpose::new(&alphabet::URL_SAFE, general_purpose::PAD);
///
/// fn main() {
/// use base64::Engine;
/// let mut buffer = Vec::<u8>::new();
/// // with the default engine
/// general_purpose::STANDARD
/// .decode_vec("aGVsbG8gd29ybGR+Cg==", &mut buffer,).unwrap();
/// println!("{:?}", buffer);
///
/// buffer.clear();
///
/// // with a custom engine
/// CUSTOM_ENGINE.decode_vec(
/// "aGVsbG8gaW50ZXJuZXR-Cg==",
/// &mut buffer,
/// ).unwrap();
/// println!("{:?}", buffer);
/// }
/// ```
#[cfg(any(feature = "alloc", test))]
#[inline]
fn decode_vec<T: AsRef<[u8]>>(
&self,
input: T,
buffer: &mut Vec<u8>,
) -> Result<(), DecodeError> {
fn inner<E>(engine: &E, input_bytes: &[u8], buffer: &mut Vec<u8>) -> Result<(), DecodeError>
where
E: Engine + ?Sized,
{
let starting_output_len = buffer.len();
let estimate = engine.internal_decoded_len_estimate(input_bytes.len());
let total_len_estimate = estimate
.decoded_len_estimate()
.checked_add(starting_output_len)
.expect("Overflow when calculating output buffer length");
buffer.resize(total_len_estimate, 0);
let buffer_slice = &mut buffer.as_mut_slice()[starting_output_len..];
let bytes_written = engine
.internal_decode(input_bytes, buffer_slice, estimate)
.map_err(|e| match e {
DecodeSliceError::DecodeError(e) => e,
DecodeSliceError::OutputSliceTooSmall => {
unreachable!("Vec is sized conservatively")
}
})?
.decoded_len;
buffer.truncate(starting_output_len + bytes_written);
Ok(())
}
inner(self, input.as_ref(), buffer)
}
/// Decode the input into the provided output slice.
///
/// Returns the number of bytes written to the slice, or an error if `output` is smaller than
/// the estimated decoded length.
///
/// This will not write any bytes past exactly what is decoded (no stray garbage bytes at the end).
///
/// See [crate::decoded_len_estimate] for calculating buffer sizes.
///
/// See [Engine::decode_slice_unchecked] for a version that panics instead of returning an error
/// if the output buffer is too small.
#[inline]
fn decode_slice<T: AsRef<[u8]>>(
&self,
input: T,
output: &mut [u8],
) -> Result<usize, DecodeSliceError> {
fn inner<E>(
engine: &E,
input_bytes: &[u8],
output: &mut [u8],
) -> Result<usize, DecodeSliceError>
where
E: Engine + ?Sized,
{
engine
.internal_decode(
input_bytes,
output,
engine.internal_decoded_len_estimate(input_bytes.len()),
)
.map(|dm| dm.decoded_len)
}
inner(self, input.as_ref(), output)
}
/// Decode the input into the provided output slice.
///
/// Returns the number of bytes written to the slice.
///
/// This will not write any bytes past exactly what is decoded (no stray garbage bytes at the end).
///
/// See [crate::decoded_len_estimate] for calculating buffer sizes.
///
/// See [Engine::decode_slice] for a version that returns an error instead of panicking if the output
/// buffer is too small.
///
/// # Panics
///
/// Panics if the provided output buffer is too small for the decoded data.
#[inline]
fn decode_slice_unchecked<T: AsRef<[u8]>>(
&self,
input: T,
output: &mut [u8],
) -> Result<usize, DecodeError> {
fn inner<E>(engine: &E, input_bytes: &[u8], output: &mut [u8]) -> Result<usize, DecodeError>
where
E: Engine + ?Sized,
{
engine
.internal_decode(
input_bytes,
output,
engine.internal_decoded_len_estimate(input_bytes.len()),
)
.map(|dm| dm.decoded_len)
.map_err(|e| match e {
DecodeSliceError::DecodeError(e) => e,
DecodeSliceError::OutputSliceTooSmall => {
panic!("Output slice is too small")
}
})
}
inner(self, input.as_ref(), output)
}
}
/// The minimal level of configuration that engines must support.
pub trait Config {
/// Returns `true` if padding should be added after the encoded output.
///
/// Padding is added outside the engine's encode() since the engine may be used
/// to encode only a chunk of the overall output, so it can't always know when
/// the output is "done" and would therefore need padding (if configured).
// It could be provided as a separate parameter when encoding, but that feels like
// leaking an implementation detail to the user, and it's hopefully more convenient
// to have to only pass one thing (the engine) to any part of the API.
fn encode_padding(&self) -> bool;
}
/// The decode estimate used by an engine implementation. Users do not need to interact with this;
/// it is only for engine implementors.
///
/// Implementors may store relevant data here when constructing this to avoid having to calculate
/// them again during actual decoding.
pub trait DecodeEstimate {
/// Returns a conservative (err on the side of too big) estimate of the decoded length to use
/// for pre-allocating buffers, etc.
///
/// The estimate must be no larger than the next largest complete triple of decoded bytes.
/// That is, the final quad of tokens to decode may be assumed to be complete with no padding.
fn decoded_len_estimate(&self) -> usize;
}
/// Controls how pad bytes are handled when decoding.
///
/// Each [Engine] must support at least the behavior indicated by
/// [DecodePaddingMode::RequireCanonical], and may support other modes.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DecodePaddingMode {
/// Canonical padding is allowed, but any fewer padding bytes than that is also allowed.
Indifferent,
/// Padding must be canonical (0, 1, or 2 `=` as needed to produce a 4 byte suffix).
RequireCanonical,
/// Padding must be absent -- for when you want predictable padding, without any wasted bytes.
RequireNone,
}
/// Metadata about the result of a decode operation
#[derive(PartialEq, Eq, Debug)]
pub struct DecodeMetadata {
/// Number of decoded bytes output
pub(crate) decoded_len: usize,
/// Offset of the first padding byte in the input, if any
pub(crate) padding_offset: Option<usize>,
}
impl DecodeMetadata {
pub(crate) fn new(decoded_bytes: usize, padding_index: Option<usize>) -> Self {
Self {
decoded_len: decoded_bytes,
padding_offset: padding_index,
}
}
}

195
vendor/base64/src/engine/naive.rs vendored Normal file
View file

@ -0,0 +1,195 @@
use crate::{
alphabet::Alphabet,
engine::{
general_purpose::{self, decode_table, encode_table},
Config, DecodeEstimate, DecodeMetadata, DecodePaddingMode, Engine,
},
DecodeError, DecodeSliceError,
};
use std::ops::{BitAnd, BitOr, Shl, Shr};
/// Comparatively simple implementation that can be used as something to compare against in tests
pub struct Naive {
encode_table: [u8; 64],
decode_table: [u8; 256],
config: NaiveConfig,
}
impl Naive {
const ENCODE_INPUT_CHUNK_SIZE: usize = 3;
const DECODE_INPUT_CHUNK_SIZE: usize = 4;
pub const fn new(alphabet: &Alphabet, config: NaiveConfig) -> Self {
Self {
encode_table: encode_table(alphabet),
decode_table: decode_table(alphabet),
config,
}
}
fn decode_byte_into_u32(&self, offset: usize, byte: u8) -> Result<u32, DecodeError> {
let decoded = self.decode_table[byte as usize];
if decoded == general_purpose::INVALID_VALUE {
return Err(DecodeError::InvalidByte(offset, byte));
}
Ok(decoded as u32)
}
}
impl Engine for Naive {
type Config = NaiveConfig;
type DecodeEstimate = NaiveEstimate;
fn internal_encode(&self, input: &[u8], output: &mut [u8]) -> usize {
// complete chunks first
const LOW_SIX_BITS: u32 = 0x3F;
let rem = input.len() % Self::ENCODE_INPUT_CHUNK_SIZE;
// will never underflow
let complete_chunk_len = input.len() - rem;
let mut input_index = 0_usize;
let mut output_index = 0_usize;
if let Some(last_complete_chunk_index) =
complete_chunk_len.checked_sub(Self::ENCODE_INPUT_CHUNK_SIZE)
{
while input_index <= last_complete_chunk_index {
let chunk = &input[input_index..input_index + Self::ENCODE_INPUT_CHUNK_SIZE];
// populate low 24 bits from 3 bytes
let chunk_int: u32 =
(chunk[0] as u32).shl(16) | (chunk[1] as u32).shl(8) | (chunk[2] as u32);
// encode 4x 6-bit output bytes
output[output_index] = self.encode_table[chunk_int.shr(18) as usize];
output[output_index + 1] =
self.encode_table[chunk_int.shr(12_u8).bitand(LOW_SIX_BITS) as usize];
output[output_index + 2] =
self.encode_table[chunk_int.shr(6_u8).bitand(LOW_SIX_BITS) as usize];
output[output_index + 3] =
self.encode_table[chunk_int.bitand(LOW_SIX_BITS) as usize];
input_index += Self::ENCODE_INPUT_CHUNK_SIZE;
output_index += 4;
}
}
// then leftovers
if rem == 2 {
let chunk = &input[input_index..input_index + 2];
// high six bits of chunk[0]
output[output_index] = self.encode_table[chunk[0].shr(2) as usize];
// bottom 2 bits of [0], high 4 bits of [1]
output[output_index + 1] =
self.encode_table[(chunk[0].shl(4_u8).bitor(chunk[1].shr(4_u8)) as u32)
.bitand(LOW_SIX_BITS) as usize];
// bottom 4 bits of [1], with the 2 bottom bits as zero
output[output_index + 2] =
self.encode_table[(chunk[1].shl(2_u8) as u32).bitand(LOW_SIX_BITS) as usize];
output_index += 3;
} else if rem == 1 {
let byte = input[input_index];
output[output_index] = self.encode_table[byte.shr(2) as usize];
output[output_index + 1] =
self.encode_table[(byte.shl(4_u8) as u32).bitand(LOW_SIX_BITS) as usize];
output_index += 2;
}
output_index
}
fn internal_decoded_len_estimate(&self, input_len: usize) -> Self::DecodeEstimate {
NaiveEstimate::new(input_len)
}
fn internal_decode(
&self,
input: &[u8],
output: &mut [u8],
estimate: Self::DecodeEstimate,
) -> Result<DecodeMetadata, DecodeSliceError> {
let complete_nonterminal_quads_len = general_purpose::decode::complete_quads_len(
input,
estimate.rem,
output.len(),
&self.decode_table,
)?;
const BOTTOM_BYTE: u32 = 0xFF;
for (chunk_index, chunk) in input[..complete_nonterminal_quads_len]
.chunks_exact(4)
.enumerate()
{
let input_index = chunk_index * 4;
let output_index = chunk_index * 3;
let decoded_int: u32 = self.decode_byte_into_u32(input_index, chunk[0])?.shl(18)
| self
.decode_byte_into_u32(input_index + 1, chunk[1])?
.shl(12)
| self.decode_byte_into_u32(input_index + 2, chunk[2])?.shl(6)
| self.decode_byte_into_u32(input_index + 3, chunk[3])?;
output[output_index] = decoded_int.shr(16_u8).bitand(BOTTOM_BYTE) as u8;
output[output_index + 1] = decoded_int.shr(8_u8).bitand(BOTTOM_BYTE) as u8;
output[output_index + 2] = decoded_int.bitand(BOTTOM_BYTE) as u8;
}
general_purpose::decode_suffix::decode_suffix(
input,
complete_nonterminal_quads_len,
output,
complete_nonterminal_quads_len / 4 * 3,
&self.decode_table,
self.config.decode_allow_trailing_bits,
self.config.decode_padding_mode,
)
}
fn config(&self) -> &Self::Config {
&self.config
}
}
pub struct NaiveEstimate {
/// remainder from dividing input by `Naive::DECODE_CHUNK_SIZE`
rem: usize,
/// Length of input that is in complete `Naive::DECODE_CHUNK_SIZE`-length chunks
complete_chunk_len: usize,
}
impl NaiveEstimate {
fn new(input_len: usize) -> Self {
let rem = input_len % Naive::DECODE_INPUT_CHUNK_SIZE;
let complete_chunk_len = input_len - rem;
Self {
rem,
complete_chunk_len,
}
}
}
impl DecodeEstimate for NaiveEstimate {
fn decoded_len_estimate(&self) -> usize {
((self.complete_chunk_len / 4) + ((self.rem > 0) as usize)) * 3
}
}
#[derive(Clone, Copy, Debug)]
pub struct NaiveConfig {
pub encode_padding: bool,
pub decode_allow_trailing_bits: bool,
pub decode_padding_mode: DecodePaddingMode,
}
impl Config for NaiveConfig {
fn encode_padding(&self) -> bool {
self.encode_padding
}
}

1579
vendor/base64/src/engine/tests.rs vendored Normal file

File diff suppressed because it is too large Load diff

277
vendor/base64/src/lib.rs vendored Normal file
View file

@ -0,0 +1,277 @@
//! Correct, fast, and configurable [base64][] decoding and encoding. Base64
//! transports binary data efficiently in contexts where only plain text is
//! allowed.
//!
//! [base64]: https://developer.mozilla.org/en-US/docs/Glossary/Base64
//!
//! # Usage
//!
//! Use an [`Engine`] to decode or encode base64, configured with the base64
//! alphabet and padding behavior best suited to your application.
//!
//! ## Engine setup
//!
//! There is more than one way to encode a stream of bytes as “base64”.
//! Different applications use different encoding
//! [alphabets][alphabet::Alphabet] and
//! [padding behaviors][engine::general_purpose::GeneralPurposeConfig].
//!
//! ### Encoding alphabet
//!
//! Almost all base64 [alphabets][alphabet::Alphabet] use `A-Z`, `a-z`, and
//! `0-9`, which gives nearly 64 characters (26 + 26 + 10 = 62), but they differ
//! in their choice of their final 2.
//!
//! Most applications use the [standard][alphabet::STANDARD] alphabet specified
//! in [RFC 4648][rfc-alphabet]. If thats all you need, you can get started
//! quickly by using the pre-configured
//! [`STANDARD`][engine::general_purpose::STANDARD] engine, which is also available
//! in the [`prelude`] module as shown here, if you prefer a minimal `use`
//! footprint.
//!
#![cfg_attr(feature = "alloc", doc = "```")]
#![cfg_attr(not(feature = "alloc"), doc = "```ignore")]
//! use base64::prelude::*;
//!
//! # fn main() -> Result<(), base64::DecodeError> {
//! assert_eq!(BASE64_STANDARD.decode(b"+uwgVQA=")?, b"\xFA\xEC\x20\x55\0");
//! assert_eq!(BASE64_STANDARD.encode(b"\xFF\xEC\x20\x55\0"), "/+wgVQA=");
//! # Ok(())
//! # }
//! ```
//!
//! [rfc-alphabet]: https://datatracker.ietf.org/doc/html/rfc4648#section-4
//!
//! Other common alphabets are available in the [`alphabet`] module.
//!
//! #### URL-safe alphabet
//!
//! The standard alphabet uses `+` and `/` as its two non-alphanumeric tokens,
//! which cannot be safely used in URLs without encoding them as `%2B` and
//! `%2F`.
//!
//! To avoid that, some applications use a [“URL-safe” alphabet][alphabet::URL_SAFE],
//! which uses `-` and `_` instead. To use that alternative alphabet, use the
//! [`URL_SAFE`][engine::general_purpose::URL_SAFE] engine. This example doesn't
//! use [`prelude`] to show what a more explicit `use` would look like.
//!
#![cfg_attr(feature = "alloc", doc = "```")]
#![cfg_attr(not(feature = "alloc"), doc = "```ignore")]
//! use base64::{engine::general_purpose::URL_SAFE, Engine as _};
//!
//! # fn main() -> Result<(), base64::DecodeError> {
//! assert_eq!(URL_SAFE.decode(b"-uwgVQA=")?, b"\xFA\xEC\x20\x55\0");
//! assert_eq!(URL_SAFE.encode(b"\xFF\xEC\x20\x55\0"), "_-wgVQA=");
//! # Ok(())
//! # }
//! ```
//!
//! ### Padding characters
//!
//! Each base64 character represents 6 bits (2⁶ = 64) of the original binary
//! data, and every 3 bytes of input binary data will encode to 4 base64
//! characters (8 bits × 3 = 6 bits × 4 = 24 bits).
//!
//! When the input is not an even multiple of 3 bytes in length, [canonical][]
//! base64 encoders insert padding characters at the end, so that the output
//! length is always a multiple of 4:
//!
//! [canonical]: https://datatracker.ietf.org/doc/html/rfc4648#section-3.5
//!
#![cfg_attr(feature = "alloc", doc = "```")]
#![cfg_attr(not(feature = "alloc"), doc = "```ignore")]
//! use base64::{engine::general_purpose::STANDARD, Engine as _};
//!
//! assert_eq!(STANDARD.encode(b""), "");
//! assert_eq!(STANDARD.encode(b"f"), "Zg==");
//! assert_eq!(STANDARD.encode(b"fo"), "Zm8=");
//! assert_eq!(STANDARD.encode(b"foo"), "Zm9v");
//! ```
//!
//! Canonical encoding ensures that base64 encodings will be exactly the same,
//! byte-for-byte, regardless of input length. But the `=` padding characters
//! arent necessary for decoding, and they may be omitted by using a
//! [`NO_PAD`][engine::general_purpose::NO_PAD] configuration:
//!
#![cfg_attr(feature = "alloc", doc = "```")]
#![cfg_attr(not(feature = "alloc"), doc = "```ignore")]
//! use base64::{engine::general_purpose::STANDARD_NO_PAD, Engine as _};
//!
//! assert_eq!(STANDARD_NO_PAD.encode(b""), "");
//! assert_eq!(STANDARD_NO_PAD.encode(b"f"), "Zg");
//! assert_eq!(STANDARD_NO_PAD.encode(b"fo"), "Zm8");
//! assert_eq!(STANDARD_NO_PAD.encode(b"foo"), "Zm9v");
//! ```
//!
//! The pre-configured `NO_PAD` engines will reject inputs containing padding
//! `=` characters. To encode without padding and still accept padding while
//! decoding, create an [engine][engine::general_purpose::GeneralPurpose] with
//! that [padding mode][engine::DecodePaddingMode].
//!
#![cfg_attr(feature = "alloc", doc = "```")]
#![cfg_attr(not(feature = "alloc"), doc = "```ignore")]
//! # use base64::{engine::general_purpose::STANDARD_NO_PAD, Engine as _};
//! assert_eq!(STANDARD_NO_PAD.decode(b"Zm8="), Err(base64::DecodeError::InvalidPadding));
//! ```
//!
//! ### Further customization
//!
//! Decoding and encoding behavior can be customized by creating an
//! [engine][engine::GeneralPurpose] with an [alphabet][alphabet::Alphabet] and
//! [padding configuration][engine::GeneralPurposeConfig]:
//!
#![cfg_attr(feature = "alloc", doc = "```")]
#![cfg_attr(not(feature = "alloc"), doc = "```ignore")]
//! use base64::{engine, alphabet, Engine as _};
//!
//! // bizarro-world base64: +/ as the first symbols instead of the last
//! let alphabet =
//! alphabet::Alphabet::new("+/ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789")
//! .unwrap();
//!
//! // a very weird config that encodes with padding but requires no padding when decoding...?
//! let crazy_config = engine::GeneralPurposeConfig::new()
//! .with_decode_allow_trailing_bits(true)
//! .with_encode_padding(true)
//! .with_decode_padding_mode(engine::DecodePaddingMode::RequireNone);
//!
//! let crazy_engine = engine::GeneralPurpose::new(&alphabet, crazy_config);
//!
//! let encoded = crazy_engine.encode(b"abc 123");
//!
//! ```
//!
//! ## Memory allocation
//!
//! The [decode][Engine::decode()] and [encode][Engine::encode()] engine methods
//! allocate memory for their results `decode` returns a `Vec<u8>` and
//! `encode` returns a `String`. To instead decode or encode into a buffer that
//! you allocated, use one of the alternative methods:
//!
//! #### Decoding
//!
//! | Method | Output | Allocates memory |
//! | -------------------------- | ----------------------------- | ----------------------------- |
//! | [`Engine::decode`] | returns a new `Vec<u8>` | always |
//! | [`Engine::decode_vec`] | appends to provided `Vec<u8>` | if `Vec` lacks capacity |
//! | [`Engine::decode_slice`] | writes to provided `&[u8]` | never
//!
//! #### Encoding
//!
//! | Method | Output | Allocates memory |
//! | -------------------------- | ---------------------------- | ------------------------------ |
//! | [`Engine::encode`] | returns a new `String` | always |
//! | [`Engine::encode_string`] | appends to provided `String` | if `String` lacks capacity |
//! | [`Engine::encode_slice`] | writes to provided `&[u8]` | never |
//!
//! ## Input and output
//!
//! The `base64` crate can [decode][Engine::decode()] and
//! [encode][Engine::encode()] values in memory, or
//! [`DecoderReader`][read::DecoderReader] and
//! [`EncoderWriter`][write::EncoderWriter] provide streaming decoding and
//! encoding for any [readable][std::io::Read] or [writable][std::io::Write]
//! byte stream.
//!
//! #### Decoding
//!
#![cfg_attr(feature = "std", doc = "```")]
#![cfg_attr(not(feature = "std"), doc = "```ignore")]
//! # use std::io;
//! use base64::{engine::general_purpose::STANDARD, read::DecoderReader};
//!
//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
//! let mut input = io::stdin();
//! let mut decoder = DecoderReader::new(&mut input, &STANDARD);
//! io::copy(&mut decoder, &mut io::stdout())?;
//! # Ok(())
//! # }
//! ```
//!
//! #### Encoding
//!
#![cfg_attr(feature = "std", doc = "```")]
#![cfg_attr(not(feature = "std"), doc = "```ignore")]
//! # use std::io;
//! use base64::{engine::general_purpose::STANDARD, write::EncoderWriter};
//!
//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
//! let mut output = io::stdout();
//! let mut encoder = EncoderWriter::new(&mut output, &STANDARD);
//! io::copy(&mut io::stdin(), &mut encoder)?;
//! # Ok(())
//! # }
//! ```
//!
//! #### Display
//!
//! If you only need a base64 representation for implementing the
//! [`Display`][std::fmt::Display] trait, use
//! [`Base64Display`][display::Base64Display]:
//!
//! ```
//! use base64::{display::Base64Display, engine::general_purpose::STANDARD};
//!
//! let value = Base64Display::new(b"\0\x01\x02\x03", &STANDARD);
//! assert_eq!("base64: AAECAw==", format!("base64: {}", value));
//! ```
//!
//! # Panics
//!
//! If length calculations result in overflowing `usize`, a panic will result.
#![cfg_attr(feature = "cargo-clippy", allow(clippy::cast_lossless))]
#![deny(
missing_docs,
trivial_casts,
trivial_numeric_casts,
unused_extern_crates,
unused_import_braces,
unused_results,
variant_size_differences
)]
#![forbid(unsafe_code)]
// Allow globally until https://github.com/rust-lang/rust-clippy/issues/8768 is resolved.
// The desired state is to allow it only for the rstest_reuse import.
#![allow(clippy::single_component_path_imports)]
#![cfg_attr(not(any(feature = "std", test)), no_std)]
#[cfg(any(feature = "alloc", test))]
extern crate alloc;
// has to be included at top level because of the way rstest_reuse defines its macros
#[cfg(test)]
use rstest_reuse;
mod chunked_encoder;
pub mod display;
#[cfg(any(feature = "std", test))]
pub mod read;
#[cfg(any(feature = "std", test))]
pub mod write;
pub mod engine;
pub use engine::Engine;
pub mod alphabet;
mod encode;
#[allow(deprecated)]
#[cfg(any(feature = "alloc", test))]
pub use crate::encode::{encode, encode_engine, encode_engine_string};
#[allow(deprecated)]
pub use crate::encode::{encode_engine_slice, encoded_len, EncodeSliceError};
mod decode;
#[allow(deprecated)]
#[cfg(any(feature = "alloc", test))]
pub use crate::decode::{decode, decode_engine, decode_engine_vec};
#[allow(deprecated)]
pub use crate::decode::{decode_engine_slice, decoded_len_estimate, DecodeError, DecodeSliceError};
pub mod prelude;
#[cfg(test)]
mod tests;
const PAD_BYTE: u8 = b'=';

20
vendor/base64/src/prelude.rs vendored Normal file
View file

@ -0,0 +1,20 @@
//! Preconfigured engines for common use cases.
//!
//! These are re-exports of `const` engines in [crate::engine::general_purpose], renamed with a `BASE64_`
//! prefix for those who prefer to `use` the entire path to a name.
//!
//! # Examples
//!
#![cfg_attr(feature = "alloc", doc = "```")]
#![cfg_attr(not(feature = "alloc"), doc = "```ignore")]
//! use base64::prelude::{Engine as _, BASE64_STANDARD_NO_PAD};
//!
//! assert_eq!("c29tZSBieXRlcw", &BASE64_STANDARD_NO_PAD.encode(b"some bytes"));
//! ```
pub use crate::engine::Engine;
pub use crate::engine::general_purpose::STANDARD as BASE64_STANDARD;
pub use crate::engine::general_purpose::STANDARD_NO_PAD as BASE64_STANDARD_NO_PAD;
pub use crate::engine::general_purpose::URL_SAFE as BASE64_URL_SAFE;
pub use crate::engine::general_purpose::URL_SAFE_NO_PAD as BASE64_URL_SAFE_NO_PAD;

335
vendor/base64/src/read/decoder.rs vendored Normal file
View file

@ -0,0 +1,335 @@
use crate::{engine::Engine, DecodeError, DecodeSliceError, PAD_BYTE};
use std::{cmp, fmt, io};
// This should be large, but it has to fit on the stack.
pub(crate) const BUF_SIZE: usize = 1024;
// 4 bytes of base64 data encode 3 bytes of raw data (modulo padding).
const BASE64_CHUNK_SIZE: usize = 4;
const DECODED_CHUNK_SIZE: usize = 3;
/// A `Read` implementation that decodes base64 data read from an underlying reader.
///
/// # Examples
///
/// ```
/// use std::io::Read;
/// use std::io::Cursor;
/// use base64::engine::general_purpose;
///
/// // use a cursor as the simplest possible `Read` -- in real code this is probably a file, etc.
/// let mut wrapped_reader = Cursor::new(b"YXNkZg==");
/// let mut decoder = base64::read::DecoderReader::new(
/// &mut wrapped_reader,
/// &general_purpose::STANDARD);
///
/// // handle errors as you normally would
/// let mut result = Vec::new();
/// decoder.read_to_end(&mut result).unwrap();
///
/// assert_eq!(b"asdf", &result[..]);
///
/// ```
pub struct DecoderReader<'e, E: Engine, R: io::Read> {
engine: &'e E,
/// Where b64 data is read from
inner: R,
/// Holds b64 data read from the delegate reader.
b64_buffer: [u8; BUF_SIZE],
/// The start of the pending buffered data in `b64_buffer`.
b64_offset: usize,
/// The amount of buffered b64 data after `b64_offset` in `b64_len`.
b64_len: usize,
/// Since the caller may provide us with a buffer of size 1 or 2 that's too small to copy a
/// decoded chunk in to, we have to be able to hang on to a few decoded bytes.
/// Technically we only need to hold 2 bytes, but then we'd need a separate temporary buffer to
/// decode 3 bytes into and then juggle copying one byte into the provided read buf and the rest
/// into here, which seems like a lot of complexity for 1 extra byte of storage.
decoded_chunk_buffer: [u8; DECODED_CHUNK_SIZE],
/// Index of start of decoded data in `decoded_chunk_buffer`
decoded_offset: usize,
/// Length of decoded data after `decoded_offset` in `decoded_chunk_buffer`
decoded_len: usize,
/// Input length consumed so far.
/// Used to provide accurate offsets in errors
input_consumed_len: usize,
/// offset of previously seen padding, if any
padding_offset: Option<usize>,
}
// exclude b64_buffer as it's uselessly large
impl<'e, E: Engine, R: io::Read> fmt::Debug for DecoderReader<'e, E, R> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("DecoderReader")
.field("b64_offset", &self.b64_offset)
.field("b64_len", &self.b64_len)
.field("decoded_chunk_buffer", &self.decoded_chunk_buffer)
.field("decoded_offset", &self.decoded_offset)
.field("decoded_len", &self.decoded_len)
.field("input_consumed_len", &self.input_consumed_len)
.field("padding_offset", &self.padding_offset)
.finish()
}
}
impl<'e, E: Engine, R: io::Read> DecoderReader<'e, E, R> {
/// Create a new decoder that will read from the provided reader `r`.
pub fn new(reader: R, engine: &'e E) -> Self {
DecoderReader {
engine,
inner: reader,
b64_buffer: [0; BUF_SIZE],
b64_offset: 0,
b64_len: 0,
decoded_chunk_buffer: [0; DECODED_CHUNK_SIZE],
decoded_offset: 0,
decoded_len: 0,
input_consumed_len: 0,
padding_offset: None,
}
}
/// Write as much as possible of the decoded buffer into the target buffer.
/// Must only be called when there is something to write and space to write into.
/// Returns a Result with the number of (decoded) bytes copied.
fn flush_decoded_buf(&mut self, buf: &mut [u8]) -> io::Result<usize> {
debug_assert!(self.decoded_len > 0);
debug_assert!(!buf.is_empty());
let copy_len = cmp::min(self.decoded_len, buf.len());
debug_assert!(copy_len > 0);
debug_assert!(copy_len <= self.decoded_len);
buf[..copy_len].copy_from_slice(
&self.decoded_chunk_buffer[self.decoded_offset..self.decoded_offset + copy_len],
);
self.decoded_offset += copy_len;
self.decoded_len -= copy_len;
debug_assert!(self.decoded_len < DECODED_CHUNK_SIZE);
Ok(copy_len)
}
/// Read into the remaining space in the buffer after the current contents.
/// Must only be called when there is space to read into in the buffer.
/// Returns the number of bytes read.
fn read_from_delegate(&mut self) -> io::Result<usize> {
debug_assert!(self.b64_offset + self.b64_len < BUF_SIZE);
let read = self
.inner
.read(&mut self.b64_buffer[self.b64_offset + self.b64_len..])?;
self.b64_len += read;
debug_assert!(self.b64_offset + self.b64_len <= BUF_SIZE);
Ok(read)
}
/// Decode the requested number of bytes from the b64 buffer into the provided buffer. It's the
/// caller's responsibility to choose the number of b64 bytes to decode correctly.
///
/// Returns a Result with the number of decoded bytes written to `buf`.
///
/// # Panics
///
/// panics if `buf` is too small
fn decode_to_buf(&mut self, b64_len_to_decode: usize, buf: &mut [u8]) -> io::Result<usize> {
debug_assert!(self.b64_len >= b64_len_to_decode);
debug_assert!(self.b64_offset + self.b64_len <= BUF_SIZE);
debug_assert!(!buf.is_empty());
let b64_to_decode = &self.b64_buffer[self.b64_offset..self.b64_offset + b64_len_to_decode];
let decode_metadata = self
.engine
.internal_decode(
b64_to_decode,
buf,
self.engine.internal_decoded_len_estimate(b64_len_to_decode),
)
.map_err(|dse| match dse {
DecodeSliceError::DecodeError(de) => {
match de {
DecodeError::InvalidByte(offset, byte) => {
match (byte, self.padding_offset) {
// if there was padding in a previous block of decoding that happened to
// be correct, and we now find more padding that happens to be incorrect,
// to be consistent with non-reader decodes, record the error at the first
// padding
(PAD_BYTE, Some(first_pad_offset)) => {
DecodeError::InvalidByte(first_pad_offset, PAD_BYTE)
}
_ => {
DecodeError::InvalidByte(self.input_consumed_len + offset, byte)
}
}
}
DecodeError::InvalidLength(len) => {
DecodeError::InvalidLength(self.input_consumed_len + len)
}
DecodeError::InvalidLastSymbol(offset, byte) => {
DecodeError::InvalidLastSymbol(self.input_consumed_len + offset, byte)
}
DecodeError::InvalidPadding => DecodeError::InvalidPadding,
}
}
DecodeSliceError::OutputSliceTooSmall => {
unreachable!("buf is sized correctly in calling code")
}
})
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
if let Some(offset) = self.padding_offset {
// we've already seen padding
if decode_metadata.decoded_len > 0 {
// we read more after already finding padding; report error at first padding byte
return Err(io::Error::new(
io::ErrorKind::InvalidData,
DecodeError::InvalidByte(offset, PAD_BYTE),
));
}
}
self.padding_offset = self.padding_offset.or(decode_metadata
.padding_offset
.map(|offset| self.input_consumed_len + offset));
self.input_consumed_len += b64_len_to_decode;
self.b64_offset += b64_len_to_decode;
self.b64_len -= b64_len_to_decode;
debug_assert!(self.b64_offset + self.b64_len <= BUF_SIZE);
Ok(decode_metadata.decoded_len)
}
/// Unwraps this `DecoderReader`, returning the base reader which it reads base64 encoded
/// input from.
///
/// Because `DecoderReader` performs internal buffering, the state of the inner reader is
/// unspecified. This function is mainly provided because the inner reader type may provide
/// additional functionality beyond the `Read` implementation which may still be useful.
pub fn into_inner(self) -> R {
self.inner
}
}
impl<'e, E: Engine, R: io::Read> io::Read for DecoderReader<'e, E, R> {
/// Decode input from the wrapped reader.
///
/// Under non-error circumstances, this returns `Ok` with the value being the number of bytes
/// written in `buf`.
///
/// Where possible, this function buffers base64 to minimize the number of read() calls to the
/// delegate reader.
///
/// # Errors
///
/// Any errors emitted by the delegate reader are returned. Decoding errors due to invalid
/// base64 are also possible, and will have `io::ErrorKind::InvalidData`.
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
if buf.is_empty() {
return Ok(0);
}
// offset == BUF_SIZE when we copied it all last time
debug_assert!(self.b64_offset <= BUF_SIZE);
debug_assert!(self.b64_offset + self.b64_len <= BUF_SIZE);
debug_assert!(if self.b64_offset == BUF_SIZE {
self.b64_len == 0
} else {
self.b64_len <= BUF_SIZE
});
debug_assert!(if self.decoded_len == 0 {
// can be = when we were able to copy the complete chunk
self.decoded_offset <= DECODED_CHUNK_SIZE
} else {
self.decoded_offset < DECODED_CHUNK_SIZE
});
// We shouldn't ever decode into decoded_buffer when we can't immediately write at least one
// byte into the provided buf, so the effective length should only be 3 momentarily between
// when we decode and when we copy into the target buffer.
debug_assert!(self.decoded_len < DECODED_CHUNK_SIZE);
debug_assert!(self.decoded_len + self.decoded_offset <= DECODED_CHUNK_SIZE);
if self.decoded_len > 0 {
// we have a few leftover decoded bytes; flush that rather than pull in more b64
self.flush_decoded_buf(buf)
} else {
let mut at_eof = false;
while self.b64_len < BASE64_CHUNK_SIZE {
// Copy any bytes we have to the start of the buffer.
self.b64_buffer
.copy_within(self.b64_offset..self.b64_offset + self.b64_len, 0);
self.b64_offset = 0;
// then fill in more data
let read = self.read_from_delegate()?;
if read == 0 {
// we never read into an empty buf, so 0 => we've hit EOF
at_eof = true;
break;
}
}
if self.b64_len == 0 {
debug_assert!(at_eof);
// we must be at EOF, and we have no data left to decode
return Ok(0);
};
debug_assert!(if at_eof {
// if we are at eof, we may not have a complete chunk
self.b64_len > 0
} else {
// otherwise, we must have at least one chunk
self.b64_len >= BASE64_CHUNK_SIZE
});
debug_assert_eq!(0, self.decoded_len);
if buf.len() < DECODED_CHUNK_SIZE {
// caller requested an annoyingly short read
// have to write to a tmp buf first to avoid double mutable borrow
let mut decoded_chunk = [0_u8; DECODED_CHUNK_SIZE];
// if we are at eof, could have less than BASE64_CHUNK_SIZE, in which case we have
// to assume that these last few tokens are, in fact, valid (i.e. must be 2-4 b64
// tokens, not 1, since 1 token can't decode to 1 byte).
let to_decode = cmp::min(self.b64_len, BASE64_CHUNK_SIZE);
let decoded = self.decode_to_buf(to_decode, &mut decoded_chunk[..])?;
self.decoded_chunk_buffer[..decoded].copy_from_slice(&decoded_chunk[..decoded]);
self.decoded_offset = 0;
self.decoded_len = decoded;
// can be less than 3 on last block due to padding
debug_assert!(decoded <= 3);
self.flush_decoded_buf(buf)
} else {
let b64_bytes_that_can_decode_into_buf = (buf.len() / DECODED_CHUNK_SIZE)
.checked_mul(BASE64_CHUNK_SIZE)
.expect("too many chunks");
debug_assert!(b64_bytes_that_can_decode_into_buf >= BASE64_CHUNK_SIZE);
let b64_bytes_available_to_decode = if at_eof {
self.b64_len
} else {
// only use complete chunks
self.b64_len - self.b64_len % 4
};
let actual_decode_len = cmp::min(
b64_bytes_that_can_decode_into_buf,
b64_bytes_available_to_decode,
);
self.decode_to_buf(actual_decode_len, buf)
}
}
}
}

Some files were not shown because too many files have changed in this diff Show more