RPM build fix (reverted CI changes which will need to be un-reverted or made conditional) and vendor Rust dependencies to make builds much faster in any CI system.

This commit is contained in:
Adam Ierymenko 2022-06-08 07:32:16 -04:00
parent 373ca30269
commit d5ca4e5f52
12611 changed files with 2898014 additions and 284 deletions

View File

@ -1,3 +1,9 @@
[source.crates-io]
replace-with = "vendored-sources"
[source.vendored-sources]
directory = "vendor"
[target.x86_64-apple-darwin]
rustflags=["-C", "link-arg=-mmacosx-version-min=10.13"]

575
zeroidc/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1 @@
{"files":{"Cargo.lock":"31bb7b361278d99a00595cbd916c444e6fd193b5f0b1ea0cf2d9454440739501","Cargo.toml":"4ca681d6949661455ac88541ffa68ebc7db50cb2b6e9a2134e6d0687da4997c3","LICENCE":"2762990c7fbba9d550802a2593c1d857dcd52596bb0f9f192a97e9a7ac5f4f9e","README.md":"8d983e1bb3cc99724010d9073a5be6452cd49bd57a877525fd0a5dd41e6591d5","examples/256_colours.rs":"5f2845068bc2d93cff4a61f18ffa44fbbbc91be771dfd686d537d343f37041da","examples/basic_colours.rs":"d610795f3743d10d90ec4e5ab32cc09fb16640896cecd2f93fca434a0920397c","examples/rgb_colours.rs":"8399e5131e959a56c932036b790e601fb4ad658856112daf87f933889b443f2c","src/ansi.rs":"988fb87936064fa006fcc9474ac62099c8d6e98d38bb80cec2cd864066482a08","src/debug.rs":"61343f8bf13695020102c033aeaacd9ccd3ec830eacbf9011127e61829451d20","src/difference.rs":"9b4b8f91c72932bfda262abdceff0ec124a5a8dd27d07bd4d2e5e7889135c6c9","src/display.rs":"c04f2397d1d1d86a5e2188c2840c505cb0baeaf9706a88d4bbe56eadc67811b9","src/lib.rs":"b85df4b9b8832cda777db049efa2ec84b9847438fa3feaf8540e597ce2532a47","src/style.rs":"1042fc973f5ea8bbb2a2faec334aad530520b53edc9b3296174ae38c1060490b","src/util.rs":"07c127f732887573a1c9126fc0288e13e7a8f1f803513b95e50aac2905171b0d","src/windows.rs":"7ce7dd6738b9728fcd3908c284b6f29a9bdfb34af761b4c7385cf7e3e1b20e64","src/write.rs":"c9ec03764ad1ecea8b680243c9cafc5e70919fcea7500cc18246ffd8f6bb4b33"},"package":"d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"}

168
zeroidc/vendor/ansi_term/Cargo.lock generated vendored Normal file
View File

@ -0,0 +1,168 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
[[package]]
name = "aho-corasick"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "ansi_term"
version = "0.12.1"
dependencies = [
"doc-comment 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.94 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "doc-comment"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "itoa"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "memchr"
version = "2.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "proc-macro2"
version = "0.4.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "quote"
version = "0.6.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "regex"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
"memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"regex-syntax 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)",
"thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "regex-syntax"
version = "0.6.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "ryu"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "serde"
version = "1.0.94"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"serde_derive 1.0.94 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "serde_derive"
version = "1.0.94"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.39 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "serde_json"
version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.94 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "syn"
version = "0.15.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "thread_local"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "unicode-xid"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "winapi"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[metadata]
"checksum aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d"
"checksum doc-comment 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "923dea538cea0aa3025e8685b20d6ee21ef99c4f77e954a30febbaac5ec73a97"
"checksum itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "501266b7edd0174f8530248f87f99c88fbe60ca4ef3dd486835b8d8d53136f7f"
"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
"checksum memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e"
"checksum proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)" = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759"
"checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1"
"checksum regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88c3d9193984285d544df4a30c23a4e62ead42edf70a4452ceb76dac1ce05c26"
"checksum regex-syntax 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b143cceb2ca5e56d5671988ef8b15615733e7ee16cd348e064333b251b89343f"
"checksum ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c92464b447c0ee8c4fb3824ecc8383b81717b9f1e74ba2e72540aef7b9f82997"
"checksum serde 1.0.94 (registry+https://github.com/rust-lang/crates.io-index)" = "076a696fdea89c19d3baed462576b8f6d663064414b5c793642da8dfeb99475b"
"checksum serde_derive 1.0.94 (registry+https://github.com/rust-lang/crates.io-index)" = "ef45eb79d6463b22f5f9e16d283798b7c0175ba6050bc25c1a946c122727fe7b"
"checksum serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)" = "051c49229f282f7c6f3813f8286cc1e3323e8051823fce42c7ea80fe13521704"
"checksum syn 0.15.39 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d960b829a55e56db167e861ddb43602c003c7be0bee1d345021703fac2fb7c"
"checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b"
"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
"checksum winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "92c1eb33641e276cfa214a0522acad57be5c56b10cb348b3c5117db75f3ac4b0"
"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"

43
zeroidc/vendor/ansi_term/Cargo.toml vendored Normal file
View File

@ -0,0 +1,43 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "ansi_term"
version = "0.12.1"
authors = ["ogham@bsago.me", "Ryan Scheel (Havvy) <ryan.havvy@gmail.com>", "Josh Triplett <josh@joshtriplett.org>"]
description = "Library for ANSI terminal colours and styles (bold, underline)"
homepage = "https://github.com/ogham/rust-ansi-term"
documentation = "https://docs.rs/ansi_term"
readme = "README.md"
license = "MIT"
repository = "https://github.com/ogham/rust-ansi-term"
[lib]
name = "ansi_term"
[dependencies.serde]
version = "1.0.90"
features = ["derive"]
optional = true
[dev-dependencies.doc-comment]
version = "0.3"
[dev-dependencies.regex]
version = "1.1.9"
[dev-dependencies.serde_json]
version = "1.0.39"
[features]
derive_serde_style = ["serde"]
[target."cfg(target_os=\"windows\")".dependencies.winapi]
version = "0.3.4"
features = ["consoleapi", "errhandlingapi", "fileapi", "handleapi", "processenv"]

21
zeroidc/vendor/ansi_term/LICENCE vendored Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014 Benjamin Sago
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

183
zeroidc/vendor/ansi_term/README.md vendored Normal file
View File

@ -0,0 +1,183 @@
# rust-ansi-term [![ansi-term on crates.io](http://meritbadge.herokuapp.com/ansi-term)](https://crates.io/crates/ansi_term) [![Build status](https://img.shields.io/travis/ogham/rust-ansi-term/master.svg?style=flat)](https://travis-ci.org/ogham/rust-ansi-term) [![Build status](https://img.shields.io/appveyor/ci/ogham/rust-ansi-term/master.svg?style=flat&logo=AppVeyor&logoColor=silver)](https://ci.appveyor.com/project/ogham/rust-ansi-term) [![Coverage status](https://coveralls.io/repos/ogham/rust-ansi-term/badge.svg?branch=master&service=github)](https://coveralls.io/github/ogham/rust-ansi-term?branch=master)
This is a library for controlling colours and formatting, such as red bold text or blue underlined text, on ANSI terminals.
### [View the Rustdoc](https://docs.rs/ansi_term/)
# Installation
This crate works with [Cargo](http://crates.io). Add the following to your `Cargo.toml` dependencies section:
```toml
[dependencies]
ansi_term = "0.12"
```
## Basic usage
There are three main types in this crate that you need to be concerned with: `ANSIString`, `Style`, and `Colour`.
A `Style` holds stylistic information: foreground and background colours, whether the text should be bold, or blinking, or other properties.
The `Colour` enum represents the available colours.
And an `ANSIString` is a string paired with a `Style`.
`Color` is also available as an alias to `Colour`.
To format a string, call the `paint` method on a `Style` or a `Colour`, passing in the string you want to format as the argument.
For example, heres how to get some red text:
```rust
use ansi_term::Colour::Red;
println!("This is in red: {}", Red.paint("a red string"));
```
Its important to note that the `paint` method does *not* actually return a string with the ANSI control characters surrounding it.
Instead, it returns an `ANSIString` value that has a `Display` implementation that, when formatted, returns the characters.
This allows strings to be printed with a minimum of `String` allocations being performed behind the scenes.
If you *do* want to get at the escape codes, then you can convert the `ANSIString` to a string as you would any other `Display` value:
```rust
use ansi_term::Colour::Red;
let red_string = Red.paint("a red string").to_string();
```
**Note for Windows 10 users:** On Windows 10, the application must enable ANSI support first:
```rust,ignore
let enabled = ansi_term::enable_ansi_support();
```
## Bold, underline, background, and other styles
For anything more complex than plain foreground colour changes, you need to construct `Style` values themselves, rather than beginning with a `Colour`.
You can do this by chaining methods based on a new `Style`, created with `Style::new()`.
Each method creates a new style that has that specific property set.
For example:
```rust
use ansi_term::Style;
println!("How about some {} and {}?",
Style::new().bold().paint("bold"),
Style::new().underline().paint("underline"));
```
For brevity, these methods have also been implemented for `Colour` values, so you can give your styles a foreground colour without having to begin with an empty `Style` value:
```rust
use ansi_term::Colour::{Blue, Yellow};
println!("Demonstrating {} and {}!",
Blue.bold().paint("blue bold"),
Yellow.underline().paint("yellow underline"));
println!("Yellow on blue: {}", Yellow.on(Blue).paint("wow!"));
```
The complete list of styles you can use are:
`bold`, `dimmed`, `italic`, `underline`, `blink`, `reverse`, `hidden`, and `on` for background colours.
In some cases, you may find it easier to change the foreground on an existing `Style` rather than starting from the appropriate `Colour`.
You can do this using the `fg` method:
```rust
use ansi_term::Style;
use ansi_term::Colour::{Blue, Cyan, Yellow};
println!("Yellow on blue: {}", Style::new().on(Blue).fg(Yellow).paint("yow!"));
println!("Also yellow on blue: {}", Cyan.on(Blue).fg(Yellow).paint("zow!"));
```
You can turn a `Colour` into a `Style` with the `normal` method.
This will produce the exact same `ANSIString` as if you just used the `paint` method on the `Colour` directly, but its useful in certain cases: for example, you may have a method that returns `Styles`, and need to represent both the “red bold” and “red, but not bold” styles with values of the same type. The `Style` struct also has a `Default` implementation if you want to have a style with *nothing* set.
```rust
use ansi_term::Style;
use ansi_term::Colour::Red;
Red.normal().paint("yet another red string");
Style::default().paint("a completely regular string");
```
## Extended colours
You can access the extended range of 256 colours by using the `Colour::Fixed` variant, which takes an argument of the colour number to use.
This can be included wherever you would use a `Colour`:
```rust
use ansi_term::Colour::Fixed;
Fixed(134).paint("A sort of light purple");
Fixed(221).on(Fixed(124)).paint("Mustard in the ketchup");
```
The first sixteen of these values are the same as the normal and bold standard colour variants.
Theres nothing stopping you from using these as `Fixed` colours instead, but theres nothing to be gained by doing so either.
You can also access full 24-bit colour by using the `Colour::RGB` variant, which takes separate `u8` arguments for red, green, and blue:
```rust
use ansi_term::Colour::RGB;
RGB(70, 130, 180).paint("Steel blue");
```
## Combining successive coloured strings
The benefit of writing ANSI escape codes to the terminal is that they *stack*: you do not need to end every coloured string with a reset code if the text that follows it is of a similar style.
For example, if you want to have some blue text followed by some blue bold text, its possible to send the ANSI code for blue, followed by the ANSI code for bold, and finishing with a reset code without having to have an extra one between the two strings.
This crate can optimise the ANSI codes that get printed in situations like this, making life easier for your terminal renderer.
The `ANSIStrings` struct takes a slice of several `ANSIString` values, and will iterate over each of them, printing only the codes for the styles that need to be updated as part of its formatting routine.
The following code snippet uses this to enclose a binary number displayed in red bold text inside some red, but not bold, brackets:
```rust
use ansi_term::Colour::Red;
use ansi_term::{ANSIString, ANSIStrings};
let some_value = format!("{:b}", 42);
let strings: &[ANSIString<'static>] = &[
Red.paint("["),
Red.bold().paint(some_value),
Red.paint("]"),
];
println!("Value: {}", ANSIStrings(strings));
```
There are several things to note here.
Firstly, the `paint` method can take *either* an owned `String` or a borrowed `&str`.
Internally, an `ANSIString` holds a copy-on-write (`Cow`) string value to deal with both owned and borrowed strings at the same time.
This is used here to display a `String`, the result of the `format!` call, using the same mechanism as some statically-available `&str` slices.
Secondly, that the `ANSIStrings` value works in the same way as its singular counterpart, with a `Display` implementation that only performs the formatting when required.
## Byte strings
This library also supports formatting `[u8]` byte strings; this supports applications working with text in an unknown encoding.
`Style` and `Colour` support painting `[u8]` values, resulting in an `ANSIByteString`.
This type does not implement `Display`, as it may not contain UTF-8, but it does provide a method `write_to` to write the result to any value that implements `Write`:
```rust
use ansi_term::Colour::Green;
Green.paint("user data".as_bytes()).write_to(&mut std::io::stdout()).unwrap();
```
Similarly, the type `ANSIByteStrings` supports writing a list of `ANSIByteString` values with minimal escape sequences:
```rust
use ansi_term::Colour::Green;
use ansi_term::ANSIByteStrings;
ANSIByteStrings(&[
Green.paint("user data 1\n".as_bytes()),
Green.bold().paint("user data 2\n".as_bytes()),
]).write_to(&mut std::io::stdout()).unwrap();
```

View File

@ -0,0 +1,73 @@
extern crate ansi_term;
use ansi_term::Colour;
// This example prints out the 256 colours.
// They're arranged like this:
//
// - 0 to 8 are the eight standard colours.
// - 9 to 15 are the eight bold colours.
// - 16 to 231 are six blocks of six-by-six colour squares.
// - 232 to 255 are shades of grey.
fn main() {
// First two lines
for c in 0..8 {
glow(c, c != 0);
print!(" ");
}
print!("\n");
for c in 8..16 {
glow(c, c != 8);
print!(" ");
}
print!("\n\n");
// Six lines of the first three squares
for row in 0..6 {
for square in 0..3 {
for column in 0..6 {
glow(16 + square * 36 + row * 6 + column, row >= 3);
print!(" ");
}
print!(" ");
}
print!("\n");
}
print!("\n");
// Six more lines of the other three squares
for row in 0..6 {
for square in 0..3 {
for column in 0..6 {
glow(124 + square * 36 + row * 6 + column, row >= 3);
print!(" ");
}
print!(" ");
}
print!("\n");
}
print!("\n");
// The last greyscale lines
for c in 232..=243 {
glow(c, false);
print!(" ");
}
print!("\n");
for c in 244..=255 {
glow(c, true);
print!(" ");
}
print!("\n");
}
fn glow(c: u8, light_bg: bool) {
let base = if light_bg { Colour::Black } else { Colour::White };
let style = base.on(Colour::Fixed(c));
print!("{}", style.paint(&format!(" {:3} ", c)));
}

View File

@ -0,0 +1,18 @@
extern crate ansi_term;
use ansi_term::{Style, Colour::*};
// This example prints out the 16 basic colours.
fn main() {
let normal = Style::default();
println!("{} {}", normal.paint("Normal"), normal.bold().paint("bold"));
println!("{} {}", Black.paint("Black"), Black.bold().paint("bold"));
println!("{} {}", Red.paint("Red"), Red.bold().paint("bold"));
println!("{} {}", Green.paint("Green"), Green.bold().paint("bold"));
println!("{} {}", Yellow.paint("Yellow"), Yellow.bold().paint("bold"));
println!("{} {}", Blue.paint("Blue"), Blue.bold().paint("bold"));
println!("{} {}", Purple.paint("Purple"), Purple.bold().paint("bold"));
println!("{} {}", Cyan.paint("Cyan"), Cyan.bold().paint("bold"));
println!("{} {}", White.paint("White"), White.bold().paint("bold"));
}

View File

@ -0,0 +1,23 @@
extern crate ansi_term;
use ansi_term::{Style, Colour};
// This example prints out a colour gradient in a grid by calculating each
// characters red, green, and blue components, and using 24-bit colour codes
// to display them.
const WIDTH: i32 = 80;
const HEIGHT: i32 = 24;
fn main() {
for row in 0 .. HEIGHT {
for col in 0 .. WIDTH {
let r = (row * 255 / HEIGHT) as u8;
let g = (col * 255 / WIDTH) as u8;
let b = 128;
print!("{}", Style::default().on(Colour::RGB(r, g, b)).paint(" "));
}
print!("\n");
}
}

374
zeroidc/vendor/ansi_term/src/ansi.rs vendored Normal file
View File

@ -0,0 +1,374 @@
use style::{Colour, Style};
use std::fmt;
use write::AnyWrite;
// ---- generating ANSI codes ----
impl Style {
/// Write any bytes that go *before* a piece of text to the given writer.
fn write_prefix<W: AnyWrite + ?Sized>(&self, f: &mut W) -> Result<(), W::Error> {
// If there are actually no styles here, then dont write *any* codes
// as the prefix. An empty ANSI code may not affect the terminal
// output at all, but a user may just want a code-free string.
if self.is_plain() {
return Ok(());
}
// Write the codes prefix, then write numbers, separated by
// semicolons, for each text style we want to apply.
write!(f, "\x1B[")?;
let mut written_anything = false;
{
let mut write_char = |c| {
if written_anything { write!(f, ";")?; }
written_anything = true;
write!(f, "{}", c)?;
Ok(())
};
if self.is_bold { write_char('1')? }
if self.is_dimmed { write_char('2')? }
if self.is_italic { write_char('3')? }
if self.is_underline { write_char('4')? }
if self.is_blink { write_char('5')? }
if self.is_reverse { write_char('7')? }
if self.is_hidden { write_char('8')? }
if self.is_strikethrough { write_char('9')? }
}
// The foreground and background colours, if specified, need to be
// handled specially because the number codes are more complicated.
// (see `write_background_code` and `write_foreground_code`)
if let Some(bg) = self.background {
if written_anything { write!(f, ";")?; }
written_anything = true;
bg.write_background_code(f)?;
}
if let Some(fg) = self.foreground {
if written_anything { write!(f, ";")?; }
fg.write_foreground_code(f)?;
}
// All the codes end with an `m`, because reasons.
write!(f, "m")?;
Ok(())
}
/// Write any bytes that go *after* a piece of text to the given writer.
fn write_suffix<W: AnyWrite + ?Sized>(&self, f: &mut W) -> Result<(), W::Error> {
if self.is_plain() {
Ok(())
}
else {
write!(f, "{}", RESET)
}
}
}
/// The code to send to reset all styles and return to `Style::default()`.
pub static RESET: &str = "\x1B[0m";
impl Colour {
fn write_foreground_code<W: AnyWrite + ?Sized>(&self, f: &mut W) -> Result<(), W::Error> {
match *self {
Colour::Black => write!(f, "30"),
Colour::Red => write!(f, "31"),
Colour::Green => write!(f, "32"),
Colour::Yellow => write!(f, "33"),
Colour::Blue => write!(f, "34"),
Colour::Purple => write!(f, "35"),
Colour::Cyan => write!(f, "36"),
Colour::White => write!(f, "37"),
Colour::Fixed(num) => write!(f, "38;5;{}", &num),
Colour::RGB(r,g,b) => write!(f, "38;2;{};{};{}", &r, &g, &b),
}
}
fn write_background_code<W: AnyWrite + ?Sized>(&self, f: &mut W) -> Result<(), W::Error> {
match *self {
Colour::Black => write!(f, "40"),
Colour::Red => write!(f, "41"),
Colour::Green => write!(f, "42"),
Colour::Yellow => write!(f, "43"),
Colour::Blue => write!(f, "44"),
Colour::Purple => write!(f, "45"),
Colour::Cyan => write!(f, "46"),
Colour::White => write!(f, "47"),
Colour::Fixed(num) => write!(f, "48;5;{}", &num),
Colour::RGB(r,g,b) => write!(f, "48;2;{};{};{}", &r, &g, &b),
}
}
}
/// Like `ANSIString`, but only displays the style prefix.
///
/// This type implements the `Display` trait, meaning it can be written to a
/// `std::fmt` formatting without doing any extra allocation, and written to a
/// string with the `.to_string()` method. For examples, see
/// [`Style::prefix`](struct.Style.html#method.prefix).
#[derive(Clone, Copy, Debug)]
pub struct Prefix(Style);
/// Like `ANSIString`, but only displays the difference between two
/// styles.
///
/// This type implements the `Display` trait, meaning it can be written to a
/// `std::fmt` formatting without doing any extra allocation, and written to a
/// string with the `.to_string()` method. For examples, see
/// [`Style::infix`](struct.Style.html#method.infix).
#[derive(Clone, Copy, Debug)]
pub struct Infix(Style, Style);
/// Like `ANSIString`, but only displays the style suffix.
///
/// This type implements the `Display` trait, meaning it can be written to a
/// `std::fmt` formatting without doing any extra allocation, and written to a
/// string with the `.to_string()` method. For examples, see
/// [`Style::suffix`](struct.Style.html#method.suffix).
#[derive(Clone, Copy, Debug)]
pub struct Suffix(Style);
impl Style {
/// The prefix bytes for this style. These are the bytes that tell the
/// terminal to use a different colour or font style.
///
/// # Examples
///
/// ```
/// use ansi_term::{Style, Colour::Blue};
///
/// let style = Style::default().bold();
/// assert_eq!("\x1b[1m",
/// style.prefix().to_string());
///
/// let style = Blue.bold();
/// assert_eq!("\x1b[1;34m",
/// style.prefix().to_string());
///
/// let style = Style::default();
/// assert_eq!("",
/// style.prefix().to_string());
/// ```
pub fn prefix(self) -> Prefix {
Prefix(self)
}
/// The infix bytes between this style and `next` style. These are the bytes
/// that tell the terminal to change the style to `next`. These may include
/// a reset followed by the next colour and style, depending on the two styles.
///
/// # Examples
///
/// ```
/// use ansi_term::{Style, Colour::Green};
///
/// let style = Style::default().bold();
/// assert_eq!("\x1b[32m",
/// style.infix(Green.bold()).to_string());
///
/// let style = Green.normal();
/// assert_eq!("\x1b[1m",
/// style.infix(Green.bold()).to_string());
///
/// let style = Style::default();
/// assert_eq!("",
/// style.infix(style).to_string());
/// ```
pub fn infix(self, next: Style) -> Infix {
Infix(self, next)
}
/// The suffix for this style. These are the bytes that tell the terminal
/// to reset back to its normal colour and font style.
///
/// # Examples
///
/// ```
/// use ansi_term::{Style, Colour::Green};
///
/// let style = Style::default().bold();
/// assert_eq!("\x1b[0m",
/// style.suffix().to_string());
///
/// let style = Green.normal().bold();
/// assert_eq!("\x1b[0m",
/// style.suffix().to_string());
///
/// let style = Style::default();
/// assert_eq!("",
/// style.suffix().to_string());
/// ```
pub fn suffix(self) -> Suffix {
Suffix(self)
}
}
impl Colour {
/// The prefix bytes for this colour as a `Style`. These are the bytes
/// that tell the terminal to use a different colour or font style.
///
/// See also [`Style::prefix`](struct.Style.html#method.prefix).
///
/// # Examples
///
/// ```
/// use ansi_term::Colour::Green;
///
/// assert_eq!("\x1b[0m",
/// Green.suffix().to_string());
/// ```
pub fn prefix(self) -> Prefix {
Prefix(self.normal())
}
/// The infix bytes between this colour and `next` colour. These are the bytes
/// that tell the terminal to use the `next` colour, or to do nothing if
/// the two colours are equal.
///
/// See also [`Style::infix`](struct.Style.html#method.infix).
///
/// # Examples
///
/// ```
/// use ansi_term::Colour::{Red, Yellow};
///
/// assert_eq!("\x1b[33m",
/// Red.infix(Yellow).to_string());
/// ```
pub fn infix(self, next: Colour) -> Infix {
Infix(self.normal(), next.normal())
}
/// The suffix for this colour as a `Style`. These are the bytes that
/// tell the terminal to reset back to its normal colour and font style.
///
/// See also [`Style::suffix`](struct.Style.html#method.suffix).
///
/// # Examples
///
/// ```
/// use ansi_term::Colour::Purple;
///
/// assert_eq!("\x1b[0m",
/// Purple.suffix().to_string());
/// ```
pub fn suffix(self) -> Suffix {
Suffix(self.normal())
}
}
impl fmt::Display for Prefix {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let f: &mut fmt::Write = f;
self.0.write_prefix(f)
}
}
impl fmt::Display for Infix {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use difference::Difference;
match Difference::between(&self.0, &self.1) {
Difference::ExtraStyles(style) => {
let f: &mut fmt::Write = f;
style.write_prefix(f)
},
Difference::Reset => {
let f: &mut fmt::Write = f;
write!(f, "{}{}", RESET, self.1.prefix())
},
Difference::NoDifference => {
Ok(()) // nothing to write
},
}
}
}
impl fmt::Display for Suffix {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let f: &mut fmt::Write = f;
self.0.write_suffix(f)
}
}
#[cfg(test)]
mod test {
use style::Style;
use style::Colour::*;
macro_rules! test {
($name: ident: $style: expr; $input: expr => $result: expr) => {
#[test]
fn $name() {
assert_eq!($style.paint($input).to_string(), $result.to_string());
let mut v = Vec::new();
$style.paint($input.as_bytes()).write_to(&mut v).unwrap();
assert_eq!(v.as_slice(), $result.as_bytes());
}
};
}
test!(plain: Style::default(); "text/plain" => "text/plain");
test!(red: Red; "hi" => "\x1B[31mhi\x1B[0m");
test!(black: Black.normal(); "hi" => "\x1B[30mhi\x1B[0m");
test!(yellow_bold: Yellow.bold(); "hi" => "\x1B[1;33mhi\x1B[0m");
test!(yellow_bold_2: Yellow.normal().bold(); "hi" => "\x1B[1;33mhi\x1B[0m");
test!(blue_underline: Blue.underline(); "hi" => "\x1B[4;34mhi\x1B[0m");
test!(green_bold_ul: Green.bold().underline(); "hi" => "\x1B[1;4;32mhi\x1B[0m");
test!(green_bold_ul_2: Green.underline().bold(); "hi" => "\x1B[1;4;32mhi\x1B[0m");
test!(purple_on_white: Purple.on(White); "hi" => "\x1B[47;35mhi\x1B[0m");
test!(purple_on_white_2: Purple.normal().on(White); "hi" => "\x1B[47;35mhi\x1B[0m");
test!(yellow_on_blue: Style::new().on(Blue).fg(Yellow); "hi" => "\x1B[44;33mhi\x1B[0m");
test!(yellow_on_blue_2: Cyan.on(Blue).fg(Yellow); "hi" => "\x1B[44;33mhi\x1B[0m");
test!(cyan_bold_on_white: Cyan.bold().on(White); "hi" => "\x1B[1;47;36mhi\x1B[0m");
test!(cyan_ul_on_white: Cyan.underline().on(White); "hi" => "\x1B[4;47;36mhi\x1B[0m");
test!(cyan_bold_ul_on_white: Cyan.bold().underline().on(White); "hi" => "\x1B[1;4;47;36mhi\x1B[0m");
test!(cyan_ul_bold_on_white: Cyan.underline().bold().on(White); "hi" => "\x1B[1;4;47;36mhi\x1B[0m");
test!(fixed: Fixed(100); "hi" => "\x1B[38;5;100mhi\x1B[0m");
test!(fixed_on_purple: Fixed(100).on(Purple); "hi" => "\x1B[45;38;5;100mhi\x1B[0m");
test!(fixed_on_fixed: Fixed(100).on(Fixed(200)); "hi" => "\x1B[48;5;200;38;5;100mhi\x1B[0m");
test!(rgb: RGB(70,130,180); "hi" => "\x1B[38;2;70;130;180mhi\x1B[0m");
test!(rgb_on_blue: RGB(70,130,180).on(Blue); "hi" => "\x1B[44;38;2;70;130;180mhi\x1B[0m");
test!(blue_on_rgb: Blue.on(RGB(70,130,180)); "hi" => "\x1B[48;2;70;130;180;34mhi\x1B[0m");
test!(rgb_on_rgb: RGB(70,130,180).on(RGB(5,10,15)); "hi" => "\x1B[48;2;5;10;15;38;2;70;130;180mhi\x1B[0m");
test!(bold: Style::new().bold(); "hi" => "\x1B[1mhi\x1B[0m");
test!(underline: Style::new().underline(); "hi" => "\x1B[4mhi\x1B[0m");
test!(bunderline: Style::new().bold().underline(); "hi" => "\x1B[1;4mhi\x1B[0m");
test!(dimmed: Style::new().dimmed(); "hi" => "\x1B[2mhi\x1B[0m");
test!(italic: Style::new().italic(); "hi" => "\x1B[3mhi\x1B[0m");
test!(blink: Style::new().blink(); "hi" => "\x1B[5mhi\x1B[0m");
test!(reverse: Style::new().reverse(); "hi" => "\x1B[7mhi\x1B[0m");
test!(hidden: Style::new().hidden(); "hi" => "\x1B[8mhi\x1B[0m");
test!(stricken: Style::new().strikethrough(); "hi" => "\x1B[9mhi\x1B[0m");
#[test]
fn test_infix() {
assert_eq!(Style::new().dimmed().infix(Style::new()).to_string(), "\x1B[0m");
assert_eq!(White.dimmed().infix(White.normal()).to_string(), "\x1B[0m\x1B[37m");
assert_eq!(White.normal().infix(White.bold()).to_string(), "\x1B[1m");
assert_eq!(White.normal().infix(Blue.normal()).to_string(), "\x1B[34m");
assert_eq!(Blue.bold().infix(Blue.bold()).to_string(), "");
}
}

134
zeroidc/vendor/ansi_term/src/debug.rs vendored Normal file
View File

@ -0,0 +1,134 @@
use std::fmt;
use style::Style;
/// Styles have a special `Debug` implementation that only shows the fields that
/// are set. Fields that havent been touched arent included in the output.
///
/// This behaviour gets bypassed when using the alternate formatting mode
/// `format!("{:#?}")`.
///
/// use ansi_term::Colour::{Red, Blue};
/// assert_eq!("Style { fg(Red), on(Blue), bold, italic }",
/// format!("{:?}", Red.on(Blue).bold().italic()));
impl fmt::Debug for Style {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
if fmt.alternate() {
fmt.debug_struct("Style")
.field("foreground", &self.foreground)
.field("background", &self.background)
.field("blink", &self.is_blink)
.field("bold", &self.is_bold)
.field("dimmed", &self.is_dimmed)
.field("hidden", &self.is_hidden)
.field("italic", &self.is_italic)
.field("reverse", &self.is_reverse)
.field("strikethrough", &self.is_strikethrough)
.field("underline", &self.is_underline)
.finish()
}
else if self.is_plain() {
fmt.write_str("Style {}")
}
else {
fmt.write_str("Style { ")?;
let mut written_anything = false;
if let Some(fg) = self.foreground {
if written_anything { fmt.write_str(", ")? }
written_anything = true;
write!(fmt, "fg({:?})", fg)?
}
if let Some(bg) = self.background {
if written_anything { fmt.write_str(", ")? }
written_anything = true;
write!(fmt, "on({:?})", bg)?
}
{
let mut write_flag = |name| {
if written_anything { fmt.write_str(", ")? }
written_anything = true;
fmt.write_str(name)
};
if self.is_blink { write_flag("blink")? }
if self.is_bold { write_flag("bold")? }
if self.is_dimmed { write_flag("dimmed")? }
if self.is_hidden { write_flag("hidden")? }
if self.is_italic { write_flag("italic")? }
if self.is_reverse { write_flag("reverse")? }
if self.is_strikethrough { write_flag("strikethrough")? }
if self.is_underline { write_flag("underline")? }
}
write!(fmt, " }}")
}
}
}
#[cfg(test)]
mod test {
use style::Colour::*;
use style::Style;
fn style() -> Style {
Style::new()
}
macro_rules! test {
($name: ident: $obj: expr => $result: expr) => {
#[test]
fn $name() {
assert_eq!($result, format!("{:?}", $obj));
}
};
}
test!(empty: style() => "Style {}");
test!(bold: style().bold() => "Style { bold }");
test!(italic: style().italic() => "Style { italic }");
test!(both: style().bold().italic() => "Style { bold, italic }");
test!(red: Red.normal() => "Style { fg(Red) }");
test!(redblue: Red.normal().on(RGB(3, 2, 4)) => "Style { fg(Red), on(RGB(3, 2, 4)) }");
test!(everything:
Red.on(Blue).blink().bold().dimmed().hidden().italic().reverse().strikethrough().underline() =>
"Style { fg(Red), on(Blue), blink, bold, dimmed, hidden, italic, reverse, strikethrough, underline }");
#[test]
fn long_and_detailed() {
extern crate regex;
let expected_debug = "Style { fg(Blue), bold }";
let expected_pretty_repat = r##"(?x)
Style\s+\{\s+
foreground:\s+Some\(\s+
Blue,?\s+
\),\s+
background:\s+None,\s+
blink:\s+false,\s+
bold:\s+true,\s+
dimmed:\s+false,\s+
hidden:\s+false,\s+
italic:\s+false,\s+
reverse:\s+false,\s+
strikethrough:\s+
false,\s+
underline:\s+false,?\s+
\}"##;
let re = regex::Regex::new(expected_pretty_repat).unwrap();
let style = Blue.bold();
let style_fmt_debug = format!("{:?}", style);
let style_fmt_pretty = format!("{:#?}", style);
println!("style_fmt_debug:\n{}", style_fmt_debug);
println!("style_fmt_pretty:\n{}", style_fmt_pretty);
assert_eq!(expected_debug, style_fmt_debug);
assert!(re.is_match(&style_fmt_pretty));
}
}

View File

@ -0,0 +1,179 @@
use super::Style;
/// When printing out one coloured string followed by another, use one of
/// these rules to figure out which *extra* control codes need to be sent.
#[derive(PartialEq, Clone, Copy, Debug)]
pub enum Difference {
/// Print out the control codes specified by this style to end up looking
/// like the second string's styles.
ExtraStyles(Style),
/// Converting between these two is impossible, so just send a reset
/// command and then the second string's styles.
Reset,
/// The before style is exactly the same as the after style, so no further
/// control codes need to be printed.
NoDifference,
}
impl Difference {
/// Compute the 'style difference' required to turn an existing style into
/// the given, second style.
///
/// For example, to turn green text into green bold text, it's redundant
/// to write a reset command then a second green+bold command, instead of
/// just writing one bold command. This method should see that both styles
/// use the foreground colour green, and reduce it to a single command.
///
/// This method returns an enum value because it's not actually always
/// possible to turn one style into another: for example, text could be
/// made bold and underlined, but you can't remove the bold property
/// without also removing the underline property. So when this has to
/// happen, this function returns None, meaning that the entire set of
/// styles should be reset and begun again.
pub fn between(first: &Style, next: &Style) -> Difference {
use self::Difference::*;
// XXX(Havvy): This algorithm is kind of hard to replicate without
// having the Plain/Foreground enum variants, so I'm just leaving
// it commented out for now, and defaulting to Reset.
if first == next {
return NoDifference;
}
// Cannot un-bold, so must Reset.
if first.is_bold && !next.is_bold {
return Reset;
}
if first.is_dimmed && !next.is_dimmed {
return Reset;
}
if first.is_italic && !next.is_italic {
return Reset;
}
// Cannot un-underline, so must Reset.
if first.is_underline && !next.is_underline {
return Reset;
}
if first.is_blink && !next.is_blink {
return Reset;
}
if first.is_reverse && !next.is_reverse {
return Reset;
}
if first.is_hidden && !next.is_hidden {
return Reset;
}
if first.is_strikethrough && !next.is_strikethrough {
return Reset;
}
// Cannot go from foreground to no foreground, so must Reset.
if first.foreground.is_some() && next.foreground.is_none() {
return Reset;
}
// Cannot go from background to no background, so must Reset.
if first.background.is_some() && next.background.is_none() {
return Reset;
}
let mut extra_styles = Style::default();
if first.is_bold != next.is_bold {
extra_styles.is_bold = true;
}
if first.is_dimmed != next.is_dimmed {
extra_styles.is_dimmed = true;
}
if first.is_italic != next.is_italic {
extra_styles.is_italic = true;
}
if first.is_underline != next.is_underline {
extra_styles.is_underline = true;
}
if first.is_blink != next.is_blink {
extra_styles.is_blink = true;
}
if first.is_reverse != next.is_reverse {
extra_styles.is_reverse = true;
}
if first.is_hidden != next.is_hidden {
extra_styles.is_hidden = true;
}
if first.is_strikethrough != next.is_strikethrough {
extra_styles.is_strikethrough = true;
}
if first.foreground != next.foreground {
extra_styles.foreground = next.foreground;
}
if first.background != next.background {
extra_styles.background = next.background;
}
ExtraStyles(extra_styles)
}
}
#[cfg(test)]
mod test {
use super::*;
use super::Difference::*;
use style::Colour::*;
use style::Style;
fn style() -> Style {
Style::new()
}
macro_rules! test {
($name: ident: $first: expr; $next: expr => $result: expr) => {
#[test]
fn $name() {
assert_eq!($result, Difference::between(&$first, &$next));
}
};
}
test!(nothing: Green.normal(); Green.normal() => NoDifference);
test!(uppercase: Green.normal(); Green.bold() => ExtraStyles(style().bold()));
test!(lowercase: Green.bold(); Green.normal() => Reset);
test!(nothing2: Green.bold(); Green.bold() => NoDifference);
test!(colour_change: Red.normal(); Blue.normal() => ExtraStyles(Blue.normal()));
test!(addition_of_blink: style(); style().blink() => ExtraStyles(style().blink()));
test!(addition_of_dimmed: style(); style().dimmed() => ExtraStyles(style().dimmed()));
test!(addition_of_hidden: style(); style().hidden() => ExtraStyles(style().hidden()));
test!(addition_of_reverse: style(); style().reverse() => ExtraStyles(style().reverse()));
test!(addition_of_strikethrough: style(); style().strikethrough() => ExtraStyles(style().strikethrough()));
test!(removal_of_strikethrough: style().strikethrough(); style() => Reset);
test!(removal_of_reverse: style().reverse(); style() => Reset);
test!(removal_of_hidden: style().hidden(); style() => Reset);
test!(removal_of_dimmed: style().dimmed(); style() => Reset);
test!(removal_of_blink: style().blink(); style() => Reset);
}

296
zeroidc/vendor/ansi_term/src/display.rs vendored Normal file
View File

@ -0,0 +1,296 @@
use std::borrow::Cow;
use std::fmt;
use std::io;
use std::ops::Deref;
use ansi::RESET;
use difference::Difference;
use style::{Style, Colour};
use write::AnyWrite;
/// An `ANSIGenericString` includes a generic string type and a `Style` to
/// display that string. `ANSIString` and `ANSIByteString` are aliases for
/// this type on `str` and `\[u8]`, respectively.
#[derive(PartialEq, Debug)]
pub struct ANSIGenericString<'a, S: 'a + ToOwned + ?Sized>
where <S as ToOwned>::Owned: fmt::Debug {
style: Style,
string: Cow<'a, S>,
}
/// Cloning an `ANSIGenericString` will clone its underlying string.
///
/// # Examples
///
/// ```
/// use ansi_term::ANSIString;
///
/// let plain_string = ANSIString::from("a plain string");
/// let clone_string = plain_string.clone();
/// assert_eq!(clone_string, plain_string);
/// ```
impl<'a, S: 'a + ToOwned + ?Sized> Clone for ANSIGenericString<'a, S>
where <S as ToOwned>::Owned: fmt::Debug {
fn clone(&self) -> ANSIGenericString<'a, S> {
ANSIGenericString {
style: self.style,
string: self.string.clone(),
}
}
}
// You might think that the hand-written Clone impl above is the same as the
// one that gets generated with #[derive]. But its not *quite* the same!
//
// `str` is not Clone, and the derived Clone implementation puts a Clone
// constraint on the S type parameter (generated using --pretty=expanded):
//
// ↓_________________↓
// impl <'a, S: ::std::clone::Clone + 'a + ToOwned + ?Sized> ::std::clone::Clone
// for ANSIGenericString<'a, S> where
// <S as ToOwned>::Owned: fmt::Debug { ... }
//
// This resulted in compile errors when you tried to derive Clone on a type
// that used it:
//
// #[derive(PartialEq, Debug, Clone, Default)]
// pub struct TextCellContents(Vec<ANSIString<'static>>);
// ^^^^^^^^^^^^^^^^^^^^^^^^^
// error[E0277]: the trait `std::clone::Clone` is not implemented for `str`
//
// The hand-written impl above can ignore that constraint and still compile.
/// An ANSI String is a string coupled with the `Style` to display it
/// in a terminal.
///
/// Although not technically a string itself, it can be turned into
/// one with the `to_string` method.
///
/// # Examples
///
/// ```
/// use ansi_term::ANSIString;
/// use ansi_term::Colour::Red;
///
/// let red_string = Red.paint("a red string");
/// println!("{}", red_string);
/// ```
///
/// ```
/// use ansi_term::ANSIString;
///
/// let plain_string = ANSIString::from("a plain string");
/// assert_eq!(&*plain_string, "a plain string");
/// ```
pub type ANSIString<'a> = ANSIGenericString<'a, str>;
/// An `ANSIByteString` represents a formatted series of bytes. Use
/// `ANSIByteString` when styling text with an unknown encoding.
pub type ANSIByteString<'a> = ANSIGenericString<'a, [u8]>;
impl<'a, I, S: 'a + ToOwned + ?Sized> From<I> for ANSIGenericString<'a, S>
where I: Into<Cow<'a, S>>,
<S as ToOwned>::Owned: fmt::Debug {
fn from(input: I) -> ANSIGenericString<'a, S> {
ANSIGenericString {
string: input.into(),
style: Style::default(),
}
}
}
impl<'a, S: 'a + ToOwned + ?Sized> ANSIGenericString<'a, S>
where <S as ToOwned>::Owned: fmt::Debug {
/// Directly access the style
pub fn style_ref(&self) -> &Style {
&self.style
}
/// Directly access the style mutably
pub fn style_ref_mut(&mut self) -> &mut Style {
&mut self.style
}
}
impl<'a, S: 'a + ToOwned + ?Sized> Deref for ANSIGenericString<'a, S>
where <S as ToOwned>::Owned: fmt::Debug {
type Target = S;
fn deref(&self) -> &S {
self.string.deref()
}
}
/// A set of `ANSIGenericString`s collected together, in order to be
/// written with a minimum of control characters.
#[derive(Debug, PartialEq)]
pub struct ANSIGenericStrings<'a, S: 'a + ToOwned + ?Sized>
(pub &'a [ANSIGenericString<'a, S>])
where <S as ToOwned>::Owned: fmt::Debug, S: PartialEq;
/// A set of `ANSIString`s collected together, in order to be written with a
/// minimum of control characters.
pub type ANSIStrings<'a> = ANSIGenericStrings<'a, str>;
/// A function to construct an `ANSIStrings` instance.
#[allow(non_snake_case)]
pub fn ANSIStrings<'a>(arg: &'a [ANSIString<'a>]) -> ANSIStrings<'a> {
ANSIGenericStrings(arg)
}
/// A set of `ANSIByteString`s collected together, in order to be
/// written with a minimum of control characters.
pub type ANSIByteStrings<'a> = ANSIGenericStrings<'a, [u8]>;
/// A function to construct an `ANSIByteStrings` instance.
#[allow(non_snake_case)]
pub fn ANSIByteStrings<'a>(arg: &'a [ANSIByteString<'a>]) -> ANSIByteStrings<'a> {
ANSIGenericStrings(arg)
}
// ---- paint functions ----
impl Style {
/// Paints the given text with this colour, returning an ANSI string.
#[must_use]
pub fn paint<'a, I, S: 'a + ToOwned + ?Sized>(self, input: I) -> ANSIGenericString<'a, S>
where I: Into<Cow<'a, S>>,
<S as ToOwned>::Owned: fmt::Debug {
ANSIGenericString {
string: input.into(),
style: self,
}
}
}
impl Colour {
/// Paints the given text with this colour, returning an ANSI string.
/// This is a short-cut so you dont have to use `Blue.normal()` just
/// to get blue text.
///
/// ```
/// use ansi_term::Colour::Blue;
/// println!("{}", Blue.paint("da ba dee"));
/// ```
#[must_use]
pub fn paint<'a, I, S: 'a + ToOwned + ?Sized>(self, input: I) -> ANSIGenericString<'a, S>
where I: Into<Cow<'a, S>>,
<S as ToOwned>::Owned: fmt::Debug {
ANSIGenericString {
string: input.into(),
style: self.normal(),
}
}
}
// ---- writers for individual ANSI strings ----
impl<'a> fmt::Display for ANSIString<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let w: &mut fmt::Write = f;
self.write_to_any(w)
}
}
impl<'a> ANSIByteString<'a> {
/// Write an `ANSIByteString` to an `io::Write`. This writes the escape
/// sequences for the associated `Style` around the bytes.
pub fn write_to<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
let w: &mut io::Write = w;
self.write_to_any(w)
}
}
impl<'a, S: 'a + ToOwned + ?Sized> ANSIGenericString<'a, S>
where <S as ToOwned>::Owned: fmt::Debug, &'a S: AsRef<[u8]> {
fn write_to_any<W: AnyWrite<wstr=S> + ?Sized>(&self, w: &mut W) -> Result<(), W::Error> {
write!(w, "{}", self.style.prefix())?;
w.write_str(self.string.as_ref())?;
write!(w, "{}", self.style.suffix())
}
}
// ---- writers for combined ANSI strings ----
impl<'a> fmt::Display for ANSIStrings<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let f: &mut fmt::Write = f;
self.write_to_any(f)
}
}
impl<'a> ANSIByteStrings<'a> {
/// Write `ANSIByteStrings` to an `io::Write`. This writes the minimal
/// escape sequences for the associated `Style`s around each set of
/// bytes.
pub fn write_to<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
let w: &mut io::Write = w;
self.write_to_any(w)
}
}
impl<'a, S: 'a + ToOwned + ?Sized + PartialEq> ANSIGenericStrings<'a, S>
where <S as ToOwned>::Owned: fmt::Debug, &'a S: AsRef<[u8]> {
fn write_to_any<W: AnyWrite<wstr=S> + ?Sized>(&self, w: &mut W) -> Result<(), W::Error> {
use self::Difference::*;
let first = match self.0.first() {
None => return Ok(()),
Some(f) => f,
};
write!(w, "{}", first.style.prefix())?;
w.write_str(first.string.as_ref())?;
for window in self.0.windows(2) {
match Difference::between(&window[0].style, &window[1].style) {
ExtraStyles(style) => write!(w, "{}", style.prefix())?,
Reset => write!(w, "{}{}", RESET, window[1].style.prefix())?,
NoDifference => {/* Do nothing! */},
}
w.write_str(&window[1].string)?;
}
// Write the final reset string after all of the ANSIStrings have been
// written, *except* if the last one has no styles, because it would
// have already been written by this point.
if let Some(last) = self.0.last() {
if !last.style.is_plain() {
write!(w, "{}", RESET)?;
}
}
Ok(())
}
}
// ---- tests ----
#[cfg(test)]
mod tests {
pub use super::super::ANSIStrings;
pub use style::Style;
pub use style::Colour::*;
#[test]
fn no_control_codes_for_plain() {
let one = Style::default().paint("one");
let two = Style::default().paint("two");
let output = format!("{}", ANSIStrings( &[ one, two ] ));
assert_eq!(&*output, "onetwo");
}
}

271
zeroidc/vendor/ansi_term/src/lib.rs vendored Normal file
View File

@ -0,0 +1,271 @@
//! This is a library for controlling colours and formatting, such as
//! red bold text or blue underlined text, on ANSI terminals.
//!
//!
//! ## Basic usage
//!
//! There are three main types in this crate that you need to be
//! concerned with: [`ANSIString`], [`Style`], and [`Colour`].
//!
//! A `Style` holds stylistic information: foreground and background colours,
//! whether the text should be bold, or blinking, or other properties. The
//! [`Colour`] enum represents the available colours. And an [`ANSIString`] is a
//! string paired with a [`Style`].
//!
//! [`Color`] is also available as an alias to `Colour`.
//!
//! To format a string, call the `paint` method on a `Style` or a `Colour`,
//! passing in the string you want to format as the argument. For example,
//! heres how to get some red text:
//!
//! ```
//! use ansi_term::Colour::Red;
//!
//! println!("This is in red: {}", Red.paint("a red string"));
//! ```
//!
//! Its important to note that the `paint` method does *not* actually return a
//! string with the ANSI control characters surrounding it. Instead, it returns
//! an [`ANSIString`] value that has a [`Display`] implementation that, when
//! formatted, returns the characters. This allows strings to be printed with a
//! minimum of [`String`] allocations being performed behind the scenes.
//!
//! If you *do* want to get at the escape codes, then you can convert the
//! [`ANSIString`] to a string as you would any other `Display` value:
//!
//! ```
//! use ansi_term::Colour::Red;
//!
//! let red_string = Red.paint("a red string").to_string();
//! ```
//!
//!
//! ## Bold, underline, background, and other styles
//!
//! For anything more complex than plain foreground colour changes, you need to
//! construct `Style` values themselves, rather than beginning with a `Colour`.
//! You can do this by chaining methods based on a new `Style`, created with
//! [`Style::new()`]. Each method creates a new style that has that specific
//! property set. For example:
//!
//! ```
//! use ansi_term::Style;
//!
//! println!("How about some {} and {}?",
//! Style::new().bold().paint("bold"),
//! Style::new().underline().paint("underline"));
//! ```
//!
//! For brevity, these methods have also been implemented for `Colour` values,
//! so you can give your styles a foreground colour without having to begin with
//! an empty `Style` value:
//!
//! ```
//! use ansi_term::Colour::{Blue, Yellow};
//!
//! println!("Demonstrating {} and {}!",
//! Blue.bold().paint("blue bold"),
//! Yellow.underline().paint("yellow underline"));
//!
//! println!("Yellow on blue: {}", Yellow.on(Blue).paint("wow!"));
//! ```
//!
//! The complete list of styles you can use are: [`bold`], [`dimmed`], [`italic`],
//! [`underline`], [`blink`], [`reverse`], [`hidden`], [`strikethrough`], and [`on`] for
//! background colours.
//!
//! In some cases, you may find it easier to change the foreground on an
//! existing `Style` rather than starting from the appropriate `Colour`.
//! You can do this using the [`fg`] method:
//!
//! ```
//! use ansi_term::Style;
//! use ansi_term::Colour::{Blue, Cyan, Yellow};
//!
//! println!("Yellow on blue: {}", Style::new().on(Blue).fg(Yellow).paint("yow!"));
//! println!("Also yellow on blue: {}", Cyan.on(Blue).fg(Yellow).paint("zow!"));
//! ```
//!
//! You can turn a `Colour` into a `Style` with the [`normal`] method.
//! This will produce the exact same `ANSIString` as if you just used the
//! `paint` method on the `Colour` directly, but its useful in certain cases:
//! for example, you may have a method that returns `Styles`, and need to
//! represent both the “red bold” and “red, but not bold” styles with values of
//! the same type. The `Style` struct also has a [`Default`] implementation if you
//! want to have a style with *nothing* set.
//!
//! ```
//! use ansi_term::Style;
//! use ansi_term::Colour::Red;
//!
//! Red.normal().paint("yet another red string");
//! Style::default().paint("a completely regular string");
//! ```
//!
//!
//! ## Extended colours
//!
//! You can access the extended range of 256 colours by using the `Colour::Fixed`
//! variant, which takes an argument of the colour number to use. This can be
//! included wherever you would use a `Colour`:
//!
//! ```
//! use ansi_term::Colour::Fixed;
//!
//! Fixed(134).paint("A sort of light purple");
//! Fixed(221).on(Fixed(124)).paint("Mustard in the ketchup");
//! ```
//!
//! The first sixteen of these values are the same as the normal and bold
//! standard colour variants. Theres nothing stopping you from using these as
//! `Fixed` colours instead, but theres nothing to be gained by doing so
//! either.
//!
//! You can also access full 24-bit colour by using the `Colour::RGB` variant,
//! which takes separate `u8` arguments for red, green, and blue:
//!
//! ```
//! use ansi_term::Colour::RGB;
//!
//! RGB(70, 130, 180).paint("Steel blue");
//! ```
//!
//! ## Combining successive coloured strings
//!
//! The benefit of writing ANSI escape codes to the terminal is that they
//! *stack*: you do not need to end every coloured string with a reset code if
//! the text that follows it is of a similar style. For example, if you want to
//! have some blue text followed by some blue bold text, its possible to send
//! the ANSI code for blue, followed by the ANSI code for bold, and finishing
//! with a reset code without having to have an extra one between the two
//! strings.
//!
//! This crate can optimise the ANSI codes that get printed in situations like
//! this, making life easier for your terminal renderer. The [`ANSIStrings`]
//! type takes a slice of several [`ANSIString`] values, and will iterate over
//! each of them, printing only the codes for the styles that need to be updated
//! as part of its formatting routine.
//!
//! The following code snippet uses this to enclose a binary number displayed in
//! red bold text inside some red, but not bold, brackets:
//!
//! ```
//! use ansi_term::Colour::Red;
//! use ansi_term::{ANSIString, ANSIStrings};
//!
//! let some_value = format!("{:b}", 42);
//! let strings: &[ANSIString<'static>] = &[
//! Red.paint("["),
//! Red.bold().paint(some_value),
//! Red.paint("]"),
//! ];
//!
//! println!("Value: {}", ANSIStrings(strings));
//! ```
//!
//! There are several things to note here. Firstly, the [`paint`] method can take
//! *either* an owned [`String`] or a borrowed [`&str`]. Internally, an [`ANSIString`]
//! holds a copy-on-write ([`Cow`]) string value to deal with both owned and
//! borrowed strings at the same time. This is used here to display a `String`,
//! the result of the `format!` call, using the same mechanism as some
//! statically-available `&str` slices. Secondly, that the [`ANSIStrings`] value
//! works in the same way as its singular counterpart, with a [`Display`]
//! implementation that only performs the formatting when required.
//!
//! ## Byte strings
//!
//! This library also supports formatting `\[u8]` byte strings; this supports
//! applications working with text in an unknown encoding. [`Style`] and
//! [`Colour`] support painting `\[u8]` values, resulting in an [`ANSIByteString`].
//! This type does not implement [`Display`], as it may not contain UTF-8, but
//! it does provide a method [`write_to`] to write the result to any value that
//! implements [`Write`]:
//!
//! ```
//! use ansi_term::Colour::Green;
//!
//! Green.paint("user data".as_bytes()).write_to(&mut std::io::stdout()).unwrap();
//! ```
//!
//! Similarly, the type [`ANSIByteStrings`] supports writing a list of
//! [`ANSIByteString`] values with minimal escape sequences:
//!
//! ```
//! use ansi_term::Colour::Green;
//! use ansi_term::ANSIByteStrings;
//!
//! ANSIByteStrings(&[
//! Green.paint("user data 1\n".as_bytes()),
//! Green.bold().paint("user data 2\n".as_bytes()),
//! ]).write_to(&mut std::io::stdout()).unwrap();
//! ```
//!
//! [`Cow`]: https://doc.rust-lang.org/std/borrow/enum.Cow.html
//! [`Display`]: https://doc.rust-lang.org/std/fmt/trait.Display.html
//! [`Default`]: https://doc.rust-lang.org/std/default/trait.Default.html
//! [`String`]: https://doc.rust-lang.org/std/string/struct.String.html
//! [`&str`]: https://doc.rust-lang.org/std/primitive.str.html
//! [`Write`]: https://doc.rust-lang.org/std/io/trait.Write.html
//! [`Style`]: struct.Style.html
//! [`Style::new()`]: struct.Style.html#method.new
//! [`Color`]: enum.Color.html
//! [`Colour`]: enum.Colour.html
//! [`ANSIString`]: type.ANSIString.html
//! [`ANSIStrings`]: type.ANSIStrings.html
//! [`ANSIByteString`]: type.ANSIByteString.html
//! [`ANSIByteStrings`]: type.ANSIByteStrings.html
//! [`write_to`]: type.ANSIByteString.html#method.write_to
//! [`paint`]: type.ANSIByteString.html#method.write_to
//! [`normal`]: enum.Colour.html#method.normal
//!
//! [`bold`]: struct.Style.html#method.bold
//! [`dimmed`]: struct.Style.html#method.dimmed
//! [`italic`]: struct.Style.html#method.italic
//! [`underline`]: struct.Style.html#method.underline
//! [`blink`]: struct.Style.html#method.blink
//! [`reverse`]: struct.Style.html#method.reverse
//! [`hidden`]: struct.Style.html#method.hidden
//! [`strikethrough`]: struct.Style.html#method.strikethrough
//! [`fg`]: struct.Style.html#method.fg
//! [`on`]: struct.Style.html#method.on
#![crate_name = "ansi_term"]
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![warn(missing_copy_implementations)]
#![warn(missing_docs)]
#![warn(trivial_casts, trivial_numeric_casts)]
#![warn(unused_extern_crates, unused_qualifications)]
#[cfg(target_os="windows")]
extern crate winapi;
#[cfg(test)]
#[macro_use]
extern crate doc_comment;
#[cfg(test)]
doctest!("../README.md");
mod ansi;
pub use ansi::{Prefix, Infix, Suffix};
mod style;
pub use style::{Colour, Style};
/// Color is a type alias for `Colour`.
pub use Colour as Color;
mod difference;
mod display;
pub use display::*;
mod write;
mod windows;
pub use windows::*;
mod util;
pub use util::*;
mod debug;

521
zeroidc/vendor/ansi_term/src/style.rs vendored Normal file
View File

@ -0,0 +1,521 @@
/// A style is a collection of properties that can format a string
/// using ANSI escape codes.
///
/// # Examples
///
/// ```
/// use ansi_term::{Style, Colour};
///
/// let style = Style::new().bold().on(Colour::Black);
/// println!("{}", style.paint("Bold on black"));
/// ```
#[derive(PartialEq, Clone, Copy)]
#[cfg_attr(feature = "derive_serde_style", derive(serde::Deserialize, serde::Serialize))]
pub struct Style {
/// The style's foreground colour, if it has one.
pub foreground: Option<Colour>,
/// The style's background colour, if it has one.
pub background: Option<Colour>,
/// Whether this style is bold.
pub is_bold: bool,
/// Whether this style is dimmed.
pub is_dimmed: bool,
/// Whether this style is italic.
pub is_italic: bool,
/// Whether this style is underlined.
pub is_underline: bool,
/// Whether this style is blinking.
pub is_blink: bool,
/// Whether this style has reverse colours.
pub is_reverse: bool,
/// Whether this style is hidden.
pub is_hidden: bool,
/// Whether this style is struckthrough.
pub is_strikethrough: bool
}
impl Style {
/// Creates a new Style with no properties set.
///
/// # Examples
///
/// ```
/// use ansi_term::Style;
///
/// let style = Style::new();
/// println!("{}", style.paint("hi"));
/// ```
pub fn new() -> Style {
Style::default()
}
/// Returns a `Style` with the bold property set.
///
/// # Examples
///
/// ```
/// use ansi_term::Style;
///
/// let style = Style::new().bold();
/// println!("{}", style.paint("hey"));
/// ```
pub fn bold(&self) -> Style {
Style { is_bold: true, .. *self }
}
/// Returns a `Style` with the dimmed property set.
///
/// # Examples
///
/// ```
/// use ansi_term::Style;
///
/// let style = Style::new().dimmed();
/// println!("{}", style.paint("sup"));
/// ```
pub fn dimmed(&self) -> Style {
Style { is_dimmed: true, .. *self }
}
/// Returns a `Style` with the italic property set.
///
/// # Examples
///
/// ```
/// use ansi_term::Style;
///
/// let style = Style::new().italic();
/// println!("{}", style.paint("greetings"));
/// ```
pub fn italic(&self) -> Style {
Style { is_italic: true, .. *self }
}
/// Returns a `Style` with the underline property set.
///
/// # Examples
///
/// ```
/// use ansi_term::Style;
///
/// let style = Style::new().underline();
/// println!("{}", style.paint("salutations"));
/// ```
pub fn underline(&self) -> Style {
Style { is_underline: true, .. *self }
}
/// Returns a `Style` with the blink property set.
/// # Examples
///
/// ```
/// use ansi_term::Style;
///
/// let style = Style::new().blink();
/// println!("{}", style.paint("wazzup"));
/// ```
pub fn blink(&self) -> Style {
Style { is_blink: true, .. *self }
}
/// Returns a `Style` with the reverse property set.
///
/// # Examples
///
/// ```
/// use ansi_term::Style;
///
/// let style = Style::new().reverse();
/// println!("{}", style.paint("aloha"));
/// ```
pub fn reverse(&self) -> Style {
Style { is_reverse: true, .. *self }
}
/// Returns a `Style` with the hidden property set.
///
/// # Examples
///
/// ```
/// use ansi_term::Style;
///
/// let style = Style::new().hidden();
/// println!("{}", style.paint("ahoy"));
/// ```
pub fn hidden(&self) -> Style {
Style { is_hidden: true, .. *self }
}
/// Returns a `Style` with the strikethrough property set.
///
/// # Examples
///
/// ```
/// use ansi_term::Style;
///
/// let style = Style::new().strikethrough();
/// println!("{}", style.paint("yo"));
/// ```
pub fn strikethrough(&self) -> Style {
Style { is_strikethrough: true, .. *self }
}
/// Returns a `Style` with the foreground colour property set.
///
/// # Examples
///
/// ```
/// use ansi_term::{Style, Colour};
///
/// let style = Style::new().fg(Colour::Yellow);
/// println!("{}", style.paint("hi"));
/// ```
pub fn fg(&self, foreground: Colour) -> Style {
Style { foreground: Some(foreground), .. *self }
}
/// Returns a `Style` with the background colour property set.
///
/// # Examples
///
/// ```
/// use ansi_term::{Style, Colour};
///
/// let style = Style::new().on(Colour::Blue);
/// println!("{}", style.paint("eyyyy"));
/// ```
pub fn on(&self, background: Colour) -> Style {
Style { background: Some(background), .. *self }
}
/// Return true if this `Style` has no actual styles, and can be written
/// without any control characters.
///
/// # Examples
///
/// ```
/// use ansi_term::Style;
///
/// assert_eq!(true, Style::default().is_plain());
/// assert_eq!(false, Style::default().bold().is_plain());
/// ```
pub fn is_plain(self) -> bool {
self == Style::default()
}
}
impl Default for Style {
/// Returns a style with *no* properties set. Formatting text using this
/// style returns the exact same text.
///
/// ```
/// use ansi_term::Style;
/// assert_eq!(None, Style::default().foreground);
/// assert_eq!(None, Style::default().background);
/// assert_eq!(false, Style::default().is_bold);
/// assert_eq!("txt", Style::default().paint("txt").to_string());
/// ```
fn default() -> Style {
Style {
foreground: None,
background: None,
is_bold: false,
is_dimmed: false,
is_italic: false,
is_underline: false,
is_blink: false,
is_reverse: false,
is_hidden: false,
is_strikethrough: false,
}
}
}
// ---- colours ----
/// A colour is one specific type of ANSI escape code, and can refer
/// to either the foreground or background colour.
///
/// These use the standard numeric sequences.
/// See <http://invisible-island.net/xterm/ctlseqs/ctlseqs.html>
#[derive(PartialEq, Clone, Copy, Debug)]
#[cfg_attr(feature = "derive_serde_style", derive(serde::Deserialize, serde::Serialize))]
pub enum Colour {
/// Colour #0 (foreground code `30`, background code `40`).
///
/// This is not necessarily the background colour, and using it as one may
/// render the text hard to read on terminals with dark backgrounds.
Black,
/// Colour #1 (foreground code `31`, background code `41`).
Red,
/// Colour #2 (foreground code `32`, background code `42`).
Green,
/// Colour #3 (foreground code `33`, background code `43`).
Yellow,
/// Colour #4 (foreground code `34`, background code `44`).
Blue,
/// Colour #5 (foreground code `35`, background code `45`).
Purple,
/// Colour #6 (foreground code `36`, background code `46`).
Cyan,
/// Colour #7 (foreground code `37`, background code `47`).
///
/// As above, this is not necessarily the foreground colour, and may be
/// hard to read on terminals with light backgrounds.
White,
/// A colour number from 0 to 255, for use in 256-colour terminal
/// environments.
///
/// - Colours 0 to 7 are the `Black` to `White` variants respectively.
/// These colours can usually be changed in the terminal emulator.
/// - Colours 8 to 15 are brighter versions of the eight colours above.
/// These can also usually be changed in the terminal emulator, or it
/// could be configured to use the original colours and show the text in
/// bold instead. It varies depending on the program.
/// - Colours 16 to 231 contain several palettes of bright colours,
/// arranged in six squares measuring six by six each.
/// - Colours 232 to 255 are shades of grey from black to white.
///
/// It might make more sense to look at a [colour chart][cc].
///
/// [cc]: https://upload.wikimedia.org/wikipedia/commons/1/15/Xterm_256color_chart.svg
Fixed(u8),
/// A 24-bit RGB color, as specified by ISO-8613-3.
RGB(u8, u8, u8),
}
impl Colour {
/// Returns a `Style` with the foreground colour set to this colour.
///
/// # Examples
///
/// ```
/// use ansi_term::Colour;
///
/// let style = Colour::Red.normal();
/// println!("{}", style.paint("hi"));
/// ```
pub fn normal(self) -> Style {
Style { foreground: Some(self), .. Style::default() }
}
/// Returns a `Style` with the foreground colour set to this colour and the
/// bold property set.
///
/// # Examples
///
/// ```
/// use ansi_term::Colour;
///
/// let style = Colour::Green.bold();
/// println!("{}", style.paint("hey"));
/// ```
pub fn bold(self) -> Style {
Style { foreground: Some(self), is_bold: true, .. Style::default() }
}
/// Returns a `Style` with the foreground colour set to this colour and the
/// dimmed property set.
///
/// # Examples
///
/// ```
/// use ansi_term::Colour;
///
/// let style = Colour::Yellow.dimmed();
/// println!("{}", style.paint("sup"));
/// ```
pub fn dimmed(self) -> Style {
Style { foreground: Some(self), is_dimmed: true, .. Style::default() }
}
/// Returns a `Style` with the foreground colour set to this colour and the
/// italic property set.
///
/// # Examples
///
/// ```
/// use ansi_term::Colour;
///
/// let style = Colour::Blue.italic();
/// println!("{}", style.paint("greetings"));
/// ```
pub fn italic(self) -> Style {
Style { foreground: Some(self), is_italic: true, .. Style::default() }
}
/// Returns a `Style` with the foreground colour set to this colour and the
/// underline property set.
///
/// # Examples
///
/// ```
/// use ansi_term::Colour;
///
/// let style = Colour::Purple.underline();
/// println!("{}", style.paint("salutations"));
/// ```
pub fn underline(self) -> Style {
Style { foreground: Some(self), is_underline: true, .. Style::default() }
}
/// Returns a `Style` with the foreground colour set to this colour and the
/// blink property set.
///
/// # Examples
///
/// ```
/// use ansi_term::Colour;
///
/// let style = Colour::Cyan.blink();
/// println!("{}", style.paint("wazzup"));
/// ```
pub fn blink(self) -> Style {
Style { foreground: Some(self), is_blink: true, .. Style::default() }
}
/// Returns a `Style` with the foreground colour set to this colour and the
/// reverse property set.
///
/// # Examples
///
/// ```
/// use ansi_term::Colour;
///
/// let style = Colour::Black.reverse();
/// println!("{}", style.paint("aloha"));
/// ```
pub fn reverse(self) -> Style {
Style { foreground: Some(self), is_reverse: true, .. Style::default() }
}
/// Returns a `Style` with the foreground colour set to this colour and the
/// hidden property set.
///
/// # Examples
///
/// ```
/// use ansi_term::Colour;
///
/// let style = Colour::White.hidden();
/// println!("{}", style.paint("ahoy"));
/// ```
pub fn hidden(self) -> Style {
Style { foreground: Some(self), is_hidden: true, .. Style::default() }
}
/// Returns a `Style` with the foreground colour set to this colour and the
/// strikethrough property set.
///
/// # Examples
///
/// ```
/// use ansi_term::Colour;
///
/// let style = Colour::Fixed(244).strikethrough();
/// println!("{}", style.paint("yo"));
/// ```
pub fn strikethrough(self) -> Style {
Style { foreground: Some(self), is_strikethrough: true, .. Style::default() }
}
/// Returns a `Style` with the foreground colour set to this colour and the
/// background colour property set to the given colour.
///
/// # Examples
///
/// ```
/// use ansi_term::Colour;
///
/// let style = Colour::RGB(31, 31, 31).on(Colour::White);
/// println!("{}", style.paint("eyyyy"));
/// ```
pub fn on(self, background: Colour) -> Style {
Style { foreground: Some(self), background: Some(background), .. Style::default() }
}
}
impl From<Colour> for Style {
/// You can turn a `Colour` into a `Style` with the foreground colour set
/// with the `From` trait.
///
/// ```
/// use ansi_term::{Style, Colour};
/// let green_foreground = Style::default().fg(Colour::Green);
/// assert_eq!(green_foreground, Colour::Green.normal());
/// assert_eq!(green_foreground, Colour::Green.into());
/// assert_eq!(green_foreground, Style::from(Colour::Green));
/// ```
fn from(colour: Colour) -> Style {
colour.normal()
}
}
#[cfg(test)]
#[cfg(feature = "derive_serde_style")]
mod serde_json_tests {
use super::{Style, Colour};
#[test]
fn colour_serialization() {
let colours = &[
Colour::Red,
Colour::Blue,
Colour::RGB(123, 123, 123),
Colour::Fixed(255),
];
assert_eq!(serde_json::to_string(&colours).unwrap(), String::from("[\"Red\",\"Blue\",{\"RGB\":[123,123,123]},{\"Fixed\":255}]"));
}
#[test]
fn colour_deserialization() {
let colours = &[
Colour::Red,
Colour::Blue,
Colour::RGB(123, 123, 123),
Colour::Fixed(255),
];
for colour in colours.into_iter() {
let serialized = serde_json::to_string(&colour).unwrap();
let deserialized: Colour = serde_json::from_str(&serialized).unwrap();
assert_eq!(colour, &deserialized);
}
}
#[test]
fn style_serialization() {
let style = Style::default();
assert_eq!(serde_json::to_string(&style).unwrap(), "{\"foreground\":null,\"background\":null,\"is_bold\":false,\"is_dimmed\":false,\"is_italic\":false,\"is_underline\":false,\"is_blink\":false,\"is_reverse\":false,\"is_hidden\":false,\"is_strikethrough\":false}".to_string());
}
}

81
zeroidc/vendor/ansi_term/src/util.rs vendored Normal file
View File

@ -0,0 +1,81 @@
use display::*;
use std::ops::Deref;
/// Return a substring of the given ANSIStrings sequence, while keeping the formatting.
pub fn sub_string<'a>(start: usize, len: usize, strs: &ANSIStrings<'a>) -> Vec<ANSIString<'static>> {
let mut vec = Vec::new();
let mut pos = start;
let mut len_rem = len;
for i in strs.0.iter() {
let fragment = i.deref();
let frag_len = fragment.len();
if pos >= frag_len {
pos -= frag_len;
continue;
}
if len_rem <= 0 {
break;
}
let end = pos + len_rem;
let pos_end = if end >= frag_len { frag_len } else { end };
vec.push(i.style_ref().paint(String::from(&fragment[pos..pos_end])));
if end <= frag_len {
break;
}
len_rem -= pos_end - pos;
pos = 0;
}
vec
}
/// Return a concatenated copy of `strs` without the formatting, as an allocated `String`.
pub fn unstyle(strs: &ANSIStrings) -> String {
let mut s = String::new();
for i in strs.0.iter() {
s += &i.deref();
}
s
}
/// Return the unstyled length of ANSIStrings. This is equaivalent to `unstyle(strs).len()`.
pub fn unstyled_len(strs: &ANSIStrings) -> usize {
let mut l = 0;
for i in strs.0.iter() {
l += i.deref().len();
}
l
}
#[cfg(test)]
mod test {
use Colour::*;
use display::*;
use super::*;
#[test]
fn test() {
let l = [
Black.paint("first"),
Red.paint("-second"),
White.paint("-third"),
];
let a = ANSIStrings(&l);
assert_eq!(unstyle(&a), "first-second-third");
assert_eq!(unstyled_len(&a), 18);
let l2 = [
Black.paint("st"),
Red.paint("-second"),
White.paint("-t"),
];
assert_eq!(sub_string(3, 11, &a).as_slice(), &l2);
}
}

61
zeroidc/vendor/ansi_term/src/windows.rs vendored Normal file
View File

@ -0,0 +1,61 @@
/// Enables ANSI code support on Windows 10.
///
/// This uses Windows API calls to alter the properties of the console that
/// the program is running in.
///
/// https://msdn.microsoft.com/en-us/library/windows/desktop/mt638032(v=vs.85).aspx
///
/// Returns a `Result` with the Windows error code if unsuccessful.
#[cfg(windows)]
pub fn enable_ansi_support() -> Result<(), u32> {
// ref: https://docs.microsoft.com/en-us/windows/console/console-virtual-terminal-sequences#EXAMPLE_OF_ENABLING_VIRTUAL_TERMINAL_PROCESSING @@ https://archive.is/L7wRJ#76%
use std::ffi::OsStr;
use std::iter::once;
use std::os::windows::ffi::OsStrExt;
use std::ptr::null_mut;
use winapi::um::consoleapi::{GetConsoleMode, SetConsoleMode};
use winapi::um::errhandlingapi::GetLastError;
use winapi::um::fileapi::{CreateFileW, OPEN_EXISTING};
use winapi::um::handleapi::INVALID_HANDLE_VALUE;
use winapi::um::winnt::{FILE_SHARE_WRITE, GENERIC_READ, GENERIC_WRITE};
const ENABLE_VIRTUAL_TERMINAL_PROCESSING: u32 = 0x0004;
unsafe {
// ref: https://docs.microsoft.com/en-us/windows/win32/api/fileapi/nf-fileapi-createfilew
// Using `CreateFileW("CONOUT$", ...)` to retrieve the console handle works correctly even if STDOUT and/or STDERR are redirected
let console_out_name: Vec<u16> = OsStr::new("CONOUT$").encode_wide().chain(once(0)).collect();
let console_handle = CreateFileW(
console_out_name.as_ptr(),
GENERIC_READ | GENERIC_WRITE,
FILE_SHARE_WRITE,
null_mut(),
OPEN_EXISTING,
0,
null_mut(),
);
if console_handle == INVALID_HANDLE_VALUE
{
return Err(GetLastError());
}
// ref: https://docs.microsoft.com/en-us/windows/console/getconsolemode
let mut console_mode: u32 = 0;
if 0 == GetConsoleMode(console_handle, &mut console_mode)
{
return Err(GetLastError());
}
// VT processing not already enabled?
if console_mode & ENABLE_VIRTUAL_TERMINAL_PROCESSING == 0 {
// https://docs.microsoft.com/en-us/windows/console/setconsolemode
if 0 == SetConsoleMode(console_handle, console_mode | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
{
return Err(GetLastError());
}
}
}
return Ok(());
}

40
zeroidc/vendor/ansi_term/src/write.rs vendored Normal file
View File

@ -0,0 +1,40 @@
use std::fmt;
use std::io;
pub trait AnyWrite {
type wstr: ?Sized;
type Error;
fn write_fmt(&mut self, fmt: fmt::Arguments) -> Result<(), Self::Error>;
fn write_str(&mut self, s: &Self::wstr) -> Result<(), Self::Error>;
}
impl<'a> AnyWrite for fmt::Write + 'a {
type wstr = str;
type Error = fmt::Error;
fn write_fmt(&mut self, fmt: fmt::Arguments) -> Result<(), Self::Error> {
fmt::Write::write_fmt(self, fmt)
}
fn write_str(&mut self, s: &Self::wstr) -> Result<(), Self::Error> {
fmt::Write::write_str(self, s)
}
}
impl<'a> AnyWrite for io::Write + 'a {
type wstr = [u8];
type Error = io::Error;
fn write_fmt(&mut self, fmt: fmt::Arguments) -> Result<(), Self::Error> {
io::Write::write_fmt(self, fmt)
}
fn write_str(&mut self, s: &Self::wstr) -> Result<(), Self::Error> {
io::Write::write_all(self, s)
}
}

View File

@ -0,0 +1 @@
{"files":{"CHANGELOG.md":"70db121262d72acc472ad1a90b78c42de570820e65b566c6b9339b62e636d572","Cargo.lock":"6868f02a96413bcba37a06f01c6bf87e6331dea9461681a47a561cec6acd2546","Cargo.toml":"3af88a07af6a4adb84373fc3cd4920884b0b12b338cdb55ef598fd512ee1a790","LICENSE":"99fa95ba4e4cdaf71c27d73260ea069fc4515b3d02fde3020c5b562280006cbc","README.md":"e559a69c0b2bd20bffcede64fd548df6c671b0d1504613c5e3e5d884d759caea","examples/atty.rs":"1551387a71474d9ac1b5153231f884e9e05213badcfaa3494ad2cb7ea958374a","rustfmt.toml":"8e6ea1bcb79c505490034020c98e9b472f4ac4113f245bae90f5e1217b1ec65a","src/lib.rs":"d5abf6a54e8c496c486572bdc91eef10480f6ad126c4287f039df5feff7a9bbb"},"package":"d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"}

73
zeroidc/vendor/atty/CHANGELOG.md vendored Normal file
View File

@ -0,0 +1,73 @@
# 0.2.14
* add support for [RustyHermit](https://github.com/hermitcore/libhermit-rs), a Rust-based unikernel [#41](https://github.com/softprops/atty/pull/41)
# 0.2.13
* support older versions of rust that do now support 2018 edition
# 0.2.12
* Redox is now in the unix family so redox cfg is no longer needed [#35](https://github.com/softprops/atty/pull/35)
# 0.2.11
* fix msys detection with `winapi@0.3.5` [#28](https://github.com/softprops/atty/pull/28)
# 0.2.10
* fix wasm regression [#27](https://github.com/softprops/atty/pull/27)
# 0.2.9
* Fix fix pty detection [#25](https://github.com/softprops/atty/pull/25)
# 0.2.8
* Fix an inverted condition on MinGW [#22](https://github.com/softprops/atty/pull/22)
# 0.2.7
* Change `||` to `&&` for whether MSYS is a tty [#24](https://github.com/softprops/atty/pull/24/)
# 0.2.6
* updated winapi dependency to [0.3](https://retep998.github.io/blog/winapi-0.3/) [#18](https://github.com/softprops/atty/pull/18)
# 0.2.5
* added support for Wasm compile targets [#17](https://github.com/softprops/atty/pull/17)
# 0.2.4
* added support for Wasm compile targets [#17](https://github.com/softprops/atty/pull/17)
# 0.2.3
* added support for Redox OS [#14](https://github.com/softprops/atty/pull/14)
# 0.2.2
* use target specific dependencies [#11](https://github.com/softprops/atty/pull/11)
* Add tty detection for MSYS terminals [#12](https://github.com/softprops/atty/pull/12)
# 0.2.1
* fix windows bug
# 0.2.0
* support for various stream types
# 0.1.2
* windows support (with automated testing)
* automated code coverage
# 0.1.1
* bumped libc dep from `0.1` to `0.2`
# 0.1.0
* initial release

49
zeroidc/vendor/atty/Cargo.lock generated vendored Normal file
View File

@ -0,0 +1,49 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
[[package]]
name = "atty"
version = "0.2.14"
dependencies = [
"hermit-abi 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "hermit-abi"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "libc"
version = "0.2.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "winapi"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[metadata]
"checksum hermit-abi 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "eff2656d88f158ce120947499e971d743c05dbcbed62e5bd2f38f1698bbc3772"
"checksum libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)" = "d515b1f41455adea1313a4a2ac8a8a477634fbae63cc6100e3aebb207ce61558"
"checksum winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "04e3bd221fcbe8a271359c04f21a76db7d0c6028862d1bb5512d85e1e2eb5bb3"
"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"

34
zeroidc/vendor/atty/Cargo.toml vendored Normal file
View File

@ -0,0 +1,34 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "atty"
version = "0.2.14"
authors = ["softprops <d.tangren@gmail.com>"]
exclude = ["/.travis.yml", "/appveyor.yml"]
description = "A simple interface for querying atty"
homepage = "https://github.com/softprops/atty"
documentation = "http://softprops.github.io/atty"
readme = "README.md"
keywords = ["terminal", "tty", "isatty"]
license = "MIT"
repository = "https://github.com/softprops/atty"
[target."cfg(target_os = \"hermit\")".dependencies.hermit-abi]
version = "0.1.6"
[target."cfg(unix)".dependencies.libc]
version = "0.2"
default-features = false
[target."cfg(windows)".dependencies.winapi]
version = "0.3"
features = ["consoleapi", "processenv", "minwinbase", "minwindef", "winbase"]
[badges.travis-ci]
repository = "softprops/atty"

20
zeroidc/vendor/atty/LICENSE vendored Normal file
View File

@ -0,0 +1,20 @@
Copyright (c) 2015-2019 Doug Tangren
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

74
zeroidc/vendor/atty/README.md vendored Normal file
View File

@ -0,0 +1,74 @@
# atty
[![Build Status](https://travis-ci.org/softprops/atty.svg?branch=master)](https://travis-ci.org/softprops/atty) [![Build status](https://ci.appveyor.com/api/projects/status/geggrsnsjsuse8cv?svg=true)](https://ci.appveyor.com/project/softprops/atty) [![Coverage Status](https://coveralls.io/repos/softprops/atty/badge.svg?branch=master&service=github)](https://coveralls.io/github/softprops/atty?branch=master) [![crates.io](https://img.shields.io/crates/v/atty.svg)](https://crates.io/crates/atty) [![Released API docs](https://docs.rs/atty/badge.svg)](http://docs.rs/atty) [![Master API docs](https://img.shields.io/badge/docs-master-green.svg)](https://softprops.github.io/atty)
> are you or are you not a tty?
## install
Add the following to your `Cargo.toml`
```toml
[dependencies]
atty = "0.2"
```
## usage
```rust
use atty::Stream;
fn main() {
if atty::is(Stream::Stdout) {
println!("I'm a terminal");
} else {
println!("I'm not");
}
}
```
## testing
This library has been unit tested on both unix and windows platforms (via appveyor).
A simple example program is provided in this repo to test various tty's. By default.
It prints
```bash
$ cargo run --example atty
stdout? true
stderr? true
stdin? true
```
To test std in, pipe some text to the program
```bash
$ echo "test" | cargo run --example atty
stdout? true
stderr? true
stdin? false
```
To test std out, pipe the program to something
```bash
$ cargo run --example atty | grep std
stdout? false
stderr? true
stdin? true
```
To test std err, pipe the program to something redirecting std err
```bash
$ cargo run --example atty 2>&1 | grep std
stdout? false
stderr? false
stdin? true
```
Doug Tangren (softprops) 2015-2019

9
zeroidc/vendor/atty/examples/atty.rs vendored Normal file
View File

@ -0,0 +1,9 @@
extern crate atty;
use atty::{is, Stream};
fn main() {
println!("stdout? {}", is(Stream::Stdout));
println!("stderr? {}", is(Stream::Stderr));
println!("stdin? {}", is(Stream::Stdin));
}

4
zeroidc/vendor/atty/rustfmt.toml vendored Normal file
View File

@ -0,0 +1,4 @@
# https://github.com/rust-lang/rustfmt/blob/master/Configurations.md#fn_args_layout
fn_args_layout = "Vertical"
# https://github.com/rust-lang/rustfmt/blob/master/Configurations.md#merge_imports
merge_imports = true

210
zeroidc/vendor/atty/src/lib.rs vendored Normal file
View File

@ -0,0 +1,210 @@
//! atty is a simple utility that answers one question
//! > is this a tty?
//!
//! usage is just as simple
//!
//! ```
//! if atty::is(atty::Stream::Stdout) {
//! println!("i'm a tty")
//! }
//! ```
//!
//! ```
//! if atty::isnt(atty::Stream::Stdout) {
//! println!("i'm not a tty")
//! }
//! ```
#![cfg_attr(unix, no_std)]
#[cfg(unix)]
extern crate libc;
#[cfg(windows)]
extern crate winapi;
#[cfg(windows)]
use winapi::shared::minwindef::DWORD;
#[cfg(windows)]
use winapi::shared::ntdef::WCHAR;
/// possible stream sources
#[derive(Clone, Copy, Debug)]
pub enum Stream {
Stdout,
Stderr,
Stdin,
}
/// returns true if this is a tty
#[cfg(all(unix, not(target_arch = "wasm32")))]
pub fn is(stream: Stream) -> bool {
extern crate libc;
let fd = match stream {
Stream::Stdout => libc::STDOUT_FILENO,
Stream::Stderr => libc::STDERR_FILENO,
Stream::Stdin => libc::STDIN_FILENO,
};
unsafe { libc::isatty(fd) != 0 }
}
/// returns true if this is a tty
#[cfg(target_os = "hermit")]
pub fn is(stream: Stream) -> bool {
extern crate hermit_abi;
let fd = match stream {
Stream::Stdout => hermit_abi::STDOUT_FILENO,
Stream::Stderr => hermit_abi::STDERR_FILENO,
Stream::Stdin => hermit_abi::STDIN_FILENO,
};
hermit_abi::isatty(fd)
}
/// returns true if this is a tty
#[cfg(windows)]
pub fn is(stream: Stream) -> bool {
use winapi::um::winbase::{
STD_ERROR_HANDLE as STD_ERROR, STD_INPUT_HANDLE as STD_INPUT,
STD_OUTPUT_HANDLE as STD_OUTPUT,
};
let (fd, others) = match stream {
Stream::Stdin => (STD_INPUT, [STD_ERROR, STD_OUTPUT]),
Stream::Stderr => (STD_ERROR, [STD_INPUT, STD_OUTPUT]),
Stream::Stdout => (STD_OUTPUT, [STD_INPUT, STD_ERROR]),
};
if unsafe { console_on_any(&[fd]) } {
// False positives aren't possible. If we got a console then
// we definitely have a tty on stdin.
return true;
}
// At this point, we *could* have a false negative. We can determine that
// this is true negative if we can detect the presence of a console on
// any of the other streams. If another stream has a console, then we know
// we're in a Windows console and can therefore trust the negative.
if unsafe { console_on_any(&others) } {
return false;
}
// Otherwise, we fall back to a very strange msys hack to see if we can
// sneakily detect the presence of a tty.
unsafe { msys_tty_on(fd) }
}
/// returns true if this is _not_ a tty
pub fn isnt(stream: Stream) -> bool {
!is(stream)
}
/// Returns true if any of the given fds are on a console.
#[cfg(windows)]
unsafe fn console_on_any(fds: &[DWORD]) -> bool {
use winapi::um::{consoleapi::GetConsoleMode, processenv::GetStdHandle};
for &fd in fds {
let mut out = 0;
let handle = GetStdHandle(fd);
if GetConsoleMode(handle, &mut out) != 0 {
return true;
}
}
false
}
/// Returns true if there is an MSYS tty on the given handle.
#[cfg(windows)]
unsafe fn msys_tty_on(fd: DWORD) -> bool {
use std::{mem, slice};
use winapi::{
ctypes::c_void,
shared::minwindef::MAX_PATH,
um::{
fileapi::FILE_NAME_INFO, minwinbase::FileNameInfo, processenv::GetStdHandle,
winbase::GetFileInformationByHandleEx,
},
};
let size = mem::size_of::<FILE_NAME_INFO>();
let mut name_info_bytes = vec![0u8; size + MAX_PATH * mem::size_of::<WCHAR>()];
let res = GetFileInformationByHandleEx(
GetStdHandle(fd),
FileNameInfo,
&mut *name_info_bytes as *mut _ as *mut c_void,
name_info_bytes.len() as u32,
);
if res == 0 {
return false;
}
let name_info: &FILE_NAME_INFO = &*(name_info_bytes.as_ptr() as *const FILE_NAME_INFO);
let s = slice::from_raw_parts(
name_info.FileName.as_ptr(),
name_info.FileNameLength as usize / 2,
);
let name = String::from_utf16_lossy(s);
// This checks whether 'pty' exists in the file name, which indicates that
// a pseudo-terminal is attached. To mitigate against false positives
// (e.g., an actual file name that contains 'pty'), we also require that
// either the strings 'msys-' or 'cygwin-' are in the file name as well.)
let is_msys = name.contains("msys-") || name.contains("cygwin-");
let is_pty = name.contains("-pty");
is_msys && is_pty
}
/// returns true if this is a tty
#[cfg(target_arch = "wasm32")]
pub fn is(_stream: Stream) -> bool {
false
}
#[cfg(test)]
mod tests {
use super::{is, Stream};
#[test]
#[cfg(windows)]
fn is_err() {
// appveyor pipes its output
assert!(!is(Stream::Stderr))
}
#[test]
#[cfg(windows)]
fn is_out() {
// appveyor pipes its output
assert!(!is(Stream::Stdout))
}
#[test]
#[cfg(windows)]
fn is_in() {
assert!(is(Stream::Stdin))
}
#[test]
#[cfg(unix)]
fn is_err() {
assert!(is(Stream::Stderr))
}
#[test]
#[cfg(unix)]
fn is_out() {
assert!(is(Stream::Stdout))
}
#[test]
#[cfg(target_os = "macos")]
fn is_in() {
// macos on travis seems to pipe its input
assert!(is(Stream::Stdin))
}
#[test]
#[cfg(all(not(target_os = "macos"), unix))]
fn is_in() {
assert!(is(Stream::Stdin))
}
}

View File

@ -0,0 +1 @@
{"files":{"Cargo.lock":"3d91565ed13de572a9ebde408a0c98e33f931d6ab52f212b0830a60b4ab26b77","Cargo.toml":"39f627122dceaad42146634719fde802fca3baa1b3908753af723074ae2a6d69","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"27995d58ad5c1145c1a8cd86244ce844886958a35eb2b78c6b772748669999ac","README.md":"4c8f9b5016f2a0c3dbeca5bc41241f57db5568f803e58c1fa480ae2b3638d0a9","examples/integers.rs":"589ff4271566dfa322becddf3e2c7b592e6e0bc97b02892ce75619b7e452e930","examples/paths.rs":"1b30e466b824ce8df7ad0a55334424131d9d2573d6cf9f7d5d50c09c8901d526","examples/traits.rs":"cbee6a3e1f7db60b02ae25b714926517144a77cb492021f492774cf0e1865a9e","examples/versions.rs":"38535e6d9f5bfae0de474a3db79a40e8f5da8ba9334c5ff4c363de9bc99d4d12","src/error.rs":"12de7dafea4a35d1dc2f0fa79bfa038386bbbea72bf083979f4ddf227999eeda","src/lib.rs":"6fa01458e8f9258d84f83ead24fdb0cdf9aec10838b0262f1dfbdf79c530c537","src/tests.rs":"f0e6dc1ad9223c0336c02e215ea3940acb2af6c3bc8fd791e16cd4e786e6a608","src/version.rs":"175727d5f02f2fe2271ddc9b041db2a5b9c6fe0f95afd17c73a4d982612764a3","tests/rustflags.rs":"5c8169b88216055019db61b5d7baf4abdf675e3b14b54f5037bb1e3acd0a5d3f"},"package":"d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"}

7
zeroidc/vendor/autocfg/Cargo.lock generated vendored Normal file
View File

@ -0,0 +1,7 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "autocfg"
version = "1.1.0"

24
zeroidc/vendor/autocfg/Cargo.toml vendored Normal file
View File

@ -0,0 +1,24 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
name = "autocfg"
version = "1.1.0"
authors = ["Josh Stone <cuviper@gmail.com>"]
exclude = ["/.github/**", "/bors.toml"]
description = "Automatic cfg for Rust compiler features"
readme = "README.md"
keywords = ["rustc", "build", "autoconf"]
categories = ["development-tools::build-utils"]
license = "Apache-2.0 OR MIT"
repository = "https://github.com/cuviper/autocfg"
[dependencies]

201
zeroidc/vendor/autocfg/LICENSE-APACHE vendored Normal file
View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
zeroidc/vendor/autocfg/LICENSE-MIT vendored Normal file
View File

@ -0,0 +1,25 @@
Copyright (c) 2018 Josh Stone
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

95
zeroidc/vendor/autocfg/README.md vendored Normal file
View File

@ -0,0 +1,95 @@
autocfg
=======
[![autocfg crate](https://img.shields.io/crates/v/autocfg.svg)](https://crates.io/crates/autocfg)
[![autocfg documentation](https://docs.rs/autocfg/badge.svg)](https://docs.rs/autocfg)
![minimum rustc 1.0](https://img.shields.io/badge/rustc-1.0+-red.svg)
![build status](https://github.com/cuviper/autocfg/workflows/master/badge.svg)
A Rust library for build scripts to automatically configure code based on
compiler support. Code snippets are dynamically tested to see if the `rustc`
will accept them, rather than hard-coding specific version support.
## Usage
Add this to your `Cargo.toml`:
```toml
[build-dependencies]
autocfg = "1"
```
Then use it in your `build.rs` script to detect compiler features. For
example, to test for 128-bit integer support, it might look like:
```rust
extern crate autocfg;
fn main() {
let ac = autocfg::new();
ac.emit_has_type("i128");
// (optional) We don't need to rerun for anything external.
autocfg::rerun_path("build.rs");
}
```
If the type test succeeds, this will write a `cargo:rustc-cfg=has_i128` line
for Cargo, which translates to Rust arguments `--cfg has_i128`. Then in the
rest of your Rust code, you can add `#[cfg(has_i128)]` conditions on code that
should only be used when the compiler supports it.
## Release Notes
- 1.1.0 (2022-02-07)
- Use `CARGO_ENCODED_RUSTFLAGS` when it is set.
- 1.0.1 (2020-08-20)
- Apply `RUSTFLAGS` for more `--target` scenarios, by @adamreichold.
- 1.0.0 (2020-01-08)
- 🎉 Release 1.0! 🎉 (no breaking changes)
- Add `probe_expression` and `emit_expression_cfg` to test arbitrary expressions.
- Add `probe_constant` and `emit_constant_cfg` to test arbitrary constant expressions.
- 0.1.7 (2019-10-20)
- Apply `RUSTFLAGS` when probing `$TARGET != $HOST`, mainly for sysroot, by @roblabla.
- 0.1.6 (2019-08-19)
- Add `probe`/`emit_sysroot_crate`, by @leo60228.
- 0.1.5 (2019-07-16)
- Mask some warnings from newer rustc.
- 0.1.4 (2019-05-22)
- Relax `std`/`no_std` probing to a warning instead of an error.
- Improve `rustc` bootstrap compatibility.
- 0.1.3 (2019-05-21)
- Auto-detects if `#![no_std]` is needed for the `$TARGET`.
- 0.1.2 (2019-01-16)
- Add `rerun_env(ENV)` to print `cargo:rerun-if-env-changed=ENV`.
- Add `rerun_path(PATH)` to print `cargo:rerun-if-changed=PATH`.
## Minimum Rust version policy
This crate's minimum supported `rustc` version is `1.0.0`. Compatibility is
its entire reason for existence, so this crate will be extremely conservative
about raising this requirement. If this is ever deemed necessary, it will be
treated as a major breaking change for semver purposes.
## License
This project is licensed under either of
* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
http://www.apache.org/licenses/LICENSE-2.0)
* MIT license ([LICENSE-MIT](LICENSE-MIT) or
http://opensource.org/licenses/MIT)
at your option.

View File

@ -0,0 +1,9 @@
extern crate autocfg;
fn main() {
// Normally, cargo will set `OUT_DIR` for build scripts.
let ac = autocfg::AutoCfg::with_dir("target").unwrap();
for i in 3..8 {
ac.emit_has_type(&format!("i{}", 1 << i));
}
}

View File

@ -0,0 +1,22 @@
extern crate autocfg;
fn main() {
// Normally, cargo will set `OUT_DIR` for build scripts.
let ac = autocfg::AutoCfg::with_dir("target").unwrap();
// since ancient times...
ac.emit_has_path("std::vec::Vec");
ac.emit_path_cfg("std::vec::Vec", "has_vec");
// rustc 1.10.0
ac.emit_has_path("std::panic::PanicInfo");
ac.emit_path_cfg("std::panic::PanicInfo", "has_panic_info");
// rustc 1.20.0
ac.emit_has_path("std::mem::ManuallyDrop");
ac.emit_path_cfg("std::mem::ManuallyDrop", "has_manually_drop");
// rustc 1.25.0
ac.emit_has_path("std::ptr::NonNull");
ac.emit_path_cfg("std::ptr::NonNull", "has_non_null");
}

View File

@ -0,0 +1,26 @@
extern crate autocfg;
fn main() {
// Normally, cargo will set `OUT_DIR` for build scripts.
let ac = autocfg::AutoCfg::with_dir("target").unwrap();
// since ancient times...
ac.emit_has_trait("std::ops::Add");
ac.emit_trait_cfg("std::ops::Add", "has_ops");
// trait parameters have to be provided
ac.emit_has_trait("std::borrow::Borrow<str>");
ac.emit_trait_cfg("std::borrow::Borrow<str>", "has_borrow");
// rustc 1.8.0
ac.emit_has_trait("std::ops::AddAssign");
ac.emit_trait_cfg("std::ops::AddAssign", "has_assign_ops");
// rustc 1.12.0
ac.emit_has_trait("std::iter::Sum");
ac.emit_trait_cfg("std::iter::Sum", "has_sum");
// rustc 1.28.0
ac.emit_has_trait("std::alloc::GlobalAlloc");
ac.emit_trait_cfg("std::alloc::GlobalAlloc", "has_global_alloc");
}

View File

@ -0,0 +1,9 @@
extern crate autocfg;
fn main() {
// Normally, cargo will set `OUT_DIR` for build scripts.
let ac = autocfg::AutoCfg::with_dir("target").unwrap();
for i in 0..100 {
ac.emit_rustc_version(1, i);
}
}

69
zeroidc/vendor/autocfg/src/error.rs vendored Normal file
View File

@ -0,0 +1,69 @@
use std::error;
use std::fmt;
use std::io;
use std::num;
use std::str;
/// A common error type for the `autocfg` crate.
#[derive(Debug)]
pub struct Error {
kind: ErrorKind,
}
impl error::Error for Error {
fn description(&self) -> &str {
"AutoCfg error"
}
fn cause(&self) -> Option<&error::Error> {
match self.kind {
ErrorKind::Io(ref e) => Some(e),
ErrorKind::Num(ref e) => Some(e),
ErrorKind::Utf8(ref e) => Some(e),
ErrorKind::Other(_) => None,
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match self.kind {
ErrorKind::Io(ref e) => e.fmt(f),
ErrorKind::Num(ref e) => e.fmt(f),
ErrorKind::Utf8(ref e) => e.fmt(f),
ErrorKind::Other(s) => s.fmt(f),
}
}
}
#[derive(Debug)]
enum ErrorKind {
Io(io::Error),
Num(num::ParseIntError),
Utf8(str::Utf8Error),
Other(&'static str),
}
pub fn from_io(e: io::Error) -> Error {
Error {
kind: ErrorKind::Io(e),
}
}
pub fn from_num(e: num::ParseIntError) -> Error {
Error {
kind: ErrorKind::Num(e),
}
}
pub fn from_utf8(e: str::Utf8Error) -> Error {
Error {
kind: ErrorKind::Utf8(e),
}
}
pub fn from_str(s: &'static str) -> Error {
Error {
kind: ErrorKind::Other(s),
}
}

453
zeroidc/vendor/autocfg/src/lib.rs vendored Normal file
View File

@ -0,0 +1,453 @@
//! A Rust library for build scripts to automatically configure code based on
//! compiler support. Code snippets are dynamically tested to see if the `rustc`
//! will accept them, rather than hard-coding specific version support.
//!
//!
//! ## Usage
//!
//! Add this to your `Cargo.toml`:
//!
//! ```toml
//! [build-dependencies]
//! autocfg = "1"
//! ```
//!
//! Then use it in your `build.rs` script to detect compiler features. For
//! example, to test for 128-bit integer support, it might look like:
//!
//! ```rust
//! extern crate autocfg;
//!
//! fn main() {
//! # // Normally, cargo will set `OUT_DIR` for build scripts.
//! # std::env::set_var("OUT_DIR", "target");
//! let ac = autocfg::new();
//! ac.emit_has_type("i128");
//!
//! // (optional) We don't need to rerun for anything external.
//! autocfg::rerun_path("build.rs");
//! }
//! ```
//!
//! If the type test succeeds, this will write a `cargo:rustc-cfg=has_i128` line
//! for Cargo, which translates to Rust arguments `--cfg has_i128`. Then in the
//! rest of your Rust code, you can add `#[cfg(has_i128)]` conditions on code that
//! should only be used when the compiler supports it.
//!
//! ## Caution
//!
//! Many of the probing methods of `AutoCfg` document the particular template they
//! use, **subject to change**. The inputs are not validated to make sure they are
//! semantically correct for their expected use, so it's _possible_ to escape and
//! inject something unintended. However, such abuse is unsupported and will not
//! be considered when making changes to the templates.
#![deny(missing_debug_implementations)]
#![deny(missing_docs)]
// allow future warnings that can't be fixed while keeping 1.0 compatibility
#![allow(unknown_lints)]
#![allow(bare_trait_objects)]
#![allow(ellipsis_inclusive_range_patterns)]
/// Local macro to avoid `std::try!`, deprecated in Rust 1.39.
macro_rules! try {
($result:expr) => {
match $result {
Ok(value) => value,
Err(error) => return Err(error),
}
};
}
use std::env;
use std::ffi::OsString;
use std::fs;
use std::io::{stderr, Write};
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
#[allow(deprecated)]
use std::sync::atomic::ATOMIC_USIZE_INIT;
use std::sync::atomic::{AtomicUsize, Ordering};
mod error;
pub use error::Error;
mod version;
use version::Version;
#[cfg(test)]
mod tests;
/// Helper to detect compiler features for `cfg` output in build scripts.
#[derive(Clone, Debug)]
pub struct AutoCfg {
out_dir: PathBuf,
rustc: PathBuf,
rustc_version: Version,
target: Option<OsString>,
no_std: bool,
rustflags: Vec<String>,
}
/// Writes a config flag for rustc on standard out.
///
/// This looks like: `cargo:rustc-cfg=CFG`
///
/// Cargo will use this in arguments to rustc, like `--cfg CFG`.
pub fn emit(cfg: &str) {
println!("cargo:rustc-cfg={}", cfg);
}
/// Writes a line telling Cargo to rerun the build script if `path` changes.
///
/// This looks like: `cargo:rerun-if-changed=PATH`
///
/// This requires at least cargo 0.7.0, corresponding to rustc 1.6.0. Earlier
/// versions of cargo will simply ignore the directive.
pub fn rerun_path(path: &str) {
println!("cargo:rerun-if-changed={}", path);
}
/// Writes a line telling Cargo to rerun the build script if the environment
/// variable `var` changes.
///
/// This looks like: `cargo:rerun-if-env-changed=VAR`
///
/// This requires at least cargo 0.21.0, corresponding to rustc 1.20.0. Earlier
/// versions of cargo will simply ignore the directive.
pub fn rerun_env(var: &str) {
println!("cargo:rerun-if-env-changed={}", var);
}
/// Create a new `AutoCfg` instance.
///
/// # Panics
///
/// Panics if `AutoCfg::new()` returns an error.
pub fn new() -> AutoCfg {
AutoCfg::new().unwrap()
}
impl AutoCfg {
/// Create a new `AutoCfg` instance.
///
/// # Common errors
///
/// - `rustc` can't be executed, from `RUSTC` or in the `PATH`.
/// - The version output from `rustc` can't be parsed.
/// - `OUT_DIR` is not set in the environment, or is not a writable directory.
///
pub fn new() -> Result<Self, Error> {
match env::var_os("OUT_DIR") {
Some(d) => Self::with_dir(d),
None => Err(error::from_str("no OUT_DIR specified!")),
}
}
/// Create a new `AutoCfg` instance with the specified output directory.
///
/// # Common errors
///
/// - `rustc` can't be executed, from `RUSTC` or in the `PATH`.
/// - The version output from `rustc` can't be parsed.
/// - `dir` is not a writable directory.
///
pub fn with_dir<T: Into<PathBuf>>(dir: T) -> Result<Self, Error> {
let rustc = env::var_os("RUSTC").unwrap_or_else(|| "rustc".into());
let rustc: PathBuf = rustc.into();
let rustc_version = try!(Version::from_rustc(&rustc));
let target = env::var_os("TARGET");
// Sanity check the output directory
let dir = dir.into();
let meta = try!(fs::metadata(&dir).map_err(error::from_io));
if !meta.is_dir() || meta.permissions().readonly() {
return Err(error::from_str("output path is not a writable directory"));
}
let mut ac = AutoCfg {
rustflags: rustflags(&target, &dir),
out_dir: dir,
rustc: rustc,
rustc_version: rustc_version,
target: target,
no_std: false,
};
// Sanity check with and without `std`.
if !ac.probe("").unwrap_or(false) {
ac.no_std = true;
if !ac.probe("").unwrap_or(false) {
// Neither worked, so assume nothing...
ac.no_std = false;
let warning = b"warning: autocfg could not probe for `std`\n";
stderr().write_all(warning).ok();
}
}
Ok(ac)
}
/// Test whether the current `rustc` reports a version greater than
/// or equal to "`major`.`minor`".
pub fn probe_rustc_version(&self, major: usize, minor: usize) -> bool {
self.rustc_version >= Version::new(major, minor, 0)
}
/// Sets a `cfg` value of the form `rustc_major_minor`, like `rustc_1_29`,
/// if the current `rustc` is at least that version.
pub fn emit_rustc_version(&self, major: usize, minor: usize) {
if self.probe_rustc_version(major, minor) {
emit(&format!("rustc_{}_{}", major, minor));
}
}
fn probe<T: AsRef<[u8]>>(&self, code: T) -> Result<bool, Error> {
#[allow(deprecated)]
static ID: AtomicUsize = ATOMIC_USIZE_INIT;
let id = ID.fetch_add(1, Ordering::Relaxed);
let mut command = Command::new(&self.rustc);
command
.arg("--crate-name")
.arg(format!("probe{}", id))
.arg("--crate-type=lib")
.arg("--out-dir")
.arg(&self.out_dir)
.arg("--emit=llvm-ir");
if let Some(target) = self.target.as_ref() {
command.arg("--target").arg(target);
}
command.args(&self.rustflags);
command.arg("-").stdin(Stdio::piped());
let mut child = try!(command.spawn().map_err(error::from_io));
let mut stdin = child.stdin.take().expect("rustc stdin");
if self.no_std {
try!(stdin.write_all(b"#![no_std]\n").map_err(error::from_io));
}
try!(stdin.write_all(code.as_ref()).map_err(error::from_io));
drop(stdin);
let status = try!(child.wait().map_err(error::from_io));
Ok(status.success())
}
/// Tests whether the given sysroot crate can be used.
///
/// The test code is subject to change, but currently looks like:
///
/// ```ignore
/// extern crate CRATE as probe;
/// ```
pub fn probe_sysroot_crate(&self, name: &str) -> bool {
self.probe(format!("extern crate {} as probe;", name)) // `as _` wasn't stabilized until Rust 1.33
.unwrap_or(false)
}
/// Emits a config value `has_CRATE` if `probe_sysroot_crate` returns true.
pub fn emit_sysroot_crate(&self, name: &str) {
if self.probe_sysroot_crate(name) {
emit(&format!("has_{}", mangle(name)));
}
}
/// Tests whether the given path can be used.
///
/// The test code is subject to change, but currently looks like:
///
/// ```ignore
/// pub use PATH;
/// ```
pub fn probe_path(&self, path: &str) -> bool {
self.probe(format!("pub use {};", path)).unwrap_or(false)
}
/// Emits a config value `has_PATH` if `probe_path` returns true.
///
/// Any non-identifier characters in the `path` will be replaced with
/// `_` in the generated config value.
pub fn emit_has_path(&self, path: &str) {
if self.probe_path(path) {
emit(&format!("has_{}", mangle(path)));
}
}
/// Emits the given `cfg` value if `probe_path` returns true.
pub fn emit_path_cfg(&self, path: &str, cfg: &str) {
if self.probe_path(path) {
emit(cfg);
}
}
/// Tests whether the given trait can be used.
///
/// The test code is subject to change, but currently looks like:
///
/// ```ignore
/// pub trait Probe: TRAIT + Sized {}
/// ```
pub fn probe_trait(&self, name: &str) -> bool {
self.probe(format!("pub trait Probe: {} + Sized {{}}", name))
.unwrap_or(false)
}
/// Emits a config value `has_TRAIT` if `probe_trait` returns true.
///
/// Any non-identifier characters in the trait `name` will be replaced with
/// `_` in the generated config value.
pub fn emit_has_trait(&self, name: &str) {
if self.probe_trait(name) {
emit(&format!("has_{}", mangle(name)));
}
}
/// Emits the given `cfg` value if `probe_trait` returns true.
pub fn emit_trait_cfg(&self, name: &str, cfg: &str) {
if self.probe_trait(name) {
emit(cfg);
}
}
/// Tests whether the given type can be used.
///
/// The test code is subject to change, but currently looks like:
///
/// ```ignore
/// pub type Probe = TYPE;
/// ```
pub fn probe_type(&self, name: &str) -> bool {
self.probe(format!("pub type Probe = {};", name))
.unwrap_or(false)
}
/// Emits a config value `has_TYPE` if `probe_type` returns true.
///
/// Any non-identifier characters in the type `name` will be replaced with
/// `_` in the generated config value.
pub fn emit_has_type(&self, name: &str) {
if self.probe_type(name) {
emit(&format!("has_{}", mangle(name)));
}
}
/// Emits the given `cfg` value if `probe_type` returns true.
pub fn emit_type_cfg(&self, name: &str, cfg: &str) {
if self.probe_type(name) {
emit(cfg);
}
}
/// Tests whether the given expression can be used.
///
/// The test code is subject to change, but currently looks like:
///
/// ```ignore
/// pub fn probe() { let _ = EXPR; }
/// ```
pub fn probe_expression(&self, expr: &str) -> bool {
self.probe(format!("pub fn probe() {{ let _ = {}; }}", expr))
.unwrap_or(false)
}
/// Emits the given `cfg` value if `probe_expression` returns true.
pub fn emit_expression_cfg(&self, expr: &str, cfg: &str) {
if self.probe_expression(expr) {
emit(cfg);
}
}
/// Tests whether the given constant expression can be used.
///
/// The test code is subject to change, but currently looks like:
///
/// ```ignore
/// pub const PROBE: () = ((), EXPR).0;
/// ```
pub fn probe_constant(&self, expr: &str) -> bool {
self.probe(format!("pub const PROBE: () = ((), {}).0;", expr))
.unwrap_or(false)
}
/// Emits the given `cfg` value if `probe_constant` returns true.
pub fn emit_constant_cfg(&self, expr: &str, cfg: &str) {
if self.probe_constant(expr) {
emit(cfg);
}
}
}
fn mangle(s: &str) -> String {
s.chars()
.map(|c| match c {
'A'...'Z' | 'a'...'z' | '0'...'9' => c,
_ => '_',
})
.collect()
}
fn dir_contains_target(
target: &Option<OsString>,
dir: &Path,
cargo_target_dir: Option<OsString>,
) -> bool {
target
.as_ref()
.and_then(|target| {
dir.to_str().and_then(|dir| {
let mut cargo_target_dir = cargo_target_dir
.map(PathBuf::from)
.unwrap_or_else(|| PathBuf::from("target"));
cargo_target_dir.push(target);
cargo_target_dir
.to_str()
.map(|cargo_target_dir| dir.contains(&cargo_target_dir))
})
})
.unwrap_or(false)
}
fn rustflags(target: &Option<OsString>, dir: &Path) -> Vec<String> {
// Starting with rust-lang/cargo#9601, shipped in Rust 1.55, Cargo always sets
// CARGO_ENCODED_RUSTFLAGS for any host/target build script invocation. This
// includes any source of flags, whether from the environment, toml config, or
// whatever may come in the future. The value is either an empty string, or a
// list of arguments separated by the ASCII unit separator (US), 0x1f.
if let Ok(a) = env::var("CARGO_ENCODED_RUSTFLAGS") {
return if a.is_empty() {
Vec::new()
} else {
a.split('\x1f').map(str::to_string).collect()
};
}
// Otherwise, we have to take a more heuristic approach, and we don't
// support values from toml config at all.
//
// Cargo only applies RUSTFLAGS for building TARGET artifact in
// cross-compilation environment. Sadly, we don't have a way to detect
// when we're building HOST artifact in a cross-compilation environment,
// so for now we only apply RUSTFLAGS when cross-compiling an artifact.
//
// See https://github.com/cuviper/autocfg/pull/10#issuecomment-527575030.
if *target != env::var_os("HOST")
|| dir_contains_target(target, dir, env::var_os("CARGO_TARGET_DIR"))
{
if let Ok(rustflags) = env::var("RUSTFLAGS") {
// This is meant to match how cargo handles the RUSTFLAGS environment variable.
// See https://github.com/rust-lang/cargo/blob/69aea5b6f69add7c51cca939a79644080c0b0ba0/src/cargo/core/compiler/build_context/target_info.rs#L434-L441
return rustflags
.split(' ')
.map(str::trim)
.filter(|s| !s.is_empty())
.map(str::to_string)
.collect();
}
}
Vec::new()
}

174
zeroidc/vendor/autocfg/src/tests.rs vendored Normal file
View File

@ -0,0 +1,174 @@
use super::AutoCfg;
use std::env;
use std::path::Path;
impl AutoCfg {
fn core_std(&self, path: &str) -> String {
let krate = if self.no_std { "core" } else { "std" };
format!("{}::{}", krate, path)
}
fn assert_std(&self, probe_result: bool) {
assert_eq!(!self.no_std, probe_result);
}
fn assert_min(&self, major: usize, minor: usize, probe_result: bool) {
assert_eq!(self.probe_rustc_version(major, minor), probe_result);
}
fn for_test() -> Result<Self, super::error::Error> {
match env::var_os("TESTS_TARGET_DIR") {
Some(d) => Self::with_dir(d),
None => Self::with_dir("target"),
}
}
}
#[test]
fn autocfg_version() {
let ac = AutoCfg::for_test().unwrap();
println!("version: {:?}", ac.rustc_version);
assert!(ac.probe_rustc_version(1, 0));
}
#[test]
fn version_cmp() {
use super::version::Version;
let v123 = Version::new(1, 2, 3);
assert!(Version::new(1, 0, 0) < v123);
assert!(Version::new(1, 2, 2) < v123);
assert!(Version::new(1, 2, 3) == v123);
assert!(Version::new(1, 2, 4) > v123);
assert!(Version::new(1, 10, 0) > v123);
assert!(Version::new(2, 0, 0) > v123);
}
#[test]
fn probe_add() {
let ac = AutoCfg::for_test().unwrap();
let add = ac.core_std("ops::Add");
let add_rhs = add.clone() + "<i32>";
let add_rhs_output = add.clone() + "<i32, Output = i32>";
let dyn_add_rhs_output = "dyn ".to_string() + &*add_rhs_output;
assert!(ac.probe_path(&add));
assert!(ac.probe_trait(&add));
assert!(ac.probe_trait(&add_rhs));
assert!(ac.probe_trait(&add_rhs_output));
ac.assert_min(1, 27, ac.probe_type(&dyn_add_rhs_output));
}
#[test]
fn probe_as_ref() {
let ac = AutoCfg::for_test().unwrap();
let as_ref = ac.core_std("convert::AsRef");
let as_ref_str = as_ref.clone() + "<str>";
let dyn_as_ref_str = "dyn ".to_string() + &*as_ref_str;
assert!(ac.probe_path(&as_ref));
assert!(ac.probe_trait(&as_ref_str));
assert!(ac.probe_type(&as_ref_str));
ac.assert_min(1, 27, ac.probe_type(&dyn_as_ref_str));
}
#[test]
fn probe_i128() {
let ac = AutoCfg::for_test().unwrap();
let i128_path = ac.core_std("i128");
ac.assert_min(1, 26, ac.probe_path(&i128_path));
ac.assert_min(1, 26, ac.probe_type("i128"));
}
#[test]
fn probe_sum() {
let ac = AutoCfg::for_test().unwrap();
let sum = ac.core_std("iter::Sum");
let sum_i32 = sum.clone() + "<i32>";
let dyn_sum_i32 = "dyn ".to_string() + &*sum_i32;
ac.assert_min(1, 12, ac.probe_path(&sum));
ac.assert_min(1, 12, ac.probe_trait(&sum));
ac.assert_min(1, 12, ac.probe_trait(&sum_i32));
ac.assert_min(1, 12, ac.probe_type(&sum_i32));
ac.assert_min(1, 27, ac.probe_type(&dyn_sum_i32));
}
#[test]
fn probe_std() {
let ac = AutoCfg::for_test().unwrap();
ac.assert_std(ac.probe_sysroot_crate("std"));
}
#[test]
fn probe_alloc() {
let ac = AutoCfg::for_test().unwrap();
ac.assert_min(1, 36, ac.probe_sysroot_crate("alloc"));
}
#[test]
fn probe_bad_sysroot_crate() {
let ac = AutoCfg::for_test().unwrap();
assert!(!ac.probe_sysroot_crate("doesnt_exist"));
}
#[test]
fn probe_no_std() {
let ac = AutoCfg::for_test().unwrap();
assert!(ac.probe_type("i32"));
assert!(ac.probe_type("[i32]"));
ac.assert_std(ac.probe_type("Vec<i32>"));
}
#[test]
fn probe_expression() {
let ac = AutoCfg::for_test().unwrap();
assert!(ac.probe_expression(r#""test".trim_left()"#));
ac.assert_min(1, 30, ac.probe_expression(r#""test".trim_start()"#));
ac.assert_std(ac.probe_expression("[1, 2, 3].to_vec()"));
}
#[test]
fn probe_constant() {
let ac = AutoCfg::for_test().unwrap();
assert!(ac.probe_constant("1 + 2 + 3"));
ac.assert_min(1, 33, ac.probe_constant("{ let x = 1 + 2 + 3; x * x }"));
ac.assert_min(1, 39, ac.probe_constant(r#""test".len()"#));
}
#[test]
fn dir_does_not_contain_target() {
assert!(!super::dir_contains_target(
&Some("x86_64-unknown-linux-gnu".into()),
Path::new("/project/target/debug/build/project-ea75983148559682/out"),
None,
));
}
#[test]
fn dir_does_contain_target() {
assert!(super::dir_contains_target(
&Some("x86_64-unknown-linux-gnu".into()),
Path::new(
"/project/target/x86_64-unknown-linux-gnu/debug/build/project-0147aca016480b9d/out"
),
None,
));
}
#[test]
fn dir_does_not_contain_target_with_custom_target_dir() {
assert!(!super::dir_contains_target(
&Some("x86_64-unknown-linux-gnu".into()),
Path::new("/project/custom/debug/build/project-ea75983148559682/out"),
Some("custom".into()),
));
}
#[test]
fn dir_does_contain_target_with_custom_target_dir() {
assert!(super::dir_contains_target(
&Some("x86_64-unknown-linux-gnu".into()),
Path::new(
"/project/custom/x86_64-unknown-linux-gnu/debug/build/project-0147aca016480b9d/out"
),
Some("custom".into()),
));
}

60
zeroidc/vendor/autocfg/src/version.rs vendored Normal file
View File

@ -0,0 +1,60 @@
use std::path::Path;
use std::process::Command;
use std::str;
use super::{error, Error};
/// A version structure for making relative comparisons.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct Version {
major: usize,
minor: usize,
patch: usize,
}
impl Version {
/// Creates a `Version` instance for a specific `major.minor.patch` version.
pub fn new(major: usize, minor: usize, patch: usize) -> Self {
Version {
major: major,
minor: minor,
patch: patch,
}
}
pub fn from_rustc(rustc: &Path) -> Result<Self, Error> {
// Get rustc's verbose version
let output = try!(Command::new(rustc)
.args(&["--version", "--verbose"])
.output()
.map_err(error::from_io));
if !output.status.success() {
return Err(error::from_str("could not execute rustc"));
}
let output = try!(str::from_utf8(&output.stdout).map_err(error::from_utf8));
// Find the release line in the verbose version output.
let release = match output.lines().find(|line| line.starts_with("release: ")) {
Some(line) => &line["release: ".len()..],
None => return Err(error::from_str("could not find rustc release")),
};
// Strip off any extra channel info, e.g. "-beta.N", "-nightly"
let version = match release.find('-') {
Some(i) => &release[..i],
None => release,
};
// Split the version into semver components.
let mut iter = version.splitn(3, '.');
let major = try!(iter.next().ok_or(error::from_str("missing major version")));
let minor = try!(iter.next().ok_or(error::from_str("missing minor version")));
let patch = try!(iter.next().ok_or(error::from_str("missing patch version")));
Ok(Version::new(
try!(major.parse().map_err(error::from_num)),
try!(minor.parse().map_err(error::from_num)),
try!(patch.parse().map_err(error::from_num)),
))
}
}

View File

@ -0,0 +1,33 @@
extern crate autocfg;
use std::env;
/// Tests that autocfg uses the RUSTFLAGS or CARGO_ENCODED_RUSTFLAGS
/// environment variables when running rustc.
#[test]
fn test_with_sysroot() {
// Use the same path as this test binary.
let dir = env::current_exe().unwrap().parent().unwrap().to_path_buf();
env::set_var("OUT_DIR", &format!("{}", dir.display()));
// If we have encoded rustflags, they take precedence, even if empty.
env::set_var("CARGO_ENCODED_RUSTFLAGS", "");
env::set_var("RUSTFLAGS", &format!("-L {}", dir.display()));
let ac = autocfg::AutoCfg::new().unwrap();
assert!(ac.probe_sysroot_crate("std"));
assert!(!ac.probe_sysroot_crate("autocfg"));
// Now try again with useful encoded args.
env::set_var(
"CARGO_ENCODED_RUSTFLAGS",
&format!("-L\x1f{}", dir.display()),
);
let ac = autocfg::AutoCfg::new().unwrap();
assert!(ac.probe_sysroot_crate("autocfg"));
// Try the old-style RUSTFLAGS, ensuring HOST != TARGET.
env::remove_var("CARGO_ENCODED_RUSTFLAGS");
env::set_var("HOST", "lol");
let ac = autocfg::AutoCfg::new().unwrap();
assert!(ac.probe_sysroot_crate("autocfg"));
}

View File

@ -0,0 +1 @@
{"files":{"Cargo.lock":"ba3f757ac955932ea298d293ce44dfb25e064ffac93e429c13bd684a686d2ec1","Cargo.toml":"3402755896ee085ef905b9ff4d2ed25040e4fd53e254353f1bb044252404072b","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0dd882e53de11566d50f8e8e2d5a651bcf3fabee4987d70f306233cf39094ba7","README.md":"2810098d290f3df719e6f41ffca38bb954d0fe62d4e56905a9a2436c4784bebf","RELEASE-NOTES.md":"5aaccee22ad349c72531bdd6c68b233bccc8c1eb23dce2cf5e05ae4498a15a1a","benches/benchmarks.rs":"bc1f603c5aa87627a93eee71eaed64fbd767d013051bac00ea265c16fecb30b9","examples/base64.rs":"f397b8726df41fce0793a8c6ebe95d4651aa37ed746da305032f1e99d9c37235","examples/make_tables.rs":"392f51b3edb1b5a2c62b823337c7785a6b8535f39f09283b1913a5c68fb186bf","icon_CLion.svg":"cffa044ba75cb998ee3306991dc4a3755ec2f39ab95ddd4b74bc21988389020f","src/chunked_encoder.rs":"fba5ea5f1204b9bf11291ec3483bcb23d330101eb2f6635e33cd63e4de13b653","src/decode.rs":"fa18535da4c549bdb29e4c4f074387645467384f88a1ecdb72c31828bd930ee3","src/display.rs":"55f9cd05ba037b77689e2558fa174055fad5273959d1edce3a2289f94244fd5d","src/encode.rs":"8a0a6b71581b4c52c2844111a3611cf73522533452a27f5ef8d09eaa73594e2e","src/lib.rs":"c7b904fac8706bc4758c2763e7a43dc1edd99ed5641ac2355957f6aeff91eece","src/read/decoder.rs":"9a7b65e165f7aed6b007bf7436ac9ba9b03d3b03e9d5a1e16691874e21509ced","src/read/decoder_tests.rs":"aacb7624c33ed6b90e068ff9af6095c839b4088060b4c406c08dce25ce837f6d","src/read/mod.rs":"e0b714eda02d16b1ffa6f78fd09b2f963e01c881b1f7c17b39db4e904be5e746","src/tables.rs":"73ce100fd3f4917ec1e8d9277ff0b956cc2636b33145f101a7cf1a5a8b7bacc1","src/tests.rs":"202ddced9cf52205182c6202e583c4c4f929b9d225fd9d1ebdbfd389cb2df0ba","src/write/encoder.rs":"afabacf7fa54f2ec9b1fe4987de818d368d7470ade0396649743a11c81dba28e","src/write/encoder_string_writer.rs":"3f9109585babe048230659f64973cb1633bbb2ed9de255177336260226127b81","src/write/encoder_tests.rs":"381d7c2871407157c36e909c928307ac0389b3d4504fb80607134e94ac59e68f","src/write/mod.rs":"1503b9457e4f5d2895b24136c3af893f0b7ce18dfe4de1096fc5d17f8d78e99b","tests/decode.rs":"da2cbd49b84e0d8b1d8a52136ba3d97cfb248920a45f9955db1e5bc5367218ce","tests/encode.rs":"5efb6904c36c6f899a05078e5c9be756fc58af1ee9940edfa8dea1ee53675364","tests/helpers.rs":"a76015e4a4e8f98213bdbaa592cd9574ccdc95a28e1b1f835a2753e09fa6037f","tests/tests.rs":"05753e5f1d4a6c75015a5342f9b5dc3073c00bdfe0a829a962f8723321c75549"},"package":"904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd"}

826
zeroidc/vendor/base64/Cargo.lock generated vendored Normal file
View File

@ -0,0 +1,826 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
[[package]]
name = "ansi_term"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "atty"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"hermit-abi 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.74 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "autocfg"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "autocfg"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "base64"
version = "0.13.0"
dependencies = [
"criterion 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
"structopt 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "bitflags"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "bstr"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"regex-automata 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "bumpalo"
version = "3.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "byteorder"
version = "1.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "cast"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "cfg-if"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "clap"
version = "2.33.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-width 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
"vec_map 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "cloudabi"
version = "0.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "criterion"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
"cast 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"clap 2.33.1 (registry+https://github.com/rust-lang/crates.io-index)",
"criterion-plot 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"csv 1.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)",
"oorandom 11.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"plotters 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.57 (registry+https://github.com/rust-lang/crates.io-index)",
"tinytemplate 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "criterion-plot"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cast 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "crossbeam-deque"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crossbeam-epoch 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "crossbeam-epoch"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"memoffset 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "crossbeam-queue"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "crossbeam-utils"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "csv"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bstr 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
"csv-core 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"itoa 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"ryu 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "csv-core"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "either"
version = "1.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "fuchsia-cprng"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "heck"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"unicode-segmentation 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "hermit-abi"
version = "0.1.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.74 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "itertools"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "itoa"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "js-sys"
version = "0.3.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"wasm-bindgen 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "libc"
version = "0.2.74"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "log"
version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "maybe-uninit"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "memchr"
version = "2.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "memoffset"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "num-traits"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "num_cpus"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"hermit-abi 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.74 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "oorandom"
version = "11.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "plotters"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"js-sys 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)",
"wasm-bindgen 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
"web-sys 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "proc-macro-error"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro-error-attr 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.36 (registry+https://github.com/rust-lang/crates.io-index)",
"version_check 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "proc-macro-error-attr"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
"version_check 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "proc-macro2"
version = "1.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "quote"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rand"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.74 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rand_chacha"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rand_core"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rand_core"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "rand_hc"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rand_isaac"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rand_jitter"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.74 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rand_os"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.74 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rand_pcg"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rand_xorshift"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rayon"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-deque 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
"either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon-core 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rayon-core"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crossbeam-deque 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-queue 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.13.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rdrand"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "regex"
version = "1.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"regex-syntax 0.6.18 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "regex-automata"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "regex-syntax"
version = "0.6.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "rustc_version"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "ryu"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "same-file"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"winapi-util 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "scopeguard"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "semver"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "semver-parser"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "serde"
version = "1.0.114"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "serde_derive"
version = "1.0.114"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.36 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "serde_json"
version = "1.0.57"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"itoa 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"ryu 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "strsim"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "structopt"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"clap 2.33.1 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"structopt-derive 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "structopt-derive"
version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro-error 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.36 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "syn"
version = "1.0.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "textwrap"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"unicode-width 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tinytemplate"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.57 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "unicode-segmentation"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "unicode-width"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "unicode-xid"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "vec_map"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "version_check"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "walkdir"
version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"same-file 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi-util 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "wasm-bindgen"
version = "0.2.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"wasm-bindgen-macro 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bumpalo 3.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.36 (registry+https://github.com/rust-lang/crates.io-index)",
"wasm-bindgen-shared 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
"wasm-bindgen-macro-support 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.36 (registry+https://github.com/rust-lang/crates.io-index)",
"wasm-bindgen-backend 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
"wasm-bindgen-shared 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "web-sys"
version = "0.3.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"js-sys 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)",
"wasm-bindgen 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "winapi-util"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[metadata]
"checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
"checksum atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
"checksum autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2"
"checksum autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8aac770f1885fd7e387acedd76065302551364496e46b3dd00860b2f8359b9d"
"checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
"checksum bstr 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "31accafdb70df7871592c058eca3985b71104e15ac32f64706022c58867da931"
"checksum bumpalo 3.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2e8c087f005730276d1096a652e92a8bacee2e2472bcc9715a74d2bec38b5820"
"checksum byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
"checksum cast 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4b9434b9a5aa1450faa3f9cb14ea0e8c53bb5d2b3c1bfd1ab4fc03e9f33fbfb0"
"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
"checksum clap 2.33.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bdfa80d47f954d53a35a64987ca1422f495b8d6483c0fe9f7117b36c2a792129"
"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
"checksum criterion 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "63f696897c88b57f4ffe3c69d8e1a0613c7d0e6c4833363c8560fbde9c47b966"
"checksum criterion-plot 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e022feadec601fba1649cfa83586381a4ad31c6bf3a9ab7d408118b05dd9889d"
"checksum crossbeam-deque 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9f02af974daeee82218205558e51ec8768b48cf524bd01d550abe5573a608285"
"checksum crossbeam-epoch 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "058ed274caafc1f60c4997b5fc07bf7dc7cca454af7c6e81edffe5f33f70dace"
"checksum crossbeam-queue 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "774ba60a54c213d409d5353bda12d49cd68d14e45036a285234c8d6f91f92570"
"checksum crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8"
"checksum csv 1.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "00affe7f6ab566df61b4be3ce8cf16bc2576bca0963ceb0955e45d514bf9a279"
"checksum csv-core 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90"
"checksum either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3"
"checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
"checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205"
"checksum hermit-abi 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "3deed196b6e7f9e44a2ae8d94225d80302d81208b1bb673fd21fe634645c85a9"
"checksum itertools 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b"
"checksum itoa 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "dc6f3ad7b9d11a0c00842ff8de1b60ee58661048eb8049ed33c73594f359d7e6"
"checksum js-sys 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)" = "85a7e2c92a4804dd459b86c339278d0fe87cf93757fae222c3fa3ae75458bc73"
"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
"checksum libc 0.2.74 (registry+https://github.com/rust-lang/crates.io-index)" = "a2f02823cf78b754822df5f7f268fb59822e7296276d3e069d8e8cb26a14bd10"
"checksum log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b"
"checksum maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00"
"checksum memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400"
"checksum memoffset 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "c198b026e1bbf08a937e94c6c60f9ec4a2267f5b0d2eec9c1b21b061ce2be55f"
"checksum num-traits 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)" = "ac267bcc07f48ee5f8935ab0d24f316fb722d7a1292e2913f0cc196b29ffd611"
"checksum num_cpus 1.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
"checksum oorandom 11.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a170cebd8021a008ea92e4db85a72f80b35df514ec664b296fdcbb654eac0b2c"
"checksum plotters 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)" = "0d1685fbe7beba33de0330629da9d955ac75bd54f33d7b79f9a895590124f6bb"
"checksum proc-macro-error 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
"checksum proc-macro-error-attr 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
"checksum proc-macro2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)" = "04f5f085b5d71e2188cb8271e5da0161ad52c3f227a661a3c135fdf28e258b12"
"checksum quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37"
"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca"
"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef"
"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
"checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc"
"checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4"
"checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08"
"checksum rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b"
"checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071"
"checksum rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44"
"checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c"
"checksum rayon 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "62f02856753d04e03e26929f820d0a0a337ebe71f849801eea335d464b349080"
"checksum rayon-core 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e92e15d89083484e11353891f1af602cc661426deb9564c298b270c726973280"
"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
"checksum regex 1.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "9c3780fcf44b193bc4d09f36d2a3c87b251da4a046c87795a0d35f4f927ad8e6"
"checksum regex-automata 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "ae1ded71d66a4a97f5e961fd0cb25a5f366a42a41570d16a763a69c092c26ae4"
"checksum regex-syntax 0.6.18 (registry+https://github.com/rust-lang/crates.io-index)" = "26412eb97c6b088a6997e05f69403a802a92d520de2f8e63c2b65f9e0f47c4e8"
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
"checksum ryu 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e"
"checksum same-file 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
"checksum scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
"checksum serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)" = "5317f7588f0a5078ee60ef675ef96735a1442132dc645eb1d12c018620ed8cd3"
"checksum serde_derive 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)" = "2a0be94b04690fbaed37cddffc5c134bf537c8e3329d53e982fe04c374978f8e"
"checksum serde_json 1.0.57 (registry+https://github.com/rust-lang/crates.io-index)" = "164eacbdb13512ec2745fb09d51fd5b22b0d65ed294a1dcf7285a360c80a675c"
"checksum strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
"checksum structopt 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)" = "6cc388d94ffabf39b5ed5fadddc40147cb21e605f53db6f8f36a625d27489ac5"
"checksum structopt-derive 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "5e2513111825077552a6751dfad9e11ce0fba07d7276a3943a037d7e93e64c5f"
"checksum syn 1.0.36 (registry+https://github.com/rust-lang/crates.io-index)" = "4cdb98bcb1f9d81d07b536179c269ea15999b5d14ea958196413869445bb5250"
"checksum textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
"checksum tinytemplate 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6d3dc76004a03cec1c5932bca4cdc2e39aaa798e3f82363dd94f9adf6098c12f"
"checksum unicode-segmentation 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e83e153d1053cbb5a118eeff7fd5be06ed99153f00dbcd8ae310c5fb2b22edc0"
"checksum unicode-width 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3"
"checksum unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
"checksum vec_map 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
"checksum version_check 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed"
"checksum walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "777182bc735b6424e1a57516d35ed72cb8019d85c8c9bf536dccb3445c1a2f7d"
"checksum wasm-bindgen 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)" = "f0563a9a4b071746dd5aedbc3a28c6fe9be4586fb3fbadb67c400d4f53c6b16c"
"checksum wasm-bindgen-backend 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)" = "bc71e4c5efa60fb9e74160e89b93353bc24059999c0ae0fb03affc39770310b0"
"checksum wasm-bindgen-macro 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)" = "97c57cefa5fa80e2ba15641578b44d36e7a64279bc5ed43c6dbaf329457a2ed2"
"checksum wasm-bindgen-macro-support 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)" = "841a6d1c35c6f596ccea1f82504a192a60378f64b3bb0261904ad8f2f5657556"
"checksum wasm-bindgen-shared 0.2.67 (registry+https://github.com/rust-lang/crates.io-index)" = "93b162580e34310e5931c4b792560108b10fd14d64915d7fff8ff00180e70092"
"checksum web-sys 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)" = "dda38f4e5ca63eda02c059d243aa25b5f35ab98451e518c51612cd0f1bd19a47"
"checksum winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
"checksum winapi-util 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"

43
zeroidc/vendor/base64/Cargo.toml vendored Normal file
View File

@ -0,0 +1,43 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
edition = "2018"
name = "base64"
version = "0.13.0"
authors = ["Alice Maz <alice@alicemaz.com>", "Marshall Pierce <marshall@mpierce.org>"]
description = "encodes and decodes base64 as bytes or utf8"
documentation = "https://docs.rs/base64"
readme = "README.md"
keywords = ["base64", "utf8", "encode", "decode", "no_std"]
categories = ["encoding"]
license = "MIT/Apache-2.0"
repository = "https://github.com/marshallpierce/rust-base64"
[profile.bench]
debug = true
[[bench]]
name = "benchmarks"
harness = false
[dev-dependencies.criterion]
version = "=0.3.2"
[dev-dependencies.rand]
version = "0.6.1"
[dev-dependencies.structopt]
version = "0.3"
[features]
alloc = []
default = ["std"]
std = []

201
zeroidc/vendor/base64/LICENSE-APACHE vendored Normal file
View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

21
zeroidc/vendor/base64/LICENSE-MIT vendored Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2015 Alice Maz
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

114
zeroidc/vendor/base64/README.md vendored Normal file
View File

@ -0,0 +1,114 @@
[base64](https://crates.io/crates/base64)
===
[![](https://img.shields.io/crates/v/base64.svg)](https://crates.io/crates/base64) [![Docs](https://docs.rs/base64/badge.svg)](https://docs.rs/base64) [![Build](https://travis-ci.org/marshallpierce/rust-base64.svg?branch=master)](https://travis-ci.org/marshallpierce/rust-base64) [![codecov](https://codecov.io/gh/marshallpierce/rust-base64/branch/master/graph/badge.svg)](https://codecov.io/gh/marshallpierce/rust-base64) [![unsafe forbidden](https://img.shields.io/badge/unsafe-forbidden-success.svg)](https://github.com/rust-secure-code/safety-dance/)
<a href="https://www.jetbrains.com/?from=rust-base64"><img src="/icon_CLion.svg" height="40px"/></a>
Made with CLion. Thanks to JetBrains for supporting open source!
It's base64. What more could anyone want?
This library's goals are to be *correct* and *fast*. It's thoroughly tested and widely used. It exposes functionality at multiple levels of abstraction so you can choose the level of convenience vs performance that you want, e.g. `decode_config_slice` decodes into an existing `&mut [u8]` and is pretty fast (2.6GiB/s for a 3 KiB input), whereas `decode_config` allocates a new `Vec<u8>` and returns it, which might be more convenient in some cases, but is slower (although still fast enough for almost any purpose) at 2.1 GiB/s.
Example
---
```rust
extern crate base64;
use base64::{encode, decode};
fn main() {
let a = b"hello world";
let b = "aGVsbG8gd29ybGQ=";
assert_eq!(encode(a), b);
assert_eq!(a, &decode(b).unwrap()[..]);
}
```
See the [docs](https://docs.rs/base64) for all the details.
Rust version compatibility
---
The minimum required Rust version is 1.34.0.
Developing
---
Benchmarks are in `benches/`. Running them requires nightly rust, but `rustup` makes it easy:
```bash
rustup run nightly cargo bench
```
Decoding is aided by some pre-calculated tables, which are generated by:
```bash
cargo run --example make_tables > src/tables.rs.tmp && mv src/tables.rs.tmp src/tables.rs
```
no_std
---
This crate supports no_std. By default the crate targets std via the `std` feature. You can deactivate the `default-features` to target core instead. In that case you lose out on all the functionality revolving around `std::io`, `std::error::Error` and heap allocations. There is an additional `alloc` feature that you can activate to bring back the support for heap allocations.
Profiling
---
On Linux, you can use [perf](https://perf.wiki.kernel.org/index.php/Main_Page) for profiling. Then compile the benchmarks with `rustup nightly run cargo bench --no-run`.
Run the benchmark binary with `perf` (shown here filtering to one particular benchmark, which will make the results easier to read). `perf` is only available to the root user on most systems as it fiddles with event counters in your CPU, so use `sudo`. We need to run the actual benchmark binary, hence the path into `target`. You can see the actual full path with `rustup run nightly cargo bench -v`; it will print out the commands it runs. If you use the exact path that `bench` outputs, make sure you get the one that's for the benchmarks, not the tests. You may also want to `cargo clean` so you have only one `benchmarks-` binary (they tend to accumulate).
```bash
sudo perf record target/release/deps/benchmarks-* --bench decode_10mib_reuse
```
Then analyze the results, again with perf:
```bash
sudo perf annotate -l
```
You'll see a bunch of interleaved rust source and assembly like this. The section with `lib.rs:327` is telling us that 4.02% of samples saw the `movzbl` aka bit shift as the active instruction. However, this percentage is not as exact as it seems due to a phenomenon called *skid*. Basically, a consequence of how fancy modern CPUs are is that this sort of instruction profiling is inherently inaccurate, especially in branch-heavy code.
```text
lib.rs:322 0.70 : 10698: mov %rdi,%rax
2.82 : 1069b: shr $0x38,%rax
: if morsel == decode_tables::INVALID_VALUE {
: bad_byte_index = input_index;
: break;
: };
: accum = (morsel as u64) << 58;
lib.rs:327 4.02 : 1069f: movzbl (%r9,%rax,1),%r15d
: // fast loop of 8 bytes at a time
: while input_index < length_of_full_chunks {
: let mut accum: u64;
:
: let input_chunk = BigEndian::read_u64(&input_bytes[input_index..(input_index + 8)]);
: morsel = decode_table[(input_chunk >> 56) as usize];
lib.rs:322 3.68 : 106a4: cmp $0xff,%r15
: if morsel == decode_tables::INVALID_VALUE {
0.00 : 106ab: je 1090e <base64::decode_config_buf::hbf68a45fefa299c1+0x46e>
```
Fuzzing
---
This uses [cargo-fuzz](https://github.com/rust-fuzz/cargo-fuzz). See `fuzz/fuzzers` for the available fuzzing scripts. To run, use an invocation like these:
```bash
cargo +nightly fuzz run roundtrip
cargo +nightly fuzz run roundtrip_no_pad
cargo +nightly fuzz run roundtrip_random_config -- -max_len=10240
cargo +nightly fuzz run decode_random
```
License
---
This project is dual-licensed under MIT and Apache 2.0.

105
zeroidc/vendor/base64/RELEASE-NOTES.md vendored Normal file
View File

@ -0,0 +1,105 @@
# 0.13.0
- Config methods are const
- Added `EncoderStringWriter` to allow encoding directly to a String
- `EncoderWriter` now owns its delegate writer rather than keeping a reference to it (though refs still work)
- As a consequence, it is now possible to extract the delegate writer from an `EncoderWriter` via `finish()`, which returns `Result<W>` instead of `Result<()>`. If you were calling `finish()` explicitly, you will now need to use `let _ = foo.finish()` instead of just `foo.finish()` to avoid a warning about the unused value.
- When decoding input that has both an invalid length and an invalid symbol as the last byte, `InvalidByte` will be emitted instead of `InvalidLength` to make the problem more obvious.
# 0.12.2
- Add `BinHex` alphabet
# 0.12.1
- Add `Bcrypt` alphabet
# 0.12.0
- A `Read` implementation (`DecoderReader`) to let users transparently decoded data from a b64 input source
- IMAP's modified b64 alphabet
- Relaxed type restrictions to just `AsRef<[ut8]>` for main `encode*`/`decode*` functions
- A minor performance improvement in encoding
# 0.11.0
- Minimum rust version 1.34.0
- `no_std` is now supported via the two new features `alloc` and `std`.
# 0.10.1
- Minimum rust version 1.27.2
- Fix bug in streaming encoding ([#90](https://github.com/marshallpierce/rust-base64/pull/90)): if the underlying writer didn't write all the bytes given to it, the remaining bytes would not be retried later. See the docs on `EncoderWriter::write`.
- Make it configurable whether or not to return an error when decoding detects excess trailing bits.
# 0.10.0
- Remove line wrapping. Line wrapping was never a great conceptual fit in this library, and other features (streaming encoding, etc) either couldn't support it or could support only special cases of it with a great increase in complexity. Line wrapping has been pulled out into a [line-wrap](https://crates.io/crates/line-wrap) crate, so it's still available if you need it.
- `Base64Display` creation no longer uses a `Result` because it can't fail, which means its helper methods for common
configs that `unwrap()` for you are no longer needed
- Add a streaming encoder `Write` impl to transparently base64 as you write.
- Remove the remaining `unsafe` code.
- Remove whitespace stripping to simplify `no_std` support. No out of the box configs use it, and it's trivial to do yourself if needed: `filter(|b| !b" \n\t\r\x0b\x0c".contains(b)`.
- Detect invalid trailing symbols when decoding and return an error rather than silently ignoring them.
# 0.9.3
- Update safemem
# 0.9.2
- Derive `Clone` for `DecodeError`.
# 0.9.1
- Add support for `crypt(3)`'s base64 variant.
# 0.9.0
- `decode_config_slice` function for no-allocation decoding, analogous to `encode_config_slice`
- Decode performance optimization
# 0.8.0
- `encode_config_slice` function for no-allocation encoding
# 0.7.0
- `STANDARD_NO_PAD` config
- `Base64Display` heap-free wrapper for use in format strings, etc
# 0.6.0
- Decode performance improvements
- Use `unsafe` in fewer places
- Added fuzzers
# 0.5.2
- Avoid usize overflow when calculating length
- Better line wrapping performance
# 0.5.1
- Temporarily disable line wrapping
- Add Apache 2.0 license
# 0.5.0
- MIME support, including configurable line endings and line wrapping
- Removed `decode_ws`
- Renamed `Base64Error` to `DecodeError`
# 0.4.1
- Allow decoding a `AsRef<[u8]>` instead of just a `&str`
# 0.4.0
- Configurable padding
- Encode performance improvements
# 0.3.0
- Added encode/decode functions that do not allocate their own storage
- Decode performance improvements
- Extraneous padding bytes are no longer ignored. Now, an error will be returned.

View File

@ -0,0 +1,210 @@
extern crate base64;
#[macro_use]
extern crate criterion;
extern crate rand;
use base64::display;
use base64::{
decode, decode_config_buf, decode_config_slice, encode, encode_config_buf, encode_config_slice,
write, Config,
};
use criterion::{black_box, Bencher, Criterion, ParameterizedBenchmark, Throughput};
use rand::{FromEntropy, Rng};
use std::io::{self, Read, Write};
const TEST_CONFIG: Config = base64::STANDARD;
fn do_decode_bench(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size * 3 / 4);
fill(&mut v);
let encoded = encode(&v);
b.iter(|| {
let orig = decode(&encoded);
black_box(&orig);
});
}
fn do_decode_bench_reuse_buf(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size * 3 / 4);
fill(&mut v);
let encoded = encode(&v);
let mut buf = Vec::new();
b.iter(|| {
decode_config_buf(&encoded, TEST_CONFIG, &mut buf).unwrap();
black_box(&buf);
buf.clear();
});
}
fn do_decode_bench_slice(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size * 3 / 4);
fill(&mut v);
let encoded = encode(&v);
let mut buf = Vec::new();
buf.resize(size, 0);
b.iter(|| {
decode_config_slice(&encoded, TEST_CONFIG, &mut buf).unwrap();
black_box(&buf);
});
}
fn do_decode_bench_stream(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size * 3 / 4);
fill(&mut v);
let encoded = encode(&v);
let mut buf = Vec::new();
buf.resize(size, 0);
buf.truncate(0);
b.iter(|| {
let mut cursor = io::Cursor::new(&encoded[..]);
let mut decoder = base64::read::DecoderReader::new(&mut cursor, TEST_CONFIG);
decoder.read_to_end(&mut buf).unwrap();
buf.clear();
black_box(&buf);
});
}
fn do_encode_bench(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size);
fill(&mut v);
b.iter(|| {
let e = encode(&v);
black_box(&e);
});
}
fn do_encode_bench_display(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size);
fill(&mut v);
b.iter(|| {
let e = format!("{}", display::Base64Display::with_config(&v, TEST_CONFIG));
black_box(&e);
});
}
fn do_encode_bench_reuse_buf(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size);
fill(&mut v);
let mut buf = String::new();
b.iter(|| {
encode_config_buf(&v, TEST_CONFIG, &mut buf);
buf.clear();
});
}
fn do_encode_bench_slice(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size);
fill(&mut v);
let mut buf = Vec::new();
// conservative estimate of encoded size
buf.resize(v.len() * 2, 0);
b.iter(|| {
encode_config_slice(&v, TEST_CONFIG, &mut buf);
});
}
fn do_encode_bench_stream(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size);
fill(&mut v);
let mut buf = Vec::new();
buf.reserve(size * 2);
b.iter(|| {
buf.clear();
let mut stream_enc = write::EncoderWriter::new(&mut buf, TEST_CONFIG);
stream_enc.write_all(&v).unwrap();
stream_enc.flush().unwrap();
});
}
fn do_encode_bench_string_stream(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size);
fill(&mut v);
b.iter(|| {
let mut stream_enc = write::EncoderStringWriter::new(TEST_CONFIG);
stream_enc.write_all(&v).unwrap();
stream_enc.flush().unwrap();
let _ = stream_enc.into_inner();
});
}
fn do_encode_bench_string_reuse_buf_stream(b: &mut Bencher, &size: &usize) {
let mut v: Vec<u8> = Vec::with_capacity(size);
fill(&mut v);
let mut buf = String::new();
b.iter(|| {
buf.clear();
let mut stream_enc = write::EncoderStringWriter::from(&mut buf, TEST_CONFIG);
stream_enc.write_all(&v).unwrap();
stream_enc.flush().unwrap();
let _ = stream_enc.into_inner();
});
}
fn fill(v: &mut Vec<u8>) {
let cap = v.capacity();
// weak randomness is plenty; we just want to not be completely friendly to the branch predictor
let mut r = rand::rngs::SmallRng::from_entropy();
while v.len() < cap {
v.push(r.gen::<u8>());
}
}
const BYTE_SIZES: [usize; 5] = [3, 50, 100, 500, 3 * 1024];
// Benchmarks over these byte sizes take longer so we will run fewer samples to
// keep the benchmark runtime reasonable.
const LARGE_BYTE_SIZES: [usize; 3] = [3 * 1024 * 1024, 10 * 1024 * 1024, 30 * 1024 * 1024];
fn encode_benchmarks(byte_sizes: &[usize]) -> ParameterizedBenchmark<usize> {
ParameterizedBenchmark::new("encode", do_encode_bench, byte_sizes.iter().cloned())
.warm_up_time(std::time::Duration::from_millis(500))
.measurement_time(std::time::Duration::from_secs(3))
.throughput(|s| Throughput::Bytes(*s as u64))
.with_function("encode_display", do_encode_bench_display)
.with_function("encode_reuse_buf", do_encode_bench_reuse_buf)
.with_function("encode_slice", do_encode_bench_slice)
.with_function("encode_reuse_buf_stream", do_encode_bench_stream)
.with_function("encode_string_stream", do_encode_bench_string_stream)
.with_function(
"encode_string_reuse_buf_stream",
do_encode_bench_string_reuse_buf_stream,
)
}
fn decode_benchmarks(byte_sizes: &[usize]) -> ParameterizedBenchmark<usize> {
ParameterizedBenchmark::new("decode", do_decode_bench, byte_sizes.iter().cloned())
.warm_up_time(std::time::Duration::from_millis(500))
.measurement_time(std::time::Duration::from_secs(3))
.throughput(|s| Throughput::Bytes(*s as u64))
.with_function("decode_reuse_buf", do_decode_bench_reuse_buf)
.with_function("decode_slice", do_decode_bench_slice)
.with_function("decode_stream", do_decode_bench_stream)
}
fn bench(c: &mut Criterion) {
c.bench("bench_small_input", encode_benchmarks(&BYTE_SIZES[..]));
c.bench(
"bench_large_input",
encode_benchmarks(&LARGE_BYTE_SIZES[..]).sample_size(10),
);
c.bench("bench_small_input", decode_benchmarks(&BYTE_SIZES[..]));
c.bench(
"bench_large_input",
decode_benchmarks(&LARGE_BYTE_SIZES[..]).sample_size(10),
);
}
criterion_group!(benches, bench);
criterion_main!(benches);

View File

@ -0,0 +1,89 @@
use std::fs::File;
use std::io::{self, Read};
use std::path::PathBuf;
use std::process;
use std::str::FromStr;
use base64::{read, write};
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
enum CharacterSet {
Standard,
UrlSafe,
}
impl Default for CharacterSet {
fn default() -> Self {
CharacterSet::Standard
}
}
impl Into<base64::Config> for CharacterSet {
fn into(self) -> base64::Config {
match self {
CharacterSet::Standard => base64::STANDARD,
CharacterSet::UrlSafe => base64::URL_SAFE,
}
}
}
impl FromStr for CharacterSet {
type Err = String;
fn from_str(s: &str) -> Result<CharacterSet, String> {
match s {
"standard" => Ok(CharacterSet::Standard),
"urlsafe" => Ok(CharacterSet::UrlSafe),
_ => Err(format!("charset '{}' unrecognized", s)),
}
}
}
/// Base64 encode or decode FILE (or standard input), to standard output.
#[derive(Debug, StructOpt)]
struct Opt {
/// decode data
#[structopt(short = "d", long = "decode")]
decode: bool,
/// The character set to choose. Defaults to the standard base64 character set.
/// Supported character sets include "standard" and "urlsafe".
#[structopt(long = "charset")]
charset: Option<CharacterSet>,
/// The file to encode/decode.
#[structopt(parse(from_os_str))]
file: Option<PathBuf>,
}
fn main() {
let opt = Opt::from_args();
let stdin;
let mut input: Box<dyn Read> = match opt.file {
None => {
stdin = io::stdin();
Box::new(stdin.lock())
}
Some(ref f) if f.as_os_str() == "-" => {
stdin = io::stdin();
Box::new(stdin.lock())
}
Some(f) => Box::new(File::open(f).unwrap()),
};
let config = opt.charset.unwrap_or_default().into();
let stdout = io::stdout();
let mut stdout = stdout.lock();
let r = if opt.decode {
let mut decoder = read::DecoderReader::new(&mut input, config);
io::copy(&mut decoder, &mut stdout)
} else {
let mut encoder = write::EncoderWriter::new(&mut stdout, config);
io::copy(&mut input, &mut encoder)
};
if let Err(e) = r {
eprintln!(
"Base64 {} failed with {}",
if opt.decode { "decode" } else { "encode" },
e
);
process::exit(1);
}
}

View File

@ -0,0 +1,179 @@
use std::collections::{HashMap, HashSet};
use std::iter::Iterator;
fn main() {
println!("pub const INVALID_VALUE: u8 = 255;");
// A-Z
let standard_alphabet: Vec<u8> = (0x41..0x5B)
// a-z
.chain(0x61..0x7B)
// 0-9
.chain(0x30..0x3A)
// +
.chain(0x2B..0x2C)
// /
.chain(0x2F..0x30)
.collect();
print_encode_table(&standard_alphabet, "STANDARD_ENCODE", 0);
print_decode_table(&standard_alphabet, "STANDARD_DECODE", 0);
// A-Z
let url_alphabet: Vec<u8> = (0x41..0x5B)
// a-z
.chain(0x61..0x7B)
// 0-9
.chain(0x30..0x3A)
// -
.chain(0x2D..0x2E)
// _
.chain(0x5F..0x60)
.collect();
print_encode_table(&url_alphabet, "URL_SAFE_ENCODE", 0);
print_decode_table(&url_alphabet, "URL_SAFE_DECODE", 0);
// ./0123456789
let crypt_alphabet: Vec<u8> = (b'.'..(b'9' + 1))
// A-Z
.chain(b'A'..(b'Z' + 1))
// a-z
.chain(b'a'..(b'z' + 1))
.collect();
print_encode_table(&crypt_alphabet, "CRYPT_ENCODE", 0);
print_decode_table(&crypt_alphabet, "CRYPT_DECODE", 0);
// ./
let bcrypt_alphabet: Vec<u8> = (b'.'..(b'/' + 1))
// A-Z
.chain(b'A'..(b'Z' + 1))
// a-z
.chain(b'a'..(b'z' + 1))
// 0-9
.chain(b'0'..(b'9' + 1))
.collect();
print_encode_table(&bcrypt_alphabet, "BCRYPT_ENCODE", 0);
print_decode_table(&bcrypt_alphabet, "BCRYPT_DECODE", 0);
// A-Z
let imap_alphabet: Vec<u8> = (0x41..0x5B)
// a-z
.chain(0x61..0x7B)
// 0-9
.chain(0x30..0x3A)
// +
.chain(0x2B..0x2C)
// ,
.chain(0x2C..0x2D)
.collect();
print_encode_table(&imap_alphabet, "IMAP_MUTF7_ENCODE", 0);
print_decode_table(&imap_alphabet, "IMAP_MUTF7_DECODE", 0);
// '!' - '-'
let binhex_alphabet: Vec<u8> = (0x21..0x2E)
// 0-9
.chain(0x30..0x3A)
// @-N
.chain(0x40..0x4F)
// P-V
.chain(0x50..0x57)
// X-[
.chain(0x58..0x5C)
// `-f
.chain(0x60..0x66)
// h-m
.chain(0x68..0x6E)
// p-r
.chain(0x70..0x73)
.collect();
print_encode_table(&binhex_alphabet, "BINHEX_ENCODE", 0);
print_decode_table(&binhex_alphabet, "BINHEX_DECODE", 0);
}
fn print_encode_table(alphabet: &[u8], const_name: &str, indent_depth: usize) {
check_alphabet(alphabet);
println!("#[rustfmt::skip]");
println!(
"{:width$}pub const {}: &[u8; 64] = &[",
"",
const_name,
width = indent_depth
);
for (i, b) in alphabet.iter().enumerate() {
println!(
"{:width$}{}, // input {} (0x{:X}) => '{}' (0x{:X})",
"",
b,
i,
i,
String::from_utf8(vec![*b as u8]).unwrap(),
b,
width = indent_depth + 4
);
}
println!("{:width$}];", "", width = indent_depth);
}
fn print_decode_table(alphabet: &[u8], const_name: &str, indent_depth: usize) {
check_alphabet(alphabet);
// map of alphabet bytes to 6-bit morsels
let mut input_to_morsel = HashMap::<u8, u8>::new();
// standard base64 alphabet bytes, in order
for (morsel, ascii_byte) in alphabet.iter().enumerate() {
// truncation cast is fine here
let _ = input_to_morsel.insert(*ascii_byte, morsel as u8);
}
println!("#[rustfmt::skip]");
println!(
"{:width$}pub const {}: &[u8; 256] = &[",
"",
const_name,
width = indent_depth
);
for ascii_byte in 0..256 {
let (value, comment) = match input_to_morsel.get(&(ascii_byte as u8)) {
None => (
"INVALID_VALUE".to_string(),
format!("input {} (0x{:X})", ascii_byte, ascii_byte),
),
Some(v) => (
format!("{}", *v),
format!(
"input {} (0x{:X} char '{}') => {} (0x{:X})",
ascii_byte,
ascii_byte,
String::from_utf8(vec![ascii_byte as u8]).unwrap(),
*v,
*v
),
),
};
println!(
"{:width$}{}, // {}",
"",
value,
comment,
width = indent_depth + 4
);
}
println!("{:width$}];", "", width = indent_depth);
}
fn check_alphabet(alphabet: &[u8]) {
// ensure all characters are distinct
assert_eq!(64, alphabet.len());
let mut set: HashSet<u8> = HashSet::new();
set.extend(alphabet);
assert_eq!(64, set.len());
// must be ASCII to be valid as single UTF-8 bytes
for &b in alphabet {
assert!(b <= 0x7F_u8);
// = is assumed to be padding, so cannot be used as a symbol
assert_ne!(b'=', b);
}
}

34
zeroidc/vendor/base64/icon_CLion.svg vendored Normal file
View File

@ -0,0 +1,34 @@
<svg id="Layer_1" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 128 128">
<defs>
<linearGradient id="linear-gradient" x1="40.69" y1="-676.56" x2="83.48" y2="-676.56" gradientTransform="matrix(1, 0, 0, -1, 0, -648.86)" gradientUnits="userSpaceOnUse">
<stop offset="0" stop-color="#ed358c"/>
<stop offset="0.16" stop-color="#e9388c"/>
<stop offset="0.3" stop-color="#de418c"/>
<stop offset="0.43" stop-color="#cc508c"/>
<stop offset="0.57" stop-color="#b2658d"/>
<stop offset="0.7" stop-color="#90808d"/>
<stop offset="0.83" stop-color="#67a18e"/>
<stop offset="0.95" stop-color="#37c78f"/>
<stop offset="1" stop-color="#22d88f"/>
</linearGradient>
<linearGradient id="linear-gradient-2" x1="32.58" y1="-665.27" x2="13.76" y2="-791.59" gradientTransform="matrix(1, 0, 0, -1, 0, -648.86)" gradientUnits="userSpaceOnUse">
<stop offset="0.09" stop-color="#22d88f"/>
<stop offset="0.9" stop-color="#029de0"/>
</linearGradient>
<linearGradient id="linear-gradient-3" x1="116.68" y1="-660.66" x2="-12.09" y2="-796.66" xlink:href="#linear-gradient-2"/>
<linearGradient id="linear-gradient-4" x1="73.35" y1="-739.1" x2="122.29" y2="-746.06" xlink:href="#linear-gradient-2"/>
</defs>
<title>icon_CLion</title>
<g>
<polygon points="49.2 51.8 40.6 55.4 48.4 0 77.8 16.2 49.2 51.8" fill="url(#linear-gradient)"/>
<polygon points="44.6 76.8 48.8 0 11.8 23.2 0 94 44.6 76.8" fill="url(#linear-gradient-2)"/>
<polygon points="125.4 38.4 109 4.8 77.8 16.2 55 41.4 0 94 41.6 124.4 93.6 77.2 125.4 38.4" fill="url(#linear-gradient-3)"/>
<polygon points="53.8 54.6 46.6 98.4 75.8 121 107.8 128 128 82.4 53.8 54.6" fill="url(#linear-gradient-4)"/>
</g>
<g>
<rect x="24" y="24" width="80" height="80"/>
<rect x="31.6" y="89" width="30" height="5" fill="#fff"/>
<path d="M31,51.2h0A16.83,16.83,0,0,1,48.2,34c6.2,0,10,2,13,5.2l-4.6,5.4c-2.6-2.4-5.2-3.8-8.4-3.8-5.6,0-9.6,4.6-9.6,10.4h0c0,5.6,4,10.4,9.6,10.4,3.8,0,6.2-1.6,8.8-3.8l4.6,4.6c-3.4,3.6-7.2,6-13.6,6A17,17,0,0,1,31,51.2" fill="#fff"/>
<path d="M66.6,34.4H74v27H88.4v6.2H66.6V34.4Z" fill="#fff"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 2.2 KiB

View File

@ -0,0 +1,247 @@
use crate::{
encode::{add_padding, encode_to_slice},
Config,
};
#[cfg(any(feature = "alloc", feature = "std", test))]
use alloc::string::String;
use core::cmp;
#[cfg(any(feature = "alloc", feature = "std", test))]
use core::str;
/// The output mechanism for ChunkedEncoder's encoded bytes.
pub trait Sink {
type Error;
/// Handle a chunk of encoded base64 data (as UTF-8 bytes)
fn write_encoded_bytes(&mut self, encoded: &[u8]) -> Result<(), Self::Error>;
}
const BUF_SIZE: usize = 1024;
/// A base64 encoder that emits encoded bytes in chunks without heap allocation.
pub struct ChunkedEncoder {
config: Config,
max_input_chunk_len: usize,
}
impl ChunkedEncoder {
pub fn new(config: Config) -> ChunkedEncoder {
ChunkedEncoder {
config,
max_input_chunk_len: max_input_length(BUF_SIZE, config),
}
}
pub fn encode<S: Sink>(&self, bytes: &[u8], sink: &mut S) -> Result<(), S::Error> {
let mut encode_buf: [u8; BUF_SIZE] = [0; BUF_SIZE];
let encode_table = self.config.char_set.encode_table();
let mut input_index = 0;
while input_index < bytes.len() {
// either the full input chunk size, or it's the last iteration
let input_chunk_len = cmp::min(self.max_input_chunk_len, bytes.len() - input_index);
let chunk = &bytes[input_index..(input_index + input_chunk_len)];
let mut b64_bytes_written = encode_to_slice(chunk, &mut encode_buf, encode_table);
input_index += input_chunk_len;
let more_input_left = input_index < bytes.len();
if self.config.pad && !more_input_left {
// no more input, add padding if needed. Buffer will have room because
// max_input_length leaves room for it.
b64_bytes_written += add_padding(bytes.len(), &mut encode_buf[b64_bytes_written..]);
}
sink.write_encoded_bytes(&encode_buf[0..b64_bytes_written])?;
}
Ok(())
}
}
/// Calculate the longest input that can be encoded for the given output buffer size.
///
/// If the config requires padding, two bytes of buffer space will be set aside so that the last
/// chunk of input can be encoded safely.
///
/// The input length will always be a multiple of 3 so that no encoding state has to be carried over
/// between chunks.
fn max_input_length(encoded_buf_len: usize, config: Config) -> usize {
let effective_buf_len = if config.pad {
// make room for padding
encoded_buf_len
.checked_sub(2)
.expect("Don't use a tiny buffer")
} else {
encoded_buf_len
};
// No padding, so just normal base64 expansion.
(effective_buf_len / 4) * 3
}
// A really simple sink that just appends to a string
#[cfg(any(feature = "alloc", feature = "std", test))]
pub(crate) struct StringSink<'a> {
string: &'a mut String,
}
#[cfg(any(feature = "alloc", feature = "std", test))]
impl<'a> StringSink<'a> {
pub(crate) fn new(s: &mut String) -> StringSink {
StringSink { string: s }
}
}
#[cfg(any(feature = "alloc", feature = "std", test))]
impl<'a> Sink for StringSink<'a> {
type Error = ();
fn write_encoded_bytes(&mut self, s: &[u8]) -> Result<(), Self::Error> {
self.string.push_str(str::from_utf8(s).unwrap());
Ok(())
}
}
#[cfg(test)]
pub mod tests {
use super::*;
use crate::{encode_config_buf, tests::random_config, CharacterSet, STANDARD};
use rand::{
distributions::{Distribution, Uniform},
FromEntropy, Rng,
};
#[test]
fn chunked_encode_empty() {
assert_eq!("", chunked_encode_str(&[], STANDARD));
}
#[test]
fn chunked_encode_intermediate_fast_loop() {
// > 8 bytes input, will enter the pretty fast loop
assert_eq!(
"Zm9vYmFyYmF6cXV4",
chunked_encode_str(b"foobarbazqux", STANDARD)
);
}
#[test]
fn chunked_encode_fast_loop() {
// > 32 bytes input, will enter the uber fast loop
assert_eq!(
"Zm9vYmFyYmF6cXV4cXV1eGNvcmdlZ3JhdWx0Z2FycGx5eg==",
chunked_encode_str(b"foobarbazquxquuxcorgegraultgarplyz", STANDARD)
);
}
#[test]
fn chunked_encode_slow_loop_only() {
// < 8 bytes input, slow loop only
assert_eq!("Zm9vYmFy", chunked_encode_str(b"foobar", STANDARD));
}
#[test]
fn chunked_encode_matches_normal_encode_random_string_sink() {
let helper = StringSinkTestHelper;
chunked_encode_matches_normal_encode_random(&helper);
}
#[test]
fn max_input_length_no_pad() {
let config = config_with_pad(false);
assert_eq!(768, max_input_length(1024, config));
}
#[test]
fn max_input_length_with_pad_decrements_one_triple() {
let config = config_with_pad(true);
assert_eq!(765, max_input_length(1024, config));
}
#[test]
fn max_input_length_with_pad_one_byte_short() {
let config = config_with_pad(true);
assert_eq!(765, max_input_length(1025, config));
}
#[test]
fn max_input_length_with_pad_fits_exactly() {
let config = config_with_pad(true);
assert_eq!(768, max_input_length(1026, config));
}
#[test]
fn max_input_length_cant_use_extra_single_encoded_byte() {
let config = Config::new(crate::CharacterSet::Standard, false);
assert_eq!(300, max_input_length(401, config));
}
pub fn chunked_encode_matches_normal_encode_random<S: SinkTestHelper>(sink_test_helper: &S) {
let mut input_buf: Vec<u8> = Vec::new();
let mut output_buf = String::new();
let mut rng = rand::rngs::SmallRng::from_entropy();
let input_len_range = Uniform::new(1, 10_000);
for _ in 0..5_000 {
input_buf.clear();
output_buf.clear();
let buf_len = input_len_range.sample(&mut rng);
for _ in 0..buf_len {
input_buf.push(rng.gen());
}
let config = random_config(&mut rng);
let chunk_encoded_string = sink_test_helper.encode_to_string(config, &input_buf);
encode_config_buf(&input_buf, config, &mut output_buf);
assert_eq!(
output_buf, chunk_encoded_string,
"input len={}, config: pad={}",
buf_len, config.pad
);
}
}
fn chunked_encode_str(bytes: &[u8], config: Config) -> String {
let mut s = String::new();
{
let mut sink = StringSink::new(&mut s);
let encoder = ChunkedEncoder::new(config);
encoder.encode(bytes, &mut sink).unwrap();
}
return s;
}
fn config_with_pad(pad: bool) -> Config {
Config::new(CharacterSet::Standard, pad)
}
// An abstraction around sinks so that we can have tests that easily to any sink implementation
pub trait SinkTestHelper {
fn encode_to_string(&self, config: Config, bytes: &[u8]) -> String;
}
struct StringSinkTestHelper;
impl SinkTestHelper for StringSinkTestHelper {
fn encode_to_string(&self, config: Config, bytes: &[u8]) -> String {
let encoder = ChunkedEncoder::new(config);
let mut s = String::new();
{
let mut sink = StringSink::new(&mut s);
encoder.encode(bytes, &mut sink).unwrap();
}
s
}
}
}

873
zeroidc/vendor/base64/src/decode.rs vendored Normal file
View File

@ -0,0 +1,873 @@
use crate::{tables, Config, PAD_BYTE};
#[cfg(any(feature = "alloc", feature = "std", test))]
use crate::STANDARD;
#[cfg(any(feature = "alloc", feature = "std", test))]
use alloc::vec::Vec;
use core::fmt;
#[cfg(any(feature = "std", test))]
use std::error;
// decode logic operates on chunks of 8 input bytes without padding
const INPUT_CHUNK_LEN: usize = 8;
const DECODED_CHUNK_LEN: usize = 6;
// we read a u64 and write a u64, but a u64 of input only yields 6 bytes of output, so the last
// 2 bytes of any output u64 should not be counted as written to (but must be available in a
// slice).
const DECODED_CHUNK_SUFFIX: usize = 2;
// how many u64's of input to handle at a time
const CHUNKS_PER_FAST_LOOP_BLOCK: usize = 4;
const INPUT_BLOCK_LEN: usize = CHUNKS_PER_FAST_LOOP_BLOCK * INPUT_CHUNK_LEN;
// includes the trailing 2 bytes for the final u64 write
const DECODED_BLOCK_LEN: usize =
CHUNKS_PER_FAST_LOOP_BLOCK * DECODED_CHUNK_LEN + DECODED_CHUNK_SUFFIX;
/// Errors that can occur while decoding.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum DecodeError {
/// An invalid byte was found in the input. The offset and offending byte are provided.
InvalidByte(usize, u8),
/// The length of the input is invalid.
/// A typical cause of this is stray trailing whitespace or other separator bytes.
/// In the case where excess trailing bytes have produced an invalid length *and* the last byte
/// is also an invalid base64 symbol (as would be the case for whitespace, etc), `InvalidByte`
/// will be emitted instead of `InvalidLength` to make the issue easier to debug.
InvalidLength,
/// The last non-padding input symbol's encoded 6 bits have nonzero bits that will be discarded.
/// This is indicative of corrupted or truncated Base64.
/// Unlike InvalidByte, which reports symbols that aren't in the alphabet, this error is for
/// symbols that are in the alphabet but represent nonsensical encodings.
InvalidLastSymbol(usize, u8),
}
impl fmt::Display for DecodeError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
DecodeError::InvalidByte(index, byte) => {
write!(f, "Invalid byte {}, offset {}.", byte, index)
}
DecodeError::InvalidLength => write!(f, "Encoded text cannot have a 6-bit remainder."),
DecodeError::InvalidLastSymbol(index, byte) => {
write!(f, "Invalid last symbol {}, offset {}.", byte, index)
}
}
}
}
#[cfg(any(feature = "std", test))]
impl error::Error for DecodeError {
fn description(&self) -> &str {
match *self {
DecodeError::InvalidByte(_, _) => "invalid byte",
DecodeError::InvalidLength => "invalid length",
DecodeError::InvalidLastSymbol(_, _) => "invalid last symbol",
}
}
fn cause(&self) -> Option<&dyn error::Error> {
None
}
}
///Decode from string reference as octets.
///Returns a Result containing a Vec<u8>.
///Convenience `decode_config(input, base64::STANDARD);`.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let bytes = base64::decode("aGVsbG8gd29ybGQ=").unwrap();
/// println!("{:?}", bytes);
///}
///```
#[cfg(any(feature = "alloc", feature = "std", test))]
pub fn decode<T: AsRef<[u8]>>(input: T) -> Result<Vec<u8>, DecodeError> {
decode_config(input, STANDARD)
}
///Decode from string reference as octets.
///Returns a Result containing a Vec<u8>.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let bytes = base64::decode_config("aGVsbG8gd29ybGR+Cg==", base64::STANDARD).unwrap();
/// println!("{:?}", bytes);
///
/// let bytes_url = base64::decode_config("aGVsbG8gaW50ZXJuZXR-Cg==", base64::URL_SAFE).unwrap();
/// println!("{:?}", bytes_url);
///}
///```
#[cfg(any(feature = "alloc", feature = "std", test))]
pub fn decode_config<T: AsRef<[u8]>>(input: T, config: Config) -> Result<Vec<u8>, DecodeError> {
let mut buffer = Vec::<u8>::with_capacity(input.as_ref().len() * 4 / 3);
decode_config_buf(input, config, &mut buffer).map(|_| buffer)
}
///Decode from string reference as octets.
///Writes into the supplied buffer to avoid allocation.
///Returns a Result containing an empty tuple, aka ().
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let mut buffer = Vec::<u8>::new();
/// base64::decode_config_buf("aGVsbG8gd29ybGR+Cg==", base64::STANDARD, &mut buffer).unwrap();
/// println!("{:?}", buffer);
///
/// buffer.clear();
///
/// base64::decode_config_buf("aGVsbG8gaW50ZXJuZXR-Cg==", base64::URL_SAFE, &mut buffer)
/// .unwrap();
/// println!("{:?}", buffer);
///}
///```
#[cfg(any(feature = "alloc", feature = "std", test))]
pub fn decode_config_buf<T: AsRef<[u8]>>(
input: T,
config: Config,
buffer: &mut Vec<u8>,
) -> Result<(), DecodeError> {
let input_bytes = input.as_ref();
let starting_output_len = buffer.len();
let num_chunks = num_chunks(input_bytes);
let decoded_len_estimate = num_chunks
.checked_mul(DECODED_CHUNK_LEN)
.and_then(|p| p.checked_add(starting_output_len))
.expect("Overflow when calculating output buffer length");
buffer.resize(decoded_len_estimate, 0);
let bytes_written;
{
let buffer_slice = &mut buffer.as_mut_slice()[starting_output_len..];
bytes_written = decode_helper(input_bytes, num_chunks, config, buffer_slice)?;
}
buffer.truncate(starting_output_len + bytes_written);
Ok(())
}
/// Decode the input into the provided output slice.
///
/// This will not write any bytes past exactly what is decoded (no stray garbage bytes at the end).
///
/// If you don't know ahead of time what the decoded length should be, size your buffer with a
/// conservative estimate for the decoded length of an input: 3 bytes of output for every 4 bytes of
/// input, rounded up, or in other words `(input_len + 3) / 4 * 3`.
///
/// If the slice is not large enough, this will panic.
pub fn decode_config_slice<T: AsRef<[u8]>>(
input: T,
config: Config,
output: &mut [u8],
) -> Result<usize, DecodeError> {
let input_bytes = input.as_ref();
decode_helper(input_bytes, num_chunks(input_bytes), config, output)
}
/// Return the number of input chunks (including a possibly partial final chunk) in the input
fn num_chunks(input: &[u8]) -> usize {
input
.len()
.checked_add(INPUT_CHUNK_LEN - 1)
.expect("Overflow when calculating number of chunks in input")
/ INPUT_CHUNK_LEN
}
/// Helper to avoid duplicating num_chunks calculation, which is costly on short inputs.
/// Returns the number of bytes written, or an error.
// We're on the fragile edge of compiler heuristics here. If this is not inlined, slow. If this is
// inlined(always), a different slow. plain ol' inline makes the benchmarks happiest at the moment,
// but this is fragile and the best setting changes with only minor code modifications.
#[inline]
fn decode_helper(
input: &[u8],
num_chunks: usize,
config: Config,
output: &mut [u8],
) -> Result<usize, DecodeError> {
let char_set = config.char_set;
let decode_table = char_set.decode_table();
let remainder_len = input.len() % INPUT_CHUNK_LEN;
// Because the fast decode loop writes in groups of 8 bytes (unrolled to
// CHUNKS_PER_FAST_LOOP_BLOCK times 8 bytes, where possible) and outputs 8 bytes at a time (of
// which only 6 are valid data), we need to be sure that we stop using the fast decode loop
// soon enough that there will always be 2 more bytes of valid data written after that loop.
let trailing_bytes_to_skip = match remainder_len {
// if input is a multiple of the chunk size, ignore the last chunk as it may have padding,
// and the fast decode logic cannot handle padding
0 => INPUT_CHUNK_LEN,
// 1 and 5 trailing bytes are illegal: can't decode 6 bits of input into a byte
1 | 5 => {
// trailing whitespace is so common that it's worth it to check the last byte to
// possibly return a better error message
if let Some(b) = input.last() {
if *b != PAD_BYTE && decode_table[*b as usize] == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(input.len() - 1, *b));
}
}
return Err(DecodeError::InvalidLength);
}
// This will decode to one output byte, which isn't enough to overwrite the 2 extra bytes
// written by the fast decode loop. So, we have to ignore both these 2 bytes and the
// previous chunk.
2 => INPUT_CHUNK_LEN + 2,
// If this is 3 unpadded chars, then it would actually decode to 2 bytes. However, if this
// is an erroneous 2 chars + 1 pad char that would decode to 1 byte, then it should fail
// with an error, not panic from going past the bounds of the output slice, so we let it
// use stage 3 + 4.
3 => INPUT_CHUNK_LEN + 3,
// This can also decode to one output byte because it may be 2 input chars + 2 padding
// chars, which would decode to 1 byte.
4 => INPUT_CHUNK_LEN + 4,
// Everything else is a legal decode len (given that we don't require padding), and will
// decode to at least 2 bytes of output.
_ => remainder_len,
};
// rounded up to include partial chunks
let mut remaining_chunks = num_chunks;
let mut input_index = 0;
let mut output_index = 0;
{
let length_of_fast_decode_chunks = input.len().saturating_sub(trailing_bytes_to_skip);
// Fast loop, stage 1
// manual unroll to CHUNKS_PER_FAST_LOOP_BLOCK of u64s to amortize slice bounds checks
if let Some(max_start_index) = length_of_fast_decode_chunks.checked_sub(INPUT_BLOCK_LEN) {
while input_index <= max_start_index {
let input_slice = &input[input_index..(input_index + INPUT_BLOCK_LEN)];
let output_slice = &mut output[output_index..(output_index + DECODED_BLOCK_LEN)];
decode_chunk(
&input_slice[0..],
input_index,
decode_table,
&mut output_slice[0..],
)?;
decode_chunk(
&input_slice[8..],
input_index + 8,
decode_table,
&mut output_slice[6..],
)?;
decode_chunk(
&input_slice[16..],
input_index + 16,
decode_table,
&mut output_slice[12..],
)?;
decode_chunk(
&input_slice[24..],
input_index + 24,
decode_table,
&mut output_slice[18..],
)?;
input_index += INPUT_BLOCK_LEN;
output_index += DECODED_BLOCK_LEN - DECODED_CHUNK_SUFFIX;
remaining_chunks -= CHUNKS_PER_FAST_LOOP_BLOCK;
}
}
// Fast loop, stage 2 (aka still pretty fast loop)
// 8 bytes at a time for whatever we didn't do in stage 1.
if let Some(max_start_index) = length_of_fast_decode_chunks.checked_sub(INPUT_CHUNK_LEN) {
while input_index < max_start_index {
decode_chunk(
&input[input_index..(input_index + INPUT_CHUNK_LEN)],
input_index,
decode_table,
&mut output
[output_index..(output_index + DECODED_CHUNK_LEN + DECODED_CHUNK_SUFFIX)],
)?;
output_index += DECODED_CHUNK_LEN;
input_index += INPUT_CHUNK_LEN;
remaining_chunks -= 1;
}
}
}
// Stage 3
// If input length was such that a chunk had to be deferred until after the fast loop
// because decoding it would have produced 2 trailing bytes that wouldn't then be
// overwritten, we decode that chunk here. This way is slower but doesn't write the 2
// trailing bytes.
// However, we still need to avoid the last chunk (partial or complete) because it could
// have padding, so we always do 1 fewer to avoid the last chunk.
for _ in 1..remaining_chunks {
decode_chunk_precise(
&input[input_index..],
input_index,
decode_table,
&mut output[output_index..(output_index + DECODED_CHUNK_LEN)],
)?;
input_index += INPUT_CHUNK_LEN;
output_index += DECODED_CHUNK_LEN;
}
// always have one more (possibly partial) block of 8 input
debug_assert!(input.len() - input_index > 1 || input.is_empty());
debug_assert!(input.len() - input_index <= 8);
// Stage 4
// Finally, decode any leftovers that aren't a complete input block of 8 bytes.
// Use a u64 as a stack-resident 8 byte buffer.
let mut leftover_bits: u64 = 0;
let mut morsels_in_leftover = 0;
let mut padding_bytes = 0;
let mut first_padding_index: usize = 0;
let mut last_symbol = 0_u8;
let start_of_leftovers = input_index;
for (i, b) in input[start_of_leftovers..].iter().enumerate() {
// '=' padding
if *b == PAD_BYTE {
// There can be bad padding in a few ways:
// 1 - Padding with non-padding characters after it
// 2 - Padding after zero or one non-padding characters before it
// in the current quad.
// 3 - More than two characters of padding. If 3 or 4 padding chars
// are in the same quad, that implies it will be caught by #2.
// If it spreads from one quad to another, it will be caught by
// #2 in the second quad.
if i % 4 < 2 {
// Check for case #2.
let bad_padding_index = start_of_leftovers
+ if padding_bytes > 0 {
// If we've already seen padding, report the first padding index.
// This is to be consistent with the faster logic above: it will report an
// error on the first padding character (since it doesn't expect to see
// anything but actual encoded data).
first_padding_index
} else {
// haven't seen padding before, just use where we are now
i
};
return Err(DecodeError::InvalidByte(bad_padding_index, *b));
}
if padding_bytes == 0 {
first_padding_index = i;
}
padding_bytes += 1;
continue;
}
// Check for case #1.
// To make '=' handling consistent with the main loop, don't allow
// non-suffix '=' in trailing chunk either. Report error as first
// erroneous padding.
if padding_bytes > 0 {
return Err(DecodeError::InvalidByte(
start_of_leftovers + first_padding_index,
PAD_BYTE,
));
}
last_symbol = *b;
// can use up to 8 * 6 = 48 bits of the u64, if last chunk has no padding.
// To minimize shifts, pack the leftovers from left to right.
let shift = 64 - (morsels_in_leftover + 1) * 6;
// tables are all 256 elements, lookup with a u8 index always succeeds
let morsel = decode_table[*b as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(start_of_leftovers + i, *b));
}
leftover_bits |= (morsel as u64) << shift;
morsels_in_leftover += 1;
}
let leftover_bits_ready_to_append = match morsels_in_leftover {
0 => 0,
2 => 8,
3 => 16,
4 => 24,
6 => 32,
7 => 40,
8 => 48,
_ => unreachable!(
"Impossible: must only have 0 to 8 input bytes in last chunk, with no invalid lengths"
),
};
// if there are bits set outside the bits we care about, last symbol encodes trailing bits that
// will not be included in the output
let mask = !0 >> leftover_bits_ready_to_append;
if !config.decode_allow_trailing_bits && (leftover_bits & mask) != 0 {
// last morsel is at `morsels_in_leftover` - 1
return Err(DecodeError::InvalidLastSymbol(
start_of_leftovers + morsels_in_leftover - 1,
last_symbol,
));
}
let mut leftover_bits_appended_to_buf = 0;
while leftover_bits_appended_to_buf < leftover_bits_ready_to_append {
// `as` simply truncates the higher bits, which is what we want here
let selected_bits = (leftover_bits >> (56 - leftover_bits_appended_to_buf)) as u8;
output[output_index] = selected_bits;
output_index += 1;
leftover_bits_appended_to_buf += 8;
}
Ok(output_index)
}
#[inline]
fn write_u64(output: &mut [u8], value: u64) {
output[..8].copy_from_slice(&value.to_be_bytes());
}
/// Decode 8 bytes of input into 6 bytes of output. 8 bytes of output will be written, but only the
/// first 6 of those contain meaningful data.
///
/// `input` is the bytes to decode, of which the first 8 bytes will be processed.
/// `index_at_start_of_input` is the offset in the overall input (used for reporting errors
/// accurately)
/// `decode_table` is the lookup table for the particular base64 alphabet.
/// `output` will have its first 8 bytes overwritten, of which only the first 6 are valid decoded
/// data.
// yes, really inline (worth 30-50% speedup)
#[inline(always)]
fn decode_chunk(
input: &[u8],
index_at_start_of_input: usize,
decode_table: &[u8; 256],
output: &mut [u8],
) -> Result<(), DecodeError> {
let mut accum: u64;
let morsel = decode_table[input[0] as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(index_at_start_of_input, input[0]));
}
accum = (morsel as u64) << 58;
let morsel = decode_table[input[1] as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 1,
input[1],
));
}
accum |= (morsel as u64) << 52;
let morsel = decode_table[input[2] as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 2,
input[2],
));
}
accum |= (morsel as u64) << 46;
let morsel = decode_table[input[3] as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 3,
input[3],
));
}
accum |= (morsel as u64) << 40;
let morsel = decode_table[input[4] as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 4,
input[4],
));
}
accum |= (morsel as u64) << 34;
let morsel = decode_table[input[5] as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 5,
input[5],
));
}
accum |= (morsel as u64) << 28;
let morsel = decode_table[input[6] as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 6,
input[6],
));
}
accum |= (morsel as u64) << 22;
let morsel = decode_table[input[7] as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 7,
input[7],
));
}
accum |= (morsel as u64) << 16;
write_u64(output, accum);
Ok(())
}
/// Decode an 8-byte chunk, but only write the 6 bytes actually decoded instead of including 2
/// trailing garbage bytes.
#[inline]
fn decode_chunk_precise(
input: &[u8],
index_at_start_of_input: usize,
decode_table: &[u8; 256],
output: &mut [u8],
) -> Result<(), DecodeError> {
let mut tmp_buf = [0_u8; 8];
decode_chunk(
input,
index_at_start_of_input,
decode_table,
&mut tmp_buf[..],
)?;
output[0..6].copy_from_slice(&tmp_buf[0..6]);
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
encode::encode_config_buf,
encode::encode_config_slice,
tests::{assert_encode_sanity, random_config},
};
use rand::{
distributions::{Distribution, Uniform},
FromEntropy, Rng,
};
#[test]
fn decode_chunk_precise_writes_only_6_bytes() {
let input = b"Zm9vYmFy"; // "foobar"
let mut output = [0_u8, 1, 2, 3, 4, 5, 6, 7];
decode_chunk_precise(&input[..], 0, tables::STANDARD_DECODE, &mut output).unwrap();
assert_eq!(&vec![b'f', b'o', b'o', b'b', b'a', b'r', 6, 7], &output);
}
#[test]
fn decode_chunk_writes_8_bytes() {
let input = b"Zm9vYmFy"; // "foobar"
let mut output = [0_u8, 1, 2, 3, 4, 5, 6, 7];
decode_chunk(&input[..], 0, tables::STANDARD_DECODE, &mut output).unwrap();
assert_eq!(&vec![b'f', b'o', b'o', b'b', b'a', b'r', 0, 0], &output);
}
#[test]
fn decode_into_nonempty_vec_doesnt_clobber_existing_prefix() {
let mut orig_data = Vec::new();
let mut encoded_data = String::new();
let mut decoded_with_prefix = Vec::new();
let mut decoded_without_prefix = Vec::new();
let mut prefix = Vec::new();
let prefix_len_range = Uniform::new(0, 1000);
let input_len_range = Uniform::new(0, 1000);
let mut rng = rand::rngs::SmallRng::from_entropy();
for _ in 0..10_000 {
orig_data.clear();
encoded_data.clear();
decoded_with_prefix.clear();
decoded_without_prefix.clear();
prefix.clear();
let input_len = input_len_range.sample(&mut rng);
for _ in 0..input_len {
orig_data.push(rng.gen());
}
let config = random_config(&mut rng);
encode_config_buf(&orig_data, config, &mut encoded_data);
assert_encode_sanity(&encoded_data, config, input_len);
let prefix_len = prefix_len_range.sample(&mut rng);
// fill the buf with a prefix
for _ in 0..prefix_len {
prefix.push(rng.gen());
}
decoded_with_prefix.resize(prefix_len, 0);
decoded_with_prefix.copy_from_slice(&prefix);
// decode into the non-empty buf
decode_config_buf(&encoded_data, config, &mut decoded_with_prefix).unwrap();
// also decode into the empty buf
decode_config_buf(&encoded_data, config, &mut decoded_without_prefix).unwrap();
assert_eq!(
prefix_len + decoded_without_prefix.len(),
decoded_with_prefix.len()
);
assert_eq!(orig_data, decoded_without_prefix);
// append plain decode onto prefix
prefix.append(&mut decoded_without_prefix);
assert_eq!(prefix, decoded_with_prefix);
}
}
#[test]
fn decode_into_slice_doesnt_clobber_existing_prefix_or_suffix() {
let mut orig_data = Vec::new();
let mut encoded_data = String::new();
let mut decode_buf = Vec::new();
let mut decode_buf_copy: Vec<u8> = Vec::new();
let input_len_range = Uniform::new(0, 1000);
let mut rng = rand::rngs::SmallRng::from_entropy();
for _ in 0..10_000 {
orig_data.clear();
encoded_data.clear();
decode_buf.clear();
decode_buf_copy.clear();
let input_len = input_len_range.sample(&mut rng);
for _ in 0..input_len {
orig_data.push(rng.gen());
}
let config = random_config(&mut rng);
encode_config_buf(&orig_data, config, &mut encoded_data);
assert_encode_sanity(&encoded_data, config, input_len);
// fill the buffer with random garbage, long enough to have some room before and after
for _ in 0..5000 {
decode_buf.push(rng.gen());
}
// keep a copy for later comparison
decode_buf_copy.extend(decode_buf.iter());
let offset = 1000;
// decode into the non-empty buf
let decode_bytes_written =
decode_config_slice(&encoded_data, config, &mut decode_buf[offset..]).unwrap();
assert_eq!(orig_data.len(), decode_bytes_written);
assert_eq!(
orig_data,
&decode_buf[offset..(offset + decode_bytes_written)]
);
assert_eq!(&decode_buf_copy[0..offset], &decode_buf[0..offset]);
assert_eq!(
&decode_buf_copy[offset + decode_bytes_written..],
&decode_buf[offset + decode_bytes_written..]
);
}
}
#[test]
fn decode_into_slice_fits_in_precisely_sized_slice() {
let mut orig_data = Vec::new();
let mut encoded_data = String::new();
let mut decode_buf = Vec::new();
let input_len_range = Uniform::new(0, 1000);
let mut rng = rand::rngs::SmallRng::from_entropy();
for _ in 0..10_000 {
orig_data.clear();
encoded_data.clear();
decode_buf.clear();
let input_len = input_len_range.sample(&mut rng);
for _ in 0..input_len {
orig_data.push(rng.gen());
}
let config = random_config(&mut rng);
encode_config_buf(&orig_data, config, &mut encoded_data);
assert_encode_sanity(&encoded_data, config, input_len);
decode_buf.resize(input_len, 0);
// decode into the non-empty buf
let decode_bytes_written =
decode_config_slice(&encoded_data, config, &mut decode_buf[..]).unwrap();
assert_eq!(orig_data.len(), decode_bytes_written);
assert_eq!(orig_data, decode_buf);
}
}
#[test]
fn detect_invalid_last_symbol_two_bytes() {
let decode =
|input, forgiving| decode_config(input, STANDARD.decode_allow_trailing_bits(forgiving));
// example from https://github.com/marshallpierce/rust-base64/issues/75
assert!(decode("iYU=", false).is_ok());
// trailing 01
assert_eq!(
Err(DecodeError::InvalidLastSymbol(2, b'V')),
decode("iYV=", false)
);
assert_eq!(Ok(vec![137, 133]), decode("iYV=", true));
// trailing 10
assert_eq!(
Err(DecodeError::InvalidLastSymbol(2, b'W')),
decode("iYW=", false)
);
assert_eq!(Ok(vec![137, 133]), decode("iYV=", true));
// trailing 11
assert_eq!(
Err(DecodeError::InvalidLastSymbol(2, b'X')),
decode("iYX=", false)
);
assert_eq!(Ok(vec![137, 133]), decode("iYV=", true));
// also works when there are 2 quads in the last block
assert_eq!(
Err(DecodeError::InvalidLastSymbol(6, b'X')),
decode("AAAAiYX=", false)
);
assert_eq!(Ok(vec![0, 0, 0, 137, 133]), decode("AAAAiYX=", true));
}
#[test]
fn detect_invalid_last_symbol_one_byte() {
// 0xFF -> "/w==", so all letters > w, 0-9, and '+', '/' should get InvalidLastSymbol
assert!(decode("/w==").is_ok());
// trailing 01
assert_eq!(Err(DecodeError::InvalidLastSymbol(1, b'x')), decode("/x=="));
assert_eq!(Err(DecodeError::InvalidLastSymbol(1, b'z')), decode("/z=="));
assert_eq!(Err(DecodeError::InvalidLastSymbol(1, b'0')), decode("/0=="));
assert_eq!(Err(DecodeError::InvalidLastSymbol(1, b'9')), decode("/9=="));
assert_eq!(Err(DecodeError::InvalidLastSymbol(1, b'+')), decode("/+=="));
assert_eq!(Err(DecodeError::InvalidLastSymbol(1, b'/')), decode("//=="));
// also works when there are 2 quads in the last block
assert_eq!(
Err(DecodeError::InvalidLastSymbol(5, b'x')),
decode("AAAA/x==")
);
}
#[test]
fn detect_invalid_last_symbol_every_possible_three_symbols() {
let mut base64_to_bytes = ::std::collections::HashMap::new();
let mut bytes = [0_u8; 2];
for b1 in 0_u16..256 {
bytes[0] = b1 as u8;
for b2 in 0_u16..256 {
bytes[1] = b2 as u8;
let mut b64 = vec![0_u8; 4];
assert_eq!(4, encode_config_slice(&bytes, STANDARD, &mut b64[..]));
let mut v = ::std::vec::Vec::with_capacity(2);
v.extend_from_slice(&bytes[..]);
assert!(base64_to_bytes.insert(b64, v).is_none());
}
}
// every possible combination of symbols must either decode to 2 bytes or get InvalidLastSymbol
let mut symbols = [0_u8; 4];
for &s1 in STANDARD.char_set.encode_table().iter() {
symbols[0] = s1;
for &s2 in STANDARD.char_set.encode_table().iter() {
symbols[1] = s2;
for &s3 in STANDARD.char_set.encode_table().iter() {
symbols[2] = s3;
symbols[3] = PAD_BYTE;
match base64_to_bytes.get(&symbols[..]) {
Some(bytes) => {
assert_eq!(Ok(bytes.to_vec()), decode_config(&symbols, STANDARD))
}
None => assert_eq!(
Err(DecodeError::InvalidLastSymbol(2, s3)),
decode_config(&symbols[..], STANDARD)
),
}
}
}
}
}
#[test]
fn detect_invalid_last_symbol_every_possible_two_symbols() {
let mut base64_to_bytes = ::std::collections::HashMap::new();
for b in 0_u16..256 {
let mut b64 = vec![0_u8; 4];
assert_eq!(4, encode_config_slice(&[b as u8], STANDARD, &mut b64[..]));
let mut v = ::std::vec::Vec::with_capacity(1);
v.push(b as u8);
assert!(base64_to_bytes.insert(b64, v).is_none());
}
// every possible combination of symbols must either decode to 1 byte or get InvalidLastSymbol
let mut symbols = [0_u8; 4];
for &s1 in STANDARD.char_set.encode_table().iter() {
symbols[0] = s1;
for &s2 in STANDARD.char_set.encode_table().iter() {
symbols[1] = s2;
symbols[2] = PAD_BYTE;
symbols[3] = PAD_BYTE;
match base64_to_bytes.get(&symbols[..]) {
Some(bytes) => {
assert_eq!(Ok(bytes.to_vec()), decode_config(&symbols, STANDARD))
}
None => assert_eq!(
Err(DecodeError::InvalidLastSymbol(1, s2)),
decode_config(&symbols[..], STANDARD)
),
}
}
}
}
}

88
zeroidc/vendor/base64/src/display.rs vendored Normal file
View File

@ -0,0 +1,88 @@
//! Enables base64'd output anywhere you might use a `Display` implementation, like a format string.
//!
//! ```
//! use base64::display::Base64Display;
//!
//! let data = vec![0x0, 0x1, 0x2, 0x3];
//! let wrapper = Base64Display::with_config(&data, base64::STANDARD);
//!
//! assert_eq!("base64: AAECAw==", format!("base64: {}", wrapper));
//! ```
use super::chunked_encoder::ChunkedEncoder;
use super::Config;
use core::fmt::{Display, Formatter};
use core::{fmt, str};
/// A convenience wrapper for base64'ing bytes into a format string without heap allocation.
pub struct Base64Display<'a> {
bytes: &'a [u8],
chunked_encoder: ChunkedEncoder,
}
impl<'a> Base64Display<'a> {
/// Create a `Base64Display` with the provided config.
pub fn with_config(bytes: &[u8], config: Config) -> Base64Display {
Base64Display {
bytes,
chunked_encoder: ChunkedEncoder::new(config),
}
}
}
impl<'a> Display for Base64Display<'a> {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {
let mut sink = FormatterSink { f: formatter };
self.chunked_encoder.encode(self.bytes, &mut sink)
}
}
struct FormatterSink<'a, 'b: 'a> {
f: &'a mut Formatter<'b>,
}
impl<'a, 'b: 'a> super::chunked_encoder::Sink for FormatterSink<'a, 'b> {
type Error = fmt::Error;
fn write_encoded_bytes(&mut self, encoded: &[u8]) -> Result<(), Self::Error> {
// Avoid unsafe. If max performance is needed, write your own display wrapper that uses
// unsafe here to gain about 10-15%.
self.f
.write_str(str::from_utf8(encoded).expect("base64 data was not utf8"))
}
}
#[cfg(test)]
mod tests {
use super::super::chunked_encoder::tests::{
chunked_encode_matches_normal_encode_random, SinkTestHelper,
};
use super::super::*;
use super::*;
#[test]
fn basic_display() {
assert_eq!(
"~$Zm9vYmFy#*",
format!("~${}#*", Base64Display::with_config(b"foobar", STANDARD))
);
assert_eq!(
"~$Zm9vYmFyZg==#*",
format!("~${}#*", Base64Display::with_config(b"foobarf", STANDARD))
);
}
#[test]
fn display_encode_matches_normal_encode() {
let helper = DisplaySinkTestHelper;
chunked_encode_matches_normal_encode_random(&helper);
}
struct DisplaySinkTestHelper;
impl SinkTestHelper for DisplaySinkTestHelper {
fn encode_to_string(&self, config: Config, bytes: &[u8]) -> String {
format!("{}", Base64Display::with_config(bytes, config))
}
}
}

675
zeroidc/vendor/base64/src/encode.rs vendored Normal file
View File

@ -0,0 +1,675 @@
use crate::{Config, PAD_BYTE};
#[cfg(any(feature = "alloc", feature = "std", test))]
use crate::{chunked_encoder, STANDARD};
#[cfg(any(feature = "alloc", feature = "std", test))]
use alloc::{string::String, vec};
use core::convert::TryInto;
///Encode arbitrary octets as base64.
///Returns a String.
///Convenience for `encode_config(input, base64::STANDARD);`.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let b64 = base64::encode(b"hello world");
/// println!("{}", b64);
///}
///```
#[cfg(any(feature = "alloc", feature = "std", test))]
pub fn encode<T: AsRef<[u8]>>(input: T) -> String {
encode_config(input, STANDARD)
}
///Encode arbitrary octets as base64.
///Returns a String.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let b64 = base64::encode_config(b"hello world~", base64::STANDARD);
/// println!("{}", b64);
///
/// let b64_url = base64::encode_config(b"hello internet~", base64::URL_SAFE);
/// println!("{}", b64_url);
///}
///```
#[cfg(any(feature = "alloc", feature = "std", test))]
pub fn encode_config<T: AsRef<[u8]>>(input: T, config: Config) -> String {
let mut buf = match encoded_size(input.as_ref().len(), config) {
Some(n) => vec![0; n],
None => panic!("integer overflow when calculating buffer size"),
};
encode_with_padding(input.as_ref(), config, buf.len(), &mut buf[..]);
String::from_utf8(buf).expect("Invalid UTF8")
}
///Encode arbitrary octets as base64.
///Writes into the supplied output buffer, which will grow the buffer if needed.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let mut buf = String::new();
/// base64::encode_config_buf(b"hello world~", base64::STANDARD, &mut buf);
/// println!("{}", buf);
///
/// buf.clear();
/// base64::encode_config_buf(b"hello internet~", base64::URL_SAFE, &mut buf);
/// println!("{}", buf);
///}
///```
#[cfg(any(feature = "alloc", feature = "std", test))]
pub fn encode_config_buf<T: AsRef<[u8]>>(input: T, config: Config, buf: &mut String) {
let input_bytes = input.as_ref();
{
let mut sink = chunked_encoder::StringSink::new(buf);
let encoder = chunked_encoder::ChunkedEncoder::new(config);
encoder
.encode(input_bytes, &mut sink)
.expect("Writing to a String shouldn't fail")
}
}
/// Encode arbitrary octets as base64.
/// Writes into the supplied output buffer.
///
/// This is useful if you wish to avoid allocation entirely (e.g. encoding into a stack-resident
/// or statically-allocated buffer).
///
/// # Panics
///
/// If `output` is too small to hold the encoded version of `input`, a panic will result.
///
/// # Example
///
/// ```rust
/// extern crate base64;
///
/// fn main() {
/// let s = b"hello internet!";
/// let mut buf = Vec::new();
/// // make sure we'll have a slice big enough for base64 + padding
/// buf.resize(s.len() * 4 / 3 + 4, 0);
///
/// let bytes_written = base64::encode_config_slice(s,
/// base64::STANDARD, &mut buf);
///
/// // shorten our vec down to just what was written
/// buf.resize(bytes_written, 0);
///
/// assert_eq!(s, base64::decode(&buf).unwrap().as_slice());
/// }
/// ```
pub fn encode_config_slice<T: AsRef<[u8]>>(input: T, config: Config, output: &mut [u8]) -> usize {
let input_bytes = input.as_ref();
let encoded_size = encoded_size(input_bytes.len(), config)
.expect("usize overflow when calculating buffer size");
let mut b64_output = &mut output[0..encoded_size];
encode_with_padding(&input_bytes, config, encoded_size, &mut b64_output);
encoded_size
}
/// B64-encode and pad (if configured).
///
/// This helper exists to avoid recalculating encoded_size, which is relatively expensive on short
/// inputs.
///
/// `encoded_size` is the encoded size calculated for `input`.
///
/// `output` must be of size `encoded_size`.
///
/// All bytes in `output` will be written to since it is exactly the size of the output.
fn encode_with_padding(input: &[u8], config: Config, encoded_size: usize, output: &mut [u8]) {
debug_assert_eq!(encoded_size, output.len());
let b64_bytes_written = encode_to_slice(input, output, config.char_set.encode_table());
let padding_bytes = if config.pad {
add_padding(input.len(), &mut output[b64_bytes_written..])
} else {
0
};
let encoded_bytes = b64_bytes_written
.checked_add(padding_bytes)
.expect("usize overflow when calculating b64 length");
debug_assert_eq!(encoded_size, encoded_bytes);
}
#[inline]
fn read_u64(s: &[u8]) -> u64 {
u64::from_be_bytes(s[..8].try_into().unwrap())
}
/// Encode input bytes to utf8 base64 bytes. Does not pad.
/// `output` must be long enough to hold the encoded `input` without padding.
/// Returns the number of bytes written.
#[inline]
pub fn encode_to_slice(input: &[u8], output: &mut [u8], encode_table: &[u8; 64]) -> usize {
let mut input_index: usize = 0;
const BLOCKS_PER_FAST_LOOP: usize = 4;
const LOW_SIX_BITS: u64 = 0x3F;
// we read 8 bytes at a time (u64) but only actually consume 6 of those bytes. Thus, we need
// 2 trailing bytes to be available to read..
let last_fast_index = input.len().saturating_sub(BLOCKS_PER_FAST_LOOP * 6 + 2);
let mut output_index = 0;
if last_fast_index > 0 {
while input_index <= last_fast_index {
// Major performance wins from letting the optimizer do the bounds check once, mostly
// on the output side
let input_chunk = &input[input_index..(input_index + (BLOCKS_PER_FAST_LOOP * 6 + 2))];
let output_chunk = &mut output[output_index..(output_index + BLOCKS_PER_FAST_LOOP * 8)];
// Hand-unrolling for 32 vs 16 or 8 bytes produces yields performance about equivalent
// to unsafe pointer code on a Xeon E5-1650v3. 64 byte unrolling was slightly better for
// large inputs but significantly worse for 50-byte input, unsurprisingly. I suspect
// that it's a not uncommon use case to encode smallish chunks of data (e.g. a 64-byte
// SHA-512 digest), so it would be nice if that fit in the unrolled loop at least once.
// Plus, single-digit percentage performance differences might well be quite different
// on different hardware.
let input_u64 = read_u64(&input_chunk[0..]);
output_chunk[0] = encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize];
output_chunk[1] = encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize];
output_chunk[2] = encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize];
output_chunk[3] = encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize];
output_chunk[4] = encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize];
output_chunk[5] = encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize];
output_chunk[6] = encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize];
output_chunk[7] = encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize];
let input_u64 = read_u64(&input_chunk[6..]);
output_chunk[8] = encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize];
output_chunk[9] = encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize];
output_chunk[10] = encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize];
output_chunk[11] = encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize];
output_chunk[12] = encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize];
output_chunk[13] = encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize];
output_chunk[14] = encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize];
output_chunk[15] = encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize];
let input_u64 = read_u64(&input_chunk[12..]);
output_chunk[16] = encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize];
output_chunk[17] = encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize];
output_chunk[18] = encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize];
output_chunk[19] = encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize];
output_chunk[20] = encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize];
output_chunk[21] = encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize];
output_chunk[22] = encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize];
output_chunk[23] = encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize];
let input_u64 = read_u64(&input_chunk[18..]);
output_chunk[24] = encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize];
output_chunk[25] = encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize];
output_chunk[26] = encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize];
output_chunk[27] = encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize];
output_chunk[28] = encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize];
output_chunk[29] = encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize];
output_chunk[30] = encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize];
output_chunk[31] = encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize];
output_index += BLOCKS_PER_FAST_LOOP * 8;
input_index += BLOCKS_PER_FAST_LOOP * 6;
}
}
// Encode what's left after the fast loop.
const LOW_SIX_BITS_U8: u8 = 0x3F;
let rem = input.len() % 3;
let start_of_rem = input.len() - rem;
// start at the first index not handled by fast loop, which may be 0.
while input_index < start_of_rem {
let input_chunk = &input[input_index..(input_index + 3)];
let output_chunk = &mut output[output_index..(output_index + 4)];
output_chunk[0] = encode_table[(input_chunk[0] >> 2) as usize];
output_chunk[1] =
encode_table[((input_chunk[0] << 4 | input_chunk[1] >> 4) & LOW_SIX_BITS_U8) as usize];
output_chunk[2] =
encode_table[((input_chunk[1] << 2 | input_chunk[2] >> 6) & LOW_SIX_BITS_U8) as usize];
output_chunk[3] = encode_table[(input_chunk[2] & LOW_SIX_BITS_U8) as usize];
input_index += 3;
output_index += 4;
}
if rem == 2 {
output[output_index] = encode_table[(input[start_of_rem] >> 2) as usize];
output[output_index + 1] = encode_table[((input[start_of_rem] << 4
| input[start_of_rem + 1] >> 4)
& LOW_SIX_BITS_U8) as usize];
output[output_index + 2] =
encode_table[((input[start_of_rem + 1] << 2) & LOW_SIX_BITS_U8) as usize];
output_index += 3;
} else if rem == 1 {
output[output_index] = encode_table[(input[start_of_rem] >> 2) as usize];
output[output_index + 1] =
encode_table[((input[start_of_rem] << 4) & LOW_SIX_BITS_U8) as usize];
output_index += 2;
}
output_index
}
/// calculate the base64 encoded string size, including padding if appropriate
pub fn encoded_size(bytes_len: usize, config: Config) -> Option<usize> {
let rem = bytes_len % 3;
let complete_input_chunks = bytes_len / 3;
let complete_chunk_output = complete_input_chunks.checked_mul(4);
if rem > 0 {
if config.pad {
complete_chunk_output.and_then(|c| c.checked_add(4))
} else {
let encoded_rem = match rem {
1 => 2,
2 => 3,
_ => unreachable!("Impossible remainder"),
};
complete_chunk_output.and_then(|c| c.checked_add(encoded_rem))
}
} else {
complete_chunk_output
}
}
/// Write padding characters.
/// `output` is the slice where padding should be written, of length at least 2.
///
/// Returns the number of padding bytes written.
pub fn add_padding(input_len: usize, output: &mut [u8]) -> usize {
let rem = input_len % 3;
let mut bytes_written = 0;
for _ in 0..((3 - rem) % 3) {
output[bytes_written] = PAD_BYTE;
bytes_written += 1;
}
bytes_written
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
decode::decode_config_buf,
tests::{assert_encode_sanity, random_config},
Config, STANDARD, URL_SAFE_NO_PAD,
};
use rand::{
distributions::{Distribution, Uniform},
FromEntropy, Rng,
};
use std;
use std::str;
#[test]
fn encoded_size_correct_standard() {
assert_encoded_length(0, 0, STANDARD);
assert_encoded_length(1, 4, STANDARD);
assert_encoded_length(2, 4, STANDARD);
assert_encoded_length(3, 4, STANDARD);
assert_encoded_length(4, 8, STANDARD);
assert_encoded_length(5, 8, STANDARD);
assert_encoded_length(6, 8, STANDARD);
assert_encoded_length(7, 12, STANDARD);
assert_encoded_length(8, 12, STANDARD);
assert_encoded_length(9, 12, STANDARD);
assert_encoded_length(54, 72, STANDARD);
assert_encoded_length(55, 76, STANDARD);
assert_encoded_length(56, 76, STANDARD);
assert_encoded_length(57, 76, STANDARD);
assert_encoded_length(58, 80, STANDARD);
}
#[test]
fn encoded_size_correct_no_pad() {
assert_encoded_length(0, 0, URL_SAFE_NO_PAD);
assert_encoded_length(1, 2, URL_SAFE_NO_PAD);
assert_encoded_length(2, 3, URL_SAFE_NO_PAD);
assert_encoded_length(3, 4, URL_SAFE_NO_PAD);
assert_encoded_length(4, 6, URL_SAFE_NO_PAD);
assert_encoded_length(5, 7, URL_SAFE_NO_PAD);
assert_encoded_length(6, 8, URL_SAFE_NO_PAD);
assert_encoded_length(7, 10, URL_SAFE_NO_PAD);
assert_encoded_length(8, 11, URL_SAFE_NO_PAD);
assert_encoded_length(9, 12, URL_SAFE_NO_PAD);
assert_encoded_length(54, 72, URL_SAFE_NO_PAD);
assert_encoded_length(55, 74, URL_SAFE_NO_PAD);
assert_encoded_length(56, 75, URL_SAFE_NO_PAD);
assert_encoded_length(57, 76, URL_SAFE_NO_PAD);
assert_encoded_length(58, 78, URL_SAFE_NO_PAD);
}
#[test]
fn encoded_size_overflow() {
assert_eq!(None, encoded_size(std::usize::MAX, STANDARD));
}
#[test]
fn encode_config_buf_into_nonempty_buffer_doesnt_clobber_prefix() {
let mut orig_data = Vec::new();
let mut prefix = String::new();
let mut encoded_data_no_prefix = String::new();
let mut encoded_data_with_prefix = String::new();
let mut decoded = Vec::new();
let prefix_len_range = Uniform::new(0, 1000);
let input_len_range = Uniform::new(0, 1000);
let mut rng = rand::rngs::SmallRng::from_entropy();
for _ in 0..10_000 {
orig_data.clear();
prefix.clear();
encoded_data_no_prefix.clear();
encoded_data_with_prefix.clear();
decoded.clear();
let input_len = input_len_range.sample(&mut rng);
for _ in 0..input_len {
orig_data.push(rng.gen());
}
let prefix_len = prefix_len_range.sample(&mut rng);
for _ in 0..prefix_len {
// getting convenient random single-byte printable chars that aren't base64 is
// annoying
prefix.push('#');
}
encoded_data_with_prefix.push_str(&prefix);
let config = random_config(&mut rng);
encode_config_buf(&orig_data, config, &mut encoded_data_no_prefix);
encode_config_buf(&orig_data, config, &mut encoded_data_with_prefix);
assert_eq!(
encoded_data_no_prefix.len() + prefix_len,
encoded_data_with_prefix.len()
);
assert_encode_sanity(&encoded_data_no_prefix, config, input_len);
assert_encode_sanity(&encoded_data_with_prefix[prefix_len..], config, input_len);
// append plain encode onto prefix
prefix.push_str(&mut encoded_data_no_prefix);
assert_eq!(prefix, encoded_data_with_prefix);
decode_config_buf(&encoded_data_no_prefix, config, &mut decoded).unwrap();
assert_eq!(orig_data, decoded);
}
}
#[test]
fn encode_config_slice_into_nonempty_buffer_doesnt_clobber_suffix() {
let mut orig_data = Vec::new();
let mut encoded_data = Vec::new();
let mut encoded_data_original_state = Vec::new();
let mut decoded = Vec::new();
let input_len_range = Uniform::new(0, 1000);
let mut rng = rand::rngs::SmallRng::from_entropy();
for _ in 0..10_000 {
orig_data.clear();
encoded_data.clear();
encoded_data_original_state.clear();
decoded.clear();
let input_len = input_len_range.sample(&mut rng);
for _ in 0..input_len {
orig_data.push(rng.gen());
}
// plenty of existing garbage in the encoded buffer
for _ in 0..10 * input_len {
encoded_data.push(rng.gen());
}
encoded_data_original_state.extend_from_slice(&encoded_data);
let config = random_config(&mut rng);
let encoded_size = encoded_size(input_len, config).unwrap();
assert_eq!(
encoded_size,
encode_config_slice(&orig_data, config, &mut encoded_data)
);
assert_encode_sanity(
std::str::from_utf8(&encoded_data[0..encoded_size]).unwrap(),
config,
input_len,
);
assert_eq!(
&encoded_data[encoded_size..],
&encoded_data_original_state[encoded_size..]
);
decode_config_buf(&encoded_data[0..encoded_size], config, &mut decoded).unwrap();
assert_eq!(orig_data, decoded);
}
}
#[test]
fn encode_config_slice_fits_into_precisely_sized_slice() {
let mut orig_data = Vec::new();
let mut encoded_data = Vec::new();
let mut decoded = Vec::new();
let input_len_range = Uniform::new(0, 1000);
let mut rng = rand::rngs::SmallRng::from_entropy();
for _ in 0..10_000 {
orig_data.clear();
encoded_data.clear();
decoded.clear();
let input_len = input_len_range.sample(&mut rng);
for _ in 0..input_len {
orig_data.push(rng.gen());
}
let config = random_config(&mut rng);
let encoded_size = encoded_size(input_len, config).unwrap();
encoded_data.resize(encoded_size, 0);
assert_eq!(
encoded_size,
encode_config_slice(&orig_data, config, &mut encoded_data)
);
assert_encode_sanity(
std::str::from_utf8(&encoded_data[0..encoded_size]).unwrap(),
config,
input_len,
);
decode_config_buf(&encoded_data[0..encoded_size], config, &mut decoded).unwrap();
assert_eq!(orig_data, decoded);
}
}
#[test]
fn encode_to_slice_random_valid_utf8() {
let mut input = Vec::new();
let mut output = Vec::new();
let input_len_range = Uniform::new(0, 1000);
let mut rng = rand::rngs::SmallRng::from_entropy();
for _ in 0..10_000 {
input.clear();
output.clear();
let input_len = input_len_range.sample(&mut rng);
for _ in 0..input_len {
input.push(rng.gen());
}
let config = random_config(&mut rng);
// fill up the output buffer with garbage
let encoded_size = encoded_size(input_len, config).unwrap();
for _ in 0..encoded_size {
output.push(rng.gen());
}
let orig_output_buf = output.to_vec();
let bytes_written =
encode_to_slice(&input, &mut output, config.char_set.encode_table());
// make sure the part beyond bytes_written is the same garbage it was before
assert_eq!(orig_output_buf[bytes_written..], output[bytes_written..]);
// make sure the encoded bytes are UTF-8
let _ = str::from_utf8(&output[0..bytes_written]).unwrap();
}
}
#[test]
fn encode_with_padding_random_valid_utf8() {
let mut input = Vec::new();
let mut output = Vec::new();
let input_len_range = Uniform::new(0, 1000);
let mut rng = rand::rngs::SmallRng::from_entropy();
for _ in 0..10_000 {
input.clear();
output.clear();
let input_len = input_len_range.sample(&mut rng);
for _ in 0..input_len {
input.push(rng.gen());
}
let config = random_config(&mut rng);
// fill up the output buffer with garbage
let encoded_size = encoded_size(input_len, config).unwrap();
for _ in 0..encoded_size + 1000 {
output.push(rng.gen());
}
let orig_output_buf = output.to_vec();
encode_with_padding(&input, config, encoded_size, &mut output[0..encoded_size]);
// make sure the part beyond b64 is the same garbage it was before
assert_eq!(orig_output_buf[encoded_size..], output[encoded_size..]);
// make sure the encoded bytes are UTF-8
let _ = str::from_utf8(&output[0..encoded_size]).unwrap();
}
}
#[test]
fn add_padding_random_valid_utf8() {
let mut output = Vec::new();
let mut rng = rand::rngs::SmallRng::from_entropy();
// cover our bases for length % 3
for input_len in 0..10 {
output.clear();
// fill output with random
for _ in 0..10 {
output.push(rng.gen());
}
let orig_output_buf = output.to_vec();
let bytes_written = add_padding(input_len, &mut output);
// make sure the part beyond bytes_written is the same garbage it was before
assert_eq!(orig_output_buf[bytes_written..], output[bytes_written..]);
// make sure the encoded bytes are UTF-8
let _ = str::from_utf8(&output[0..bytes_written]).unwrap();
}
}
fn assert_encoded_length(input_len: usize, encoded_len: usize, config: Config) {
assert_eq!(encoded_len, encoded_size(input_len, config).unwrap());
let mut bytes: Vec<u8> = Vec::new();
let mut rng = rand::rngs::SmallRng::from_entropy();
for _ in 0..input_len {
bytes.push(rng.gen());
}
let encoded = encode_config(&bytes, config);
assert_encode_sanity(&encoded, config, input_len);
assert_eq!(encoded_len, encoded.len());
}
#[test]
fn encode_imap() {
assert_eq!(
encode_config(b"\xFB\xFF", crate::IMAP_MUTF7),
encode_config(b"\xFB\xFF", crate::STANDARD_NO_PAD).replace("/", ",")
);
}
}

245
zeroidc/vendor/base64/src/lib.rs vendored Normal file
View File

@ -0,0 +1,245 @@
//! # Configs
//!
//! There isn't just one type of Base64; that would be too simple. You need to choose a character
//! set (standard, URL-safe, etc) and padding suffix (yes/no).
//! The `Config` struct encapsulates this info. There are some common configs included: `STANDARD`,
//! `URL_SAFE`, etc. You can also make your own `Config` if needed.
//!
//! The functions that don't have `config` in the name (e.g. `encode()` and `decode()`) use the
//! `STANDARD` config .
//!
//! The functions that write to a slice (the ones that end in `_slice`) are generally the fastest
//! because they don't need to resize anything. If it fits in your workflow and you care about
//! performance, keep using the same buffer (growing as need be) and use the `_slice` methods for
//! the best performance.
//!
//! # Encoding
//!
//! Several different encoding functions are available to you depending on your desire for
//! convenience vs performance.
//!
//! | Function | Output | Allocates |
//! | ----------------------- | ---------------------------- | ------------------------------ |
//! | `encode` | Returns a new `String` | Always |
//! | `encode_config` | Returns a new `String` | Always |
//! | `encode_config_buf` | Appends to provided `String` | Only if `String` needs to grow |
//! | `encode_config_slice` | Writes to provided `&[u8]` | Never |
//!
//! All of the encoding functions that take a `Config` will pad as per the config.
//!
//! # Decoding
//!
//! Just as for encoding, there are different decoding functions available.
//!
//! | Function | Output | Allocates |
//! | ----------------------- | ----------------------------- | ------------------------------ |
//! | `decode` | Returns a new `Vec<u8>` | Always |
//! | `decode_config` | Returns a new `Vec<u8>` | Always |
//! | `decode_config_buf` | Appends to provided `Vec<u8>` | Only if `Vec` needs to grow |
//! | `decode_config_slice` | Writes to provided `&[u8]` | Never |
//!
//! Unlike encoding, where all possible input is valid, decoding can fail (see `DecodeError`).
//!
//! Input can be invalid because it has invalid characters or invalid padding. (No padding at all is
//! valid, but excess padding is not.) Whitespace in the input is invalid.
//!
//! # `Read` and `Write`
//!
//! To map a `Read` of b64 bytes to the decoded bytes, wrap a reader (file, network socket, etc)
//! with `base64::read::DecoderReader`. To write raw bytes and have them b64 encoded on the fly,
//! wrap a writer with `base64::write::EncoderWriter`. There is some performance overhead (15% or
//! so) because of the necessary buffer shuffling -- still fast enough that almost nobody cares.
//! Also, these implementations do not heap allocate.
//!
//! # Panics
//!
//! If length calculations result in overflowing `usize`, a panic will result.
//!
//! The `_slice` flavors of encode or decode will panic if the provided output slice is too small,
#![cfg_attr(feature = "cargo-clippy", allow(clippy::cast_lossless))]
#![deny(
missing_docs,
trivial_casts,
trivial_numeric_casts,
unused_extern_crates,
unused_import_braces,
unused_results,
variant_size_differences,
warnings
)]
#![forbid(unsafe_code)]
#![cfg_attr(not(any(feature = "std", test)), no_std)]
#[cfg(all(feature = "alloc", not(any(feature = "std", test))))]
extern crate alloc;
#[cfg(any(feature = "std", test))]
extern crate std as alloc;
mod chunked_encoder;
pub mod display;
#[cfg(any(feature = "std", test))]
pub mod read;
mod tables;
#[cfg(any(feature = "std", test))]
pub mod write;
mod encode;
pub use crate::encode::encode_config_slice;
#[cfg(any(feature = "alloc", feature = "std", test))]
pub use crate::encode::{encode, encode_config, encode_config_buf};
mod decode;
#[cfg(any(feature = "alloc", feature = "std", test))]
pub use crate::decode::{decode, decode_config, decode_config_buf};
pub use crate::decode::{decode_config_slice, DecodeError};
#[cfg(test)]
mod tests;
/// Available encoding character sets
#[derive(Clone, Copy, Debug)]
pub enum CharacterSet {
/// The standard character set (uses `+` and `/`).
///
/// See [RFC 3548](https://tools.ietf.org/html/rfc3548#section-3).
Standard,
/// The URL safe character set (uses `-` and `_`).
///
/// See [RFC 3548](https://tools.ietf.org/html/rfc3548#section-4).
UrlSafe,
/// The `crypt(3)` character set (uses `./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz`).
///
/// Not standardized, but folk wisdom on the net asserts that this alphabet is what crypt uses.
Crypt,
/// The bcrypt character set (uses `./ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789`).
Bcrypt,
/// The character set used in IMAP-modified UTF-7 (uses `+` and `,`).
///
/// See [RFC 3501](https://tools.ietf.org/html/rfc3501#section-5.1.3)
ImapMutf7,
/// The character set used in BinHex 4.0 files.
///
/// See [BinHex 4.0 Definition](http://files.stairways.com/other/binhex-40-specs-info.txt)
BinHex,
}
impl CharacterSet {
fn encode_table(self) -> &'static [u8; 64] {
match self {
CharacterSet::Standard => tables::STANDARD_ENCODE,
CharacterSet::UrlSafe => tables::URL_SAFE_ENCODE,
CharacterSet::Crypt => tables::CRYPT_ENCODE,
CharacterSet::Bcrypt => tables::BCRYPT_ENCODE,
CharacterSet::ImapMutf7 => tables::IMAP_MUTF7_ENCODE,
CharacterSet::BinHex => tables::BINHEX_ENCODE,
}
}
fn decode_table(self) -> &'static [u8; 256] {
match self {
CharacterSet::Standard => tables::STANDARD_DECODE,
CharacterSet::UrlSafe => tables::URL_SAFE_DECODE,
CharacterSet::Crypt => tables::CRYPT_DECODE,
CharacterSet::Bcrypt => tables::BCRYPT_DECODE,
CharacterSet::ImapMutf7 => tables::IMAP_MUTF7_DECODE,
CharacterSet::BinHex => tables::BINHEX_DECODE,
}
}
}
/// Contains configuration parameters for base64 encoding
#[derive(Clone, Copy, Debug)]
pub struct Config {
/// Character set to use
char_set: CharacterSet,
/// True to pad output with `=` characters
pad: bool,
/// True to ignore excess nonzero bits in the last few symbols, otherwise an error is returned.
decode_allow_trailing_bits: bool,
}
impl Config {
/// Create a new `Config`.
pub const fn new(char_set: CharacterSet, pad: bool) -> Config {
Config {
char_set,
pad,
decode_allow_trailing_bits: false,
}
}
/// Sets whether to pad output with `=` characters.
pub const fn pad(self, pad: bool) -> Config {
Config { pad, ..self }
}
/// Sets whether to emit errors for nonzero trailing bits.
///
/// This is useful when implementing
/// [forgiving-base64 decode](https://infra.spec.whatwg.org/#forgiving-base64-decode).
pub const fn decode_allow_trailing_bits(self, allow: bool) -> Config {
Config {
decode_allow_trailing_bits: allow,
..self
}
}
}
/// Standard character set with padding.
pub const STANDARD: Config = Config {
char_set: CharacterSet::Standard,
pad: true,
decode_allow_trailing_bits: false,
};
/// Standard character set without padding.
pub const STANDARD_NO_PAD: Config = Config {
char_set: CharacterSet::Standard,
pad: false,
decode_allow_trailing_bits: false,
};
/// URL-safe character set with padding
pub const URL_SAFE: Config = Config {
char_set: CharacterSet::UrlSafe,
pad: true,
decode_allow_trailing_bits: false,
};
/// URL-safe character set without padding
pub const URL_SAFE_NO_PAD: Config = Config {
char_set: CharacterSet::UrlSafe,
pad: false,
decode_allow_trailing_bits: false,
};
/// As per `crypt(3)` requirements
pub const CRYPT: Config = Config {
char_set: CharacterSet::Crypt,
pad: false,
decode_allow_trailing_bits: false,
};
/// Bcrypt character set
pub const BCRYPT: Config = Config {
char_set: CharacterSet::Bcrypt,
pad: false,
decode_allow_trailing_bits: false,
};
/// IMAP modified UTF-7 requirements
pub const IMAP_MUTF7: Config = Config {
char_set: CharacterSet::ImapMutf7,
pad: false,
decode_allow_trailing_bits: false,
};
/// BinHex character set
pub const BINHEX: Config = Config {
char_set: CharacterSet::BinHex,
pad: false,
decode_allow_trailing_bits: false,
};
const PAD_BYTE: u8 = b'=';

View File

@ -0,0 +1,282 @@
use crate::{decode_config_slice, Config, DecodeError};
use std::io::Read;
use std::{cmp, fmt, io};
// This should be large, but it has to fit on the stack.
pub(crate) const BUF_SIZE: usize = 1024;
// 4 bytes of base64 data encode 3 bytes of raw data (modulo padding).
const BASE64_CHUNK_SIZE: usize = 4;
const DECODED_CHUNK_SIZE: usize = 3;
/// A `Read` implementation that decodes base64 data read from an underlying reader.
///
/// # Examples
///
/// ```
/// use std::io::Read;
/// use std::io::Cursor;
///
/// // use a cursor as the simplest possible `Read` -- in real code this is probably a file, etc.
/// let mut wrapped_reader = Cursor::new(b"YXNkZg==");
/// let mut decoder = base64::read::DecoderReader::new(
/// &mut wrapped_reader, base64::STANDARD);
///
/// // handle errors as you normally would
/// let mut result = Vec::new();
/// decoder.read_to_end(&mut result).unwrap();
///
/// assert_eq!(b"asdf", &result[..]);
///
/// ```
pub struct DecoderReader<'a, R: 'a + io::Read> {
config: Config,
/// Where b64 data is read from
r: &'a mut R,
// Holds b64 data read from the delegate reader.
b64_buffer: [u8; BUF_SIZE],
// The start of the pending buffered data in b64_buffer.
b64_offset: usize,
// The amount of buffered b64 data.
b64_len: usize,
// Since the caller may provide us with a buffer of size 1 or 2 that's too small to copy a
// decoded chunk in to, we have to be able to hang on to a few decoded bytes.
// Technically we only need to hold 2 bytes but then we'd need a separate temporary buffer to
// decode 3 bytes into and then juggle copying one byte into the provided read buf and the rest
// into here, which seems like a lot of complexity for 1 extra byte of storage.
decoded_buffer: [u8; 3],
// index of start of decoded data
decoded_offset: usize,
// length of decoded data
decoded_len: usize,
// used to provide accurate offsets in errors
total_b64_decoded: usize,
}
impl<'a, R: io::Read> fmt::Debug for DecoderReader<'a, R> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("DecoderReader")
.field("config", &self.config)
.field("b64_offset", &self.b64_offset)
.field("b64_len", &self.b64_len)
.field("decoded_buffer", &self.decoded_buffer)
.field("decoded_offset", &self.decoded_offset)
.field("decoded_len", &self.decoded_len)
.field("total_b64_decoded", &self.total_b64_decoded)
.finish()
}
}
impl<'a, R: io::Read> DecoderReader<'a, R> {
/// Create a new decoder that will read from the provided reader `r`.
pub fn new(r: &'a mut R, config: Config) -> Self {
DecoderReader {
config,
r,
b64_buffer: [0; BUF_SIZE],
b64_offset: 0,
b64_len: 0,
decoded_buffer: [0; DECODED_CHUNK_SIZE],
decoded_offset: 0,
decoded_len: 0,
total_b64_decoded: 0,
}
}
/// Write as much as possible of the decoded buffer into the target buffer.
/// Must only be called when there is something to write and space to write into.
/// Returns a Result with the number of (decoded) bytes copied.
fn flush_decoded_buf(&mut self, buf: &mut [u8]) -> io::Result<usize> {
debug_assert!(self.decoded_len > 0);
debug_assert!(buf.len() > 0);
let copy_len = cmp::min(self.decoded_len, buf.len());
debug_assert!(copy_len > 0);
debug_assert!(copy_len <= self.decoded_len);
buf[..copy_len].copy_from_slice(
&self.decoded_buffer[self.decoded_offset..self.decoded_offset + copy_len],
);
self.decoded_offset += copy_len;
self.decoded_len -= copy_len;
debug_assert!(self.decoded_len < DECODED_CHUNK_SIZE);
Ok(copy_len)
}
/// Read into the remaining space in the buffer after the current contents.
/// Must only be called when there is space to read into in the buffer.
/// Returns the number of bytes read.
fn read_from_delegate(&mut self) -> io::Result<usize> {
debug_assert!(self.b64_offset + self.b64_len < BUF_SIZE);
let read = self
.r
.read(&mut self.b64_buffer[self.b64_offset + self.b64_len..])?;
self.b64_len += read;
debug_assert!(self.b64_offset + self.b64_len <= BUF_SIZE);
return Ok(read);
}
/// Decode the requested number of bytes from the b64 buffer into the provided buffer. It's the
/// caller's responsibility to choose the number of b64 bytes to decode correctly.
///
/// Returns a Result with the number of decoded bytes written to `buf`.
fn decode_to_buf(&mut self, num_bytes: usize, buf: &mut [u8]) -> io::Result<usize> {
debug_assert!(self.b64_len >= num_bytes);
debug_assert!(self.b64_offset + self.b64_len <= BUF_SIZE);
debug_assert!(buf.len() > 0);
let decoded = decode_config_slice(
&self.b64_buffer[self.b64_offset..self.b64_offset + num_bytes],
self.config,
&mut buf[..],
)
.map_err(|e| match e {
DecodeError::InvalidByte(offset, byte) => {
DecodeError::InvalidByte(self.total_b64_decoded + offset, byte)
}
DecodeError::InvalidLength => DecodeError::InvalidLength,
DecodeError::InvalidLastSymbol(offset, byte) => {
DecodeError::InvalidLastSymbol(self.total_b64_decoded + offset, byte)
}
})
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
self.total_b64_decoded += num_bytes;
self.b64_offset += num_bytes;
self.b64_len -= num_bytes;
debug_assert!(self.b64_offset + self.b64_len <= BUF_SIZE);
Ok(decoded)
}
}
impl<'a, R: Read> Read for DecoderReader<'a, R> {
/// Decode input from the wrapped reader.
///
/// Under non-error circumstances, this returns `Ok` with the value being the number of bytes
/// written in `buf`.
///
/// Where possible, this function buffers base64 to minimize the number of read() calls to the
/// delegate reader.
///
/// # Errors
///
/// Any errors emitted by the delegate reader are returned. Decoding errors due to invalid
/// base64 are also possible, and will have `io::ErrorKind::InvalidData`.
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
if buf.len() == 0 {
return Ok(0);
}
// offset == BUF_SIZE when we copied it all last time
debug_assert!(self.b64_offset <= BUF_SIZE);
debug_assert!(self.b64_offset + self.b64_len <= BUF_SIZE);
debug_assert!(if self.b64_offset == BUF_SIZE {
self.b64_len == 0
} else {
self.b64_len <= BUF_SIZE
});
debug_assert!(if self.decoded_len == 0 {
// can be = when we were able to copy the complete chunk
self.decoded_offset <= DECODED_CHUNK_SIZE
} else {
self.decoded_offset < DECODED_CHUNK_SIZE
});
// We shouldn't ever decode into here when we can't immediately write at least one byte into
// the provided buf, so the effective length should only be 3 momentarily between when we
// decode and when we copy into the target buffer.
debug_assert!(self.decoded_len < DECODED_CHUNK_SIZE);
debug_assert!(self.decoded_len + self.decoded_offset <= DECODED_CHUNK_SIZE);
if self.decoded_len > 0 {
// we have a few leftover decoded bytes; flush that rather than pull in more b64
self.flush_decoded_buf(buf)
} else {
let mut at_eof = false;
while self.b64_len < BASE64_CHUNK_SIZE {
// Work around lack of copy_within, which is only present in 1.37
// Copy any bytes we have to the start of the buffer.
// We know we have < 1 chunk, so we can use a tiny tmp buffer.
let mut memmove_buf = [0_u8; BASE64_CHUNK_SIZE];
memmove_buf[..self.b64_len].copy_from_slice(
&self.b64_buffer[self.b64_offset..self.b64_offset + self.b64_len],
);
self.b64_buffer[0..self.b64_len].copy_from_slice(&memmove_buf[..self.b64_len]);
self.b64_offset = 0;
// then fill in more data
let read = self.read_from_delegate()?;
if read == 0 {
// we never pass in an empty buf, so 0 => we've hit EOF
at_eof = true;
break;
}
}
if self.b64_len == 0 {
debug_assert!(at_eof);
// we must be at EOF, and we have no data left to decode
return Ok(0);
};
debug_assert!(if at_eof {
// if we are at eof, we may not have a complete chunk
self.b64_len > 0
} else {
// otherwise, we must have at least one chunk
self.b64_len >= BASE64_CHUNK_SIZE
});
debug_assert_eq!(0, self.decoded_len);
if buf.len() < DECODED_CHUNK_SIZE {
// caller requested an annoyingly short read
// have to write to a tmp buf first to avoid double mutable borrow
let mut decoded_chunk = [0_u8; DECODED_CHUNK_SIZE];
// if we are at eof, could have less than BASE64_CHUNK_SIZE, in which case we have
// to assume that these last few tokens are, in fact, valid (i.e. must be 2-4 b64
// tokens, not 1, since 1 token can't decode to 1 byte).
let to_decode = cmp::min(self.b64_len, BASE64_CHUNK_SIZE);
let decoded = self.decode_to_buf(to_decode, &mut decoded_chunk[..])?;
self.decoded_buffer[..decoded].copy_from_slice(&decoded_chunk[..decoded]);
self.decoded_offset = 0;
self.decoded_len = decoded;
// can be less than 3 on last block due to padding
debug_assert!(decoded <= 3);
self.flush_decoded_buf(buf)
} else {
let b64_bytes_that_can_decode_into_buf = (buf.len() / DECODED_CHUNK_SIZE)
.checked_mul(BASE64_CHUNK_SIZE)
.expect("too many chunks");
debug_assert!(b64_bytes_that_can_decode_into_buf >= BASE64_CHUNK_SIZE);
let b64_bytes_available_to_decode = if at_eof {
self.b64_len
} else {
// only use complete chunks
self.b64_len - self.b64_len % 4
};
let actual_decode_len = cmp::min(
b64_bytes_that_can_decode_into_buf,
b64_bytes_available_to_decode,
);
self.decode_to_buf(actual_decode_len, buf)
}
}
}
}

View File

@ -0,0 +1,335 @@
use std::io::{self, Read};
use rand::{Rng, RngCore};
use std::{cmp, iter};
use super::decoder::{DecoderReader, BUF_SIZE};
use crate::encode::encode_config_buf;
use crate::tests::random_config;
use crate::{decode_config_buf, DecodeError, STANDARD};
#[test]
fn simple() {
let tests: &[(&[u8], &[u8])] = &[
(&b"0"[..], &b"MA=="[..]),
(b"01", b"MDE="),
(b"012", b"MDEy"),
(b"0123", b"MDEyMw=="),
(b"01234", b"MDEyMzQ="),
(b"012345", b"MDEyMzQ1"),
(b"0123456", b"MDEyMzQ1Ng=="),
(b"01234567", b"MDEyMzQ1Njc="),
(b"012345678", b"MDEyMzQ1Njc4"),
(b"0123456789", b"MDEyMzQ1Njc4OQ=="),
][..];
for (text_expected, base64data) in tests.iter() {
// Read n bytes at a time.
for n in 1..base64data.len() + 1 {
let mut wrapped_reader = io::Cursor::new(base64data);
let mut decoder = DecoderReader::new(&mut wrapped_reader, STANDARD);
// handle errors as you normally would
let mut text_got = Vec::new();
let mut buffer = vec![0u8; n];
while let Ok(read) = decoder.read(&mut buffer[..]) {
if read == 0 {
break;
}
text_got.extend_from_slice(&buffer[..read]);
}
assert_eq!(
text_got,
*text_expected,
"\nGot: {}\nExpected: {}",
String::from_utf8_lossy(&text_got[..]),
String::from_utf8_lossy(text_expected)
);
}
}
}
// Make sure we error out on trailing junk.
#[test]
fn trailing_junk() {
let tests: &[&[u8]] = &[&b"MDEyMzQ1Njc4*!@#$%^&"[..], b"MDEyMzQ1Njc4OQ== "][..];
for base64data in tests.iter() {
// Read n bytes at a time.
for n in 1..base64data.len() + 1 {
let mut wrapped_reader = io::Cursor::new(base64data);
let mut decoder = DecoderReader::new(&mut wrapped_reader, STANDARD);
// handle errors as you normally would
let mut buffer = vec![0u8; n];
let mut saw_error = false;
loop {
match decoder.read(&mut buffer[..]) {
Err(_) => {
saw_error = true;
break;
}
Ok(read) if read == 0 => break,
Ok(_) => (),
}
}
assert!(saw_error);
}
}
}
#[test]
fn handles_short_read_from_delegate() {
let mut rng = rand::thread_rng();
let mut bytes = Vec::new();
let mut b64 = String::new();
let mut decoded = Vec::new();
for _ in 0..10_000 {
bytes.clear();
b64.clear();
decoded.clear();
let size = rng.gen_range(0, 10 * BUF_SIZE);
bytes.extend(iter::repeat(0).take(size));
bytes.truncate(size);
rng.fill_bytes(&mut bytes[..size]);
assert_eq!(size, bytes.len());
let config = random_config(&mut rng);
encode_config_buf(&bytes[..], config, &mut b64);
let mut wrapped_reader = io::Cursor::new(b64.as_bytes());
let mut short_reader = RandomShortRead {
delegate: &mut wrapped_reader,
rng: &mut rng,
};
let mut decoder = DecoderReader::new(&mut short_reader, config);
let decoded_len = decoder.read_to_end(&mut decoded).unwrap();
assert_eq!(size, decoded_len);
assert_eq!(&bytes[..], &decoded[..]);
}
}
#[test]
fn read_in_short_increments() {
let mut rng = rand::thread_rng();
let mut bytes = Vec::new();
let mut b64 = String::new();
let mut decoded = Vec::new();
for _ in 0..10_000 {
bytes.clear();
b64.clear();
decoded.clear();
let size = rng.gen_range(0, 10 * BUF_SIZE);
bytes.extend(iter::repeat(0).take(size));
// leave room to play around with larger buffers
decoded.extend(iter::repeat(0).take(size * 3));
rng.fill_bytes(&mut bytes[..]);
assert_eq!(size, bytes.len());
let config = random_config(&mut rng);
encode_config_buf(&bytes[..], config, &mut b64);
let mut wrapped_reader = io::Cursor::new(&b64[..]);
let mut decoder = DecoderReader::new(&mut wrapped_reader, config);
consume_with_short_reads_and_validate(&mut rng, &bytes[..], &mut decoded, &mut decoder);
}
}
#[test]
fn read_in_short_increments_with_short_delegate_reads() {
let mut rng = rand::thread_rng();
let mut bytes = Vec::new();
let mut b64 = String::new();
let mut decoded = Vec::new();
for _ in 0..10_000 {
bytes.clear();
b64.clear();
decoded.clear();
let size = rng.gen_range(0, 10 * BUF_SIZE);
bytes.extend(iter::repeat(0).take(size));
// leave room to play around with larger buffers
decoded.extend(iter::repeat(0).take(size * 3));
rng.fill_bytes(&mut bytes[..]);
assert_eq!(size, bytes.len());
let config = random_config(&mut rng);
encode_config_buf(&bytes[..], config, &mut b64);
let mut base_reader = io::Cursor::new(&b64[..]);
let mut decoder = DecoderReader::new(&mut base_reader, config);
let mut short_reader = RandomShortRead {
delegate: &mut decoder,
rng: &mut rand::thread_rng(),
};
consume_with_short_reads_and_validate(&mut rng, &bytes[..], &mut decoded, &mut short_reader)
}
}
#[test]
fn reports_invalid_last_symbol_correctly() {
let mut rng = rand::thread_rng();
let mut bytes = Vec::new();
let mut b64 = String::new();
let mut b64_bytes = Vec::new();
let mut decoded = Vec::new();
let mut bulk_decoded = Vec::new();
for _ in 0..1_000 {
bytes.clear();
b64.clear();
b64_bytes.clear();
let size = rng.gen_range(1, 10 * BUF_SIZE);
bytes.extend(iter::repeat(0).take(size));
decoded.extend(iter::repeat(0).take(size));
rng.fill_bytes(&mut bytes[..]);
assert_eq!(size, bytes.len());
let mut config = random_config(&mut rng);
// changing padding will cause invalid padding errors when we twiddle the last byte
config.pad = false;
encode_config_buf(&bytes[..], config, &mut b64);
b64_bytes.extend(b64.bytes());
assert_eq!(b64_bytes.len(), b64.len());
// change the last character to every possible symbol. Should behave the same as bulk
// decoding whether invalid or valid.
for &s1 in config.char_set.encode_table().iter() {
decoded.clear();
bulk_decoded.clear();
// replace the last
*b64_bytes.last_mut().unwrap() = s1;
let bulk_res = decode_config_buf(&b64_bytes[..], config, &mut bulk_decoded);
let mut wrapped_reader = io::Cursor::new(&b64_bytes[..]);
let mut decoder = DecoderReader::new(&mut wrapped_reader, config);
let stream_res = decoder.read_to_end(&mut decoded).map(|_| ()).map_err(|e| {
e.into_inner()
.and_then(|e| e.downcast::<DecodeError>().ok())
});
assert_eq!(bulk_res.map_err(|e| Some(Box::new(e))), stream_res);
}
}
}
#[test]
fn reports_invalid_byte_correctly() {
let mut rng = rand::thread_rng();
let mut bytes = Vec::new();
let mut b64 = String::new();
let mut decoded = Vec::new();
for _ in 0..10_000 {
bytes.clear();
b64.clear();
decoded.clear();
let size = rng.gen_range(1, 10 * BUF_SIZE);
bytes.extend(iter::repeat(0).take(size));
rng.fill_bytes(&mut bytes[..size]);
assert_eq!(size, bytes.len());
let config = random_config(&mut rng);
encode_config_buf(&bytes[..], config, &mut b64);
// replace one byte, somewhere, with '*', which is invalid
let bad_byte_pos = rng.gen_range(0, &b64.len());
let mut b64_bytes = b64.bytes().collect::<Vec<u8>>();
b64_bytes[bad_byte_pos] = b'*';
let mut wrapped_reader = io::Cursor::new(b64_bytes.clone());
let mut decoder = DecoderReader::new(&mut wrapped_reader, config);
// some gymnastics to avoid double-moving the io::Error, which is not Copy
let read_decode_err = decoder
.read_to_end(&mut decoded)
.map_err(|e| {
let kind = e.kind();
let inner = e
.into_inner()
.and_then(|e| e.downcast::<DecodeError>().ok());
inner.map(|i| (*i, kind))
})
.err()
.and_then(|o| o);
let mut bulk_buf = Vec::new();
let bulk_decode_err = decode_config_buf(&b64_bytes[..], config, &mut bulk_buf).err();
// it's tricky to predict where the invalid data's offset will be since if it's in the last
// chunk it will be reported at the first padding location because it's treated as invalid
// padding. So, we just check that it's the same as it is for decoding all at once.
assert_eq!(
bulk_decode_err.map(|e| (e, io::ErrorKind::InvalidData)),
read_decode_err
);
}
}
fn consume_with_short_reads_and_validate<R: Read>(
rng: &mut rand::rngs::ThreadRng,
expected_bytes: &[u8],
decoded: &mut Vec<u8>,
short_reader: &mut R,
) -> () {
let mut total_read = 0_usize;
loop {
assert!(
total_read <= expected_bytes.len(),
"tr {} size {}",
total_read,
expected_bytes.len()
);
if total_read == expected_bytes.len() {
assert_eq!(expected_bytes, &decoded[..total_read]);
// should be done
assert_eq!(0, short_reader.read(&mut decoded[..]).unwrap());
// didn't write anything
assert_eq!(expected_bytes, &decoded[..total_read]);
break;
}
let decode_len = rng.gen_range(1, cmp::max(2, expected_bytes.len() * 2));
let read = short_reader
.read(&mut decoded[total_read..total_read + decode_len])
.unwrap();
total_read += read;
}
}
/// Limits how many bytes a reader will provide in each read call.
/// Useful for shaking out code that may work fine only with typical input sources that always fill
/// the buffer.
struct RandomShortRead<'a, 'b, R: io::Read, N: rand::Rng> {
delegate: &'b mut R,
rng: &'a mut N,
}
impl<'a, 'b, R: io::Read, N: rand::Rng> io::Read for RandomShortRead<'a, 'b, R, N> {
fn read(&mut self, buf: &mut [u8]) -> Result<usize, io::Error> {
// avoid 0 since it means EOF for non-empty buffers
let effective_len = cmp::min(self.rng.gen_range(1, 20), buf.len());
self.delegate.read(&mut buf[..effective_len])
}
}

6
zeroidc/vendor/base64/src/read/mod.rs vendored Normal file
View File

@ -0,0 +1,6 @@
//! Implementations of `io::Read` to transparently decode base64.
mod decoder;
pub use self::decoder::DecoderReader;
#[cfg(test)]
mod decoder_tests;

1957
zeroidc/vendor/base64/src/tables.rs vendored Normal file

File diff suppressed because it is too large Load Diff

81
zeroidc/vendor/base64/src/tests.rs vendored Normal file
View File

@ -0,0 +1,81 @@
use crate::{decode_config, encode::encoded_size, encode_config_buf, CharacterSet, Config};
use std::str;
use rand::{
distributions::{Distribution, Uniform},
seq::SliceRandom,
FromEntropy, Rng,
};
#[test]
fn roundtrip_random_config_short() {
// exercise the slower encode/decode routines that operate on shorter buffers more vigorously
roundtrip_random_config(Uniform::new(0, 50), 10_000);
}
#[test]
fn roundtrip_random_config_long() {
roundtrip_random_config(Uniform::new(0, 1000), 10_000);
}
pub fn assert_encode_sanity(encoded: &str, config: Config, input_len: usize) {
let input_rem = input_len % 3;
let expected_padding_len = if input_rem > 0 {
if config.pad {
3 - input_rem
} else {
0
}
} else {
0
};
let expected_encoded_len = encoded_size(input_len, config).unwrap();
assert_eq!(expected_encoded_len, encoded.len());
let padding_len = encoded.chars().filter(|&c| c == '=').count();
assert_eq!(expected_padding_len, padding_len);
let _ = str::from_utf8(encoded.as_bytes()).expect("Base64 should be valid utf8");
}
fn roundtrip_random_config(input_len_range: Uniform<usize>, iterations: u32) {
let mut input_buf: Vec<u8> = Vec::new();
let mut encoded_buf = String::new();
let mut rng = rand::rngs::SmallRng::from_entropy();
for _ in 0..iterations {
input_buf.clear();
encoded_buf.clear();
let input_len = input_len_range.sample(&mut rng);
let config = random_config(&mut rng);
for _ in 0..input_len {
input_buf.push(rng.gen());
}
encode_config_buf(&input_buf, config, &mut encoded_buf);
assert_encode_sanity(&encoded_buf, config, input_len);
assert_eq!(input_buf, decode_config(&encoded_buf, config).unwrap());
}
}
pub fn random_config<R: Rng>(rng: &mut R) -> Config {
const CHARSETS: &[CharacterSet] = &[
CharacterSet::UrlSafe,
CharacterSet::Standard,
CharacterSet::Crypt,
CharacterSet::ImapMutf7,
CharacterSet::BinHex,
];
let charset = *CHARSETS.choose(rng).unwrap();
Config::new(charset, rng.gen())
}

View File

@ -0,0 +1,381 @@
use crate::encode::encode_to_slice;
use crate::{encode_config_slice, Config};
use std::{
cmp, fmt,
io::{ErrorKind, Result, Write},
};
pub(crate) const BUF_SIZE: usize = 1024;
/// The most bytes whose encoding will fit in `BUF_SIZE`
const MAX_INPUT_LEN: usize = BUF_SIZE / 4 * 3;
// 3 bytes of input = 4 bytes of base64, always (because we don't allow line wrapping)
const MIN_ENCODE_CHUNK_SIZE: usize = 3;
/// A `Write` implementation that base64 encodes data before delegating to the wrapped writer.
///
/// Because base64 has special handling for the end of the input data (padding, etc), there's a
/// `finish()` method on this type that encodes any leftover input bytes and adds padding if
/// appropriate. It's called automatically when deallocated (see the `Drop` implementation), but
/// any error that occurs when invoking the underlying writer will be suppressed. If you want to
/// handle such errors, call `finish()` yourself.
///
/// # Examples
///
/// ```
/// use std::io::Write;
///
/// // use a vec as the simplest possible `Write` -- in real code this is probably a file, etc.
/// let mut enc = base64::write::EncoderWriter::new(Vec::new(), base64::STANDARD);
///
/// // handle errors as you normally would
/// enc.write_all(b"asdf").unwrap();
///
/// // could leave this out to be called by Drop, if you don't care
/// // about handling errors or getting the delegate writer back
/// let delegate = enc.finish().unwrap();
///
/// // base64 was written to the writer
/// assert_eq!(b"YXNkZg==", &delegate[..]);
///
/// ```
///
/// # Panics
///
/// Calling `write()` (or related methods) or `finish()` after `finish()` has completed without
/// error is invalid and will panic.
///
/// # Errors
///
/// Base64 encoding itself does not generate errors, but errors from the wrapped writer will be
/// returned as per the contract of `Write`.
///
/// # Performance
///
/// It has some minor performance loss compared to encoding slices (a couple percent).
/// It does not do any heap allocation.
pub struct EncoderWriter<W: Write> {
config: Config,
/// Where encoded data is written to. It's an Option as it's None immediately before Drop is
/// called so that finish() can return the underlying writer. None implies that finish() has
/// been called successfully.
delegate: Option<W>,
/// Holds a partial chunk, if any, after the last `write()`, so that we may then fill the chunk
/// with the next `write()`, encode it, then proceed with the rest of the input normally.
extra_input: [u8; MIN_ENCODE_CHUNK_SIZE],
/// How much of `extra` is occupied, in `[0, MIN_ENCODE_CHUNK_SIZE]`.
extra_input_occupied_len: usize,
/// Buffer to encode into. May hold leftover encoded bytes from a previous write call that the underlying writer
/// did not write last time.
output: [u8; BUF_SIZE],
/// How much of `output` is occupied with encoded data that couldn't be written last time
output_occupied_len: usize,
/// panic safety: don't write again in destructor if writer panicked while we were writing to it
panicked: bool,
}
impl<W: Write> fmt::Debug for EncoderWriter<W> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"extra_input: {:?} extra_input_occupied_len:{:?} output[..5]: {:?} output_occupied_len: {:?}",
self.extra_input,
self.extra_input_occupied_len,
&self.output[0..5],
self.output_occupied_len
)
}
}
impl<W: Write> EncoderWriter<W> {
/// Create a new encoder that will write to the provided delegate writer `w`.
pub fn new(w: W, config: Config) -> EncoderWriter<W> {
EncoderWriter {
config,
delegate: Some(w),
extra_input: [0u8; MIN_ENCODE_CHUNK_SIZE],
extra_input_occupied_len: 0,
output: [0u8; BUF_SIZE],
output_occupied_len: 0,
panicked: false,
}
}
/// Encode all remaining buffered data and write it, including any trailing incomplete input
/// triples and associated padding.
///
/// Once this succeeds, no further writes or calls to this method are allowed.
///
/// This may write to the delegate writer multiple times if the delegate writer does not accept
/// all input provided to its `write` each invocation.
///
/// If you don't care about error handling, it is not necessary to call this function, as the
/// equivalent finalization is done by the Drop impl.
///
/// Returns the writer that this was constructed around.
///
/// # Errors
///
/// The first error that is not of `ErrorKind::Interrupted` will be returned.
pub fn finish(&mut self) -> Result<W> {
// If we could consume self in finish(), we wouldn't have to worry about this case, but
// finish() is retryable in the face of I/O errors, so we can't consume here.
if self.delegate.is_none() {
panic!("Encoder has already had finish() called")
};
self.write_final_leftovers()?;
let writer = self.delegate.take().expect("Writer must be present");
Ok(writer)
}
/// Write any remaining buffered data to the delegate writer.
fn write_final_leftovers(&mut self) -> Result<()> {
if self.delegate.is_none() {
// finish() has already successfully called this, and we are now in drop() with a None
// writer, so just no-op
return Ok(());
}
self.write_all_encoded_output()?;
if self.extra_input_occupied_len > 0 {
let encoded_len = encode_config_slice(
&self.extra_input[..self.extra_input_occupied_len],
self.config,
&mut self.output[..],
);
self.output_occupied_len = encoded_len;
self.write_all_encoded_output()?;
// write succeeded, do not write the encoding of extra again if finish() is retried
self.extra_input_occupied_len = 0;
}
Ok(())
}
/// Write as much of the encoded output to the delegate writer as it will accept, and store the
/// leftovers to be attempted at the next write() call. Updates `self.output_occupied_len`.
///
/// # Errors
///
/// Errors from the delegate writer are returned. In the case of an error,
/// `self.output_occupied_len` will not be updated, as errors from `write` are specified to mean
/// that no write took place.
fn write_to_delegate(&mut self, current_output_len: usize) -> Result<()> {
self.panicked = true;
let res = self
.delegate
.as_mut()
.expect("Writer must be present")
.write(&self.output[..current_output_len]);
self.panicked = false;
res.map(|consumed| {
debug_assert!(consumed <= current_output_len);
if consumed < current_output_len {
self.output_occupied_len = current_output_len.checked_sub(consumed).unwrap();
// If we're blocking on I/O, the minor inefficiency of copying bytes to the
// start of the buffer is the least of our concerns...
// Rotate moves more than we need to, but copy_within isn't stabilized yet.
self.output.rotate_left(consumed);
} else {
self.output_occupied_len = 0;
}
})
}
/// Write all buffered encoded output. If this returns `Ok`, `self.output_occupied_len` is `0`.
///
/// This is basically write_all for the remaining buffered data but without the undesirable
/// abort-on-`Ok(0)` behavior.
///
/// # Errors
///
/// Any error emitted by the delegate writer abort the write loop and is returned, unless it's
/// `Interrupted`, in which case the error is ignored and writes will continue.
fn write_all_encoded_output(&mut self) -> Result<()> {
while self.output_occupied_len > 0 {
let remaining_len = self.output_occupied_len;
match self.write_to_delegate(remaining_len) {
// try again on interrupts ala write_all
Err(ref e) if e.kind() == ErrorKind::Interrupted => {}
// other errors return
Err(e) => return Err(e),
// success no-ops because remaining length is already updated
Ok(_) => {}
};
}
debug_assert_eq!(0, self.output_occupied_len);
Ok(())
}
}
impl<W: Write> Write for EncoderWriter<W> {
/// Encode input and then write to the delegate writer.
///
/// Under non-error circumstances, this returns `Ok` with the value being the number of bytes
/// of `input` consumed. The value may be `0`, which interacts poorly with `write_all`, which
/// interprets `Ok(0)` as an error, despite it being allowed by the contract of `write`. See
/// https://github.com/rust-lang/rust/issues/56889 for more on that.
///
/// If the previous call to `write` provided more (encoded) data than the delegate writer could
/// accept in a single call to its `write`, the remaining data is buffered. As long as buffered
/// data is present, subsequent calls to `write` will try to write the remaining buffered data
/// to the delegate and return either `Ok(0)` -- and therefore not consume any of `input` -- or
/// an error.
///
/// # Errors
///
/// Any errors emitted by the delegate writer are returned.
fn write(&mut self, input: &[u8]) -> Result<usize> {
if self.delegate.is_none() {
panic!("Cannot write more after calling finish()");
}
if input.is_empty() {
return Ok(0);
}
// The contract of `Write::write` places some constraints on this implementation:
// - a call to `write()` represents at most one call to a wrapped `Write`, so we can't
// iterate over the input and encode multiple chunks.
// - Errors mean that "no bytes were written to this writer", so we need to reset the
// internal state to what it was before the error occurred
// before reading any input, write any leftover encoded output from last time
if self.output_occupied_len > 0 {
let current_len = self.output_occupied_len;
return self
.write_to_delegate(current_len)
// did not read any input
.map(|_| 0);
}
debug_assert_eq!(0, self.output_occupied_len);
// how many bytes, if any, were read into `extra` to create a triple to encode
let mut extra_input_read_len = 0;
let mut input = input;
let orig_extra_len = self.extra_input_occupied_len;
let mut encoded_size = 0;
// always a multiple of MIN_ENCODE_CHUNK_SIZE
let mut max_input_len = MAX_INPUT_LEN;
// process leftover un-encoded input from last write
if self.extra_input_occupied_len > 0 {
debug_assert!(self.extra_input_occupied_len < 3);
if input.len() + self.extra_input_occupied_len >= MIN_ENCODE_CHUNK_SIZE {
// Fill up `extra`, encode that into `output`, and consume as much of the rest of
// `input` as possible.
// We could write just the encoding of `extra` by itself but then we'd have to
// return after writing only 4 bytes, which is inefficient if the underlying writer
// would make a syscall.
extra_input_read_len = MIN_ENCODE_CHUNK_SIZE - self.extra_input_occupied_len;
debug_assert!(extra_input_read_len > 0);
// overwrite only bytes that weren't already used. If we need to rollback extra_len
// (when the subsequent write errors), the old leading bytes will still be there.
self.extra_input[self.extra_input_occupied_len..MIN_ENCODE_CHUNK_SIZE]
.copy_from_slice(&input[0..extra_input_read_len]);
let len = encode_to_slice(
&self.extra_input[0..MIN_ENCODE_CHUNK_SIZE],
&mut self.output[..],
self.config.char_set.encode_table(),
);
debug_assert_eq!(4, len);
input = &input[extra_input_read_len..];
// consider extra to be used up, since we encoded it
self.extra_input_occupied_len = 0;
// don't clobber where we just encoded to
encoded_size = 4;
// and don't read more than can be encoded
max_input_len = MAX_INPUT_LEN - MIN_ENCODE_CHUNK_SIZE;
// fall through to normal encoding
} else {
// `extra` and `input` are non empty, but `|extra| + |input| < 3`, so there must be
// 1 byte in each.
debug_assert_eq!(1, input.len());
debug_assert_eq!(1, self.extra_input_occupied_len);
self.extra_input[self.extra_input_occupied_len] = input[0];
self.extra_input_occupied_len += 1;
return Ok(1);
};
} else if input.len() < MIN_ENCODE_CHUNK_SIZE {
// `extra` is empty, and `input` fits inside it
self.extra_input[0..input.len()].copy_from_slice(input);
self.extra_input_occupied_len = input.len();
return Ok(input.len());
};
// either 0 or 1 complete chunks encoded from extra
debug_assert!(encoded_size == 0 || encoded_size == 4);
debug_assert!(
// didn't encode extra input
MAX_INPUT_LEN == max_input_len
// encoded one triple
|| MAX_INPUT_LEN == max_input_len + MIN_ENCODE_CHUNK_SIZE
);
// encode complete triples only
let input_complete_chunks_len = input.len() - (input.len() % MIN_ENCODE_CHUNK_SIZE);
let input_chunks_to_encode_len = cmp::min(input_complete_chunks_len, max_input_len);
debug_assert_eq!(0, max_input_len % MIN_ENCODE_CHUNK_SIZE);
debug_assert_eq!(0, input_chunks_to_encode_len % MIN_ENCODE_CHUNK_SIZE);
encoded_size += encode_to_slice(
&input[..(input_chunks_to_encode_len)],
&mut self.output[encoded_size..],
self.config.char_set.encode_table(),
);
// not updating `self.output_occupied_len` here because if the below write fails, it should
// "never take place" -- the buffer contents we encoded are ignored and perhaps retried
// later, if the consumer chooses.
self.write_to_delegate(encoded_size)
// no matter whether we wrote the full encoded buffer or not, we consumed the same
// input
.map(|_| extra_input_read_len + input_chunks_to_encode_len)
.map_err(|e| {
// in case we filled and encoded `extra`, reset extra_len
self.extra_input_occupied_len = orig_extra_len;
e
})
}
/// Because this is usually treated as OK to call multiple times, it will *not* flush any
/// incomplete chunks of input or write padding.
/// # Errors
///
/// The first error that is not of [`ErrorKind::Interrupted`] will be returned.
fn flush(&mut self) -> Result<()> {
self.write_all_encoded_output()?;
self.delegate
.as_mut()
.expect("Writer must be present")
.flush()
}
}
impl<W: Write> Drop for EncoderWriter<W> {
fn drop(&mut self) {
if !self.panicked {
// like `BufWriter`, ignore errors during drop
let _ = self.write_final_leftovers();
}
}
}

View File

@ -0,0 +1,176 @@
use super::encoder::EncoderWriter;
use crate::Config;
use std::io;
use std::io::Write;
/// A `Write` implementation that base64-encodes data using the provided config and accumulates the
/// resulting base64 in memory, which is then exposed as a String via `into_inner()`.
///
/// # Examples
///
/// Buffer base64 in a new String:
///
/// ```
/// use std::io::Write;
///
/// let mut enc = base64::write::EncoderStringWriter::new(base64::STANDARD);
///
/// enc.write_all(b"asdf").unwrap();
///
/// // get the resulting String
/// let b64_string = enc.into_inner();
///
/// assert_eq!("YXNkZg==", &b64_string);
/// ```
///
/// Or, append to an existing String:
///
/// ```
/// use std::io::Write;
///
/// let mut buf = String::from("base64: ");
///
/// let mut enc = base64::write::EncoderStringWriter::from(&mut buf, base64::STANDARD);
///
/// enc.write_all(b"asdf").unwrap();
///
/// // release the &mut reference on buf
/// let _ = enc.into_inner();
///
/// assert_eq!("base64: YXNkZg==", &buf);
/// ```
///
/// # Panics
///
/// Calling `write()` (or related methods) or `finish()` after `finish()` has completed without
/// error is invalid and will panic.
///
/// # Performance
///
/// Because it has to validate that the base64 is UTF-8, it is about 80% as fast as writing plain
/// bytes to a `io::Write`.
pub struct EncoderStringWriter<S: StrConsumer> {
encoder: EncoderWriter<Utf8SingleCodeUnitWriter<S>>,
}
impl<S: StrConsumer> EncoderStringWriter<S> {
/// Create a EncoderStringWriter that will append to the provided `StrConsumer`.
pub fn from(str_consumer: S, config: Config) -> Self {
EncoderStringWriter {
encoder: EncoderWriter::new(Utf8SingleCodeUnitWriter { str_consumer }, config),
}
}
/// Encode all remaining buffered data, including any trailing incomplete input triples and
/// associated padding.
///
/// Once this succeeds, no further writes or calls to this method are allowed.
///
/// Returns the base64-encoded form of the accumulated written data.
pub fn into_inner(mut self) -> S {
self.encoder
.finish()
.expect("Writing to a Vec<u8> should never fail")
.str_consumer
}
}
impl EncoderStringWriter<String> {
/// Create a EncoderStringWriter that will encode into a new String with the provided config.
pub fn new(config: Config) -> Self {
EncoderStringWriter::from(String::new(), config)
}
}
impl<S: StrConsumer> Write for EncoderStringWriter<S> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.encoder.write(buf)
}
fn flush(&mut self) -> io::Result<()> {
self.encoder.flush()
}
}
/// An abstraction around consuming `str`s produced by base64 encoding.
pub trait StrConsumer {
/// Consume the base64 encoded data in `buf`
fn consume(&mut self, buf: &str);
}
/// As for io::Write, `StrConsumer` is implemented automatically for `&mut S`.
impl<S: StrConsumer + ?Sized> StrConsumer for &mut S {
fn consume(&mut self, buf: &str) {
(**self).consume(buf)
}
}
/// Pushes the str onto the end of the String
impl StrConsumer for String {
fn consume(&mut self, buf: &str) {
self.push_str(buf)
}
}
/// A `Write` that only can handle bytes that are valid single-byte UTF-8 code units.
///
/// This is safe because we only use it when writing base64, which is always valid UTF-8.
struct Utf8SingleCodeUnitWriter<S: StrConsumer> {
str_consumer: S,
}
impl<S: StrConsumer> io::Write for Utf8SingleCodeUnitWriter<S> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
// Because we expect all input to be valid utf-8 individual bytes, we can encode any buffer
// length
let s = std::str::from_utf8(buf).expect("Input must be valid UTF-8");
self.str_consumer.consume(s);
Ok(buf.len())
}
fn flush(&mut self) -> io::Result<()> {
// no op
Ok(())
}
}
#[cfg(test)]
mod tests {
use crate::encode_config_buf;
use crate::tests::random_config;
use crate::write::encoder_string_writer::EncoderStringWriter;
use rand::Rng;
use std::io::Write;
#[test]
fn every_possible_split_of_input() {
let mut rng = rand::thread_rng();
let mut orig_data = Vec::<u8>::new();
let mut normal_encoded = String::new();
let size = 5_000;
for i in 0..size {
orig_data.clear();
normal_encoded.clear();
for _ in 0..size {
orig_data.push(rng.gen());
}
let config = random_config(&mut rng);
encode_config_buf(&orig_data, config, &mut normal_encoded);
let mut stream_encoder = EncoderStringWriter::new(config);
// Write the first i bytes, then the rest
stream_encoder.write_all(&orig_data[0..i]).unwrap();
stream_encoder.write_all(&orig_data[i..]).unwrap();
let stream_encoded = stream_encoder.into_inner();
assert_eq!(normal_encoded, stream_encoded);
}
}
}

View File

@ -0,0 +1,568 @@
use super::EncoderWriter;
use crate::tests::random_config;
use crate::{encode_config, encode_config_buf, STANDARD_NO_PAD, URL_SAFE};
use std::io::{Cursor, Write};
use std::{cmp, io, str};
use rand::Rng;
#[test]
fn encode_three_bytes() {
let mut c = Cursor::new(Vec::new());
{
let mut enc = EncoderWriter::new(&mut c, URL_SAFE);
let sz = enc.write(b"abc").unwrap();
assert_eq!(sz, 3);
}
assert_eq!(&c.get_ref()[..], encode_config("abc", URL_SAFE).as_bytes());
}
#[test]
fn encode_nine_bytes_two_writes() {
let mut c = Cursor::new(Vec::new());
{
let mut enc = EncoderWriter::new(&mut c, URL_SAFE);
let sz = enc.write(b"abcdef").unwrap();
assert_eq!(sz, 6);
let sz = enc.write(b"ghi").unwrap();
assert_eq!(sz, 3);
}
assert_eq!(
&c.get_ref()[..],
encode_config("abcdefghi", URL_SAFE).as_bytes()
);
}
#[test]
fn encode_one_then_two_bytes() {
let mut c = Cursor::new(Vec::new());
{
let mut enc = EncoderWriter::new(&mut c, URL_SAFE);
let sz = enc.write(b"a").unwrap();
assert_eq!(sz, 1);
let sz = enc.write(b"bc").unwrap();
assert_eq!(sz, 2);
}
assert_eq!(&c.get_ref()[..], encode_config("abc", URL_SAFE).as_bytes());
}
#[test]
fn encode_one_then_five_bytes() {
let mut c = Cursor::new(Vec::new());
{
let mut enc = EncoderWriter::new(&mut c, URL_SAFE);
let sz = enc.write(b"a").unwrap();
assert_eq!(sz, 1);
let sz = enc.write(b"bcdef").unwrap();
assert_eq!(sz, 5);
}
assert_eq!(
&c.get_ref()[..],
encode_config("abcdef", URL_SAFE).as_bytes()
);
}
#[test]
fn encode_1_2_3_bytes() {
let mut c = Cursor::new(Vec::new());
{
let mut enc = EncoderWriter::new(&mut c, URL_SAFE);
let sz = enc.write(b"a").unwrap();
assert_eq!(sz, 1);
let sz = enc.write(b"bc").unwrap();
assert_eq!(sz, 2);
let sz = enc.write(b"def").unwrap();
assert_eq!(sz, 3);
}
assert_eq!(
&c.get_ref()[..],
encode_config("abcdef", URL_SAFE).as_bytes()
);
}
#[test]
fn encode_with_padding() {
let mut c = Cursor::new(Vec::new());
{
let mut enc = EncoderWriter::new(&mut c, URL_SAFE);
enc.write_all(b"abcd").unwrap();
enc.flush().unwrap();
}
assert_eq!(&c.get_ref()[..], encode_config("abcd", URL_SAFE).as_bytes());
}
#[test]
fn encode_with_padding_multiple_writes() {
let mut c = Cursor::new(Vec::new());
{
let mut enc = EncoderWriter::new(&mut c, URL_SAFE);
assert_eq!(1, enc.write(b"a").unwrap());
assert_eq!(2, enc.write(b"bc").unwrap());
assert_eq!(3, enc.write(b"def").unwrap());
assert_eq!(1, enc.write(b"g").unwrap());
enc.flush().unwrap();
}
assert_eq!(
&c.get_ref()[..],
encode_config("abcdefg", URL_SAFE).as_bytes()
);
}
#[test]
fn finish_writes_extra_byte() {
let mut c = Cursor::new(Vec::new());
{
let mut enc = EncoderWriter::new(&mut c, URL_SAFE);
assert_eq!(6, enc.write(b"abcdef").unwrap());
// will be in extra
assert_eq!(1, enc.write(b"g").unwrap());
// 1 trailing byte = 2 encoded chars
let _ = enc.finish().unwrap();
}
assert_eq!(
&c.get_ref()[..],
encode_config("abcdefg", URL_SAFE).as_bytes()
);
}
#[test]
fn write_partial_chunk_encodes_partial_chunk() {
let mut c = Cursor::new(Vec::new());
{
let mut enc = EncoderWriter::new(&mut c, STANDARD_NO_PAD);
// nothing encoded yet
assert_eq!(2, enc.write(b"ab").unwrap());
// encoded here
let _ = enc.finish().unwrap();
}
assert_eq!(
&c.get_ref()[..],
encode_config("ab", STANDARD_NO_PAD).as_bytes()
);
assert_eq!(3, c.get_ref().len());
}
#[test]
fn write_1_chunk_encodes_complete_chunk() {
let mut c = Cursor::new(Vec::new());
{
let mut enc = EncoderWriter::new(&mut c, STANDARD_NO_PAD);
assert_eq!(3, enc.write(b"abc").unwrap());
let _ = enc.finish().unwrap();
}
assert_eq!(
&c.get_ref()[..],
encode_config("abc", STANDARD_NO_PAD).as_bytes()
);
assert_eq!(4, c.get_ref().len());
}
#[test]
fn write_1_chunk_and_partial_encodes_only_complete_chunk() {
let mut c = Cursor::new(Vec::new());
{
let mut enc = EncoderWriter::new(&mut c, STANDARD_NO_PAD);
// "d" not written
assert_eq!(3, enc.write(b"abcd").unwrap());
let _ = enc.finish().unwrap();
}
assert_eq!(
&c.get_ref()[..],
encode_config("abc", STANDARD_NO_PAD).as_bytes()
);
assert_eq!(4, c.get_ref().len());
}
#[test]
fn write_2_partials_to_exactly_complete_chunk_encodes_complete_chunk() {
let mut c = Cursor::new(Vec::new());
{
let mut enc = EncoderWriter::new(&mut c, STANDARD_NO_PAD);
assert_eq!(1, enc.write(b"a").unwrap());
assert_eq!(2, enc.write(b"bc").unwrap());
let _ = enc.finish().unwrap();
}
assert_eq!(
&c.get_ref()[..],
encode_config("abc", STANDARD_NO_PAD).as_bytes()
);
assert_eq!(4, c.get_ref().len());
}
#[test]
fn write_partial_then_enough_to_complete_chunk_but_not_complete_another_chunk_encodes_complete_chunk_without_consuming_remaining(
) {
let mut c = Cursor::new(Vec::new());
{
let mut enc = EncoderWriter::new(&mut c, STANDARD_NO_PAD);
assert_eq!(1, enc.write(b"a").unwrap());
// doesn't consume "d"
assert_eq!(2, enc.write(b"bcd").unwrap());
let _ = enc.finish().unwrap();
}
assert_eq!(
&c.get_ref()[..],
encode_config("abc", STANDARD_NO_PAD).as_bytes()
);
assert_eq!(4, c.get_ref().len());
}
#[test]
fn write_partial_then_enough_to_complete_chunk_and_another_chunk_encodes_complete_chunks() {
let mut c = Cursor::new(Vec::new());
{
let mut enc = EncoderWriter::new(&mut c, STANDARD_NO_PAD);
assert_eq!(1, enc.write(b"a").unwrap());
// completes partial chunk, and another chunk
assert_eq!(5, enc.write(b"bcdef").unwrap());
let _ = enc.finish().unwrap();
}
assert_eq!(
&c.get_ref()[..],
encode_config("abcdef", STANDARD_NO_PAD).as_bytes()
);
assert_eq!(8, c.get_ref().len());
}
#[test]
fn write_partial_then_enough_to_complete_chunk_and_another_chunk_and_another_partial_chunk_encodes_only_complete_chunks(
) {
let mut c = Cursor::new(Vec::new());
{
let mut enc = EncoderWriter::new(&mut c, STANDARD_NO_PAD);
assert_eq!(1, enc.write(b"a").unwrap());
// completes partial chunk, and another chunk, with one more partial chunk that's not
// consumed
assert_eq!(5, enc.write(b"bcdefe").unwrap());
let _ = enc.finish().unwrap();
}
assert_eq!(
&c.get_ref()[..],
encode_config("abcdef", STANDARD_NO_PAD).as_bytes()
);
assert_eq!(8, c.get_ref().len());
}
#[test]
fn drop_calls_finish_for_you() {
let mut c = Cursor::new(Vec::new());
{
let mut enc = EncoderWriter::new(&mut c, STANDARD_NO_PAD);
assert_eq!(1, enc.write(b"a").unwrap());
}
assert_eq!(
&c.get_ref()[..],
encode_config("a", STANDARD_NO_PAD).as_bytes()
);
assert_eq!(2, c.get_ref().len());
}
#[test]
fn every_possible_split_of_input() {
let mut rng = rand::thread_rng();
let mut orig_data = Vec::<u8>::new();
let mut stream_encoded = Vec::<u8>::new();
let mut normal_encoded = String::new();
let size = 5_000;
for i in 0..size {
orig_data.clear();
stream_encoded.clear();
normal_encoded.clear();
for _ in 0..size {
orig_data.push(rng.gen());
}
let config = random_config(&mut rng);
encode_config_buf(&orig_data, config, &mut normal_encoded);
{
let mut stream_encoder = EncoderWriter::new(&mut stream_encoded, config);
// Write the first i bytes, then the rest
stream_encoder.write_all(&orig_data[0..i]).unwrap();
stream_encoder.write_all(&orig_data[i..]).unwrap();
}
assert_eq!(normal_encoded, str::from_utf8(&stream_encoded).unwrap());
}
}
#[test]
fn encode_random_config_matches_normal_encode_reasonable_input_len() {
// choose up to 2 * buf size, so ~half the time it'll use a full buffer
do_encode_random_config_matches_normal_encode(super::encoder::BUF_SIZE * 2)
}
#[test]
fn encode_random_config_matches_normal_encode_tiny_input_len() {
do_encode_random_config_matches_normal_encode(10)
}
#[test]
fn retrying_writes_that_error_with_interrupted_works() {
let mut rng = rand::thread_rng();
let mut orig_data = Vec::<u8>::new();
let mut stream_encoded = Vec::<u8>::new();
let mut normal_encoded = String::new();
for _ in 0..1_000 {
orig_data.clear();
stream_encoded.clear();
normal_encoded.clear();
let orig_len: usize = rng.gen_range(100, 20_000);
for _ in 0..orig_len {
orig_data.push(rng.gen());
}
// encode the normal way
let config = random_config(&mut rng);
encode_config_buf(&orig_data, config, &mut normal_encoded);
// encode via the stream encoder
{
let mut interrupt_rng = rand::thread_rng();
let mut interrupting_writer = InterruptingWriter {
w: &mut stream_encoded,
rng: &mut interrupt_rng,
fraction: 0.8,
};
let mut stream_encoder = EncoderWriter::new(&mut interrupting_writer, config);
let mut bytes_consumed = 0;
while bytes_consumed < orig_len {
// use short inputs since we want to use `extra` a lot as that's what needs rollback
// when errors occur
let input_len: usize = cmp::min(rng.gen_range(0, 10), orig_len - bytes_consumed);
retry_interrupted_write_all(
&mut stream_encoder,
&orig_data[bytes_consumed..bytes_consumed + input_len],
)
.unwrap();
bytes_consumed += input_len;
}
loop {
let res = stream_encoder.finish();
match res {
Ok(_) => break,
Err(e) => match e.kind() {
io::ErrorKind::Interrupted => continue,
_ => Err(e).unwrap(), // bail
},
}
}
assert_eq!(orig_len, bytes_consumed);
}
assert_eq!(normal_encoded, str::from_utf8(&stream_encoded).unwrap());
}
}
#[test]
fn writes_that_only_write_part_of_input_and_sometimes_interrupt_produce_correct_encoded_data() {
let mut rng = rand::thread_rng();
let mut orig_data = Vec::<u8>::new();
let mut stream_encoded = Vec::<u8>::new();
let mut normal_encoded = String::new();
for _ in 0..1_000 {
orig_data.clear();
stream_encoded.clear();
normal_encoded.clear();
let orig_len: usize = rng.gen_range(100, 20_000);
for _ in 0..orig_len {
orig_data.push(rng.gen());
}
// encode the normal way
let config = random_config(&mut rng);
encode_config_buf(&orig_data, config, &mut normal_encoded);
// encode via the stream encoder
{
let mut partial_rng = rand::thread_rng();
let mut partial_writer = PartialInterruptingWriter {
w: &mut stream_encoded,
rng: &mut partial_rng,
full_input_fraction: 0.1,
no_interrupt_fraction: 0.1,
};
let mut stream_encoder = EncoderWriter::new(&mut partial_writer, config);
let mut bytes_consumed = 0;
while bytes_consumed < orig_len {
// use at most medium-length inputs to exercise retry logic more aggressively
let input_len: usize = cmp::min(rng.gen_range(0, 100), orig_len - bytes_consumed);
let res =
stream_encoder.write(&orig_data[bytes_consumed..bytes_consumed + input_len]);
// retry on interrupt
match res {
Ok(len) => bytes_consumed += len,
Err(e) => match e.kind() {
io::ErrorKind::Interrupted => continue,
_ => {
panic!("should not see other errors");
}
},
}
}
let _ = stream_encoder.finish().unwrap();
assert_eq!(orig_len, bytes_consumed);
}
assert_eq!(normal_encoded, str::from_utf8(&stream_encoded).unwrap());
}
}
/// Retry writes until all the data is written or an error that isn't Interrupted is returned.
fn retry_interrupted_write_all<W: Write>(w: &mut W, buf: &[u8]) -> io::Result<()> {
let mut bytes_consumed = 0;
while bytes_consumed < buf.len() {
let res = w.write(&buf[bytes_consumed..]);
match res {
Ok(len) => bytes_consumed += len,
Err(e) => match e.kind() {
io::ErrorKind::Interrupted => continue,
_ => return Err(e),
},
}
}
Ok(())
}
fn do_encode_random_config_matches_normal_encode(max_input_len: usize) {
let mut rng = rand::thread_rng();
let mut orig_data = Vec::<u8>::new();
let mut stream_encoded = Vec::<u8>::new();
let mut normal_encoded = String::new();
for _ in 0..1_000 {
orig_data.clear();
stream_encoded.clear();
normal_encoded.clear();
let orig_len: usize = rng.gen_range(100, 20_000);
for _ in 0..orig_len {
orig_data.push(rng.gen());
}
// encode the normal way
let config = random_config(&mut rng);
encode_config_buf(&orig_data, config, &mut normal_encoded);
// encode via the stream encoder
{
let mut stream_encoder = EncoderWriter::new(&mut stream_encoded, config);
let mut bytes_consumed = 0;
while bytes_consumed < orig_len {
let input_len: usize =
cmp::min(rng.gen_range(0, max_input_len), orig_len - bytes_consumed);
// write a little bit of the data
stream_encoder
.write_all(&orig_data[bytes_consumed..bytes_consumed + input_len])
.unwrap();
bytes_consumed += input_len;
}
let _ = stream_encoder.finish().unwrap();
assert_eq!(orig_len, bytes_consumed);
}
assert_eq!(normal_encoded, str::from_utf8(&stream_encoded).unwrap());
}
}
/// A `Write` implementation that returns Interrupted some fraction of the time, randomly.
struct InterruptingWriter<'a, W: 'a + Write, R: 'a + Rng> {
w: &'a mut W,
rng: &'a mut R,
/// In [0, 1]. If a random number in [0, 1] is `<= threshold`, `Write` methods will return
/// an `Interrupted` error
fraction: f64,
}
impl<'a, W: Write, R: Rng> Write for InterruptingWriter<'a, W, R> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
if self.rng.gen_range(0.0, 1.0) <= self.fraction {
return Err(io::Error::new(io::ErrorKind::Interrupted, "interrupted"));
}
self.w.write(buf)
}
fn flush(&mut self) -> io::Result<()> {
if self.rng.gen_range(0.0, 1.0) <= self.fraction {
return Err(io::Error::new(io::ErrorKind::Interrupted, "interrupted"));
}
self.w.flush()
}
}
/// A `Write` implementation that sometimes will only write part of its input.
struct PartialInterruptingWriter<'a, W: 'a + Write, R: 'a + Rng> {
w: &'a mut W,
rng: &'a mut R,
/// In [0, 1]. If a random number in [0, 1] is `<= threshold`, `write()` will write all its
/// input. Otherwise, it will write a random substring
full_input_fraction: f64,
no_interrupt_fraction: f64,
}
impl<'a, W: Write, R: Rng> Write for PartialInterruptingWriter<'a, W, R> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
if self.rng.gen_range(0.0, 1.0) > self.no_interrupt_fraction {
return Err(io::Error::new(io::ErrorKind::Interrupted, "interrupted"));
}
if self.rng.gen_range(0.0, 1.0) <= self.full_input_fraction || buf.len() == 0 {
// pass through the buf untouched
self.w.write(buf)
} else {
// only use a prefix of it
self.w
.write(&buf[0..(self.rng.gen_range(0, buf.len() - 1))])
}
}
fn flush(&mut self) -> io::Result<()> {
self.w.flush()
}
}

View File

@ -0,0 +1,8 @@
//! Implementations of `io::Write` to transparently handle base64.
mod encoder;
mod encoder_string_writer;
pub use self::encoder::EncoderWriter;
pub use self::encoder_string_writer::EncoderStringWriter;
#[cfg(test)]
mod encoder_tests;

330
zeroidc/vendor/base64/tests/decode.rs vendored Normal file
View File

@ -0,0 +1,330 @@
extern crate base64;
use base64::*;
mod helpers;
use self::helpers::*;
#[test]
fn decode_rfc4648_0() {
compare_decode("", "");
}
#[test]
fn decode_rfc4648_1() {
compare_decode("f", "Zg==");
}
#[test]
fn decode_rfc4648_1_just_a_bit_of_padding() {
// allows less padding than required
compare_decode("f", "Zg=");
}
#[test]
fn decode_rfc4648_1_no_padding() {
compare_decode("f", "Zg");
}
#[test]
fn decode_rfc4648_2() {
compare_decode("fo", "Zm8=");
}
#[test]
fn decode_rfc4648_2_no_padding() {
compare_decode("fo", "Zm8");
}
#[test]
fn decode_rfc4648_3() {
compare_decode("foo", "Zm9v");
}
#[test]
fn decode_rfc4648_4() {
compare_decode("foob", "Zm9vYg==");
}
#[test]
fn decode_rfc4648_4_no_padding() {
compare_decode("foob", "Zm9vYg");
}
#[test]
fn decode_rfc4648_5() {
compare_decode("fooba", "Zm9vYmE=");
}
#[test]
fn decode_rfc4648_5_no_padding() {
compare_decode("fooba", "Zm9vYmE");
}
#[test]
fn decode_rfc4648_6() {
compare_decode("foobar", "Zm9vYmFy");
}
#[test]
fn decode_reject_null() {
assert_eq!(
DecodeError::InvalidByte(3, 0x0),
decode_config("YWx\0pY2U==", config_std_pad()).unwrap_err()
);
}
#[test]
fn decode_single_pad_byte_after_2_chars_in_trailing_quad_ok() {
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push_str("Zg=");
let input_len = num_quads * 3 + 1;
// Since there are 3 bytes in the trailing quad, want to be sure this allows for the fact
// that it could be bad padding rather than assuming that it will decode to 2 bytes and
// therefore allow 1 extra round of fast decode logic (stage 1 / 2).
let mut decoded = Vec::new();
decoded.resize(input_len, 0);
assert_eq!(
input_len,
decode_config_slice(&s, STANDARD, &mut decoded).unwrap()
);
}
}
//this is a MAY in the rfc: https://tools.ietf.org/html/rfc4648#section-3.3
#[test]
fn decode_1_pad_byte_in_fast_loop_then_extra_padding_chunk_error() {
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push_str("YWxpY2U=====");
// since the first 8 bytes are handled in stage 1 or 2, the padding is detected as a
// generic invalid byte, not specifcally a padding issue.
// Could argue that the *next* padding byte (in the next quad) is technically the first
// erroneous one, but reporting that accurately is more complex and probably nobody cares
assert_eq!(
DecodeError::InvalidByte(num_quads * 4 + 7, b'='),
decode(&s).unwrap_err()
);
}
}
#[test]
fn decode_2_pad_bytes_in_leftovers_then_extra_padding_chunk_error() {
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push_str("YWxpY2UABB====");
// 6 bytes (4 padding) after last 8-byte chunk, so it's decoded by stage 4.
// First padding byte is invalid.
assert_eq!(
DecodeError::InvalidByte(num_quads * 4 + 10, b'='),
decode(&s).unwrap_err()
);
}
}
#[test]
fn decode_valid_bytes_after_padding_in_leftovers_error() {
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push_str("YWxpY2UABB=B");
// 4 bytes after last 8-byte chunk, so it's decoded by stage 4.
// First (and only) padding byte is invalid.
assert_eq!(
DecodeError::InvalidByte(num_quads * 4 + 10, b'='),
decode(&s).unwrap_err()
);
}
}
#[test]
fn decode_absurd_pad_error() {
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push_str("==Y=Wx===pY=2U=====");
// Plenty of remaining bytes, so handled by stage 1 or 2.
// first padding byte
assert_eq!(
DecodeError::InvalidByte(num_quads * 4, b'='),
decode(&s).unwrap_err()
);
}
}
#[test]
fn decode_extra_padding_after_1_pad_bytes_in_trailing_quad_returns_error() {
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push_str("EEE===");
// handled by stage 1, 2, or 4 depending on length
// first padding byte -- which would be legal if it was the only padding
assert_eq!(
DecodeError::InvalidByte(num_quads * 4 + 3, b'='),
decode(&s).unwrap_err()
);
}
}
#[test]
fn decode_extra_padding_after_2_pad_bytes_in_trailing_quad_2_returns_error() {
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push_str("EE====");
// handled by stage 1, 2, or 4 depending on length
// first padding byte -- which would be legal if it was by itself
assert_eq!(
DecodeError::InvalidByte(num_quads * 4 + 2, b'='),
decode(&s).unwrap_err()
);
}
}
#[test]
fn decode_start_quad_with_padding_returns_error() {
for num_quads in 0..25 {
// add enough padding to ensure that we'll hit all 4 stages at the different lengths
for pad_bytes in 1..32 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
let padding: String = std::iter::repeat("=").take(pad_bytes).collect();
s.push_str(&padding);
if pad_bytes % 4 == 1 {
// detected in early length check
assert_eq!(DecodeError::InvalidLength, decode(&s).unwrap_err());
} else {
// padding lengths 2 - 8 are handled by stage 4
// padding length >= 8 will hit at least one chunk at stages 1, 2, 3 at different
// prefix lengths
assert_eq!(
DecodeError::InvalidByte(num_quads * 4, b'='),
decode(&s).unwrap_err()
);
}
}
}
}
#[test]
fn decode_padding_followed_by_non_padding_returns_error() {
for num_quads in 0..25 {
for pad_bytes in 0..31 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
let padding: String = std::iter::repeat("=").take(pad_bytes).collect();
s.push_str(&padding);
s.push_str("E");
if pad_bytes % 4 == 0 {
assert_eq!(DecodeError::InvalidLength, decode(&s).unwrap_err());
} else {
// pad len 1 - 8 will be handled by stage 4
// pad len 9 (suffix len 10) will have 8 bytes of padding handled by stage 3
// first padding byte
assert_eq!(
DecodeError::InvalidByte(num_quads * 4, b'='),
decode(&s).unwrap_err()
);
}
}
}
}
#[test]
fn decode_one_char_in_quad_with_padding_error() {
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push_str("E=");
assert_eq!(
DecodeError::InvalidByte(num_quads * 4 + 1, b'='),
decode(&s).unwrap_err()
);
// more padding doesn't change the error
s.push_str("=");
assert_eq!(
DecodeError::InvalidByte(num_quads * 4 + 1, b'='),
decode(&s).unwrap_err()
);
s.push_str("=");
assert_eq!(
DecodeError::InvalidByte(num_quads * 4 + 1, b'='),
decode(&s).unwrap_err()
);
}
}
#[test]
fn decode_one_char_in_quad_without_padding_error() {
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push('E');
assert_eq!(DecodeError::InvalidLength, decode(&s).unwrap_err());
}
}
#[test]
fn decode_reject_invalid_bytes_with_correct_error() {
for length in 1..100 {
for index in 0_usize..length {
for invalid_byte in " \t\n\r\x0C\x0B\x00%*.".bytes() {
let prefix: String = std::iter::repeat("A").take(index).collect();
let suffix: String = std::iter::repeat("B").take(length - index - 1).collect();
let input = prefix + &String::from_utf8(vec![invalid_byte]).unwrap() + &suffix;
assert_eq!(
length,
input.len(),
"length {} error position {}",
length,
index
);
if length % 4 == 1 && !suffix.is_empty() {
assert_eq!(DecodeError::InvalidLength, decode(&input).unwrap_err());
} else {
assert_eq!(
DecodeError::InvalidByte(index, invalid_byte),
decode(&input).unwrap_err()
);
}
}
}
}
}
#[test]
fn decode_imap() {
assert_eq!(
decode_config(b"+,,+", crate::IMAP_MUTF7),
decode_config(b"+//+", crate::STANDARD_NO_PAD)
);
}
#[test]
fn decode_invalid_trailing_bytes() {
// The case of trailing newlines is common enough to warrant a test for a good error
// message.
assert_eq!(
Err(DecodeError::InvalidByte(8, b'\n')),
decode(b"Zm9vCg==\n")
);
// extra padding, however, is still InvalidLength
assert_eq!(Err(DecodeError::InvalidLength), decode(b"Zm9vCg==="));
}
fn config_std_pad() -> Config {
Config::new(CharacterSet::Standard, true)
}

105
zeroidc/vendor/base64/tests/encode.rs vendored Normal file
View File

@ -0,0 +1,105 @@
extern crate base64;
use base64::*;
fn compare_encode(expected: &str, target: &[u8]) {
assert_eq!(expected, encode(target));
}
#[test]
fn encode_rfc4648_0() {
compare_encode("", b"");
}
#[test]
fn encode_rfc4648_1() {
compare_encode("Zg==", b"f");
}
#[test]
fn encode_rfc4648_2() {
compare_encode("Zm8=", b"fo");
}
#[test]
fn encode_rfc4648_3() {
compare_encode("Zm9v", b"foo");
}
#[test]
fn encode_rfc4648_4() {
compare_encode("Zm9vYg==", b"foob");
}
#[test]
fn encode_rfc4648_5() {
compare_encode("Zm9vYmE=", b"fooba");
}
#[test]
fn encode_rfc4648_6() {
compare_encode("Zm9vYmFy", b"foobar");
}
#[test]
fn encode_all_ascii() {
let mut ascii = Vec::<u8>::with_capacity(128);
for i in 0..128 {
ascii.push(i);
}
compare_encode(
"AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7P\
D0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn8\
=",
&ascii,
);
}
#[test]
fn encode_all_bytes() {
let mut bytes = Vec::<u8>::with_capacity(256);
for i in 0..255 {
bytes.push(i);
}
bytes.push(255); //bug with "overflowing" ranges?
compare_encode(
"AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7P\
D0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn\
+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6\
/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/w==",
&bytes,
);
}
#[test]
fn encode_all_bytes_url() {
let mut bytes = Vec::<u8>::with_capacity(256);
for i in 0..255 {
bytes.push(i);
}
bytes.push(255); //bug with "overflowing" ranges?
assert_eq!(
"AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0\
-P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn\
-AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq\
-wsbKztLW2t7i5uru8vb6_wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t_g4eLj5OXm5-jp6uvs7e7v8PHy\
8_T19vf4-fr7_P3-_w==",
encode_config(&bytes, URL_SAFE)
);
}
#[test]
fn encode_url_safe_without_padding() {
let encoded = encode_config(b"alice", URL_SAFE_NO_PAD);
assert_eq!(&encoded, "YWxpY2U");
assert_eq!(
String::from_utf8(decode(&encoded).unwrap()).unwrap(),
"alice"
);
}

14
zeroidc/vendor/base64/tests/helpers.rs vendored Normal file
View File

@ -0,0 +1,14 @@
extern crate base64;
use base64::*;
pub fn compare_decode(expected: &str, target: &str) {
assert_eq!(
expected,
String::from_utf8(decode(target).unwrap()).unwrap()
);
assert_eq!(
expected,
String::from_utf8(decode(target.as_bytes()).unwrap()).unwrap()
);
}

194
zeroidc/vendor/base64/tests/tests.rs vendored Normal file
View File

@ -0,0 +1,194 @@
extern crate base64;
extern crate rand;
use rand::{FromEntropy, Rng};
use base64::*;
mod helpers;
use self::helpers::*;
// generate random contents of the specified length and test encode/decode roundtrip
fn roundtrip_random(
byte_buf: &mut Vec<u8>,
str_buf: &mut String,
config: Config,
byte_len: usize,
approx_values_per_byte: u8,
max_rounds: u64,
) {
// let the short ones be short but don't let it get too crazy large
let num_rounds = calculate_number_of_rounds(byte_len, approx_values_per_byte, max_rounds);
let mut r = rand::rngs::SmallRng::from_entropy();
let mut decode_buf = Vec::new();
for _ in 0..num_rounds {
byte_buf.clear();
str_buf.clear();
decode_buf.clear();
while byte_buf.len() < byte_len {
byte_buf.push(r.gen::<u8>());
}
encode_config_buf(&byte_buf, config, str_buf);
decode_config_buf(&str_buf, config, &mut decode_buf).unwrap();
assert_eq!(byte_buf, &decode_buf);
}
}
fn calculate_number_of_rounds(byte_len: usize, approx_values_per_byte: u8, max: u64) -> u64 {
// don't overflow
let mut prod = approx_values_per_byte as u64;
for _ in 0..byte_len {
if prod > max {
return max;
}
prod = prod.saturating_mul(prod);
}
prod
}
fn no_pad_config() -> Config {
Config::new(CharacterSet::Standard, false)
}
#[test]
fn roundtrip_random_short_standard() {
let mut byte_buf: Vec<u8> = Vec::new();
let mut str_buf = String::new();
for input_len in 0..40 {
roundtrip_random(&mut byte_buf, &mut str_buf, STANDARD, input_len, 4, 10000);
}
}
#[test]
fn roundtrip_random_with_fast_loop_standard() {
let mut byte_buf: Vec<u8> = Vec::new();
let mut str_buf = String::new();
for input_len in 40..100 {
roundtrip_random(&mut byte_buf, &mut str_buf, STANDARD, input_len, 4, 1000);
}
}
#[test]
fn roundtrip_random_short_no_padding() {
let mut byte_buf: Vec<u8> = Vec::new();
let mut str_buf = String::new();
for input_len in 0..40 {
roundtrip_random(
&mut byte_buf,
&mut str_buf,
no_pad_config(),
input_len,
4,
10000,
);
}
}
#[test]
fn roundtrip_random_no_padding() {
let mut byte_buf: Vec<u8> = Vec::new();
let mut str_buf = String::new();
for input_len in 40..100 {
roundtrip_random(
&mut byte_buf,
&mut str_buf,
no_pad_config(),
input_len,
4,
1000,
);
}
}
#[test]
fn roundtrip_decode_trailing_10_bytes() {
// This is a special case because we decode 8 byte blocks of input at a time as much as we can,
// ideally unrolled to 32 bytes at a time, in stages 1 and 2. Since we also write a u64's worth
// of bytes (8) to the output, we always write 2 garbage bytes that then will be overwritten by
// the NEXT block. However, if the next block only contains 2 bytes, it will decode to 1 byte,
// and therefore be too short to cover up the trailing 2 garbage bytes. Thus, we have stage 3
// to handle that case.
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push_str("EFGHIJKLZg");
let decoded = decode(&s).unwrap();
assert_eq!(num_quads * 3 + 7, decoded.len());
assert_eq!(s, encode_config(&decoded, STANDARD_NO_PAD));
}
}
#[test]
fn display_wrapper_matches_normal_encode() {
let mut bytes = Vec::<u8>::with_capacity(256);
for i in 0..255 {
bytes.push(i);
}
bytes.push(255);
assert_eq!(
encode(&bytes),
format!(
"{}",
base64::display::Base64Display::with_config(&bytes, STANDARD)
)
);
}
#[test]
fn because_we_can() {
compare_decode("alice", "YWxpY2U=");
compare_decode("alice", &encode(b"alice"));
compare_decode("alice", &encode(&decode(&encode(b"alice")).unwrap()));
}
#[test]
fn encode_config_slice_can_use_inline_buffer() {
let mut buf: [u8; 22] = [0; 22];
let mut larger_buf: [u8; 24] = [0; 24];
let mut input: [u8; 16] = [0; 16];
let mut rng = rand::rngs::SmallRng::from_entropy();
for elt in &mut input {
*elt = rng.gen();
}
assert_eq!(22, encode_config_slice(&input, STANDARD_NO_PAD, &mut buf));
let decoded = decode_config(&buf, STANDARD_NO_PAD).unwrap();
assert_eq!(decoded, input);
// let's try it again with padding
assert_eq!(24, encode_config_slice(&input, STANDARD, &mut larger_buf));
let decoded = decode_config(&buf, STANDARD).unwrap();
assert_eq!(decoded, input);
}
#[test]
#[should_panic(expected = "index 24 out of range for slice of length 22")]
fn encode_config_slice_panics_when_buffer_too_small() {
let mut buf: [u8; 22] = [0; 22];
let mut input: [u8; 16] = [0; 16];
let mut rng = rand::rngs::SmallRng::from_entropy();
for elt in &mut input {
*elt = rng.gen();
}
encode_config_slice(&input, STANDARD, &mut buf);
}

View File

@ -0,0 +1 @@
{"files":{"CHANGELOG.md":"d362fc1fccaaf4d421bcf0fe8b80ddb4f625dade0c1ee52d08bd0b95509a49d1","CODE_OF_CONDUCT.md":"42634d0f6d922f49857175af991802822f7f920487aefa2ee250a50d12251a66","Cargo.toml":"87aced7532a7974eb37ab5fe6037f0abafc36d6b2d74891ecd2bf2f14f50d11e","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","README.md":"baa8604f8afb34fd93b9c79729daafb884dedcaf34023e4af8ad037d916061fd","src/example_generated.rs":"e43eb59e90f317f38d436670a6067d2fd9eb35fb319fe716184e4a04e24ed1b2","src/lib.rs":"e6477688535ee326d27238aeedc9cb4320ac35b9d17a4deda09e0587b0ccdbd4","tests/basic.rs":"146f1cbf6279bc609242cd3349f29cb21b41294f5e4921875f5ec95bd83529a2","tests/compile-fail/impls/copy.rs":"b791371237ddc75a7c04d2130e03b462c9c00a80dca08bd45aa97433d9c0d13a","tests/compile-fail/impls/copy.stderr.beta":"77d83484ce221d4b6ff2f7de843929a452d779fcfff428122710dd8218c298e3","tests/compile-fail/impls/eq.rs":"0cee8b9e07d537890e0189710293b53972d0fab63c09366f33c391065afafa99","tests/compile-fail/impls/eq.stderr.beta":"381fc6143d45ce76d7cecc47aa59cb69fe5e79c0b60a4a85d5c6163b400b3cc7","tests/compile-fail/non_integer_base/all_defined.rs":"95e14cad9e94560262f2862c3c01865ac30369b69da1001b0e7285cb55e6cb75","tests/compile-fail/non_integer_base/all_defined.stderr.beta":"1760739a276690903bb03844025587d37939f5dfcbfab309db3c86f32bdbf748","tests/compile-fail/non_integer_base/all_missing.rs":"b3d9da619d23213731ba2581aa7999c796c3c79aaf4f0ee6b11ceec08a11537f","tests/compile-fail/non_integer_base/all_missing.stderr.beta":"37e102290d3867e175b21976be798939f294efb17580d5b51e7b17b590d55132","tests/compile-fail/visibility/private_field.rs":"38e4d3fe6471829360d12c8d09b097f6a21aa93fb51eac3b215d96bdae23316b","tests/compile-fail/visibility/private_field.stderr.beta":"5aa24a3ebb39326f31927721c5017b8beb66c3e501fb865a3fa814c9763bfa0f","tests/compile-fail/visibility/private_flags.rs":"2ce4235802aa4e9c96c4e77d9e31d8401ef58dcda4741325184f0764ab1fe393","tests/compile-fail/visibility/private_flags.stderr.beta":"f3eb9f7baf2689258f3519ff7ee5c6ec3c237264ebcfe63f40c40f2023e5022f","tests/compile-fail/visibility/pub_const.rs":"8f813a97ac518c5ea8ac65b184101912452384afaf7b8d6c5e62f8370eca3c0a","tests/compile-fail/visibility/pub_const.stderr.beta":"823976ae1794d7f5372e2ec9aabba497e7bb88004722904c38da342ed98e8962","tests/compile-pass/impls/convert.rs":"88fe80bfb9cd5779f0e1d92c9ec02a8b6bb67e334c07f2309e9c0ba5ef776eb0","tests/compile-pass/impls/default.rs":"c508f9a461691f44b45142fa5ad599f02326e1de4c0cbca6c0593f4652eba109","tests/compile-pass/impls/inherent_methods.rs":"ecc26388e9a394bfa7a5bb69a5d621ab3d4d1e53f28f657bb8e78fe79f437913","tests/compile-pass/redefinition/core.rs":"ff5b6e72f87acc6ebb12405d3c0f6e3fa62e669933656a454bb63b30ea44179c","tests/compile-pass/redefinition/stringify.rs":"1edbce42b900c14425d7ffa14e83e165ebe452d7dccd8c0a8a821bdec64f5c93","tests/compile-pass/repr/c.rs":"6fda17f7c2edfcd155314579e83d0fc8a16209e400f1f9a5ca77bd9a799041f2","tests/compile-pass/repr/transparent.rs":"6cdc87a2137d8a4e0c8ce9b6cba83c82255f8ea125951bf614418685600489ce","tests/compile-pass/visibility/bits_field.rs":"1f3e5ba5a047440066a9f6bf7b7af33f5b06f6b1da3dd9af6886168199a7ea0a","tests/compile-pass/visibility/pub_in.rs":"e95312ff60966d42ec4bc00225507895a9b8ec24056ce6a9edd9145be35d730f","tests/compile.rs":"f27c67a7dd183ca30efea1b6e0880e3469a6dd63b92b1fd711c082df182c9eec"},"package":"bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"}

206
zeroidc/vendor/bitflags/CHANGELOG.md vendored Normal file
View File

@ -0,0 +1,206 @@
# 1.3.2
- Allow `non_snake_case` in generated flags types ([#256])
[#252]: https://github.com/bitflags/bitflags/pull/256
# 1.3.1
- Revert unconditional `#[repr(transparent)]` ([#252])
[#252]: https://github.com/bitflags/bitflags/pull/252
# 1.3.0 (yanked)
- Add `#[repr(transparent)]` ([#187])
- End `empty` doc comment with full stop ([#202])
- Fix typo in crate root docs ([#206])
- Document from_bits_unchecked unsafety ([#207])
- Let `is_all` ignore extra bits ([#211])
- Allows empty flag definition ([#225])
- Making crate accessible from std ([#227])
- Make `from_bits` a const fn ([#229])
- Allow multiple bitflags structs in one macro invocation ([#235])
- Add named functions to perform set operations ([#244])
- Fix typos in method docs ([#245])
- Modernization of the `bitflags` macro to take advantage of newer features and 2018 idioms ([#246])
- Fix regression (in an unreleased feature) and simplify tests ([#247])
- Use `Self` and fix bug when overriding `stringify!` ([#249])
[#187]: https://github.com/bitflags/bitflags/pull/187
[#202]: https://github.com/bitflags/bitflags/pull/202
[#206]: https://github.com/bitflags/bitflags/pull/206
[#207]: https://github.com/bitflags/bitflags/pull/207
[#211]: https://github.com/bitflags/bitflags/pull/211
[#225]: https://github.com/bitflags/bitflags/pull/225
[#227]: https://github.com/bitflags/bitflags/pull/227
[#229]: https://github.com/bitflags/bitflags/pull/229
[#235]: https://github.com/bitflags/bitflags/pull/235
[#244]: https://github.com/bitflags/bitflags/pull/244
[#245]: https://github.com/bitflags/bitflags/pull/245
[#246]: https://github.com/bitflags/bitflags/pull/246
[#247]: https://github.com/bitflags/bitflags/pull/247
[#249]: https://github.com/bitflags/bitflags/pull/249
# 1.2.1
- Remove extraneous `#[inline]` attributes ([#194])
[#194]: https://github.com/bitflags/bitflags/pull/194
# 1.2.0
- Fix typo: {Lower, Upper}Exp - {Lower, Upper}Hex ([#183])
- Add support for "unknown" bits ([#188])
[#183]: https://github.com/rust-lang-nursery/bitflags/pull/183
[#188]: https://github.com/rust-lang-nursery/bitflags/pull/188
# 1.1.0
This is a re-release of `1.0.5`, which was yanked due to a bug in the RLS.
# 1.0.5
- Use compiletest_rs flags supported by stable toolchain ([#171])
- Put the user provided attributes first ([#173])
- Make bitflags methods `const` on newer compilers ([#175])
[#171]: https://github.com/rust-lang-nursery/bitflags/pull/171
[#173]: https://github.com/rust-lang-nursery/bitflags/pull/173
[#175]: https://github.com/rust-lang-nursery/bitflags/pull/175
# 1.0.4
- Support Rust 2018 style macro imports ([#165])
```rust
use bitflags::bitflags;
```
[#165]: https://github.com/rust-lang-nursery/bitflags/pull/165
# 1.0.3
- Improve zero value flag handling and documentation ([#157])
[#157]: https://github.com/rust-lang-nursery/bitflags/pull/157
# 1.0.2
- 30% improvement in compile time of bitflags crate ([#156])
- Documentation improvements ([#153])
- Implementation cleanup ([#149])
[#156]: https://github.com/rust-lang-nursery/bitflags/pull/156
[#153]: https://github.com/rust-lang-nursery/bitflags/pull/153
[#149]: https://github.com/rust-lang-nursery/bitflags/pull/149
# 1.0.1
- Add support for `pub(restricted)` specifier on the bitflags struct ([#135])
- Optimize performance of `all()` when called from a separate crate ([#136])
[#135]: https://github.com/rust-lang-nursery/bitflags/pull/135
[#136]: https://github.com/rust-lang-nursery/bitflags/pull/136
# 1.0.0
- **[breaking change]** Macro now generates [associated constants](https://doc.rust-lang.org/reference/items.html#associated-constants) ([#24])
- **[breaking change]** Minimum supported version is Rust **1.20**, due to usage of associated constants
- After being broken in 0.9, the `#[deprecated]` attribute is now supported again ([#112])
- Other improvements to unit tests and documentation ([#106] and [#115])
[#24]: https://github.com/rust-lang-nursery/bitflags/pull/24
[#106]: https://github.com/rust-lang-nursery/bitflags/pull/106
[#112]: https://github.com/rust-lang-nursery/bitflags/pull/112
[#115]: https://github.com/rust-lang-nursery/bitflags/pull/115
## How to update your code to use associated constants
Assuming the following structure definition:
```rust
bitflags! {
struct Something: u8 {
const FOO = 0b01,
const BAR = 0b10
}
}
```
In 0.9 and older you could do:
```rust
let x = FOO.bits | BAR.bits;
```
Now you must use:
```rust
let x = Something::FOO.bits | Something::BAR.bits;
```
# 0.9.1
- Fix the implementation of `Formatting` traits when other formatting traits were present in scope ([#105])
[#105]: https://github.com/rust-lang-nursery/bitflags/pull/105
# 0.9.0
- **[breaking change]** Use struct keyword instead of flags to define bitflag types ([#84])
- **[breaking change]** Terminate const items with semicolons instead of commas ([#87])
- Implement the `Hex`, `Octal`, and `Binary` formatting traits ([#86])
- Printing an empty flag value with the `Debug` trait now prints "(empty)" instead of nothing ([#85])
- The `bitflags!` macro can now be used inside of a fn body, to define a type local to that function ([#74])
[#74]: https://github.com/rust-lang-nursery/bitflags/pull/74
[#84]: https://github.com/rust-lang-nursery/bitflags/pull/84
[#85]: https://github.com/rust-lang-nursery/bitflags/pull/85
[#86]: https://github.com/rust-lang-nursery/bitflags/pull/86
[#87]: https://github.com/rust-lang-nursery/bitflags/pull/87
# 0.8.2
- Update feature flag used when building bitflags as a dependency of the Rust toolchain
# 0.8.1
- Allow bitflags to be used as a dependency of the Rust toolchain
# 0.8.0
- Add support for the experimental `i128` and `u128` integer types ([#57])
- Add set method: `flags.set(SOME_FLAG, true)` or `flags.set(SOME_FLAG, false)` ([#55])
This may break code that defines its own set method
[#55]: https://github.com/rust-lang-nursery/bitflags/pull/55
[#57]: https://github.com/rust-lang-nursery/bitflags/pull/57
# 0.7.1
*(yanked)*
# 0.7.0
- Implement the Extend trait ([#49])
- Allow definitions inside the `bitflags!` macro to refer to items imported from other modules ([#51])
[#49]: https://github.com/rust-lang-nursery/bitflags/pull/49
[#51]: https://github.com/rust-lang-nursery/bitflags/pull/51
# 0.6.0
- The `no_std` feature was removed as it is now the default
- The `assignment_operators` feature was remove as it is now enabled by default
- Some clippy suggestions have been applied

View File

@ -0,0 +1,73 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to making participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, gender identity and expression, level of experience,
education, socio-economic status, nationality, personal appearance, race,
religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community. Examples of
representing a project or community include using an official project e-mail
address, posting via an official social media account, or acting as an appointed
representative at an online or offline event. Representation of a project may be
further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at coc@senaite.org. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
[homepage]: https://www.contributor-covenant.org

58
zeroidc/vendor/bitflags/Cargo.toml vendored Normal file
View File

@ -0,0 +1,58 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
edition = "2018"
name = "bitflags"
version = "1.3.2"
authors = ["The Rust Project Developers"]
exclude = ["bors.toml"]
description = "A macro to generate structures which behave like bitflags.\n"
homepage = "https://github.com/bitflags/bitflags"
documentation = "https://docs.rs/bitflags"
readme = "README.md"
keywords = ["bit", "bitmask", "bitflags", "flags"]
categories = ["no-std"]
license = "MIT/Apache-2.0"
repository = "https://github.com/bitflags/bitflags"
[package.metadata.docs.rs]
features = ["example_generated"]
[dependencies.compiler_builtins]
version = "0.1.2"
optional = true
[dependencies.core]
version = "1.0.0"
optional = true
package = "rustc-std-workspace-core"
[dev-dependencies.rustversion]
version = "1.0"
[dev-dependencies.serde]
version = "1.0"
[dev-dependencies.serde_derive]
version = "1.0"
[dev-dependencies.serde_json]
version = "1.0"
[dev-dependencies.trybuild]
version = "1.0"
[dev-dependencies.walkdir]
version = "2.3"
[features]
default = []
example_generated = []
rustc-dep-of-std = ["core", "compiler_builtins"]

201
zeroidc/vendor/bitflags/LICENSE-APACHE vendored Normal file
View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
zeroidc/vendor/bitflags/LICENSE-MIT vendored Normal file
View File

@ -0,0 +1,25 @@
Copyright (c) 2014 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

32
zeroidc/vendor/bitflags/README.md vendored Normal file
View File

@ -0,0 +1,32 @@
bitflags
========
[![Rust](https://github.com/bitflags/bitflags/workflows/Rust/badge.svg)](https://github.com/bitflags/bitflags/actions)
[![Join the chat at https://gitter.im/bitflags/Lobby](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/bitflags/Lobby?utm_source=badge&utm_medium=badge&utm_content=badge)
[![Latest version](https://img.shields.io/crates/v/bitflags.svg)](https://crates.io/crates/bitflags)
[![Documentation](https://docs.rs/bitflags/badge.svg)](https://docs.rs/bitflags)
![License](https://img.shields.io/crates/l/bitflags.svg)
A Rust macro to generate structures which behave like a set of bitflags
- [Documentation](https://docs.rs/bitflags)
- [Release notes](https://github.com/bitflags/bitflags/releases)
## Usage
Add this to your `Cargo.toml`:
```toml
[dependencies]
bitflags = "1.3"
```
and this to your source code:
```rust
use bitflags::bitflags;
```
## Rust Version Support
The minimum supported Rust version is 1.46 due to use of associated constants and const functions.

View File

@ -0,0 +1,14 @@
//! This module shows an example of code generated by the macro. **IT MUST NOT BE USED OUTSIDE THIS
//! CRATE**.
bitflags! {
/// This is the same `Flags` struct defined in the [crate level example](../index.html#example).
/// Note that this struct is just for documentation purposes only, it must not be used outside
/// this crate.
pub struct Flags: u32 {
const A = 0b00000001;
const B = 0b00000010;
const C = 0b00000100;
const ABC = Self::A.bits | Self::B.bits | Self::C.bits;
}
}

1729
zeroidc/vendor/bitflags/src/lib.rs vendored Normal file

File diff suppressed because it is too large Load Diff

20
zeroidc/vendor/bitflags/tests/basic.rs vendored Normal file
View File

@ -0,0 +1,20 @@
#![no_std]
use bitflags::bitflags;
bitflags! {
/// baz
struct Flags: u32 {
const A = 0b00000001;
#[doc = "bar"]
const B = 0b00000010;
const C = 0b00000100;
#[doc = "foo"]
const ABC = Flags::A.bits | Flags::B.bits | Flags::C.bits;
}
}
#[test]
fn basic() {
assert_eq!(Flags::ABC, Flags::A | Flags::B | Flags::C);
}

View File

@ -0,0 +1,10 @@
use bitflags::bitflags;
bitflags! {
#[derive(Clone, Copy)]
struct Flags: u32 {
const A = 0b00000001;
}
}
fn main() {}

View File

@ -0,0 +1,27 @@
error[E0119]: conflicting implementations of trait `std::clone::Clone` for type `Flags`
--> $DIR/copy.rs:3:1
|
3 | / bitflags! {
4 | | #[derive(Clone, Copy)]
| | ----- first implementation here
5 | | struct Flags: u32 {
6 | | const A = 0b00000001;
7 | | }
8 | | }
| |_^ conflicting implementation for `Flags`
|
= note: this error originates in the derive macro `Clone` (in Nightly builds, run with -Z macro-backtrace for more info)
error[E0119]: conflicting implementations of trait `std::marker::Copy` for type `Flags`
--> $DIR/copy.rs:3:1
|
3 | / bitflags! {
4 | | #[derive(Clone, Copy)]
| | ---- first implementation here
5 | | struct Flags: u32 {
6 | | const A = 0b00000001;
7 | | }
8 | | }
| |_^ conflicting implementation for `Flags`
|
= note: this error originates in the derive macro `Copy` (in Nightly builds, run with -Z macro-backtrace for more info)

View File

@ -0,0 +1,10 @@
use bitflags::bitflags;
bitflags! {
#[derive(PartialEq, Eq)]
struct Flags: u32 {
const A = 0b00000001;
}
}
fn main() {}

View File

@ -0,0 +1,55 @@
error[E0119]: conflicting implementations of trait `std::cmp::PartialEq` for type `Flags`
--> $DIR/eq.rs:3:1
|
3 | / bitflags! {
4 | | #[derive(PartialEq, Eq)]
| | --------- first implementation here
5 | | struct Flags: u32 {
6 | | const A = 0b00000001;
7 | | }
8 | | }
| |_^ conflicting implementation for `Flags`
|
= note: this error originates in the derive macro `PartialEq` (in Nightly builds, run with -Z macro-backtrace for more info)
error[E0119]: conflicting implementations of trait `std::cmp::Eq` for type `Flags`
--> $DIR/eq.rs:3:1
|
3 | / bitflags! {
4 | | #[derive(PartialEq, Eq)]
| | -- first implementation here
5 | | struct Flags: u32 {
6 | | const A = 0b00000001;
7 | | }
8 | | }
| |_^ conflicting implementation for `Flags`
|
= note: this error originates in the derive macro `Eq` (in Nightly builds, run with -Z macro-backtrace for more info)
error[E0119]: conflicting implementations of trait `std::marker::StructuralPartialEq` for type `Flags`
--> $DIR/eq.rs:3:1
|
3 | / bitflags! {
4 | | #[derive(PartialEq, Eq)]
| | --------- first implementation here
5 | | struct Flags: u32 {
6 | | const A = 0b00000001;
7 | | }
8 | | }
| |_^ conflicting implementation for `Flags`
|
= note: this error originates in the derive macro `PartialEq` (in Nightly builds, run with -Z macro-backtrace for more info)
error[E0119]: conflicting implementations of trait `std::marker::StructuralEq` for type `Flags`
--> $DIR/eq.rs:3:1
|
3 | / bitflags! {
4 | | #[derive(PartialEq, Eq)]
| | -- first implementation here
5 | | struct Flags: u32 {
6 | | const A = 0b00000001;
7 | | }
8 | | }
| |_^ conflicting implementation for `Flags`
|
= note: this error originates in the derive macro `Eq` (in Nightly builds, run with -Z macro-backtrace for more info)

View File

@ -0,0 +1,123 @@
use std::{
fmt::{
self,
Debug,
Display,
LowerHex,
UpperHex,
Octal,
Binary,
},
ops::{
BitAnd,
BitOr,
BitXor,
BitAndAssign,
BitOrAssign,
BitXorAssign,
Not,
},
};
use bitflags::bitflags;
// Ideally we'd actually want this to work, but currently need something like `num`'s `Zero`
// With some design work it could be made possible
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
struct MyInt(u8);
impl BitAnd for MyInt {
type Output = Self;
fn bitand(self, other: Self) -> Self {
MyInt(self.0 & other.0)
}
}
impl BitOr for MyInt {
type Output = Self;
fn bitor(self, other: Self) -> Self {
MyInt(self.0 | other.0)
}
}
impl BitXor for MyInt {
type Output = Self;
fn bitxor(self, other: Self) -> Self {
MyInt(self.0 ^ other.0)
}
}
impl BitAndAssign for MyInt {
fn bitand_assign(&mut self, other: Self) {
self.0 &= other.0
}
}
impl BitOrAssign for MyInt {
fn bitor_assign(&mut self, other: Self) {
self.0 |= other.0
}
}
impl BitXorAssign for MyInt {
fn bitxor_assign(&mut self, other: Self) {
self.0 ^= other.0
}
}
impl Debug for MyInt {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(&self.0, f)
}
}
impl Display for MyInt {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(&self.0, f)
}
}
impl LowerHex for MyInt {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
LowerHex::fmt(&self.0, f)
}
}
impl UpperHex for MyInt {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
UpperHex::fmt(&self.0, f)
}
}
impl Octal for MyInt {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Octal::fmt(&self.0, f)
}
}
impl Binary for MyInt {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Binary::fmt(&self.0, f)
}
}
impl Not for MyInt {
type Output = MyInt;
fn not(self) -> Self {
MyInt(!self.0)
}
}
bitflags! {
struct Flags128: MyInt {
const A = MyInt(0b0000_0001u8);
const B = MyInt(0b0000_0010u8);
const C = MyInt(0b0000_0100u8);
}
}
fn main() {}

View File

@ -0,0 +1,27 @@
error[E0308]: mismatched types
--> $DIR/all_defined.rs:115:1
|
115 | / bitflags! {
116 | | struct Flags128: MyInt {
117 | | const A = MyInt(0b0000_0001u8);
118 | | const B = MyInt(0b0000_0010u8);
119 | | const C = MyInt(0b0000_0100u8);
120 | | }
121 | | }
| |_^ expected struct `MyInt`, found integer
|
= note: this error originates in the macro `__impl_all_bitflags` (in Nightly builds, run with -Z macro-backtrace for more info)
error[E0308]: mismatched types
--> $DIR/all_defined.rs:115:1
|
115 | / bitflags! {
116 | | struct Flags128: MyInt {
117 | | const A = MyInt(0b0000_0001u8);
118 | | const B = MyInt(0b0000_0010u8);
119 | | const C = MyInt(0b0000_0100u8);
120 | | }
121 | | }
| |_^ expected struct `MyInt`, found integer
|
= note: this error originates in the macro `__impl_bitflags` (in Nightly builds, run with -Z macro-backtrace for more info)

View File

@ -0,0 +1,13 @@
use bitflags::bitflags;
struct MyInt(u8);
bitflags! {
struct Flags128: MyInt {
const A = MyInt(0b0000_0001);
const B = MyInt(0b0000_0010);
const C = MyInt(0b0000_0100);
}
}
fn main() {}

View File

@ -0,0 +1,13 @@
error[E0204]: the trait `Copy` may not be implemented for this type
--> $DIR/all_missing.rs:5:1
|
5 | / bitflags! {
6 | | struct Flags128: MyInt {
7 | | const A = MyInt(0b0000_0001);
8 | | const B = MyInt(0b0000_0010);
9 | | const C = MyInt(0b0000_0100);
10 | | }
11 | | }
| |_^ this field does not implement `Copy`
|
= note: this error originates in the derive macro `Copy` (in Nightly builds, run with -Z macro-backtrace for more info)

View File

@ -0,0 +1,13 @@
mod example {
use bitflags::bitflags;
bitflags! {
pub struct Flags1: u32 {
const FLAG_A = 0b00000001;
}
}
}
fn main() {
let flag1 = example::Flags1::FLAG_A.bits;
}

View File

@ -0,0 +1,10 @@
error[E0616]: field `bits` of struct `Flags1` is private
--> $DIR/private_field.rs:12:41
|
12 | let flag1 = example::Flags1::FLAG_A.bits;
| ^^^^ private field
|
help: a method `bits` also exists, call it with parentheses
|
12 | let flag1 = example::Flags1::FLAG_A.bits();
| ^^

View File

@ -0,0 +1,18 @@
mod example {
use bitflags::bitflags;
bitflags! {
pub struct Flags1: u32 {
const FLAG_A = 0b00000001;
}
struct Flags2: u32 {
const FLAG_B = 0b00000010;
}
}
}
fn main() {
let flag1 = example::Flags1::FLAG_A;
let flag2 = example::Flags2::FLAG_B;
}

View File

@ -0,0 +1,18 @@
error[E0603]: struct `Flags2` is private
--> $DIR/private_flags.rs:17:26
|
17 | let flag2 = example::Flags2::FLAG_B;
| ^^^^^^ private struct
|
note: the struct `Flags2` is defined here
--> $DIR/private_flags.rs:4:5
|
4 | / bitflags! {
5 | | pub struct Flags1: u32 {
6 | | const FLAG_A = 0b00000001;
7 | | }
... |
11 | | }
12 | | }
| |_____^
= note: this error originates in the macro `bitflags` (in Nightly builds, run with -Z macro-backtrace for more info)

View File

@ -0,0 +1,9 @@
use bitflags::bitflags;
bitflags! {
pub struct Flags1: u32 {
pub const FLAG_A = 0b00000001;
}
}
fn main() {}

View File

@ -0,0 +1,5 @@
error: no rules expected the token `pub`
--> $DIR/pub_const.rs:5:9
|
5 | pub const FLAG_A = 0b00000001;
| ^^^ no rules expected this token in macro call

View File

@ -0,0 +1,17 @@
use bitflags::bitflags;
bitflags! {
struct Flags: u32 {
const A = 0b00000001;
}
}
impl From<u32> for Flags {
fn from(v: u32) -> Flags {
Flags::from_bits_truncate(v)
}
}
fn main() {
}

View File

@ -0,0 +1,10 @@
use bitflags::bitflags;
bitflags! {
#[derive(Default)]
struct Flags: u32 {
const A = 0b00000001;
}
}
fn main() {}

View File

@ -0,0 +1,15 @@
use bitflags::bitflags;
bitflags! {
struct Flags: u32 {
const A = 0b00000001;
}
}
impl Flags {
pub fn new() -> Flags {
Flags::A
}
}
fn main() {}

View File

@ -0,0 +1,14 @@
use bitflags::bitflags;
// Checks for possible errors caused by overriding names used by `bitflags!` internally.
mod core {}
mod _core {}
bitflags! {
struct Test: u8 {
const A = 1;
}
}
fn main() {}

Some files were not shown because too many files have changed in this diff Show More