Skip to content

Commit

Permalink
Constant-time Base64 (#8)
Browse files Browse the repository at this point in the history
* Add ct-codecs to dependencies

* Remove base64 from dependencies

* NIT

* NIT

* errors: impl From ct-codecs Error

* Update CHANGELOG

* encode/decode_b64

* b64: Use alloc

* NIT

* Remove unused lifetime
  • Loading branch information
brycx authored Mar 21, 2021
1 parent fc9b615 commit 6189599
Show file tree
Hide file tree
Showing 6 changed files with 57 additions and 37 deletions.
9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,12 @@
### 0.1.1

__Date:__ March 21, 2021.

__Changelog:__

- Switch from `base64` to `ct-codecs` to provide constant-time Base64 encoding/decoding


### 0.1.0 - Initial release

__Date:__ October 12, 2020.
13 changes: 6 additions & 7 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "pasetors"
version = "0.1.0" # Update html_root_url in lib.rs along with this.
version = "0.1.1" # Update html_root_url in lib.rs along with this.
authors = ["brycx <[email protected]>"]
edition = "2018"
description = "PASETO: Platform-Agnostic Security Tokens (in Rust)"
Expand All @@ -25,12 +25,11 @@ features = ["u64_backend"]
version = "0.15.4"
default-features = false

[dependencies.base64]
version = "0.13.0"
default-features = false
features = ["alloc"]

[dependencies.rand_core]
version = "0.5.1"
default-features = false
features = ["alloc"]
features = ["alloc"]

[dependencies.ct-codecs]
version = "1.1.1"
default-features = false
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ assert!(LocalToken::decrypt(&secret, &local_token, Some(b"footer")).is_ok());
This library has **not undergone any third-party security audit**. Usage is at **own risk**.


The [ed25519-dalek](https://github.com/dalek-cryptography/ed25519-dalek) library, used for public tokens, was [included in an audit](https://blog.quarkslab.com/security-audit-of-dalek-libraries.html). The [orion](https://github.com/brycx/orion) library, used for local tokens, has **not** been audited. Note also, that the reference implementation uses constant-time Base64 encoding, while this library does not.
The [ed25519-dalek](https://github.com/dalek-cryptography/ed25519-dalek) library, used for public tokens, was [included in an audit](https://blog.quarkslab.com/security-audit-of-dalek-libraries.html). The [orion](https://github.com/brycx/orion) library, used for local tokens, has **not** been audited.

### Minimum Supported Rust Version
Rust 1.41 or later is supported however, the majority of testing happens with latest stable Rust.
Expand Down
4 changes: 2 additions & 2 deletions src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ pub enum Errors {
LossyConversionError,
}

impl From<base64::DecodeError> for Errors {
fn from(_: base64::DecodeError) -> Self {
impl From<ct_codecs::Error> for Errors {
fn from(_: ct_codecs::Error) -> Self {
Errors::Base64DecodingError
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
unused_qualifications,
overflowing_literals
)]
#![doc(html_root_url = "https://docs.rs/pasetors/0.1.0")]
#![doc(html_root_url = "https://docs.rs/pasetors/0.1.1")]

#[macro_use]
extern crate alloc;
Expand Down
64 changes: 38 additions & 26 deletions src/version2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,29 @@ use core::convert::TryFrom;
use crate::errors::Errors;
use crate::pae;

use base64::{decode_config, encode_config, URL_SAFE_NO_PAD};
use ct_codecs::{Base64UrlSafeNoPadding, Decoder, Encoder};
use rand_core::{CryptoRng, RngCore};

fn encode_b64<T: AsRef<[u8]>>(encoded: T) -> Result<String, Errors> {
let inlen = encoded.as_ref().len();
let mut buf = vec![0u8; Base64UrlSafeNoPadding::encoded_len(inlen)?];

let ret: String = Base64UrlSafeNoPadding::encode_to_str(&mut buf, encoded)?.into();

Ok(ret)
}

fn decode_b64<T: AsRef<[u8]>>(encoded: T) -> Result<Vec<u8>, Errors> {
let inlen = encoded.as_ref().len();
// We can use encoded len here, even if it returns more than needed,
// because ct-codecs allows this.
let mut buf = vec![0u8; Base64UrlSafeNoPadding::encoded_len(inlen)?];

let ret: Vec<u8> = Base64UrlSafeNoPadding::decode(&mut buf, encoded, None)?.into();

Ok(ret)
}

/// Validate that a token begins with a given header.purpose and does not contain more than:
/// header.purpose.payload.footer
/// If a footer is present, this is validated against the supplied.
Expand Down Expand Up @@ -36,7 +56,7 @@ fn validate_format_footer<'a>(
return Err(Errors::TokenValidationError);
}

let token_footer = decode_config(parts_split[3], URL_SAFE_NO_PAD)?;
let token_footer = decode_b64(parts_split[3])?;
if secure_cmp(footer, token_footer.as_ref()).is_err() {
return Err(Errors::TokenValidationError);
}
Expand Down Expand Up @@ -82,16 +102,12 @@ impl PublicToken {
let mut m_sig: Vec<u8> = Vec::from(message);
m_sig.extend_from_slice(sig.to_bytes().as_ref());

let token_no_footer = format!("{}{}", Self::HEADER, encode_config(m_sig, URL_SAFE_NO_PAD));
let token_no_footer = format!("{}{}", Self::HEADER, encode_b64(m_sig)?);

if f.is_empty() {
Ok(token_no_footer)
} else {
Ok(format!(
"{}.{}",
token_no_footer,
encode_config(f, URL_SAFE_NO_PAD)
))
Ok(format!("{}.{}", token_no_footer, encode_b64(f)?))
}
}

Expand All @@ -103,7 +119,7 @@ impl PublicToken {
let f = footer.unwrap_or(&[]);

let parts_split = validate_format_footer(Self::HEADER, token, f)?;
let sm = decode_config(parts_split[2], URL_SAFE_NO_PAD)?;
let sm = decode_b64(parts_split[2])?;
if sm.len() < ed25519_dalek::SIGNATURE_LENGTH {
return Err(Errors::TokenFormatError);
}
Expand Down Expand Up @@ -181,16 +197,12 @@ impl LocalToken {
}

out[..nonce.len()].copy_from_slice(nonce.as_ref());
let token_no_footer = format!("{}{}", Self::HEADER, encode_config(out, URL_SAFE_NO_PAD));
let token_no_footer = format!("{}{}", Self::HEADER, encode_b64(out)?);

if f.is_empty() {
Ok(token_no_footer)
} else {
Ok(format!(
"{}.{}",
token_no_footer,
encode_config(f, URL_SAFE_NO_PAD)
))
Ok(format!("{}.{}", token_no_footer, encode_b64(f)?))
}
}

Expand Down Expand Up @@ -224,7 +236,7 @@ impl LocalToken {

let f = footer.unwrap_or(&[]);
let parts_split = validate_format_footer(Self::HEADER, token, f)?;
let nc = decode_config(parts_split[2], URL_SAFE_NO_PAD)?;
let nc = decode_b64(parts_split[2])?;
if nc.len() < (XCHACHA_NONCESIZE + POLY1305_OUTSIZE) {
return Err(Errors::TokenFormatError);
}
Expand Down Expand Up @@ -1053,9 +1065,9 @@ mod token_validation {
#[test]
fn err_on_modified_signature() {
let mut split_public = VALID_PUBLIC_TOKEN.split('.').collect::<Vec<&str>>();
let mut bad_sig = Vec::from(decode_config(split_public[2], URL_SAFE_NO_PAD).unwrap());
let mut bad_sig = Vec::from(decode_b64(split_public[2]).unwrap());
bad_sig.copy_within(0..32, 32);
let tmp = encode_config(bad_sig, URL_SAFE_NO_PAD);
let tmp = encode_b64(bad_sig).unwrap();
split_public[2] = &tmp;
let invalid_public: String = format!(
"{}.{}.{}.{}",
Expand All @@ -1071,10 +1083,10 @@ mod token_validation {
#[test]
fn err_on_modified_tag() {
let mut split_local = VALID_LOCAL_TOKEN.split('.').collect::<Vec<&str>>();
let mut bad_tag = Vec::from(decode_config(split_local[2], URL_SAFE_NO_PAD).unwrap());
let mut bad_tag = Vec::from(decode_b64(split_local[2]).unwrap());
let tlen = bad_tag.len();
bad_tag.copy_within(0..16, tlen - 16);
let tmp = encode_config(bad_tag, URL_SAFE_NO_PAD);
let tmp = encode_b64(bad_tag).unwrap();
split_local[2] = &tmp;
let invalid_local: String = format!(
"{}.{}.{}.{}",
Expand All @@ -1090,10 +1102,10 @@ mod token_validation {
#[test]
fn err_on_modified_ciphertext() {
let mut split_local = VALID_LOCAL_TOKEN.split('.').collect::<Vec<&str>>();
let mut bad_ct = Vec::from(decode_config(split_local[2], URL_SAFE_NO_PAD).unwrap());
let mut bad_ct = Vec::from(decode_b64(split_local[2]).unwrap());
let ctlen = bad_ct.len();
bad_ct.copy_within((ctlen - 16)..ctlen, 24);
let tmp = encode_config(bad_ct, URL_SAFE_NO_PAD);
let tmp = encode_b64(bad_ct).unwrap();
split_local[2] = &tmp;
let invalid_local: String = format!(
"{}.{}.{}.{}",
Expand All @@ -1109,10 +1121,10 @@ mod token_validation {
#[test]
fn err_on_modified_nonce() {
let mut split_local = VALID_LOCAL_TOKEN.split('.').collect::<Vec<&str>>();
let mut bad_nonce = Vec::from(decode_config(split_local[2], URL_SAFE_NO_PAD).unwrap());
let mut bad_nonce = Vec::from(decode_b64(split_local[2]).unwrap());
let nlen = bad_nonce.len();
bad_nonce.copy_within((nlen - 24)..nlen, 0);
let tmp = encode_config(bad_nonce, URL_SAFE_NO_PAD);
let tmp = encode_b64(bad_nonce).unwrap();
split_local[2] = &tmp;
let invalid_local: String = format!(
"{}.{}.{}.{}",
Expand All @@ -1128,10 +1140,10 @@ mod token_validation {
#[test]
fn err_on_invalid_base64() {
let mut split_local = VALID_LOCAL_TOKEN.split('.').collect::<Vec<&str>>();
let mut bad_nonce = Vec::from(decode_config(split_local[2], URL_SAFE_NO_PAD).unwrap());
let mut bad_nonce = Vec::from(decode_b64(split_local[2]).unwrap());
let nlen = bad_nonce.len();
bad_nonce.copy_within((nlen - 24)..nlen, 0);
let tmp = encode_config(bad_nonce, URL_SAFE_NO_PAD);
let tmp = encode_b64(bad_nonce).unwrap();
split_local[2] = &tmp;
let invalid_local: String = format!(
"{}.{}.{}.{}",
Expand Down

0 comments on commit 6189599

Please sign in to comment.