diff --git a/src/Blake2Fast/Blake2Fast.csproj b/src/Blake2Fast/Blake2Fast.csproj
index 3157898..cef3a44 100644
--- a/src/Blake2Fast/Blake2Fast.csproj
+++ b/src/Blake2Fast/Blake2Fast.csproj
@@ -31,8 +31,8 @@
-
-
+
+
@@ -40,8 +40,8 @@
-
-
+
+
diff --git a/src/Blake2Fast/Blake2Hmac.cs b/src/Blake2Fast/Blake2Hmac.cs
index 74c507c..2a060f1 100644
--- a/src/Blake2Fast/Blake2Hmac.cs
+++ b/src/Blake2Fast/Blake2Hmac.cs
@@ -63,8 +63,8 @@ private IBlake2Incremental createIncrementalInstance()
#endif
return alg == Algorithm.Blake2b ?
- Blake2b.CreateIncrementalHasher(HashSizeValue / 8, key) :
- Blake2s.CreateIncrementalHasher(HashSizeValue / 8, key);
+ (IBlake2Incremental)Blake2b.CreateIncrementalHasher(HashSizeValue / 8, key) :
+ (IBlake2Incremental)Blake2s.CreateIncrementalHasher(HashSizeValue / 8, key);
}
}
}
diff --git a/src/Blake2Fast/Blake2b/Blake2b.cs b/src/Blake2Fast/Blake2b/Blake2b.cs
index 43d0b50..557ed6e 100644
--- a/src/Blake2Fast/Blake2b/Blake2b.cs
+++ b/src/Blake2Fast/Blake2b/Blake2b.cs
@@ -12,6 +12,8 @@
using System.Security.Cryptography;
#endif
+using Blake2Fast.Implementation;
+
namespace Blake2Fast
{
/// Static helper methods for BLAKE2b hashing.
@@ -23,7 +25,7 @@ namespace Blake2Fast
static class Blake2b
{
/// The default hash digest length in bytes. For BLAKE2b, this value is 64.
- public const int DefaultDigestLength = Blake2bContext.HashBytes;
+ public const int DefaultDigestLength = Blake2bHashState.HashBytes;
///
public static byte[] ComputeHash(ReadOnlySpan input) => ComputeHash(DefaultDigestLength, default, input);
@@ -42,10 +44,10 @@ static class Blake2b
/// The computed hash digest from the message bytes in .
public static byte[] ComputeHash(int digestLength, ReadOnlySpan key, ReadOnlySpan input)
{
- var ctx = default(Blake2bContext);
- ctx.Init(digestLength, key);
- ctx.Update(input);
- return ctx.Finish();
+ var hs = default(Blake2bHashState);
+ hs.Init(digestLength, key);
+ hs.Update(input);
+ return hs.Finish();
}
///
@@ -69,31 +71,31 @@ public static void ComputeAndWriteHash(int digestLength, ReadOnlySpan key,
if (output.Length < digestLength)
throw new ArgumentException($"Output buffer must have a capacity of at least {digestLength} bytes.", nameof(output));
- var ctx = default(Blake2bContext);
- ctx.Init(digestLength, key);
- ctx.Update(input);
- ctx.TryFinish(output, out int _);
+ var hs = default(Blake2bHashState);
+ hs.Init(digestLength, key);
+ hs.Update(input);
+ hs.Finish(output);
}
///
- public static IBlake2Incremental CreateIncrementalHasher() => CreateIncrementalHasher(DefaultDigestLength, default);
+ public static Blake2bHashState CreateIncrementalHasher() => CreateIncrementalHasher(DefaultDigestLength, default);
///
- public static IBlake2Incremental CreateIncrementalHasher(int digestLength) => CreateIncrementalHasher(digestLength, default);
+ public static Blake2bHashState CreateIncrementalHasher(int digestLength) => CreateIncrementalHasher(digestLength, default);
///
- public static IBlake2Incremental CreateIncrementalHasher(ReadOnlySpan key) => CreateIncrementalHasher(DefaultDigestLength, key);
+ public static Blake2bHashState CreateIncrementalHasher(ReadOnlySpan key) => CreateIncrementalHasher(DefaultDigestLength, key);
/// Create and initialize an incremental BLAKE2b hash computation.
/// If you will receive the input in segments rather than all at once, this is the most efficient way to calculate the hash.
/// The hash digest length in bytes. Valid values are 1 to 64.
/// 0 to 64 bytes of input for initializing a keyed hash.
- /// An interface for updating and finalizing the hash.
- public static IBlake2Incremental CreateIncrementalHasher(int digestLength, ReadOnlySpan key)
+ /// An instance for updating and finalizing the hash.
+ public static Blake2bHashState CreateIncrementalHasher(int digestLength, ReadOnlySpan key)
{
- var ctx = default(Blake2bContext);
- ctx.Init(digestLength, key);
- return ctx;
+ var hs = default(Blake2bHashState);
+ hs.Init(digestLength, key);
+ return hs;
}
#if BLAKE2_CRYPTOGRAPHY
diff --git a/src/Blake2Fast/Blake2b/Blake2bAvx2.cs b/src/Blake2Fast/Blake2b/Blake2bAvx2.cs
index 02765e1..148aea5 100644
--- a/src/Blake2Fast/Blake2b/Blake2bAvx2.cs
+++ b/src/Blake2Fast/Blake2b/Blake2bAvx2.cs
@@ -11,9 +11,14 @@
using System.Runtime.InteropServices;
using System.Runtime.CompilerServices;
-namespace Blake2Fast
+namespace Blake2Fast.Implementation
{
- unsafe internal partial struct Blake2bContext
+#if BLAKE2_PUBLIC
+ public
+#else
+ internal
+#endif
+ unsafe partial struct Blake2bHashState
{
// SIMD algorithm described in https://eprint.iacr.org/2012/275.pdf
[MethodImpl(MethodImplOptions.AggressiveOptimization)]
diff --git a/src/Blake2Fast/Blake2b/Blake2bAvx2.tt b/src/Blake2Fast/Blake2b/Blake2bAvx2.tt
index a6dcd32..1fa3202 100644
--- a/src/Blake2Fast/Blake2b/Blake2bAvx2.tt
+++ b/src/Blake2Fast/Blake2b/Blake2bAvx2.tt
@@ -13,9 +13,14 @@ using System.Runtime.Intrinsics.X86;
using System.Runtime.InteropServices;
using System.Runtime.CompilerServices;
-namespace Blake2Fast
+namespace Blake2Fast.Implementation
{
- unsafe internal partial struct Blake2bContext
+#if BLAKE2_PUBLIC
+ public
+#else
+ internal
+#endif
+ unsafe partial struct Blake2bHashState
{
// SIMD algorithm described in https://eprint.iacr.org/2012/275.pdf
[MethodImpl(MethodImplOptions.AggressiveOptimization)]
diff --git a/src/Blake2Fast/Blake2b/Blake2bContext.cs b/src/Blake2Fast/Blake2b/Blake2bHashState.cs
similarity index 79%
rename from src/Blake2Fast/Blake2b/Blake2bContext.cs
rename to src/Blake2Fast/Blake2b/Blake2bHashState.cs
index 548a283..5fca27b 100644
--- a/src/Blake2Fast/Blake2b/Blake2bContext.cs
+++ b/src/Blake2Fast/Blake2b/Blake2bHashState.cs
@@ -13,16 +13,23 @@
using System.Runtime.Intrinsics.X86;
#endif
-namespace Blake2Fast
+namespace Blake2Fast.Implementation
{
- unsafe internal partial struct Blake2bContext : IBlake2Incremental
+ /// Defines the state associated with an incremental BLAKE2b hashing operation.
+ /// Instances of this struct must be created by . An instance created directly will be unusable.
+#if BLAKE2_PUBLIC
+ public
+#else
+ internal
+#endif
+ unsafe partial struct Blake2bHashState : IBlake2Incremental
{
- public const int WordSize = sizeof(ulong);
- public const int BlockWords = 16;
- public const int BlockBytes = BlockWords * WordSize;
- public const int HashWords = 8;
- public const int HashBytes = HashWords * WordSize;
- public const int MaxKeyBytes = HashBytes;
+ internal const int WordSize = sizeof(ulong);
+ internal const int BlockWords = 16;
+ internal const int BlockBytes = BlockWords * WordSize;
+ internal const int HashWords = 8;
+ internal const int HashBytes = HashWords * WordSize;
+ internal const int MaxKeyBytes = HashBytes;
private fixed byte b[BlockBytes];
private fixed ulong h[HashWords];
@@ -49,6 +56,7 @@ unsafe internal partial struct Blake2bContext : IBlake2Incremental
};
#endif
+ ///
public int DigestLength => (int)outlen;
private void compress(ref byte input, uint offs, uint cb)
@@ -56,7 +64,7 @@ private void compress(ref byte input, uint offs, uint cb)
uint inc = Math.Min(cb, BlockBytes);
fixed (byte* pinput = &input)
- fixed (Blake2bContext* s = &this)
+ fixed (Blake2bHashState* s = &this)
{
ulong* sh = s->h;
byte* pin = pinput + offs;
@@ -86,7 +94,7 @@ private void compress(ref byte input, uint offs, uint cb)
}
}
- public void Init(int digestLength = HashBytes, ReadOnlySpan key = default)
+ internal void Init(int digestLength = HashBytes, ReadOnlySpan key = default)
{
uint keylen = (uint)key.Length;
@@ -106,8 +114,10 @@ public void Init(int digestLength = HashBytes, ReadOnlySpan key = default)
}
}
+ ///
public void Update(ReadOnlySpan input)
{
+ if (outlen == 0) ThrowHelper.HashNotInitialized();
if (f[0] != 0) ThrowHelper.HashFinalized();
uint consumed = 0;
@@ -141,6 +151,7 @@ public void Update(ReadOnlySpan input)
}
}
+ ///
public void Update(ReadOnlySpan input) where T : struct
{
ThrowHelper.ThrowIfIsRefOrContainsRefs();
@@ -148,6 +159,7 @@ public void Update(ReadOnlySpan input) where T : struct
Update(MemoryMarshal.AsBytes(input));
}
+ ///
public void Update(T input) where T : struct
{
ThrowHelper.ThrowIfIsRefOrContainsRefs();
@@ -172,6 +184,7 @@ public void Update(T input) where T : struct
private void finish(Span hash)
{
+ if (outlen == 0) ThrowHelper.HashNotInitialized();
if (f[0] != 0) ThrowHelper.HashFinalized();
if (c < BlockBytes)
@@ -183,6 +196,7 @@ private void finish(Span hash)
Unsafe.CopyBlockUnaligned(ref hash[0], ref Unsafe.As(ref h[0]), outlen);
}
+ ///
public byte[] Finish()
{
byte[] hash = new byte[outlen];
@@ -191,6 +205,15 @@ public byte[] Finish()
return hash;
}
+ ///
+ public void Finish(Span output)
+ {
+ if ((uint)output.Length < outlen) ThrowHelper.OutputTooSmall(DigestLength);
+
+ finish(output);
+ }
+
+ ///
public bool TryFinish(Span output, out int bytesWritten)
{
if ((uint)output.Length < outlen)
diff --git a/src/Blake2Fast/Blake2b/Blake2bContext.tt b/src/Blake2Fast/Blake2b/Blake2bHashState.tt
similarity index 100%
rename from src/Blake2Fast/Blake2b/Blake2bContext.tt
rename to src/Blake2Fast/Blake2b/Blake2bHashState.tt
diff --git a/src/Blake2Fast/Blake2b/Blake2bScalar.cs b/src/Blake2Fast/Blake2b/Blake2bScalar.cs
index 289a352..89a8217 100644
--- a/src/Blake2Fast/Blake2b/Blake2bScalar.cs
+++ b/src/Blake2Fast/Blake2b/Blake2bScalar.cs
@@ -5,9 +5,14 @@
//
//------------------------------------------------------------------------------
-namespace Blake2Fast
+namespace Blake2Fast.Implementation
{
- unsafe internal partial struct Blake2bContext
+#if BLAKE2_PUBLIC
+ public
+#else
+ internal
+#endif
+ unsafe partial struct Blake2bHashState
{
private static void mixScalar(ulong* sh, ulong* m)
{
diff --git a/src/Blake2Fast/Blake2b/Blake2bSse4.cs b/src/Blake2Fast/Blake2b/Blake2bSse4.cs
index ab119df..4960c2a 100644
--- a/src/Blake2Fast/Blake2b/Blake2bSse4.cs
+++ b/src/Blake2Fast/Blake2b/Blake2bSse4.cs
@@ -11,9 +11,14 @@
using System.Runtime.InteropServices;
using System.Runtime.CompilerServices;
-namespace Blake2Fast
+namespace Blake2Fast.Implementation
{
- unsafe internal partial struct Blake2bContext
+#if BLAKE2_PUBLIC
+ public
+#else
+ internal
+#endif
+ unsafe partial struct Blake2bHashState
{
// SIMD algorithm described in https://eprint.iacr.org/2012/275.pdf
#if !HWINTRINSICS_EXP
diff --git a/src/Blake2Fast/Blake2b/Blake2bSse4.tt b/src/Blake2Fast/Blake2b/Blake2bSse4.tt
index ee6105a..cafc8c9 100644
--- a/src/Blake2Fast/Blake2b/Blake2bSse4.tt
+++ b/src/Blake2Fast/Blake2b/Blake2bSse4.tt
@@ -13,9 +13,14 @@ using System.Runtime.Intrinsics.X86;
using System.Runtime.InteropServices;
using System.Runtime.CompilerServices;
-namespace Blake2Fast
+namespace Blake2Fast.Implementation
{
- unsafe internal partial struct Blake2bContext
+#if BLAKE2_PUBLIC
+ public
+#else
+ internal
+#endif
+ unsafe partial struct Blake2bHashState
{
// SIMD algorithm described in https://eprint.iacr.org/2012/275.pdf
#if !HWINTRINSICS_EXP
diff --git a/src/Blake2Fast/Blake2s/Blake2s.cs b/src/Blake2Fast/Blake2s/Blake2s.cs
index 787a5c2..16a3fc0 100644
--- a/src/Blake2Fast/Blake2s/Blake2s.cs
+++ b/src/Blake2Fast/Blake2s/Blake2s.cs
@@ -12,6 +12,8 @@
using System.Security.Cryptography;
#endif
+using Blake2Fast.Implementation;
+
namespace Blake2Fast
{
/// Static helper methods for BLAKE2s hashing.
@@ -23,7 +25,7 @@ namespace Blake2Fast
static class Blake2s
{
/// The default hash digest length in bytes. For BLAKE2s, this value is 32.
- public const int DefaultDigestLength = Blake2sContext.HashBytes;
+ public const int DefaultDigestLength = Blake2sHashState.HashBytes;
///
public static byte[] ComputeHash(ReadOnlySpan input) => ComputeHash(DefaultDigestLength, default, input);
@@ -42,10 +44,10 @@ static class Blake2s
/// The computed hash digest from the message bytes in .
public static byte[] ComputeHash(int digestLength, ReadOnlySpan key, ReadOnlySpan input)
{
- var ctx = default(Blake2sContext);
- ctx.Init(digestLength, key);
- ctx.Update(input);
- return ctx.Finish();
+ var hs = default(Blake2sHashState);
+ hs.Init(digestLength, key);
+ hs.Update(input);
+ return hs.Finish();
}
///
@@ -69,31 +71,31 @@ public static void ComputeAndWriteHash(int digestLength, ReadOnlySpan key,
if (output.Length < digestLength)
throw new ArgumentException($"Output buffer must have a capacity of at least {digestLength} bytes.", nameof(output));
- var ctx = default(Blake2sContext);
- ctx.Init(digestLength, key);
- ctx.Update(input);
- ctx.TryFinish(output, out int _);
+ var hs = default(Blake2sHashState);
+ hs.Init(digestLength, key);
+ hs.Update(input);
+ hs.Finish(output);
}
///
- public static IBlake2Incremental CreateIncrementalHasher() => CreateIncrementalHasher(DefaultDigestLength, default);
+ public static Blake2sHashState CreateIncrementalHasher() => CreateIncrementalHasher(DefaultDigestLength, default);
///
- public static IBlake2Incremental CreateIncrementalHasher(int digestLength) => CreateIncrementalHasher(digestLength, default);
+ public static Blake2sHashState CreateIncrementalHasher(int digestLength) => CreateIncrementalHasher(digestLength, default);
///
- public static IBlake2Incremental CreateIncrementalHasher(ReadOnlySpan key) => CreateIncrementalHasher(DefaultDigestLength, key);
+ public static Blake2sHashState CreateIncrementalHasher(ReadOnlySpan key) => CreateIncrementalHasher(DefaultDigestLength, key);
/// Create and initialize an incremental BLAKE2s hash computation.
/// If you will receive the input in segments rather than all at once, this is the most efficient way to calculate the hash.
/// The hash digest length in bytes. Valid values are 1 to 32.
/// 0 to 32 bytes of input for initializing a keyed hash.
- /// An interface for updating and finalizing the hash.
- public static IBlake2Incremental CreateIncrementalHasher(int digestLength, ReadOnlySpan key)
+ /// An instance for updating and finalizing the hash.
+ public static Blake2sHashState CreateIncrementalHasher(int digestLength, ReadOnlySpan key)
{
- var ctx = default(Blake2sContext);
- ctx.Init(digestLength, key);
- return ctx;
+ var hs = default(Blake2sHashState);
+ hs.Init(digestLength, key);
+ return hs;
}
#if BLAKE2_CRYPTOGRAPHY
diff --git a/src/Blake2Fast/Blake2s/Blake2sContext.cs b/src/Blake2Fast/Blake2s/Blake2sHashState.cs
similarity index 78%
rename from src/Blake2Fast/Blake2s/Blake2sContext.cs
rename to src/Blake2Fast/Blake2s/Blake2sHashState.cs
index f7eb073..859f4fc 100644
--- a/src/Blake2Fast/Blake2s/Blake2sContext.cs
+++ b/src/Blake2Fast/Blake2s/Blake2sHashState.cs
@@ -13,16 +13,23 @@
using System.Runtime.Intrinsics.X86;
#endif
-namespace Blake2Fast
+namespace Blake2Fast.Implementation
{
- unsafe internal partial struct Blake2sContext : IBlake2Incremental
+ /// Defines the state associated with an incremental BLAKE2s hashing operation.
+ /// Instances of this struct must be created by . An instance created directly will be unusable.
+#if BLAKE2_PUBLIC
+ public
+#else
+ internal
+#endif
+ unsafe partial struct Blake2sHashState : IBlake2Incremental
{
- public const int WordSize = sizeof(uint);
- public const int BlockWords = 16;
- public const int BlockBytes = BlockWords * WordSize;
- public const int HashWords = 8;
- public const int HashBytes = HashWords * WordSize;
- public const int MaxKeyBytes = HashBytes;
+ internal const int WordSize = sizeof(uint);
+ internal const int BlockWords = 16;
+ internal const int BlockBytes = BlockWords * WordSize;
+ internal const int HashWords = 8;
+ internal const int HashBytes = HashWords * WordSize;
+ internal const int MaxKeyBytes = HashBytes;
private fixed byte b[BlockBytes];
private fixed uint h[HashWords];
@@ -49,6 +56,7 @@ unsafe internal partial struct Blake2sContext : IBlake2Incremental
};
#endif
+ ///
public int DigestLength => (int)outlen;
private void compress(ref byte input, uint offs, uint cb)
@@ -56,7 +64,7 @@ private void compress(ref byte input, uint offs, uint cb)
uint inc = Math.Min(cb, BlockBytes);
fixed (byte* pinput = &input)
- fixed (Blake2sContext* s = &this)
+ fixed (Blake2sHashState* s = &this)
{
uint* sh = s->h;
byte* pin = pinput + offs;
@@ -81,7 +89,7 @@ private void compress(ref byte input, uint offs, uint cb)
}
}
- public void Init(int digestLength = HashBytes, ReadOnlySpan key = default)
+ internal void Init(int digestLength = HashBytes, ReadOnlySpan key = default)
{
uint keylen = (uint)key.Length;
@@ -101,8 +109,10 @@ public void Init(int digestLength = HashBytes, ReadOnlySpan key = default)
}
}
+ ///
public void Update(ReadOnlySpan input)
{
+ if (outlen == 0) ThrowHelper.HashNotInitialized();
if (f[0] != 0) ThrowHelper.HashFinalized();
uint consumed = 0;
@@ -136,6 +146,7 @@ public void Update(ReadOnlySpan input)
}
}
+ ///
public void Update(ReadOnlySpan input) where T : struct
{
ThrowHelper.ThrowIfIsRefOrContainsRefs();
@@ -143,6 +154,7 @@ public void Update(ReadOnlySpan input) where T : struct
Update(MemoryMarshal.AsBytes(input));
}
+ ///
public void Update(T input) where T : struct
{
ThrowHelper.ThrowIfIsRefOrContainsRefs();
@@ -167,6 +179,7 @@ public void Update(T input) where T : struct
private void finish(Span hash)
{
+ if (outlen == 0) ThrowHelper.HashNotInitialized();
if (f[0] != 0) ThrowHelper.HashFinalized();
if (c < BlockBytes)
@@ -178,6 +191,7 @@ private void finish(Span hash)
Unsafe.CopyBlockUnaligned(ref hash[0], ref Unsafe.As(ref h[0]), outlen);
}
+ ///
public byte[] Finish()
{
byte[] hash = new byte[outlen];
@@ -186,6 +200,15 @@ public byte[] Finish()
return hash;
}
+ ///
+ public void Finish(Span output)
+ {
+ if ((uint)output.Length < outlen) ThrowHelper.OutputTooSmall(DigestLength);
+
+ finish(output);
+ }
+
+ ///
public bool TryFinish(Span output, out int bytesWritten)
{
if ((uint)output.Length < outlen)
diff --git a/src/Blake2Fast/Blake2s/Blake2sContext.tt b/src/Blake2Fast/Blake2s/Blake2sHashState.tt
similarity index 100%
rename from src/Blake2Fast/Blake2s/Blake2sContext.tt
rename to src/Blake2Fast/Blake2s/Blake2sHashState.tt
diff --git a/src/Blake2Fast/Blake2s/Blake2sScalar.cs b/src/Blake2Fast/Blake2s/Blake2sScalar.cs
index d88b653..d0eb7a2 100644
--- a/src/Blake2Fast/Blake2s/Blake2sScalar.cs
+++ b/src/Blake2Fast/Blake2s/Blake2sScalar.cs
@@ -5,9 +5,14 @@
//
//------------------------------------------------------------------------------
-namespace Blake2Fast
+namespace Blake2Fast.Implementation
{
- unsafe internal partial struct Blake2sContext
+#if BLAKE2_PUBLIC
+ public
+#else
+ internal
+#endif
+ unsafe partial struct Blake2sHashState
{
private static void mixScalar(uint* sh, uint* m)
{
diff --git a/src/Blake2Fast/Blake2s/Blake2sSse4.cs b/src/Blake2Fast/Blake2s/Blake2sSse4.cs
index f2df4fd..57e7aa9 100644
--- a/src/Blake2Fast/Blake2s/Blake2sSse4.cs
+++ b/src/Blake2Fast/Blake2s/Blake2sSse4.cs
@@ -11,9 +11,14 @@
using System.Runtime.InteropServices;
using System.Runtime.CompilerServices;
-namespace Blake2Fast
+namespace Blake2Fast.Implementation
{
- unsafe internal partial struct Blake2sContext
+#if BLAKE2_PUBLIC
+ public
+#else
+ internal
+#endif
+ unsafe partial struct Blake2sHashState
{
// SIMD algorithm described in https://eprint.iacr.org/2012/275.pdf
#if !HWINTRINSICS_EXP
diff --git a/src/Blake2Fast/Blake2s/Blake2sSse4.tt b/src/Blake2Fast/Blake2s/Blake2sSse4.tt
index 8bc2d16..d8d4c93 100644
--- a/src/Blake2Fast/Blake2s/Blake2sSse4.tt
+++ b/src/Blake2Fast/Blake2s/Blake2sSse4.tt
@@ -13,9 +13,14 @@ using System.Runtime.Intrinsics.X86;
using System.Runtime.InteropServices;
using System.Runtime.CompilerServices;
-namespace Blake2Fast
+namespace Blake2Fast.Implementation
{
- unsafe internal partial struct Blake2sContext
+#if BLAKE2_PUBLIC
+ public
+#else
+ internal
+#endif
+ unsafe partial struct Blake2sHashState
{
// SIMD algorithm described in https://eprint.iacr.org/2012/275.pdf
#if !HWINTRINSICS_EXP
diff --git a/src/Blake2Fast/IBlake2Incremental.cs b/src/Blake2Fast/IBlake2Incremental.cs
index dfe3a28..b5f3afc 100644
--- a/src/Blake2Fast/IBlake2Incremental.cs
+++ b/src/Blake2Fast/IBlake2Incremental.cs
@@ -1,7 +1,19 @@
using System;
+using System.Runtime.CompilerServices;
namespace Blake2Fast
{
+#if BLAKE2_PUBLIC
+ namespace Implementation
+ {
+ ///
+ /// Contains BLAKE2 hash state implementation structs. Use the CreateIncrementalHasher factory methods
+ /// to create and initialize the state structs. The state structs will not be usable if created directly.
+ ///
+ [CompilerGenerated] internal class NamespaceDoc { }
+ }
+#endif
+
/// Defines an incremental BLAKE2 hashing operation.
/// Allows the hash to be computed as portions of the message become available, rather than all at once.
#if BLAKE2_PUBLIC
@@ -35,6 +47,9 @@ interface IBlake2Incremental
/// The computed hash digest.
byte[] Finish();
+ ///
+ void Finish(Span output);
+
/// Finalize the hash, and copy the computed digest to .
/// The buffer into which the hash digest should be written. The buffer must have a capacity of at least bytes for the method to succeed.
/// On return, contains the number of bytes written to .
diff --git a/src/Blake2Fast/ThrowHelper.cs b/src/Blake2Fast/ThrowHelper.cs
index 4cd585c..470202e 100644
--- a/src/Blake2Fast/ThrowHelper.cs
+++ b/src/Blake2Fast/ThrowHelper.cs
@@ -34,16 +34,19 @@ public static void ThrowIfIsRefOrContainsRefs()
#else
TypeCache.IsReferenceOrContainsReferences
#endif
- )
- throw new NotSupportedException("This method may only be used with value types that do not contain reference type fields.");
+ ) throw new NotSupportedException("This method may only be used with value types that do not contain reference type fields.");
}
public static void HashFinalized() => throw new InvalidOperationException("Hash has already been finalized.");
+ public static void HashNotInitialized() => throw new InvalidOperationException("Hash not initialized. Do not create the state struct instance directly; use CreateIncrementalHasher.");
+
public static void NoBigEndian() => throw new PlatformNotSupportedException("Big-endian platforms not supported");
public static void DigestInvalidLength(int max) => throw new ArgumentOutOfRangeException("digestLength", $"Value must be between 1 and {max}");
public static void KeyTooLong(int max) => throw new ArgumentException($"Key must be between 0 and {max} bytes in length", "key");
+
+ public static void OutputTooSmall(int min) => throw new ArgumentException($"Output must be at least {min} bytes in length", "output");
}
}
\ No newline at end of file
diff --git a/src/Blake2Fast/_Blake2Api.ttinclude b/src/Blake2Fast/_Blake2Api.ttinclude
index 493583e..f5922a4 100644
--- a/src/Blake2Fast/_Blake2Api.ttinclude
+++ b/src/Blake2Fast/_Blake2Api.ttinclude
@@ -12,6 +12,8 @@ using System;
using System.Security.Cryptography;
#endif
+using Blake2Fast.Implementation;
+
namespace Blake2Fast
{
/// Static helper methods for BLAKE2<#= alg.suffix #> hashing.
@@ -23,7 +25,7 @@ namespace Blake2Fast
static class Blake2<#= alg.suffix #>
{
/// The default hash digest length in bytes. For BLAKE2<#= alg.suffix #>, this value is <#= alg.bits #>.
- public const int DefaultDigestLength = Blake2<#= alg.suffix #>Context.HashBytes;
+ public const int DefaultDigestLength = Blake2<#= alg.suffix #>HashState.HashBytes;
///
public static byte[] ComputeHash(ReadOnlySpan input) => ComputeHash(DefaultDigestLength, default, input);
@@ -42,10 +44,10 @@ namespace Blake2Fast
/// The computed hash digest from the message bytes in .
public static byte[] ComputeHash(int digestLength, ReadOnlySpan key, ReadOnlySpan input)
{
- var ctx = default(Blake2<#= alg.suffix #>Context);
- ctx.Init(digestLength, key);
- ctx.Update(input);
- return ctx.Finish();
+ var hs = default(Blake2<#= alg.suffix #>HashState);
+ hs.Init(digestLength, key);
+ hs.Update(input);
+ return hs.Finish();
}
///
@@ -69,31 +71,31 @@ namespace Blake2Fast
if (output.Length < digestLength)
throw new ArgumentException($"Output buffer must have a capacity of at least {digestLength} bytes.", nameof(output));
- var ctx = default(Blake2<#= alg.suffix #>Context);
- ctx.Init(digestLength, key);
- ctx.Update(input);
- ctx.TryFinish(output, out int _);
+ var hs = default(Blake2<#= alg.suffix #>HashState);
+ hs.Init(digestLength, key);
+ hs.Update(input);
+ hs.Finish(output);
}
///
- public static IBlake2Incremental CreateIncrementalHasher() => CreateIncrementalHasher(DefaultDigestLength, default);
+ public static Blake2<#= alg.suffix #>HashState CreateIncrementalHasher() => CreateIncrementalHasher(DefaultDigestLength, default);
///
- public static IBlake2Incremental CreateIncrementalHasher(int digestLength) => CreateIncrementalHasher(digestLength, default);
+ public static Blake2<#= alg.suffix #>HashState CreateIncrementalHasher(int digestLength) => CreateIncrementalHasher(digestLength, default);
///
- public static IBlake2Incremental CreateIncrementalHasher(ReadOnlySpan key) => CreateIncrementalHasher(DefaultDigestLength, key);
+ public static Blake2<#= alg.suffix #>HashState CreateIncrementalHasher(ReadOnlySpan key) => CreateIncrementalHasher(DefaultDigestLength, key);
/// Create and initialize an incremental BLAKE2<#= alg.suffix #> hash computation.
/// If you will receive the input in segments rather than all at once, this is the most efficient way to calculate the hash.
/// The hash digest length in bytes. Valid values are 1 to <#= alg.bits #>.
/// 0 to <#= alg.bits #> bytes of input for initializing a keyed hash.
- /// An interface for updating and finalizing the hash.
- public static IBlake2Incremental CreateIncrementalHasher(int digestLength, ReadOnlySpan key)
+ /// An instance for updating and finalizing the hash.
+ public static Blake2<#= alg.suffix #>HashState CreateIncrementalHasher(int digestLength, ReadOnlySpan key)
{
- var ctx = default(Blake2<#= alg.suffix #>Context);
- ctx.Init(digestLength, key);
- return ctx;
+ var hs = default(Blake2<#= alg.suffix #>HashState);
+ hs.Init(digestLength, key);
+ return hs;
}
#if BLAKE2_CRYPTOGRAPHY
diff --git a/src/Blake2Fast/_Blake2Main.ttinclude b/src/Blake2Fast/_Blake2Main.ttinclude
index ea00408..a7cb5c2 100644
--- a/src/Blake2Fast/_Blake2Main.ttinclude
+++ b/src/Blake2Fast/_Blake2Main.ttinclude
@@ -13,19 +13,26 @@ using System.Runtime.CompilerServices;
using System.Runtime.Intrinsics.X86;
#endif
-namespace Blake2Fast
+namespace Blake2Fast.Implementation
{
- unsafe internal partial struct Blake2<#= alg.suffix #>Context : IBlake2Incremental
+ /// Defines the state associated with an incremental BLAKE2<#= alg.suffix #> hashing operation.
+ /// Instances of this struct must be created by . An instance created directly will be unusable.
+#if BLAKE2_PUBLIC
+ public
+#else
+ internal
+#endif
+ unsafe partial struct Blake2<#= alg.suffix #>HashState : IBlake2Incremental
{
- public const int WordSize = sizeof(<#= alg.wtype #>);
- public const int BlockWords = 16;
- public const int BlockBytes = BlockWords * WordSize;
- public const int HashWords = 8;
- public const int HashBytes = HashWords * WordSize;
- public const int MaxKeyBytes = HashBytes;
+ internal const int WordSize = sizeof(<#= alg.wtype #>);
+ internal const int BlockWords = 16;
+ internal const int BlockBytes = BlockWords * WordSize;
+ internal const int HashWords = 8;
+ internal const int HashBytes = HashWords * WordSize;
+ internal const int MaxKeyBytes = HashBytes;
<#
- //public const int SaltBytes = 2 * WordSize;
- //public const int PersonalizationBytes = 2 * WordSize;
+ //internal const int SaltBytes = 2 * WordSize;
+ //internal const int PersonalizationBytes = 2 * WordSize;
#>
<#
@@ -76,6 +83,7 @@ if (alg.bits == 64) {
};
#endif
+ ///
public int DigestLength => (int)outlen;
private void compress(ref byte input, uint offs, uint cb)
@@ -83,7 +91,7 @@ if (alg.bits == 64) {
uint inc = Math.Min(cb, BlockBytes);
fixed (byte* pinput = &input)
- fixed (Blake2<#= alg.suffix #>Context* s = &this)
+ fixed (Blake2<#= alg.suffix #>HashState* s = &this)
{
<#= alg.wtype #>* sh = s->h;
byte* pin = pinput + offs;
@@ -132,7 +140,7 @@ if (alg.bits == 64) {
}
}
- public void Init(int digestLength = HashBytes, ReadOnlySpan key = default)
+ internal void Init(int digestLength = HashBytes, ReadOnlySpan key = default)
{
uint keylen = (uint)key.Length;
@@ -181,8 +189,10 @@ if (alg.bits == 64) {
}
}
+ ///
public void Update(ReadOnlySpan input)
{
+ if (outlen == 0) ThrowHelper.HashNotInitialized();
if (f[0] != 0) ThrowHelper.HashFinalized();
uint consumed = 0;
@@ -216,6 +226,7 @@ if (alg.bits == 64) {
}
}
+ ///
public void Update(ReadOnlySpan input) where T : struct
{
ThrowHelper.ThrowIfIsRefOrContainsRefs();
@@ -223,6 +234,7 @@ if (alg.bits == 64) {
Update(MemoryMarshal.AsBytes(input));
}
+ ///
public void Update(T input) where T : struct
{
ThrowHelper.ThrowIfIsRefOrContainsRefs();
@@ -247,6 +259,7 @@ if (alg.bits == 64) {
private void finish(Span hash)
{
+ if (outlen == 0) ThrowHelper.HashNotInitialized();
if (f[0] != 0) ThrowHelper.HashFinalized();
if (c < BlockBytes)
@@ -270,6 +283,7 @@ if (alg.bits == 64) {
Unsafe.CopyBlockUnaligned(ref hash[0], ref Unsafe.As<<#= alg.wtype #>, byte>(ref h[0]), outlen);
}
+ ///
public byte[] Finish()
{
byte[] hash = new byte[outlen];
@@ -278,6 +292,15 @@ if (alg.bits == 64) {
return hash;
}
+ ///
+ public void Finish(Span output)
+ {
+ if ((uint)output.Length < outlen) ThrowHelper.OutputTooSmall(DigestLength);
+
+ finish(output);
+ }
+
+ ///
public bool TryFinish(Span output, out int bytesWritten)
{
if ((uint)output.Length < outlen)
diff --git a/src/Blake2Fast/_Blake2Scalar.ttinclude b/src/Blake2Fast/_Blake2Scalar.ttinclude
index 9b2e7d2..886398a 100644
--- a/src/Blake2Fast/_Blake2Scalar.ttinclude
+++ b/src/Blake2Fast/_Blake2Scalar.ttinclude
@@ -7,9 +7,14 @@
//
//------------------------------------------------------------------------------
-namespace Blake2Fast
+namespace Blake2Fast.Implementation
{
- unsafe internal partial struct Blake2<#= alg.suffix #>Context
+#if BLAKE2_PUBLIC
+ public
+#else
+ internal
+#endif
+ unsafe partial struct Blake2<#= alg.suffix #>HashState
{
private static void mixScalar(<#= alg.wtype #>* sh, <#= alg.wtype #>* m)
{
diff --git a/tests/Blake2.Bench/Blake2.Bench.csproj b/tests/Blake2.Bench/Blake2.Bench.csproj
index 058238d..4928a2c 100644
--- a/tests/Blake2.Bench/Blake2.Bench.csproj
+++ b/tests/Blake2.Bench/Blake2.Bench.csproj
@@ -16,7 +16,7 @@
-
+