aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/tools/python3/src/Modules/_blake2/impl/blake2-impl.h
blob: 57943b868a3f09e0dc65f895085db2038af6a4b0 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
/*
   BLAKE2 reference source code package - optimized C implementations
 
   Written in 2012 by Samuel Neves <sneves@dei.uc.pt> 
 
   To the extent possible under law, the author(s) have dedicated all copyright 
   and related and neighboring rights to this software to the public domain 
   worldwide. This software is distributed without any warranty. 
 
   You should have received a copy of the CC0 Public Domain Dedication along with 
   this software. If not, see <http://creativecommons.org/publicdomain/zero/1.0/>. 
*/
#pragma once
#ifndef __BLAKE2_IMPL_H__
#define __BLAKE2_IMPL_H__

#if defined(_WIN32) || defined(WIN32) 
#include <windows.h> 
#endif 
 
#include <stddef.h> 
#include <stdint.h>
#include <string.h>

#define BLAKE2_IMPL_CAT(x,y) x ## y 
#define BLAKE2_IMPL_EVAL(x,y)  BLAKE2_IMPL_CAT(x,y) 
#define BLAKE2_IMPL_NAME(fun)  BLAKE2_IMPL_EVAL(fun, SUFFIX) 
 
static inline uint32_t load32( const void *src ) 
{
#if defined(NATIVE_LITTLE_ENDIAN)
  uint32_t w;
  memcpy( &w, src, sizeof( w ) ); 
  return w;
#else
  const uint8_t *p = ( uint8_t * )src; 
  uint32_t w = *p++;
  w |= ( uint32_t )( *p++ ) <<  8;
  w |= ( uint32_t )( *p++ ) << 16;
  w |= ( uint32_t )( *p++ ) << 24;
  return w;
#endif
}

static inline uint64_t load64( const void *src ) 
{
#if defined(NATIVE_LITTLE_ENDIAN)
  uint64_t w;
  memcpy( &w, src, sizeof( w ) ); 
  return w;
#else
  const uint8_t *p = ( uint8_t * )src; 
  uint64_t w = *p++;
  w |= ( uint64_t )( *p++ ) <<  8;
  w |= ( uint64_t )( *p++ ) << 16;
  w |= ( uint64_t )( *p++ ) << 24;
  w |= ( uint64_t )( *p++ ) << 32;
  w |= ( uint64_t )( *p++ ) << 40;
  w |= ( uint64_t )( *p++ ) << 48;
  w |= ( uint64_t )( *p++ ) << 56;
  return w;
#endif
}

static inline void store32( void *dst, uint32_t w ) 
{
#if defined(NATIVE_LITTLE_ENDIAN)
  memcpy( dst, &w, sizeof( w ) ); 
#else
  uint8_t *p = ( uint8_t * )dst;
  *p++ = ( uint8_t )w; w >>= 8;
  *p++ = ( uint8_t )w; w >>= 8;
  *p++ = ( uint8_t )w; w >>= 8;
  *p++ = ( uint8_t )w;
#endif
}

static inline void store64( void *dst, uint64_t w ) 
{
#if defined(NATIVE_LITTLE_ENDIAN)
  memcpy( dst, &w, sizeof( w ) ); 
#else
  uint8_t *p = ( uint8_t * )dst;
  *p++ = ( uint8_t )w; w >>= 8;
  *p++ = ( uint8_t )w; w >>= 8;
  *p++ = ( uint8_t )w; w >>= 8;
  *p++ = ( uint8_t )w; w >>= 8;
  *p++ = ( uint8_t )w; w >>= 8;
  *p++ = ( uint8_t )w; w >>= 8;
  *p++ = ( uint8_t )w; w >>= 8;
  *p++ = ( uint8_t )w;
#endif
}

static inline uint64_t load48( const void *src ) 
{
  const uint8_t *p = ( const uint8_t * )src;
  uint64_t w = *p++;
  w |= ( uint64_t )( *p++ ) <<  8;
  w |= ( uint64_t )( *p++ ) << 16;
  w |= ( uint64_t )( *p++ ) << 24;
  w |= ( uint64_t )( *p++ ) << 32;
  w |= ( uint64_t )( *p++ ) << 40;
  return w;
}

static inline void store48( void *dst, uint64_t w ) 
{
  uint8_t *p = ( uint8_t * )dst;
  *p++ = ( uint8_t )w; w >>= 8;
  *p++ = ( uint8_t )w; w >>= 8;
  *p++ = ( uint8_t )w; w >>= 8;
  *p++ = ( uint8_t )w; w >>= 8;
  *p++ = ( uint8_t )w; w >>= 8;
  *p++ = ( uint8_t )w;
}

static inline uint32_t rotl32( const uint32_t w, const unsigned c ) 
{
  return ( w << c ) | ( w >> ( 32 - c ) );
}

static inline uint64_t rotl64( const uint64_t w, const unsigned c ) 
{
  return ( w << c ) | ( w >> ( 64 - c ) );
}

static inline uint32_t rotr32( const uint32_t w, const unsigned c ) 
{
  return ( w >> c ) | ( w << ( 32 - c ) );
}

static inline uint64_t rotr64( const uint64_t w, const unsigned c ) 
{
  return ( w >> c ) | ( w << ( 64 - c ) );
}

/* prevents compiler optimizing out memset() */
static inline void secure_zero_memory(void *v, size_t n) 
{
#if defined(_WIN32) || defined(WIN32) 
  SecureZeroMemory(v, n); 
#elif defined(__hpux) 
  static void *(*const volatile memset_v)(void *, int, size_t) = &memset;
  memset_v(v, 0, n);
#else 
// prioritize first the general C11 call 
#if defined(HAVE_MEMSET_S) 
  memset_s(v, n, 0, n); 
#elif defined(HAVE_EXPLICIT_BZERO) 
  explicit_bzero(v, n); 
#elif defined(HAVE_EXPLICIT_MEMSET) 
  explicit_memset(v, 0, n); 
#else 
  memset(v, 0, n); 
  __asm__ __volatile__("" :: "r"(v) : "memory"); 
#endif 
#endif 
}

#endif