highway/
portable.rs

1use crate::internal::{HashPacket, PACKET_SIZE};
2use crate::key::Key;
3use crate::traits::HighwayHash;
4
5/// Hardware agnostic HighwayHash implementation.
6///
7/// "Portable" refers to being able to run on any platform Rust will run on, and
8/// is not referring to the output, as the HighwayHash is already hardware
9/// agnostic across all implementations.
10///
11/// The main reason for directly using `PortableHash` would be if avoiding
12/// `unsafe` code blocks is a top priority.
13#[derive(Debug, Default, Clone)]
14pub struct PortableHash {
15    pub(crate) v0: [u64; 4],
16    pub(crate) v1: [u64; 4],
17    pub(crate) mul0: [u64; 4],
18    pub(crate) mul1: [u64; 4],
19    pub(crate) buffer: HashPacket,
20}
21
22impl HighwayHash for PortableHash {
23    #[inline]
24    fn append(&mut self, data: &[u8]) {
25        self.append(data);
26    }
27
28    #[inline]
29    fn finalize64(mut self) -> u64 {
30        Self::finalize64(&mut self)
31    }
32
33    #[inline]
34    fn finalize128(mut self) -> [u64; 2] {
35        Self::finalize128(&mut self)
36    }
37
38    #[inline]
39    fn finalize256(mut self) -> [u64; 4] {
40        Self::finalize256(&mut self)
41    }
42
43    #[inline]
44    fn checkpoint(&self) -> [u8; 164] {
45        let mut result = [0u8; 164];
46        let mut cursor = &mut result[..];
47
48        // Write out the state in 8 * 4 * 4 bytes = 128 bytes
49        for array in [&self.v0, &self.v1, &self.mul0, &self.mul1] {
50            for &x in array {
51                let (bucket, rest) = cursor.split_at_mut(core::mem::size_of::<u64>());
52                bucket.copy_from_slice(&x.to_le_bytes());
53                cursor = rest;
54            }
55        }
56
57        let (buffered, rest) = cursor.split_at_mut(PACKET_SIZE);
58        buffered.copy_from_slice(&self.buffer.buf);
59        rest.copy_from_slice(&(self.buffer.len() as u32).to_le_bytes());
60        result
61    }
62}
63
64impl PortableHash {
65    /// Create a new `PortableHash` from a `Key`
66    #[must_use]
67    pub fn new(key: Key) -> Self {
68        let mul0 = [
69            0xdbe6_d5d5_fe4c_ce2f,
70            0xa409_3822_299f_31d0,
71            0x1319_8a2e_0370_7344,
72            0x243f_6a88_85a3_08d3,
73        ];
74        let mul1 = [
75            0x3bd3_9e10_cb0e_f593,
76            0xc0ac_f169_b5f1_8a8c,
77            0xbe54_66cf_34e9_0c6c,
78            0x4528_21e6_38d0_1377,
79        ];
80
81        PortableHash {
82            v0: [
83                mul0[0] ^ key[0],
84                mul0[1] ^ key[1],
85                mul0[2] ^ key[2],
86                mul0[3] ^ key[3],
87            ],
88            v1: [
89                mul1[0] ^ key[0].rotate_left(32),
90                mul1[1] ^ key[1].rotate_left(32),
91                mul1[2] ^ key[2].rotate_left(32),
92                mul1[3] ^ key[3].rotate_left(32),
93            ],
94            mul0,
95            mul1,
96            buffer: HashPacket::default(),
97        }
98    }
99
100    /// Create hasher from checkpointed state
101    #[must_use]
102    pub fn from_checkpoint(data: [u8; 164]) -> Self {
103        let mut cursor = &data[..];
104        let mut v0 = [0u64; 4];
105        let mut v1 = [0u64; 4];
106        let mut mul0 = [0u64; 4];
107        let mut mul1 = [0u64; 4];
108
109        for array in [&mut v0, &mut v1, &mut mul0, &mut mul1] {
110            for state in array.iter_mut() {
111                let (x, rest) = cursor.split_at(core::mem::size_of::<u64>());
112                *state = u64::from_le_bytes([x[0], x[1], x[2], x[3], x[4], x[5], x[6], x[7]]);
113                cursor = rest;
114            }
115        }
116
117        let (buffered, rest) = cursor.split_at(PACKET_SIZE);
118        let mut buffer = HashPacket::default();
119
120        let (len, _) = rest.split_at(core::mem::size_of::<u32>());
121        let len = u32::from_le_bytes([len[0], len[1], len[2], len[3]]);
122        buffer.fill(&buffered[..(len as usize).min(buffered.len())]);
123
124        PortableHash {
125            v0,
126            v1,
127            mul0,
128            mul1,
129            buffer,
130        }
131    }
132
133    pub(crate) fn finalize64(&mut self) -> u64 {
134        if !self.buffer.is_empty() {
135            self.update_remainder();
136        }
137
138        for _i in 0..4 {
139            self.permute_and_update();
140        }
141
142        self.v0[0]
143            .wrapping_add(self.v1[0])
144            .wrapping_add(self.mul0[0])
145            .wrapping_add(self.mul1[0])
146    }
147
148    pub(crate) fn finalize128(&mut self) -> [u64; 2] {
149        if !self.buffer.is_empty() {
150            self.update_remainder();
151        }
152
153        for _i in 0..6 {
154            self.permute_and_update();
155        }
156
157        let low = self.v0[0]
158            .wrapping_add(self.mul0[0])
159            .wrapping_add(self.v1[2])
160            .wrapping_add(self.mul1[2]);
161
162        let high = self.v0[1]
163            .wrapping_add(self.mul0[1])
164            .wrapping_add(self.v1[3])
165            .wrapping_add(self.mul1[3]);
166
167        [low, high]
168    }
169
170    pub(crate) fn finalize256(&mut self) -> [u64; 4] {
171        if !self.buffer.is_empty() {
172            self.update_remainder();
173        }
174
175        for _i in 0..10 {
176            self.permute_and_update();
177        }
178
179        let (lowest, low) = PortableHash::module_reduction(
180            self.v1[1].wrapping_add(self.mul1[1]),
181            self.v1[0].wrapping_add(self.mul1[0]),
182            self.v0[1].wrapping_add(self.mul0[1]),
183            self.v0[0].wrapping_add(self.mul0[0]),
184        );
185        let (high, highest) = PortableHash::module_reduction(
186            self.v1[3].wrapping_add(self.mul1[3]),
187            self.v1[2].wrapping_add(self.mul1[2]),
188            self.v0[3].wrapping_add(self.mul0[3]),
189            self.v0[2].wrapping_add(self.mul0[2]),
190        );
191
192        [lowest, low, high, highest]
193    }
194
195    const fn module_reduction(a3_unmasked: u64, a2: u64, a1: u64, a0: u64) -> (u64, u64) {
196        let a3 = a3_unmasked & 0x3FFF_FFFF_FFFF_FFFF;
197        let high = a1 ^ ((a3 << 1) | (a2 >> 63)) ^ ((a3 << 2) | (a2 >> 62));
198        let low = a0 ^ (a2 << 1) ^ (a2 << 2);
199        (low, high)
200    }
201
202    const fn permute(v: &[u64; 4]) -> [u64; 4] {
203        [
204            v[2].rotate_left(32),
205            v[3].rotate_left(32),
206            v[0].rotate_left(32),
207            v[1].rotate_left(32),
208        ]
209    }
210
211    fn permute_and_update(&mut self) {
212        let permuted: [u64; 4] = PortableHash::permute(&self.v0);
213        self.update(permuted);
214    }
215
216    fn update(&mut self, lanes: [u64; 4]) {
217        for (i, lane) in lanes.iter().enumerate() {
218            self.v1[i] = self.v1[i].wrapping_add(*lane);
219        }
220
221        for i in 0..4 {
222            self.v1[i] = self.v1[i].wrapping_add(self.mul0[i]);
223        }
224
225        for i in 0..4 {
226            self.mul0[i] ^= (self.v1[i] & 0xffff_ffff).wrapping_mul(self.v0[i] >> 32);
227        }
228
229        for i in 0..4 {
230            self.v0[i] = self.v0[i].wrapping_add(self.mul1[i]);
231        }
232
233        for i in 0..4 {
234            self.mul1[i] ^= (self.v0[i] & 0xffff_ffff).wrapping_mul(self.v1[i] >> 32);
235        }
236
237        PortableHash::zipper_merge_and_add(self.v1[1], self.v1[0], &mut self.v0, 1, 0);
238        PortableHash::zipper_merge_and_add(self.v1[3], self.v1[2], &mut self.v0, 3, 2);
239        PortableHash::zipper_merge_and_add(self.v0[1], self.v0[0], &mut self.v1, 1, 0);
240        PortableHash::zipper_merge_and_add(self.v0[3], self.v0[2], &mut self.v1, 3, 2);
241    }
242
243    fn zipper_merge_and_add(v1: u64, v0: u64, lane: &mut [u64; 4], add1: usize, add0: usize) {
244        lane[add0] = lane[add0].wrapping_add(
245            (((v0 & 0xff00_0000) | (v1 & 0x00ff_0000_0000)) >> 24)
246                | (((v0 & 0xff00_0000_0000) | (v1 & 0x00ff_0000_0000_0000)) >> 16)
247                | (v0 & 0x00ff_0000)
248                | ((v0 & 0xff00) << 32)
249                | ((v1 & 0xff00_0000_0000_0000) >> 8)
250                | (v0 << 56),
251        );
252        lane[add1] = lane[add1].wrapping_add(
253            (((v1 & 0xff00_0000) | (v0 & 0x00ff_0000_0000)) >> 24)
254                | (v1 & 0x00ff_0000)
255                | ((v1 & 0xff00_0000_0000) >> 16)
256                | ((v1 & 0xff00) << 24)
257                | ((v0 & 0x00ff_0000_0000_0000) >> 8)
258                | ((v1 & 0xff) << 48)
259                | (v0 & 0xff00_0000_0000_0000),
260        );
261    }
262
263    fn data_to_lanes(d: &[u8]) -> [u64; 4] {
264        let mut result = [0u64; 4];
265        for (x, dest) in d.chunks_exact(8).zip(result.iter_mut()) {
266            *dest = u64::from_le_bytes([x[0], x[1], x[2], x[3], x[4], x[5], x[6], x[7]]);
267        }
268        result
269    }
270
271    fn rotate_32_by(count: u64, lanes: &mut [u64; 4]) {
272        for lane in lanes.iter_mut() {
273            let half0: u32 = *lane as u32;
274            let half1: u32 = (*lane >> 32) as u32;
275            *lane = u64::from((half0 << count) | (half0 >> (32 - count)));
276            *lane |= u64::from((half1 << count) | (half1 >> (32 - count))) << 32;
277        }
278    }
279
280    fn update_lanes(&mut self, size: u64) {
281        for i in 0..4 {
282            self.v0[i] = self.v0[i].wrapping_add((size << 32) + size);
283        }
284
285        PortableHash::rotate_32_by(size, &mut self.v1);
286    }
287
288    fn remainder(bytes: &[u8]) -> [u8; 32] {
289        let mut packet: [u8; 32] = [0u8; 32];
290        if bytes.len() > packet.len() {
291            debug_assert!(false, "remainder bytes must be less than 32");
292            return packet;
293        }
294
295        let size_mod4 = bytes.len() & 3;
296        let remainder_jump = bytes.len() & !3;
297        let remainder = &bytes[remainder_jump..];
298        let size = bytes.len() as u64;
299
300        packet[..remainder_jump].clone_from_slice(&bytes[..remainder_jump]);
301        if size & 16 != 0 {
302            let muxed = packet[28..]
303                .iter_mut()
304                .zip(&bytes[remainder_jump + size_mod4 - 4..]);
305
306            for (p, b) in muxed {
307                *p = *b;
308            }
309        } else if size_mod4 != 0 {
310            packet[16] = remainder[0];
311            packet[16 + 1] = remainder[size_mod4 >> 1];
312            packet[16 + 2] = remainder[size_mod4 - 1];
313        }
314
315        packet
316    }
317
318    fn update_remainder(&mut self) {
319        let size = self.buffer.len() as u64;
320        self.update_lanes(size);
321        let packet = PortableHash::remainder(self.buffer.as_slice());
322        self.update(PortableHash::data_to_lanes(&packet));
323    }
324
325    fn append(&mut self, data: &[u8]) {
326        if self.buffer.is_empty() {
327            let mut chunks = data.chunks_exact(PACKET_SIZE);
328            for chunk in chunks.by_ref() {
329                self.update(Self::data_to_lanes(chunk));
330            }
331            self.buffer.set_to(chunks.remainder());
332        } else if let Some(tail) = self.buffer.fill(data) {
333            self.update(Self::data_to_lanes(self.buffer.inner()));
334            let mut chunks = tail.chunks_exact(PACKET_SIZE);
335            for chunk in chunks.by_ref() {
336                self.update(Self::data_to_lanes(chunk));
337            }
338
339            self.buffer.set_to(chunks.remainder());
340        }
341    }
342}
343
344impl_write!(PortableHash);
345impl_hasher!(PortableHash);