diff --git a/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/aead-common.c b/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/aead-common.c new file mode 100644 index 0000000..84fc53a --- /dev/null +++ b/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/aead-common.c @@ -0,0 +1,69 @@ +/* + * Copyright (C) 2020 Southern Storm Software, Pty Ltd. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#include "aead-common.h" + +int aead_check_tag + (unsigned char *plaintext, unsigned long long plaintext_len, + const unsigned char *tag1, const unsigned char *tag2, + unsigned size) +{ + /* Set "accum" to -1 if the tags match, or 0 if they don't match */ + int accum = 0; + while (size > 0) { + accum |= (*tag1++ ^ *tag2++); + --size; + } + accum = (accum - 1) >> 8; + + /* Destroy the plaintext if the tag match failed */ + while (plaintext_len > 0) { + *plaintext++ &= accum; + --plaintext_len; + } + + /* If "accum" is 0, return -1, otherwise return 0 */ + return ~accum; +} + +int aead_check_tag_precheck + (unsigned char *plaintext, unsigned long long plaintext_len, + const unsigned char *tag1, const unsigned char *tag2, + unsigned size, int precheck) +{ + /* Set "accum" to -1 if the tags match, or 0 if they don't match */ + int accum = 0; + while (size > 0) { + accum |= (*tag1++ ^ *tag2++); + --size; + } + accum = ((accum - 1) >> 8) & precheck; + + /* Destroy the plaintext if the tag match failed */ + while (plaintext_len > 0) { + *plaintext++ &= accum; + --plaintext_len; + } + + /* If "accum" is 0, return -1, otherwise return 0 */ + return ~accum; +} diff --git a/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/aead-common.h b/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/aead-common.h new file mode 100644 index 0000000..2be95eb --- /dev/null +++ b/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/aead-common.h @@ -0,0 +1,256 @@ +/* + * Copyright (C) 2020 Southern Storm Software, Pty Ltd. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#ifndef LWCRYPTO_AEAD_COMMON_H +#define LWCRYPTO_AEAD_COMMON_H + +#include + +/** + * \file aead-common.h + * \brief Definitions that are common across AEAD schemes. + * + * AEAD stands for "Authenticated Encryption with Associated Data". + * It is a standard API pattern for securely encrypting and + * authenticating packets of data. + */ + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * \brief Encrypts and authenticates a packet with an AEAD scheme. + * + * \param c Buffer to receive the output. + * \param clen On exit, set to the length of the output which includes + * the ciphertext and the authentication tag. + * \param m Buffer that contains the plaintext message to encrypt. + * \param mlen Length of the plaintext message in bytes. + * \param ad Buffer that contains associated data to authenticate + * along with the packet but which does not need to be encrypted. + * \param adlen Length of the associated data in bytes. + * \param nsec Secret nonce - normally not used by AEAD schemes. + * \param npub Points to the public nonce for the packet. + * \param k Points to the key to use to encrypt the packet. + * + * \return 0 on success, or a negative value if there was an error in + * the parameters. + */ +typedef int (*aead_cipher_encrypt_t) + (unsigned char *c, unsigned long long *clen, + const unsigned char *m, unsigned long long mlen, + const unsigned char *ad, unsigned long long adlen, + const unsigned char *nsec, + const unsigned char *npub, + const unsigned char *k); + +/** + * \brief Decrypts and authenticates a packet with an AEAD scheme. + * + * \param m Buffer to receive the plaintext message on output. + * \param mlen Receives the length of the plaintext message on output. + * \param nsec Secret nonce - normally not used by AEAD schemes. + * \param c Buffer that contains the ciphertext and authentication + * tag to decrypt. + * \param clen Length of the input data in bytes, which includes the + * ciphertext and the authentication tag. + * \param ad Buffer that contains associated data to authenticate + * along with the packet but which does not need to be encrypted. + * \param adlen Length of the associated data in bytes. + * \param npub Points to the public nonce for the packet. + * \param k Points to the key to use to decrypt the packet. + * + * \return 0 on success, -1 if the authentication tag was incorrect, + * or some other negative number if there was an error in the parameters. + */ +typedef int (*aead_cipher_decrypt_t) + (unsigned char *m, unsigned long long *mlen, + unsigned char *nsec, + const unsigned char *c, unsigned long long clen, + const unsigned char *ad, unsigned long long adlen, + const unsigned char *npub, + const unsigned char *k); + +/** + * \brief Hashes a block of input data. + * + * \param out Buffer to receive the hash output. + * \param in Points to the input data to be hashed. + * \param inlen Length of the input data in bytes. + * + * \return Returns zero on success or -1 if there was an error in the + * parameters. + */ +typedef int (*aead_hash_t) + (unsigned char *out, const unsigned char *in, unsigned long long inlen); + +/** + * \brief Initializes the state for a hashing operation. + * + * \param state Hash state to be initialized. + */ +typedef void (*aead_hash_init_t)(void *state); + +/** + * \brief Updates a hash state with more input data. + * + * \param state Hash state to be updated. + * \param in Points to the input data to be incorporated into the state. + * \param inlen Length of the input data to be incorporated into the state. + */ +typedef void (*aead_hash_update_t) + (void *state, const unsigned char *in, unsigned long long inlen); + +/** + * \brief Returns the final hash value from a hashing operation. + * + * \param Hash state to be finalized. + * \param out Points to the output buffer to receive the hash value. + */ +typedef void (*aead_hash_finalize_t)(void *state, unsigned char *out); + +/** + * \brief Aborbs more input data into an XOF state. + * + * \param state XOF state to be updated. + * \param in Points to the input data to be absorbed into the state. + * \param inlen Length of the input data to be absorbed into the state. + * + * \sa ascon_xof_init(), ascon_xof_squeeze() + */ +typedef void (*aead_xof_absorb_t) + (void *state, const unsigned char *in, unsigned long long inlen); + +/** + * \brief Squeezes output data from an XOF state. + * + * \param state XOF state to squeeze the output data from. + * \param out Points to the output buffer to receive the squeezed data. + * \param outlen Number of bytes of data to squeeze out of the state. + */ +typedef void (*aead_xof_squeeze_t) + (void *state, unsigned char *out, unsigned long long outlen); + +/** + * \brief No special AEAD features. + */ +#define AEAD_FLAG_NONE 0x0000 + +/** + * \brief The natural byte order of the AEAD cipher is little-endian. + * + * If this flag is not present, then the natural byte order of the + * AEAD cipher should be assumed to be big-endian. + * + * The natural byte order may be useful when formatting packet sequence + * numbers as nonces. The application needs to know whether the sequence + * number should be packed into the leading or trailing bytes of the nonce. + */ +#define AEAD_FLAG_LITTLE_ENDIAN 0x0001 + +/** + * \brief Meta-information about an AEAD cipher. + */ +typedef struct +{ + const char *name; /**< Name of the cipher */ + unsigned key_len; /**< Length of the key in bytes */ + unsigned nonce_len; /**< Length of the nonce in bytes */ + unsigned tag_len; /**< Length of the tag in bytes */ + unsigned flags; /**< Flags for extra features */ + aead_cipher_encrypt_t encrypt; /**< AEAD encryption function */ + aead_cipher_decrypt_t decrypt; /**< AEAD decryption function */ + +} aead_cipher_t; + +/** + * \brief Meta-information about a hash algorithm that is related to an AEAD. + * + * Regular hash algorithms should provide the "hash", "init", "update", + * and "finalize" functions. Extensible Output Functions (XOF's) should + * proivde the "hash", "init", "absorb", and "squeeze" functions. + */ +typedef struct +{ + const char *name; /**< Name of the hash algorithm */ + size_t state_size; /**< Size of the incremental state structure */ + unsigned hash_len; /**< Length of the hash in bytes */ + unsigned flags; /**< Flags for extra features */ + aead_hash_t hash; /**< All in one hashing function */ + aead_hash_init_t init; /**< Incremental hash/XOF init function */ + aead_hash_update_t update; /**< Incremental hash update function */ + aead_hash_finalize_t finalize; /**< Incremental hash finalize function */ + aead_xof_absorb_t absorb; /**< Incremental XOF absorb function */ + aead_xof_squeeze_t squeeze; /**< Incremental XOF squeeze function */ + +} aead_hash_algorithm_t; + +/** + * \brief Check an authentication tag in constant time. + * + * \param plaintext Points to the plaintext data. + * \param plaintext_len Length of the plaintext in bytes. + * \param tag1 First tag to compare. + * \param tag2 Second tag to compare. + * \param tag_len Length of the tags in bytes. + * + * \return Returns -1 if the tag check failed or 0 if the check succeeded. + * + * If the tag check fails, then the \a plaintext will also be zeroed to + * prevent it from being used accidentally by the application when the + * ciphertext was invalid. + */ +int aead_check_tag + (unsigned char *plaintext, unsigned long long plaintext_len, + const unsigned char *tag1, const unsigned char *tag2, + unsigned tag_len); + +/** + * \brief Check an authentication tag in constant time with a previous check. + * + * \param plaintext Points to the plaintext data. + * \param plaintext_len Length of the plaintext in bytes. + * \param tag1 First tag to compare. + * \param tag2 Second tag to compare. + * \param tag_len Length of the tags in bytes. + * \param precheck Set to -1 if previous check succeeded or 0 if it failed. + * + * \return Returns -1 if the tag check failed or 0 if the check succeeded. + * + * If the tag check fails, then the \a plaintext will also be zeroed to + * prevent it from being used accidentally by the application when the + * ciphertext was invalid. + * + * This version can be used to incorporate other information about the + * correctness of the plaintext into the final result. + */ +int aead_check_tag_precheck + (unsigned char *plaintext, unsigned long long plaintext_len, + const unsigned char *tag1, const unsigned char *tag2, + unsigned tag_len, int precheck); + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/api.h b/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/api.h new file mode 100644 index 0000000..fb1dab8 --- /dev/null +++ b/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/api.h @@ -0,0 +1,5 @@ +#define CRYPTO_KEYBYTES 32 +#define CRYPTO_NSECBYTES 0 +#define CRYPTO_NPUBBYTES 16 +#define CRYPTO_ABYTES 16 +#define CRYPTO_NOOVERLAP 1 diff --git a/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/encrypt.c b/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/encrypt.c new file mode 100644 index 0000000..53f563e --- /dev/null +++ b/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/encrypt.c @@ -0,0 +1,26 @@ + +#include "gimli24.h" + +int crypto_aead_encrypt + (unsigned char *c, unsigned long long *clen, + const unsigned char *m, unsigned long long mlen, + const unsigned char *ad, unsigned long long adlen, + const unsigned char *nsec, + const unsigned char *npub, + const unsigned char *k) +{ + return gimli24_aead_encrypt + (c, clen, m, mlen, ad, adlen, nsec, npub, k); +} + +int crypto_aead_decrypt + (unsigned char *m, unsigned long long *mlen, + unsigned char *nsec, + const unsigned char *c, unsigned long long clen, + const unsigned char *ad, unsigned long long adlen, + const unsigned char *npub, + const unsigned char *k) +{ + return gimli24_aead_decrypt + (m, mlen, nsec, c, clen, ad, adlen, npub, k); +} diff --git a/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/gimli24.c b/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/gimli24.c new file mode 100644 index 0000000..4bc7d9f --- /dev/null +++ b/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/gimli24.c @@ -0,0 +1,330 @@ +/* + * Copyright (C) 2020 Southern Storm Software, Pty Ltd. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#include "gimli24.h" +#include "internal-gimli24.h" +#include + +aead_cipher_t const gimli24_cipher = { + "GIMLI-24", + GIMLI24_KEY_SIZE, + GIMLI24_NONCE_SIZE, + GIMLI24_TAG_SIZE, + AEAD_FLAG_LITTLE_ENDIAN, + gimli24_aead_encrypt, + gimli24_aead_decrypt +}; + +aead_hash_algorithm_t const gimli24_hash_algorithm = { + "GIMLI-24-HASH", + sizeof(gimli24_hash_state_t), + GIMLI24_HASH_SIZE, + AEAD_FLAG_LITTLE_ENDIAN, + gimli24_hash, + (aead_hash_init_t)gimli24_hash_init, + (aead_hash_update_t)gimli24_hash_absorb, + (aead_hash_finalize_t)gimli24_hash_finalize, + (aead_xof_absorb_t)gimli24_hash_absorb, + (aead_xof_squeeze_t)gimli24_hash_squeeze +}; + +/** + * \brief Number of bytes of input or output data to process per block. + */ +#define GIMLI24_BLOCK_SIZE 16 + +/** + * \brief Structure of the GIMLI-24 state as both an array of words + * and an array of bytes. + */ +typedef union +{ + uint32_t words[12]; /**< Words in the state */ + uint8_t bytes[48]; /**< Bytes in the state */ + +} gimli24_state_t; + +/** + * \brief Absorbs data into a GIMLI-24 state. + * + * \param state The state to absorb the data into. + * \param data Points to the data to be absorbed. + * \param len Length of the data to be absorbed. + */ +static void gimli24_absorb + (gimli24_state_t *state, const unsigned char *data, unsigned long long len) +{ + unsigned temp; + while (len >= GIMLI24_BLOCK_SIZE) { + lw_xor_block(state->bytes, data, GIMLI24_BLOCK_SIZE); + gimli24_permute(state->words); + data += GIMLI24_BLOCK_SIZE; + len -= GIMLI24_BLOCK_SIZE; + } + temp = (unsigned)len; + lw_xor_block(state->bytes, data, temp); + state->bytes[temp] ^= 0x01; /* Padding */ + state->bytes[47] ^= 0x01; + gimli24_permute(state->words); +} + +/** + * \brief Encrypts a block of data with a GIMLI-24 state. + * + * \param state The state to encrypt with. + * \param dest Points to the destination buffer. + * \param src Points to the source buffer. + * \param len Length of the data to encrypt from \a src into \a dest. + */ +static void gimli24_encrypt + (gimli24_state_t *state, unsigned char *dest, + const unsigned char *src, unsigned long long len) +{ + unsigned temp; + while (len >= GIMLI24_BLOCK_SIZE) { + lw_xor_block_2_dest(dest, state->bytes, src, GIMLI24_BLOCK_SIZE); + gimli24_permute(state->words); + dest += GIMLI24_BLOCK_SIZE; + src += GIMLI24_BLOCK_SIZE; + len -= GIMLI24_BLOCK_SIZE; + } + temp = (unsigned)len; + lw_xor_block_2_dest(dest, state->bytes, src, temp); + state->bytes[temp] ^= 0x01; /* Padding */ + state->bytes[47] ^= 0x01; + gimli24_permute(state->words); +} + +/** + * \brief Decrypts a block of data with a GIMLI-24 state. + * + * \param state The state to decrypt with. + * \param dest Points to the destination buffer. + * \param src Points to the source buffer. + * \param len Length of the data to decrypt from \a src into \a dest. + */ +static void gimli24_decrypt + (gimli24_state_t *state, unsigned char *dest, + const unsigned char *src, unsigned long long len) +{ + unsigned temp; + while (len >= GIMLI24_BLOCK_SIZE) { + lw_xor_block_swap(dest, state->bytes, src, GIMLI24_BLOCK_SIZE); + gimli24_permute(state->words); + dest += GIMLI24_BLOCK_SIZE; + src += GIMLI24_BLOCK_SIZE; + len -= GIMLI24_BLOCK_SIZE; + } + temp = (unsigned)len; + lw_xor_block_swap(dest, state->bytes, src, temp); + state->bytes[temp] ^= 0x01; /* Padding */ + state->bytes[47] ^= 0x01; + gimli24_permute(state->words); +} + +int gimli24_aead_encrypt + (unsigned char *c, unsigned long long *clen, + const unsigned char *m, unsigned long long mlen, + const unsigned char *ad, unsigned long long adlen, + const unsigned char *nsec, + const unsigned char *npub, + const unsigned char *k) +{ + gimli24_state_t state; + (void)nsec; + + /* Set the length of the returned ciphertext */ + *clen = mlen + GIMLI24_TAG_SIZE; + + /* Format the initial GIMLI state from the nonce and the key */ + memcpy(state.words, npub, GIMLI24_NONCE_SIZE); + memcpy(state.words + 4, k, GIMLI24_KEY_SIZE); + + /* Permute the initial state */ + gimli24_permute(state.words); + + /* Absorb the associated data */ + gimli24_absorb(&state, ad, adlen); + + /* Encrypt the plaintext to produce the ciphertext */ + gimli24_encrypt(&state, c, m, mlen); + + /* Generate the authentication tag at the end of the ciphertext */ + memcpy(c + mlen, state.bytes, GIMLI24_TAG_SIZE); + return 0; +} + +int gimli24_aead_decrypt + (unsigned char *m, unsigned long long *mlen, + unsigned char *nsec, + const unsigned char *c, unsigned long long clen, + const unsigned char *ad, unsigned long long adlen, + const unsigned char *npub, + const unsigned char *k) +{ + gimli24_state_t state; + (void)nsec; + + /* Validate the ciphertext length and set the return "mlen" value */ + if (clen < GIMLI24_TAG_SIZE) + return -1; + *mlen = clen - GIMLI24_TAG_SIZE; + + /* Format the initial GIMLI state from the nonce and the key */ + memcpy(state.words, npub, GIMLI24_NONCE_SIZE); + memcpy(state.words + 4, k, GIMLI24_KEY_SIZE); + + /* Permute the initial state */ + gimli24_permute(state.words); + + /* Absorb the associated data */ + gimli24_absorb(&state, ad, adlen); + + /* Decrypt the ciphertext to produce the plaintext */ + gimli24_decrypt(&state, m, c, *mlen); + + /* Check the authentication tag at the end of the packet */ + return aead_check_tag + (m, *mlen, state.bytes, c + *mlen, GIMLI24_TAG_SIZE); +} + +int gimli24_hash + (unsigned char *out, const unsigned char *in, unsigned long long inlen) +{ + gimli24_state_t state; + + /* Initialize the hash state to all zeroes */ + memset(&state, 0, sizeof(state)); + + /* Absorb the input */ + gimli24_absorb(&state, in, inlen); + + /* Generate the output hash */ + memcpy(out, state.bytes, GIMLI24_HASH_SIZE / 2); + gimli24_permute(state.words); + memcpy(out + GIMLI24_HASH_SIZE / 2, state.bytes, GIMLI24_HASH_SIZE / 2); + return 0; +} + +void gimli24_hash_init(gimli24_hash_state_t *state) +{ + memset(state, 0, sizeof(gimli24_hash_state_t)); +} + +#define GIMLI24_XOF_RATE 16 +#define gimli24_xof_permute() \ + gimli24_permute((uint32_t *)(state->s.state)) + +void gimli24_hash_absorb + (gimli24_hash_state_t *state, const unsigned char *in, + unsigned long long inlen) +{ + unsigned temp; + + if (state->s.mode) { + /* We were squeezing output - go back to the absorb phase */ + state->s.mode = 0; + state->s.count = 0; + gimli24_xof_permute(); + } + + /* Handle the partial left-over block from last time */ + if (state->s.count) { + temp = GIMLI24_XOF_RATE - state->s.count; + if (temp > inlen) { + temp = (unsigned)inlen; + lw_xor_block(state->s.state + state->s.count, in, temp); + state->s.count += temp; + return; + } + lw_xor_block(state->s.state + state->s.count, in, temp); + state->s.count = 0; + in += temp; + inlen -= temp; + gimli24_xof_permute(); + } + + /* Process full blocks that are aligned at state->s.count == 0 */ + while (inlen >= GIMLI24_XOF_RATE) { + lw_xor_block(state->s.state, in, GIMLI24_XOF_RATE); + in += GIMLI24_XOF_RATE; + inlen -= GIMLI24_XOF_RATE; + gimli24_xof_permute(); + } + + /* Process the left-over block at the end of the input */ + temp = (unsigned)inlen; + lw_xor_block(state->s.state, in, temp); + state->s.count = temp; +} + +void gimli24_hash_squeeze + (gimli24_hash_state_t *state, unsigned char *out, + unsigned long long outlen) +{ + unsigned temp; + + /* Pad the final input block if we were still in the absorb phase */ + if (!state->s.mode) { + state->s.state[state->s.count] ^= 0x01; + state->s.state[47] ^= 0x01; + state->s.count = 0; + state->s.mode = 1; + } + + /* Handle left-over partial blocks from last time */ + if (state->s.count) { + temp = GIMLI24_XOF_RATE - state->s.count; + if (temp > outlen) { + temp = (unsigned)outlen; + memcpy(out, state->s.state + state->s.count, temp); + state->s.count += temp; + return; + } + memcpy(out, state->s.state + state->s.count, temp); + out += temp; + outlen -= temp; + state->s.count = 0; + } + + /* Handle full blocks */ + while (outlen >= GIMLI24_XOF_RATE) { + gimli24_xof_permute(); + memcpy(out, state->s.state, GIMLI24_XOF_RATE); + out += GIMLI24_XOF_RATE; + outlen -= GIMLI24_XOF_RATE; + } + + /* Handle the left-over block */ + if (outlen > 0) { + temp = (unsigned)outlen; + gimli24_xof_permute(); + memcpy(out, state->s.state, temp); + state->s.count = temp; + } +} + +void gimli24_hash_finalize + (gimli24_hash_state_t *state, unsigned char *out) +{ + gimli24_hash_squeeze(state, out, GIMLI24_HASH_SIZE); +} diff --git a/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/gimli24.h b/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/gimli24.h new file mode 100644 index 0000000..f72aec7 --- /dev/null +++ b/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/gimli24.h @@ -0,0 +1,220 @@ +/* + * Copyright (C) 2020 Southern Storm Software, Pty Ltd. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#ifndef LWCRYPTO_GIMLI24_H +#define LWCRYPTO_GIMLI24_H + +#include "aead-common.h" + +/** + * \file gimli24.h + * \brief Gimli authenticated encryption algorithm. + * + * GIMLI-24-CIPHER has a 256-bit key, a 128-bit nonce, and a 128-bit tag. + * It is the spiritual successor to the widely used ChaCha20 and has a + * similar design. + * + * This library also includes an implementation of the hash algorithm + * GIMLI-24-HASH in both regular hashing and XOF modes. + * + * References: https://gimli.cr.yp.to/ + */ + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * \brief Size of the key for GIMLI-24. + */ +#define GIMLI24_KEY_SIZE 32 + +/** + * \brief Size of the nonce for GIMLI-24. + */ +#define GIMLI24_NONCE_SIZE 16 + +/** + * \brief Size of the authentication tag for GIMLI-24. + */ +#define GIMLI24_TAG_SIZE 16 + +/** + * \brief Size of the hash output for GIMLI-24. + */ +#define GIMLI24_HASH_SIZE 32 + +/** + * \brief State information for GIMLI-24-HASH incremental modes. + */ +typedef union +{ + struct { + unsigned char state[48]; /**< Current hash state */ + unsigned char count; /**< Number of bytes in the current block */ + unsigned char mode; /**< Hash mode: 0 for absorb, 1 for squeeze */ + } s; /**< State */ + unsigned long long align; /**< For alignment of this structure */ + +} gimli24_hash_state_t; + +/** + * \brief Meta-information block for the GIMLI-24 cipher. + */ +extern aead_cipher_t const gimli24_cipher; + +/** + * \brief Meta-information block for the GIMLI-24-HASH algorithm. + * + * This meta-information block can also be used in XOF mode. + */ +extern aead_hash_algorithm_t const gimli24_hash_algorithm; + +/** + * \brief Encrypts and authenticates a packet with GIMLI-24 using the + * full AEAD mode. + * + * \param c Buffer to receive the output. + * \param clen On exit, set to the length of the output which includes + * the ciphertext and the 16 byte authentication tag. + * \param m Buffer that contains the plaintext message to encrypt. + * \param mlen Length of the plaintext message in bytes. + * \param ad Buffer that contains associated data to authenticate + * along with the packet but which does not need to be encrypted. + * \param adlen Length of the associated data in bytes. + * \param nsec Secret nonce - not used by this algorithm. + * \param npub Points to the public nonce for the packet which must + * be 16 bytes in length. + * \param k Points to the 32 bytes of the key to use to encrypt the packet. + * + * \return 0 on success, or a negative value if there was an error in + * the parameters. + * + * \sa gimli24_aead_decrypt() + */ +int gimli24_aead_encrypt + (unsigned char *c, unsigned long long *clen, + const unsigned char *m, unsigned long long mlen, + const unsigned char *ad, unsigned long long adlen, + const unsigned char *nsec, + const unsigned char *npub, + const unsigned char *k); + +/** + * \brief Decrypts and authenticates a packet with GIMLI-24 using the + * full AEAD mode. + * + * \param m Buffer to receive the plaintext message on output. + * \param mlen Receives the length of the plaintext message on output. + * \param nsec Secret nonce - not used by this algorithm. + * \param c Buffer that contains the ciphertext and authentication + * tag to decrypt. + * \param clen Length of the input data in bytes, which includes the + * ciphertext and the 16 byte authentication tag. + * \param ad Buffer that contains associated data to authenticate + * along with the packet but which does not need to be encrypted. + * \param adlen Length of the associated data in bytes. + * \param npub Points to the public nonce for the packet which must + * be 16 bytes in length. + * \param k Points to the 32 bytes of the key to use to decrypt the packet. + * + * \return 0 on success, -1 if the authentication tag was incorrect, + * or some other negative number if there was an error in the parameters. + * + * \sa gimli24_aead_encrypt() + */ +int gimli24_aead_decrypt + (unsigned char *m, unsigned long long *mlen, + unsigned char *nsec, + const unsigned char *c, unsigned long long clen, + const unsigned char *ad, unsigned long long adlen, + const unsigned char *npub, + const unsigned char *k); + +/** + * \brief Hashes a block of input data with GIMLI-24 to generate a hash value. + * + * \param out Buffer to receive the hash output which must be at least + * GIMLI24_HASH_SIZE bytes in length. + * \param in Points to the input data to be hashed. + * \param inlen Length of the input data in bytes. + * + * \return Returns zero on success or -1 if there was an error in the + * parameters. + */ +int gimli24_hash + (unsigned char *out, const unsigned char *in, unsigned long long inlen); + +/** + * \brief Initializes the state for a GIMLI-24-HASH hashing operation. + * + * \param state Hash state to be initialized. + * + * \sa gimli24_hash_absorb(), gimli24_hash_squeeze(), gimli24_hash() + */ +void gimli24_hash_init(gimli24_hash_state_t *state); + +/** + * \brief Aborbs more input data into a GIMLI-24-HASH state. + * + * \param state Hash state to be updated. + * \param in Points to the input data to be absorbed into the state. + * \param inlen Length of the input data to be absorbed into the state. + * + * \sa gimli24_hash_init(), gimli24_hash_squeeze() + */ +void gimli24_hash_absorb + (gimli24_hash_state_t *state, const unsigned char *in, + unsigned long long inlen); + +/** + * \brief Squeezes output data from an GIMLI-24-HASH state. + * + * \param state Hash state to squeeze the output data from. + * \param out Points to the output buffer to receive the squeezed data. + * \param outlen Number of bytes of data to squeeze out of the state. + * + * \sa gimli24_hash_init(), gimli24_hash_absorb() + */ +void gimli24_hash_squeeze + (gimli24_hash_state_t *state, unsigned char *out, + unsigned long long outlen); + +/** + * \brief Returns the final hash value from a GIMLI-24-HASH hashing operation. + * + * \param state Hash state to be finalized. + * \param out Points to the output buffer to receive the hash value. + * + * \note This is a wrapper around gimli24_hash_squeeze() for a fixed length + * of GIMLI24_HASH_SIZE bytes. + * + * \sa gimli24_hash_init(), gimli24_hash_absorb() + */ +void gimli24_hash_finalize + (gimli24_hash_state_t *state, unsigned char *out); + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/internal-gimli24-avr.S b/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/internal-gimli24-avr.S new file mode 100644 index 0000000..efcd500 --- /dev/null +++ b/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/internal-gimli24-avr.S @@ -0,0 +1,9419 @@ +#if defined(__AVR__) +#include +/* Automatically generated - do not edit */ + + .text +.global gimli24_permute + .type gimli24_permute, @function +gimli24_permute: + push r28 + push r29 + push r2 + push r3 + push r4 + push r5 + push r6 + push r7 + push r8 + push r9 + push r10 + push r11 + push r12 + push r13 + push r14 + push r15 + push r16 + push r17 + movw r30,r24 +.L__stack_usage = 18 + ld r18,Z + ldd r19,Z+1 + ldd r20,Z+2 + ldd r21,Z+3 + ldd r22,Z+16 + ldd r23,Z+17 + ldd r26,Z+18 + ldd r27,Z+19 + ldd r28,Z+32 + ldd r29,Z+33 + ldd r2,Z+34 + ldd r3,Z+35 + ldd r4,Z+4 + ldd r5,Z+5 + ldd r6,Z+6 + ldd r7,Z+7 + ldd r8,Z+20 + ldd r9,Z+21 + ldd r10,Z+22 + ldd r11,Z+23 + ldd r12,Z+36 + ldd r13,Z+37 + ldd r14,Z+38 + ldd r15,Z+39 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + ldi r31,24 + eor r4,r31 + ldi r30,121 + eor r5,r30 + ldi r25,55 + eor r6,r25 + ldi r24,158 + eor r7,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + pop r30 + pop r31 + st Z,r4 + std Z+1,r5 + std Z+2,r6 + std Z+3,r7 + std Z+16,r22 + std Z+17,r23 + std Z+18,r26 + std Z+19,r27 + std Z+32,r28 + std Z+33,r29 + std Z+34,r2 + std Z+35,r3 + std Z+4,r18 + std Z+5,r19 + std Z+6,r20 + std Z+7,r21 + std Z+20,r8 + std Z+21,r9 + std Z+22,r10 + std Z+23,r11 + std Z+36,r12 + std Z+37,r13 + std Z+38,r14 + std Z+39,r15 + ldd r4,Z+8 + ldd r5,Z+9 + ldd r6,Z+10 + ldd r7,Z+11 + ldd r22,Z+24 + ldd r23,Z+25 + ldd r26,Z+26 + ldd r27,Z+27 + ldd r28,Z+40 + ldd r29,Z+41 + ldd r2,Z+42 + ldd r3,Z+43 + ldd r18,Z+12 + ldd r19,Z+13 + ldd r20,Z+14 + ldd r21,Z+15 + ldd r8,Z+28 + ldd r9,Z+29 + ldd r10,Z+30 + ldd r11,Z+31 + ldd r12,Z+44 + ldd r13,Z+45 + ldd r14,Z+46 + ldd r15,Z+47 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + pop r30 + pop r31 + push r21 + push r20 + push r19 + push r18 + push r7 + push r6 + push r5 + push r4 + ld r18,Z + ldd r19,Z+1 + ldd r20,Z+2 + ldd r21,Z+3 + ldd r4,Z+4 + ldd r5,Z+5 + ldd r6,Z+6 + ldd r7,Z+7 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + pop r30 + pop r31 + std Z+8,r4 + std Z+9,r5 + std Z+10,r6 + std Z+11,r7 + std Z+24,r22 + std Z+25,r23 + std Z+26,r26 + std Z+27,r27 + std Z+40,r28 + std Z+41,r29 + std Z+42,r2 + std Z+43,r3 + std Z+12,r18 + std Z+13,r19 + std Z+14,r20 + std Z+15,r21 + std Z+28,r8 + std Z+29,r9 + std Z+30,r10 + std Z+31,r11 + std Z+44,r12 + std Z+45,r13 + std Z+46,r14 + std Z+47,r15 + ld r4,Z + ldd r5,Z+1 + ldd r6,Z+2 + ldd r7,Z+3 + ldd r22,Z+16 + ldd r23,Z+17 + ldd r26,Z+18 + ldd r27,Z+19 + ldd r28,Z+32 + ldd r29,Z+33 + ldd r2,Z+34 + ldd r3,Z+35 + ldd r18,Z+4 + ldd r19,Z+5 + ldd r20,Z+6 + ldd r21,Z+7 + ldd r8,Z+20 + ldd r9,Z+21 + ldd r10,Z+22 + ldd r11,Z+23 + ldd r12,Z+36 + ldd r13,Z+37 + ldd r14,Z+38 + ldd r15,Z+39 + pop r18 + pop r19 + pop r20 + pop r21 + pop r4 + pop r5 + pop r6 + pop r7 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + ldi r31,20 + eor r18,r31 + ldi r30,121 + eor r19,r30 + ldi r25,55 + eor r20,r25 + ldi r24,158 + eor r21,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + pop r30 + pop r31 + push r21 + push r20 + push r19 + push r18 + push r7 + push r6 + push r5 + push r4 + ldd r18,Z+8 + ldd r19,Z+9 + ldd r20,Z+10 + ldd r21,Z+11 + ldd r4,Z+12 + ldd r5,Z+13 + ldd r6,Z+14 + ldd r7,Z+15 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + ldi r31,16 + eor r4,r31 + ldi r30,121 + eor r5,r30 + ldi r25,55 + eor r6,r25 + ldi r24,158 + eor r7,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + pop r30 + pop r31 + st Z,r4 + std Z+1,r5 + std Z+2,r6 + std Z+3,r7 + std Z+16,r22 + std Z+17,r23 + std Z+18,r26 + std Z+19,r27 + std Z+32,r28 + std Z+33,r29 + std Z+34,r2 + std Z+35,r3 + std Z+4,r18 + std Z+5,r19 + std Z+6,r20 + std Z+7,r21 + std Z+20,r8 + std Z+21,r9 + std Z+22,r10 + std Z+23,r11 + std Z+36,r12 + std Z+37,r13 + std Z+38,r14 + std Z+39,r15 + ldd r4,Z+8 + ldd r5,Z+9 + ldd r6,Z+10 + ldd r7,Z+11 + ldd r22,Z+24 + ldd r23,Z+25 + ldd r26,Z+26 + ldd r27,Z+27 + ldd r28,Z+40 + ldd r29,Z+41 + ldd r2,Z+42 + ldd r3,Z+43 + ldd r18,Z+12 + ldd r19,Z+13 + ldd r20,Z+14 + ldd r21,Z+15 + ldd r8,Z+28 + ldd r9,Z+29 + ldd r10,Z+30 + ldd r11,Z+31 + ldd r12,Z+44 + ldd r13,Z+45 + ldd r14,Z+46 + ldd r15,Z+47 + pop r18 + pop r19 + pop r20 + pop r21 + pop r4 + pop r5 + pop r6 + pop r7 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + pop r30 + pop r31 + push r21 + push r20 + push r19 + push r18 + push r7 + push r6 + push r5 + push r4 + ld r18,Z + ldd r19,Z+1 + ldd r20,Z+2 + ldd r21,Z+3 + ldd r4,Z+4 + ldd r5,Z+5 + ldd r6,Z+6 + ldd r7,Z+7 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + pop r30 + pop r31 + std Z+8,r4 + std Z+9,r5 + std Z+10,r6 + std Z+11,r7 + std Z+24,r22 + std Z+25,r23 + std Z+26,r26 + std Z+27,r27 + std Z+40,r28 + std Z+41,r29 + std Z+42,r2 + std Z+43,r3 + std Z+12,r18 + std Z+13,r19 + std Z+14,r20 + std Z+15,r21 + std Z+28,r8 + std Z+29,r9 + std Z+30,r10 + std Z+31,r11 + std Z+44,r12 + std Z+45,r13 + std Z+46,r14 + std Z+47,r15 + ld r4,Z + ldd r5,Z+1 + ldd r6,Z+2 + ldd r7,Z+3 + ldd r22,Z+16 + ldd r23,Z+17 + ldd r26,Z+18 + ldd r27,Z+19 + ldd r28,Z+32 + ldd r29,Z+33 + ldd r2,Z+34 + ldd r3,Z+35 + ldd r18,Z+4 + ldd r19,Z+5 + ldd r20,Z+6 + ldd r21,Z+7 + ldd r8,Z+20 + ldd r9,Z+21 + ldd r10,Z+22 + ldd r11,Z+23 + ldd r12,Z+36 + ldd r13,Z+37 + ldd r14,Z+38 + ldd r15,Z+39 + pop r18 + pop r19 + pop r20 + pop r21 + pop r4 + pop r5 + pop r6 + pop r7 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + ldi r31,12 + eor r18,r31 + ldi r30,121 + eor r19,r30 + ldi r25,55 + eor r20,r25 + ldi r24,158 + eor r21,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + pop r30 + pop r31 + push r21 + push r20 + push r19 + push r18 + push r7 + push r6 + push r5 + push r4 + ldd r18,Z+8 + ldd r19,Z+9 + ldd r20,Z+10 + ldd r21,Z+11 + ldd r4,Z+12 + ldd r5,Z+13 + ldd r6,Z+14 + ldd r7,Z+15 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + ldi r31,8 + eor r4,r31 + ldi r30,121 + eor r5,r30 + ldi r25,55 + eor r6,r25 + ldi r24,158 + eor r7,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + pop r30 + pop r31 + st Z,r4 + std Z+1,r5 + std Z+2,r6 + std Z+3,r7 + std Z+16,r22 + std Z+17,r23 + std Z+18,r26 + std Z+19,r27 + std Z+32,r28 + std Z+33,r29 + std Z+34,r2 + std Z+35,r3 + std Z+4,r18 + std Z+5,r19 + std Z+6,r20 + std Z+7,r21 + std Z+20,r8 + std Z+21,r9 + std Z+22,r10 + std Z+23,r11 + std Z+36,r12 + std Z+37,r13 + std Z+38,r14 + std Z+39,r15 + ldd r4,Z+8 + ldd r5,Z+9 + ldd r6,Z+10 + ldd r7,Z+11 + ldd r22,Z+24 + ldd r23,Z+25 + ldd r26,Z+26 + ldd r27,Z+27 + ldd r28,Z+40 + ldd r29,Z+41 + ldd r2,Z+42 + ldd r3,Z+43 + ldd r18,Z+12 + ldd r19,Z+13 + ldd r20,Z+14 + ldd r21,Z+15 + ldd r8,Z+28 + ldd r9,Z+29 + ldd r10,Z+30 + ldd r11,Z+31 + ldd r12,Z+44 + ldd r13,Z+45 + ldd r14,Z+46 + ldd r15,Z+47 + pop r18 + pop r19 + pop r20 + pop r21 + pop r4 + pop r5 + pop r6 + pop r7 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + pop r30 + pop r31 + push r21 + push r20 + push r19 + push r18 + push r7 + push r6 + push r5 + push r4 + ld r18,Z + ldd r19,Z+1 + ldd r20,Z+2 + ldd r21,Z+3 + ldd r4,Z+4 + ldd r5,Z+5 + ldd r6,Z+6 + ldd r7,Z+7 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + pop r30 + pop r31 + std Z+8,r4 + std Z+9,r5 + std Z+10,r6 + std Z+11,r7 + std Z+24,r22 + std Z+25,r23 + std Z+26,r26 + std Z+27,r27 + std Z+40,r28 + std Z+41,r29 + std Z+42,r2 + std Z+43,r3 + std Z+12,r18 + std Z+13,r19 + std Z+14,r20 + std Z+15,r21 + std Z+28,r8 + std Z+29,r9 + std Z+30,r10 + std Z+31,r11 + std Z+44,r12 + std Z+45,r13 + std Z+46,r14 + std Z+47,r15 + ld r4,Z + ldd r5,Z+1 + ldd r6,Z+2 + ldd r7,Z+3 + ldd r22,Z+16 + ldd r23,Z+17 + ldd r26,Z+18 + ldd r27,Z+19 + ldd r28,Z+32 + ldd r29,Z+33 + ldd r2,Z+34 + ldd r3,Z+35 + ldd r18,Z+4 + ldd r19,Z+5 + ldd r20,Z+6 + ldd r21,Z+7 + ldd r8,Z+20 + ldd r9,Z+21 + ldd r10,Z+22 + ldd r11,Z+23 + ldd r12,Z+36 + ldd r13,Z+37 + ldd r14,Z+38 + ldd r15,Z+39 + pop r18 + pop r19 + pop r20 + pop r21 + pop r4 + pop r5 + pop r6 + pop r7 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + ldi r31,4 + eor r18,r31 + ldi r30,121 + eor r19,r30 + ldi r25,55 + eor r20,r25 + ldi r24,158 + eor r21,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + pop r30 + pop r31 + push r21 + push r20 + push r19 + push r18 + push r7 + push r6 + push r5 + push r4 + ldd r18,Z+8 + ldd r19,Z+9 + ldd r20,Z+10 + ldd r21,Z+11 + ldd r4,Z+12 + ldd r5,Z+13 + ldd r6,Z+14 + ldd r7,Z+15 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + pop r30 + pop r31 + st Z,r18 + std Z+1,r19 + std Z+2,r20 + std Z+3,r21 + std Z+16,r22 + std Z+17,r23 + std Z+18,r26 + std Z+19,r27 + std Z+32,r28 + std Z+33,r29 + std Z+34,r2 + std Z+35,r3 + std Z+4,r4 + std Z+5,r5 + std Z+6,r6 + std Z+7,r7 + std Z+20,r8 + std Z+21,r9 + std Z+22,r10 + std Z+23,r11 + std Z+36,r12 + std Z+37,r13 + std Z+38,r14 + std Z+39,r15 + ldd r18,Z+8 + ldd r19,Z+9 + ldd r20,Z+10 + ldd r21,Z+11 + ldd r22,Z+24 + ldd r23,Z+25 + ldd r26,Z+26 + ldd r27,Z+27 + ldd r28,Z+40 + ldd r29,Z+41 + ldd r2,Z+42 + ldd r3,Z+43 + ldd r4,Z+12 + ldd r5,Z+13 + ldd r6,Z+14 + ldd r7,Z+15 + ldd r8,Z+28 + ldd r9,Z+29 + ldd r10,Z+30 + ldd r11,Z+31 + ldd r12,Z+44 + ldd r13,Z+45 + ldd r14,Z+46 + ldd r15,Z+47 + pop r4 + pop r5 + pop r6 + pop r7 + pop r18 + pop r19 + pop r20 + pop r21 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + pop r30 + pop r31 + std Z+8,r18 + std Z+9,r19 + std Z+10,r20 + std Z+11,r21 + std Z+24,r22 + std Z+25,r23 + std Z+26,r26 + std Z+27,r27 + std Z+40,r28 + std Z+41,r29 + std Z+42,r2 + std Z+43,r3 + std Z+12,r4 + std Z+13,r5 + std Z+14,r6 + std Z+15,r7 + std Z+28,r8 + std Z+29,r9 + std Z+30,r10 + std Z+31,r11 + std Z+44,r12 + std Z+45,r13 + std Z+46,r14 + std Z+47,r15 + pop r17 + pop r16 + pop r15 + pop r14 + pop r13 + pop r12 + pop r11 + pop r10 + pop r9 + pop r8 + pop r7 + pop r6 + pop r5 + pop r4 + pop r3 + pop r2 + pop r29 + pop r28 + eor r1,r1 + ret + .size gimli24_permute, .-gimli24_permute + +#endif diff --git a/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/internal-gimli24.h b/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/internal-gimli24.h new file mode 100644 index 0000000..c81ead1 --- /dev/null +++ b/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/internal-gimli24.h @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2020 Southern Storm Software, Pty Ltd. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#ifndef LW_INTERNAL_GIMLI24_H +#define LW_INTERNAL_GIMLI24_H + +#include "internal-util.h" + +/** + * \file internal-gimli24.h + * \brief Internal implementation of the GIMLI-24 permutation. + * + * References: https://gimli.cr.yp.to/ + */ + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * \brief Permutes the GIMLI-24 state. + * + * \param state The GIMLI-24 state to be permuted. + * + * The input and output \a state will be in little-endian byte order. + */ +void gimli24_permute(uint32_t state[12]); + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/internal-util.h b/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/internal-util.h new file mode 100644 index 0000000..e30166d --- /dev/null +++ b/gimli/Implementations/crypto_aead/gimli24v1/rhys-avr/internal-util.h @@ -0,0 +1,702 @@ +/* + * Copyright (C) 2020 Southern Storm Software, Pty Ltd. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#ifndef LW_INTERNAL_UTIL_H +#define LW_INTERNAL_UTIL_H + +#include + +/* Figure out how to inline functions using this C compiler */ +#if defined(__STDC__) && __STDC_VERSION__ >= 199901L +#define STATIC_INLINE static inline +#elif defined(__GNUC__) || defined(__clang__) +#define STATIC_INLINE static __inline__ +#else +#define STATIC_INLINE static +#endif + +/* Try to figure out whether the CPU is little-endian or big-endian. + * May need to modify this to include new compiler-specific defines. + * Alternatively, define __LITTLE_ENDIAN__ or __BIG_ENDIAN__ in your + * compiler flags when you compile this library */ +#if defined(__x86_64) || defined(__x86_64__) || \ + defined(__i386) || defined(__i386__) || \ + defined(__AVR__) || defined(__arm) || defined(__arm__) || \ + defined(_M_AMD64) || defined(_M_X64) || defined(_M_IX86) || \ + defined(_M_IA64) || defined(_M_ARM) || defined(_M_ARM_FP) || \ + (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == 1234) || \ + defined(__LITTLE_ENDIAN__) +#define LW_UTIL_LITTLE_ENDIAN 1 +#elif (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == 4321) || \ + defined(__BIG_ENDIAN__) +/* Big endian */ +#else +#error "Cannot determine the endianess of this platform" +#endif + +/* Helper macros to load and store values while converting endian-ness */ + +/* Load a big-endian 32-bit word from a byte buffer */ +#define be_load_word32(ptr) \ + ((((uint32_t)((ptr)[0])) << 24) | \ + (((uint32_t)((ptr)[1])) << 16) | \ + (((uint32_t)((ptr)[2])) << 8) | \ + ((uint32_t)((ptr)[3]))) + +/* Store a big-endian 32-bit word into a byte buffer */ +#define be_store_word32(ptr, x) \ + do { \ + uint32_t _x = (x); \ + (ptr)[0] = (uint8_t)(_x >> 24); \ + (ptr)[1] = (uint8_t)(_x >> 16); \ + (ptr)[2] = (uint8_t)(_x >> 8); \ + (ptr)[3] = (uint8_t)_x; \ + } while (0) + +/* Load a little-endian 32-bit word from a byte buffer */ +#define le_load_word32(ptr) \ + ((((uint32_t)((ptr)[3])) << 24) | \ + (((uint32_t)((ptr)[2])) << 16) | \ + (((uint32_t)((ptr)[1])) << 8) | \ + ((uint32_t)((ptr)[0]))) + +/* Store a little-endian 32-bit word into a byte buffer */ +#define le_store_word32(ptr, x) \ + do { \ + uint32_t _x = (x); \ + (ptr)[0] = (uint8_t)_x; \ + (ptr)[1] = (uint8_t)(_x >> 8); \ + (ptr)[2] = (uint8_t)(_x >> 16); \ + (ptr)[3] = (uint8_t)(_x >> 24); \ + } while (0) + +/* Load a big-endian 64-bit word from a byte buffer */ +#define be_load_word64(ptr) \ + ((((uint64_t)((ptr)[0])) << 56) | \ + (((uint64_t)((ptr)[1])) << 48) | \ + (((uint64_t)((ptr)[2])) << 40) | \ + (((uint64_t)((ptr)[3])) << 32) | \ + (((uint64_t)((ptr)[4])) << 24) | \ + (((uint64_t)((ptr)[5])) << 16) | \ + (((uint64_t)((ptr)[6])) << 8) | \ + ((uint64_t)((ptr)[7]))) + +/* Store a big-endian 64-bit word into a byte buffer */ +#define be_store_word64(ptr, x) \ + do { \ + uint64_t _x = (x); \ + (ptr)[0] = (uint8_t)(_x >> 56); \ + (ptr)[1] = (uint8_t)(_x >> 48); \ + (ptr)[2] = (uint8_t)(_x >> 40); \ + (ptr)[3] = (uint8_t)(_x >> 32); \ + (ptr)[4] = (uint8_t)(_x >> 24); \ + (ptr)[5] = (uint8_t)(_x >> 16); \ + (ptr)[6] = (uint8_t)(_x >> 8); \ + (ptr)[7] = (uint8_t)_x; \ + } while (0) + +/* Load a little-endian 64-bit word from a byte buffer */ +#define le_load_word64(ptr) \ + ((((uint64_t)((ptr)[7])) << 56) | \ + (((uint64_t)((ptr)[6])) << 48) | \ + (((uint64_t)((ptr)[5])) << 40) | \ + (((uint64_t)((ptr)[4])) << 32) | \ + (((uint64_t)((ptr)[3])) << 24) | \ + (((uint64_t)((ptr)[2])) << 16) | \ + (((uint64_t)((ptr)[1])) << 8) | \ + ((uint64_t)((ptr)[0]))) + +/* Store a little-endian 64-bit word into a byte buffer */ +#define le_store_word64(ptr, x) \ + do { \ + uint64_t _x = (x); \ + (ptr)[0] = (uint8_t)_x; \ + (ptr)[1] = (uint8_t)(_x >> 8); \ + (ptr)[2] = (uint8_t)(_x >> 16); \ + (ptr)[3] = (uint8_t)(_x >> 24); \ + (ptr)[4] = (uint8_t)(_x >> 32); \ + (ptr)[5] = (uint8_t)(_x >> 40); \ + (ptr)[6] = (uint8_t)(_x >> 48); \ + (ptr)[7] = (uint8_t)(_x >> 56); \ + } while (0) + +/* Load a big-endian 16-bit word from a byte buffer */ +#define be_load_word16(ptr) \ + ((((uint16_t)((ptr)[0])) << 8) | \ + ((uint16_t)((ptr)[1]))) + +/* Store a big-endian 16-bit word into a byte buffer */ +#define be_store_word16(ptr, x) \ + do { \ + uint16_t _x = (x); \ + (ptr)[0] = (uint8_t)(_x >> 8); \ + (ptr)[1] = (uint8_t)_x; \ + } while (0) + +/* Load a little-endian 16-bit word from a byte buffer */ +#define le_load_word16(ptr) \ + ((((uint16_t)((ptr)[1])) << 8) | \ + ((uint16_t)((ptr)[0]))) + +/* Store a little-endian 16-bit word into a byte buffer */ +#define le_store_word16(ptr, x) \ + do { \ + uint16_t _x = (x); \ + (ptr)[0] = (uint8_t)_x; \ + (ptr)[1] = (uint8_t)(_x >> 8); \ + } while (0) + +/* XOR a source byte buffer against a destination */ +#define lw_xor_block(dest, src, len) \ + do { \ + unsigned char *_dest = (dest); \ + const unsigned char *_src = (src); \ + unsigned _len = (len); \ + while (_len > 0) { \ + *_dest++ ^= *_src++; \ + --_len; \ + } \ + } while (0) + +/* XOR two source byte buffers and put the result in a destination buffer */ +#define lw_xor_block_2_src(dest, src1, src2, len) \ + do { \ + unsigned char *_dest = (dest); \ + const unsigned char *_src1 = (src1); \ + const unsigned char *_src2 = (src2); \ + unsigned _len = (len); \ + while (_len > 0) { \ + *_dest++ = *_src1++ ^ *_src2++; \ + --_len; \ + } \ + } while (0) + +/* XOR a source byte buffer against a destination and write to another + * destination at the same time */ +#define lw_xor_block_2_dest(dest2, dest, src, len) \ + do { \ + unsigned char *_dest2 = (dest2); \ + unsigned char *_dest = (dest); \ + const unsigned char *_src = (src); \ + unsigned _len = (len); \ + while (_len > 0) { \ + *_dest2++ = (*_dest++ ^= *_src++); \ + --_len; \ + } \ + } while (0) + +/* XOR two byte buffers and write to a destination which at the same + * time copying the contents of src2 to dest2 */ +#define lw_xor_block_copy_src(dest2, dest, src1, src2, len) \ + do { \ + unsigned char *_dest2 = (dest2); \ + unsigned char *_dest = (dest); \ + const unsigned char *_src1 = (src1); \ + const unsigned char *_src2 = (src2); \ + unsigned _len = (len); \ + while (_len > 0) { \ + unsigned char _temp = *_src2++; \ + *_dest2++ = _temp; \ + *_dest++ = *_src1++ ^ _temp; \ + --_len; \ + } \ + } while (0) + +/* XOR a source byte buffer against a destination and write to another + * destination at the same time. This version swaps the source value + * into the "dest" buffer */ +#define lw_xor_block_swap(dest2, dest, src, len) \ + do { \ + unsigned char *_dest2 = (dest2); \ + unsigned char *_dest = (dest); \ + const unsigned char *_src = (src); \ + unsigned _len = (len); \ + while (_len > 0) { \ + unsigned char _temp = *_src++; \ + *_dest2++ = *_dest ^ _temp; \ + *_dest++ = _temp; \ + --_len; \ + } \ + } while (0) + +/* Rotation functions need to be optimised for best performance on AVR. + * The most efficient rotations are where the number of bits is 1 or a + * multiple of 8, so we compose the efficient rotations to produce all + * other rotation counts of interest. */ + +#if defined(__AVR__) +#define LW_CRYPTO_ROTATE32_COMPOSED 1 +#else +#define LW_CRYPTO_ROTATE32_COMPOSED 0 +#endif + +/* Rotation macros for 32-bit arguments */ + +/* Generic left rotate */ +#define leftRotate(a, bits) \ + (__extension__ ({ \ + uint32_t _temp = (a); \ + (_temp << (bits)) | (_temp >> (32 - (bits))); \ + })) + +/* Generic right rotate */ +#define rightRotate(a, bits) \ + (__extension__ ({ \ + uint32_t _temp = (a); \ + (_temp >> (bits)) | (_temp << (32 - (bits))); \ + })) + +#if !LW_CRYPTO_ROTATE32_COMPOSED + +/* Left rotate by a specific number of bits. These macros may be replaced + * with more efficient ones on platforms that lack a barrel shifter */ +#define leftRotate1(a) (leftRotate((a), 1)) +#define leftRotate2(a) (leftRotate((a), 2)) +#define leftRotate3(a) (leftRotate((a), 3)) +#define leftRotate4(a) (leftRotate((a), 4)) +#define leftRotate5(a) (leftRotate((a), 5)) +#define leftRotate6(a) (leftRotate((a), 6)) +#define leftRotate7(a) (leftRotate((a), 7)) +#define leftRotate8(a) (leftRotate((a), 8)) +#define leftRotate9(a) (leftRotate((a), 9)) +#define leftRotate10(a) (leftRotate((a), 10)) +#define leftRotate11(a) (leftRotate((a), 11)) +#define leftRotate12(a) (leftRotate((a), 12)) +#define leftRotate13(a) (leftRotate((a), 13)) +#define leftRotate14(a) (leftRotate((a), 14)) +#define leftRotate15(a) (leftRotate((a), 15)) +#define leftRotate16(a) (leftRotate((a), 16)) +#define leftRotate17(a) (leftRotate((a), 17)) +#define leftRotate18(a) (leftRotate((a), 18)) +#define leftRotate19(a) (leftRotate((a), 19)) +#define leftRotate20(a) (leftRotate((a), 20)) +#define leftRotate21(a) (leftRotate((a), 21)) +#define leftRotate22(a) (leftRotate((a), 22)) +#define leftRotate23(a) (leftRotate((a), 23)) +#define leftRotate24(a) (leftRotate((a), 24)) +#define leftRotate25(a) (leftRotate((a), 25)) +#define leftRotate26(a) (leftRotate((a), 26)) +#define leftRotate27(a) (leftRotate((a), 27)) +#define leftRotate28(a) (leftRotate((a), 28)) +#define leftRotate29(a) (leftRotate((a), 29)) +#define leftRotate30(a) (leftRotate((a), 30)) +#define leftRotate31(a) (leftRotate((a), 31)) + +/* Right rotate by a specific number of bits. These macros may be replaced + * with more efficient ones on platforms that lack a barrel shifter */ +#define rightRotate1(a) (rightRotate((a), 1)) +#define rightRotate2(a) (rightRotate((a), 2)) +#define rightRotate3(a) (rightRotate((a), 3)) +#define rightRotate4(a) (rightRotate((a), 4)) +#define rightRotate5(a) (rightRotate((a), 5)) +#define rightRotate6(a) (rightRotate((a), 6)) +#define rightRotate7(a) (rightRotate((a), 7)) +#define rightRotate8(a) (rightRotate((a), 8)) +#define rightRotate9(a) (rightRotate((a), 9)) +#define rightRotate10(a) (rightRotate((a), 10)) +#define rightRotate11(a) (rightRotate((a), 11)) +#define rightRotate12(a) (rightRotate((a), 12)) +#define rightRotate13(a) (rightRotate((a), 13)) +#define rightRotate14(a) (rightRotate((a), 14)) +#define rightRotate15(a) (rightRotate((a), 15)) +#define rightRotate16(a) (rightRotate((a), 16)) +#define rightRotate17(a) (rightRotate((a), 17)) +#define rightRotate18(a) (rightRotate((a), 18)) +#define rightRotate19(a) (rightRotate((a), 19)) +#define rightRotate20(a) (rightRotate((a), 20)) +#define rightRotate21(a) (rightRotate((a), 21)) +#define rightRotate22(a) (rightRotate((a), 22)) +#define rightRotate23(a) (rightRotate((a), 23)) +#define rightRotate24(a) (rightRotate((a), 24)) +#define rightRotate25(a) (rightRotate((a), 25)) +#define rightRotate26(a) (rightRotate((a), 26)) +#define rightRotate27(a) (rightRotate((a), 27)) +#define rightRotate28(a) (rightRotate((a), 28)) +#define rightRotate29(a) (rightRotate((a), 29)) +#define rightRotate30(a) (rightRotate((a), 30)) +#define rightRotate31(a) (rightRotate((a), 31)) + +#else /* LW_CRYPTO_ROTATE32_COMPOSED */ + +/* Composed rotation macros where 1 and 8 are fast, but others are slow */ + +/* Left rotate by 1 */ +#define leftRotate1(a) (leftRotate((a), 1)) + +/* Left rotate by 2 */ +#define leftRotate2(a) (leftRotate(leftRotate((a), 1), 1)) + +/* Left rotate by 3 */ +#define leftRotate3(a) (leftRotate(leftRotate(leftRotate((a), 1), 1), 1)) + +/* Left rotate by 4 */ +#define leftRotate4(a) (leftRotate(leftRotate(leftRotate(leftRotate((a), 1), 1), 1), 1)) + +/* Left rotate by 5: Rotate left by 8, then right by 3 */ +#define leftRotate5(a) (rightRotate(rightRotate(rightRotate(leftRotate((a), 8), 1), 1), 1)) + +/* Left rotate by 6: Rotate left by 8, then right by 2 */ +#define leftRotate6(a) (rightRotate(rightRotate(leftRotate((a), 8), 1), 1)) + +/* Left rotate by 7: Rotate left by 8, then right by 1 */ +#define leftRotate7(a) (rightRotate(leftRotate((a), 8), 1)) + +/* Left rotate by 8 */ +#define leftRotate8(a) (leftRotate((a), 8)) + +/* Left rotate by 9: Rotate left by 8, then left by 1 */ +#define leftRotate9(a) (leftRotate(leftRotate((a), 8), 1)) + +/* Left rotate by 10: Rotate left by 8, then left by 2 */ +#define leftRotate10(a) (leftRotate(leftRotate(leftRotate((a), 8), 1), 1)) + +/* Left rotate by 11: Rotate left by 8, then left by 3 */ +#define leftRotate11(a) (leftRotate(leftRotate(leftRotate(leftRotate((a), 8), 1), 1), 1)) + +/* Left rotate by 12: Rotate left by 16, then right by 4 */ +#define leftRotate12(a) (rightRotate(rightRotate(rightRotate(rightRotate(leftRotate((a), 16), 1), 1), 1), 1)) + +/* Left rotate by 13: Rotate left by 16, then right by 3 */ +#define leftRotate13(a) (rightRotate(rightRotate(rightRotate(leftRotate((a), 16), 1), 1), 1)) + +/* Left rotate by 14: Rotate left by 16, then right by 2 */ +#define leftRotate14(a) (rightRotate(rightRotate(leftRotate((a), 16), 1), 1)) + +/* Left rotate by 15: Rotate left by 16, then right by 1 */ +#define leftRotate15(a) (rightRotate(leftRotate((a), 16), 1)) + +/* Left rotate by 16 */ +#define leftRotate16(a) (leftRotate((a), 16)) + +/* Left rotate by 17: Rotate left by 16, then left by 1 */ +#define leftRotate17(a) (leftRotate(leftRotate((a), 16), 1)) + +/* Left rotate by 18: Rotate left by 16, then left by 2 */ +#define leftRotate18(a) (leftRotate(leftRotate(leftRotate((a), 16), 1), 1)) + +/* Left rotate by 19: Rotate left by 16, then left by 3 */ +#define leftRotate19(a) (leftRotate(leftRotate(leftRotate(leftRotate((a), 16), 1), 1), 1)) + +/* Left rotate by 20: Rotate left by 16, then left by 4 */ +#define leftRotate20(a) (leftRotate(leftRotate(leftRotate(leftRotate(leftRotate((a), 16), 1), 1), 1), 1)) + +/* Left rotate by 21: Rotate left by 24, then right by 3 */ +#define leftRotate21(a) (rightRotate(rightRotate(rightRotate(leftRotate((a), 24), 1), 1), 1)) + +/* Left rotate by 22: Rotate left by 24, then right by 2 */ +#define leftRotate22(a) (rightRotate(rightRotate(leftRotate((a), 24), 1), 1)) + +/* Left rotate by 23: Rotate left by 24, then right by 1 */ +#define leftRotate23(a) (rightRotate(leftRotate((a), 24), 1)) + +/* Left rotate by 24 */ +#define leftRotate24(a) (leftRotate((a), 24)) + +/* Left rotate by 25: Rotate left by 24, then left by 1 */ +#define leftRotate25(a) (leftRotate(leftRotate((a), 24), 1)) + +/* Left rotate by 26: Rotate left by 24, then left by 2 */ +#define leftRotate26(a) (leftRotate(leftRotate(leftRotate((a), 24), 1), 1)) + +/* Left rotate by 27: Rotate left by 24, then left by 3 */ +#define leftRotate27(a) (leftRotate(leftRotate(leftRotate(leftRotate((a), 24), 1), 1), 1)) + +/* Left rotate by 28: Rotate right by 4 */ +#define leftRotate28(a) (rightRotate(rightRotate(rightRotate(rightRotate((a), 1), 1), 1), 1)) + +/* Left rotate by 29: Rotate right by 3 */ +#define leftRotate29(a) (rightRotate(rightRotate(rightRotate((a), 1), 1), 1)) + +/* Left rotate by 30: Rotate right by 2 */ +#define leftRotate30(a) (rightRotate(rightRotate((a), 1), 1)) + +/* Left rotate by 31: Rotate right by 1 */ +#define leftRotate31(a) (rightRotate((a), 1)) + +/* Define the 32-bit right rotations in terms of left rotations */ +#define rightRotate1(a) (leftRotate31((a))) +#define rightRotate2(a) (leftRotate30((a))) +#define rightRotate3(a) (leftRotate29((a))) +#define rightRotate4(a) (leftRotate28((a))) +#define rightRotate5(a) (leftRotate27((a))) +#define rightRotate6(a) (leftRotate26((a))) +#define rightRotate7(a) (leftRotate25((a))) +#define rightRotate8(a) (leftRotate24((a))) +#define rightRotate9(a) (leftRotate23((a))) +#define rightRotate10(a) (leftRotate22((a))) +#define rightRotate11(a) (leftRotate21((a))) +#define rightRotate12(a) (leftRotate20((a))) +#define rightRotate13(a) (leftRotate19((a))) +#define rightRotate14(a) (leftRotate18((a))) +#define rightRotate15(a) (leftRotate17((a))) +#define rightRotate16(a) (leftRotate16((a))) +#define rightRotate17(a) (leftRotate15((a))) +#define rightRotate18(a) (leftRotate14((a))) +#define rightRotate19(a) (leftRotate13((a))) +#define rightRotate20(a) (leftRotate12((a))) +#define rightRotate21(a) (leftRotate11((a))) +#define rightRotate22(a) (leftRotate10((a))) +#define rightRotate23(a) (leftRotate9((a))) +#define rightRotate24(a) (leftRotate8((a))) +#define rightRotate25(a) (leftRotate7((a))) +#define rightRotate26(a) (leftRotate6((a))) +#define rightRotate27(a) (leftRotate5((a))) +#define rightRotate28(a) (leftRotate4((a))) +#define rightRotate29(a) (leftRotate3((a))) +#define rightRotate30(a) (leftRotate2((a))) +#define rightRotate31(a) (leftRotate1((a))) + +#endif /* LW_CRYPTO_ROTATE32_COMPOSED */ + +/* Rotation macros for 64-bit arguments */ + +/* Generic left rotate */ +#define leftRotate_64(a, bits) \ + (__extension__ ({ \ + uint64_t _temp = (a); \ + (_temp << (bits)) | (_temp >> (64 - (bits))); \ + })) + +/* Generic right rotate */ +#define rightRotate_64(a, bits) \ + (__extension__ ({ \ + uint64_t _temp = (a); \ + (_temp >> (bits)) | (_temp << (64 - (bits))); \ + })) + +/* Left rotate by a specific number of bits. These macros may be replaced + * with more efficient ones on platforms that lack a barrel shifter */ +#define leftRotate1_64(a) (leftRotate_64((a), 1)) +#define leftRotate2_64(a) (leftRotate_64((a), 2)) +#define leftRotate3_64(a) (leftRotate_64((a), 3)) +#define leftRotate4_64(a) (leftRotate_64((a), 4)) +#define leftRotate5_64(a) (leftRotate_64((a), 5)) +#define leftRotate6_64(a) (leftRotate_64((a), 6)) +#define leftRotate7_64(a) (leftRotate_64((a), 7)) +#define leftRotate8_64(a) (leftRotate_64((a), 8)) +#define leftRotate9_64(a) (leftRotate_64((a), 9)) +#define leftRotate10_64(a) (leftRotate_64((a), 10)) +#define leftRotate11_64(a) (leftRotate_64((a), 11)) +#define leftRotate12_64(a) (leftRotate_64((a), 12)) +#define leftRotate13_64(a) (leftRotate_64((a), 13)) +#define leftRotate14_64(a) (leftRotate_64((a), 14)) +#define leftRotate15_64(a) (leftRotate_64((a), 15)) +#define leftRotate16_64(a) (leftRotate_64((a), 16)) +#define leftRotate17_64(a) (leftRotate_64((a), 17)) +#define leftRotate18_64(a) (leftRotate_64((a), 18)) +#define leftRotate19_64(a) (leftRotate_64((a), 19)) +#define leftRotate20_64(a) (leftRotate_64((a), 20)) +#define leftRotate21_64(a) (leftRotate_64((a), 21)) +#define leftRotate22_64(a) (leftRotate_64((a), 22)) +#define leftRotate23_64(a) (leftRotate_64((a), 23)) +#define leftRotate24_64(a) (leftRotate_64((a), 24)) +#define leftRotate25_64(a) (leftRotate_64((a), 25)) +#define leftRotate26_64(a) (leftRotate_64((a), 26)) +#define leftRotate27_64(a) (leftRotate_64((a), 27)) +#define leftRotate28_64(a) (leftRotate_64((a), 28)) +#define leftRotate29_64(a) (leftRotate_64((a), 29)) +#define leftRotate30_64(a) (leftRotate_64((a), 30)) +#define leftRotate31_64(a) (leftRotate_64((a), 31)) +#define leftRotate32_64(a) (leftRotate_64((a), 32)) +#define leftRotate33_64(a) (leftRotate_64((a), 33)) +#define leftRotate34_64(a) (leftRotate_64((a), 34)) +#define leftRotate35_64(a) (leftRotate_64((a), 35)) +#define leftRotate36_64(a) (leftRotate_64((a), 36)) +#define leftRotate37_64(a) (leftRotate_64((a), 37)) +#define leftRotate38_64(a) (leftRotate_64((a), 38)) +#define leftRotate39_64(a) (leftRotate_64((a), 39)) +#define leftRotate40_64(a) (leftRotate_64((a), 40)) +#define leftRotate41_64(a) (leftRotate_64((a), 41)) +#define leftRotate42_64(a) (leftRotate_64((a), 42)) +#define leftRotate43_64(a) (leftRotate_64((a), 43)) +#define leftRotate44_64(a) (leftRotate_64((a), 44)) +#define leftRotate45_64(a) (leftRotate_64((a), 45)) +#define leftRotate46_64(a) (leftRotate_64((a), 46)) +#define leftRotate47_64(a) (leftRotate_64((a), 47)) +#define leftRotate48_64(a) (leftRotate_64((a), 48)) +#define leftRotate49_64(a) (leftRotate_64((a), 49)) +#define leftRotate50_64(a) (leftRotate_64((a), 50)) +#define leftRotate51_64(a) (leftRotate_64((a), 51)) +#define leftRotate52_64(a) (leftRotate_64((a), 52)) +#define leftRotate53_64(a) (leftRotate_64((a), 53)) +#define leftRotate54_64(a) (leftRotate_64((a), 54)) +#define leftRotate55_64(a) (leftRotate_64((a), 55)) +#define leftRotate56_64(a) (leftRotate_64((a), 56)) +#define leftRotate57_64(a) (leftRotate_64((a), 57)) +#define leftRotate58_64(a) (leftRotate_64((a), 58)) +#define leftRotate59_64(a) (leftRotate_64((a), 59)) +#define leftRotate60_64(a) (leftRotate_64((a), 60)) +#define leftRotate61_64(a) (leftRotate_64((a), 61)) +#define leftRotate62_64(a) (leftRotate_64((a), 62)) +#define leftRotate63_64(a) (leftRotate_64((a), 63)) + +/* Right rotate by a specific number of bits. These macros may be replaced + * with more efficient ones on platforms that lack a barrel shifter */ +#define rightRotate1_64(a) (rightRotate_64((a), 1)) +#define rightRotate2_64(a) (rightRotate_64((a), 2)) +#define rightRotate3_64(a) (rightRotate_64((a), 3)) +#define rightRotate4_64(a) (rightRotate_64((a), 4)) +#define rightRotate5_64(a) (rightRotate_64((a), 5)) +#define rightRotate6_64(a) (rightRotate_64((a), 6)) +#define rightRotate7_64(a) (rightRotate_64((a), 7)) +#define rightRotate8_64(a) (rightRotate_64((a), 8)) +#define rightRotate9_64(a) (rightRotate_64((a), 9)) +#define rightRotate10_64(a) (rightRotate_64((a), 10)) +#define rightRotate11_64(a) (rightRotate_64((a), 11)) +#define rightRotate12_64(a) (rightRotate_64((a), 12)) +#define rightRotate13_64(a) (rightRotate_64((a), 13)) +#define rightRotate14_64(a) (rightRotate_64((a), 14)) +#define rightRotate15_64(a) (rightRotate_64((a), 15)) +#define rightRotate16_64(a) (rightRotate_64((a), 16)) +#define rightRotate17_64(a) (rightRotate_64((a), 17)) +#define rightRotate18_64(a) (rightRotate_64((a), 18)) +#define rightRotate19_64(a) (rightRotate_64((a), 19)) +#define rightRotate20_64(a) (rightRotate_64((a), 20)) +#define rightRotate21_64(a) (rightRotate_64((a), 21)) +#define rightRotate22_64(a) (rightRotate_64((a), 22)) +#define rightRotate23_64(a) (rightRotate_64((a), 23)) +#define rightRotate24_64(a) (rightRotate_64((a), 24)) +#define rightRotate25_64(a) (rightRotate_64((a), 25)) +#define rightRotate26_64(a) (rightRotate_64((a), 26)) +#define rightRotate27_64(a) (rightRotate_64((a), 27)) +#define rightRotate28_64(a) (rightRotate_64((a), 28)) +#define rightRotate29_64(a) (rightRotate_64((a), 29)) +#define rightRotate30_64(a) (rightRotate_64((a), 30)) +#define rightRotate31_64(a) (rightRotate_64((a), 31)) +#define rightRotate32_64(a) (rightRotate_64((a), 32)) +#define rightRotate33_64(a) (rightRotate_64((a), 33)) +#define rightRotate34_64(a) (rightRotate_64((a), 34)) +#define rightRotate35_64(a) (rightRotate_64((a), 35)) +#define rightRotate36_64(a) (rightRotate_64((a), 36)) +#define rightRotate37_64(a) (rightRotate_64((a), 37)) +#define rightRotate38_64(a) (rightRotate_64((a), 38)) +#define rightRotate39_64(a) (rightRotate_64((a), 39)) +#define rightRotate40_64(a) (rightRotate_64((a), 40)) +#define rightRotate41_64(a) (rightRotate_64((a), 41)) +#define rightRotate42_64(a) (rightRotate_64((a), 42)) +#define rightRotate43_64(a) (rightRotate_64((a), 43)) +#define rightRotate44_64(a) (rightRotate_64((a), 44)) +#define rightRotate45_64(a) (rightRotate_64((a), 45)) +#define rightRotate46_64(a) (rightRotate_64((a), 46)) +#define rightRotate47_64(a) (rightRotate_64((a), 47)) +#define rightRotate48_64(a) (rightRotate_64((a), 48)) +#define rightRotate49_64(a) (rightRotate_64((a), 49)) +#define rightRotate50_64(a) (rightRotate_64((a), 50)) +#define rightRotate51_64(a) (rightRotate_64((a), 51)) +#define rightRotate52_64(a) (rightRotate_64((a), 52)) +#define rightRotate53_64(a) (rightRotate_64((a), 53)) +#define rightRotate54_64(a) (rightRotate_64((a), 54)) +#define rightRotate55_64(a) (rightRotate_64((a), 55)) +#define rightRotate56_64(a) (rightRotate_64((a), 56)) +#define rightRotate57_64(a) (rightRotate_64((a), 57)) +#define rightRotate58_64(a) (rightRotate_64((a), 58)) +#define rightRotate59_64(a) (rightRotate_64((a), 59)) +#define rightRotate60_64(a) (rightRotate_64((a), 60)) +#define rightRotate61_64(a) (rightRotate_64((a), 61)) +#define rightRotate62_64(a) (rightRotate_64((a), 62)) +#define rightRotate63_64(a) (rightRotate_64((a), 63)) + +/* Rotate a 16-bit value left by a number of bits */ +#define leftRotate_16(a, bits) \ + (__extension__ ({ \ + uint16_t _temp = (a); \ + (_temp << (bits)) | (_temp >> (16 - (bits))); \ + })) + +/* Rotate a 16-bit value right by a number of bits */ +#define rightRotate_16(a, bits) \ + (__extension__ ({ \ + uint16_t _temp = (a); \ + (_temp >> (bits)) | (_temp << (16 - (bits))); \ + })) + +/* Left rotate by a specific number of bits. These macros may be replaced + * with more efficient ones on platforms that lack a barrel shifter */ +#define leftRotate1_16(a) (leftRotate_16((a), 1)) +#define leftRotate2_16(a) (leftRotate_16((a), 2)) +#define leftRotate3_16(a) (leftRotate_16((a), 3)) +#define leftRotate4_16(a) (leftRotate_16((a), 4)) +#define leftRotate5_16(a) (leftRotate_16((a), 5)) +#define leftRotate6_16(a) (leftRotate_16((a), 6)) +#define leftRotate7_16(a) (leftRotate_16((a), 7)) +#define leftRotate8_16(a) (leftRotate_16((a), 8)) +#define leftRotate9_16(a) (leftRotate_16((a), 9)) +#define leftRotate10_16(a) (leftRotate_16((a), 10)) +#define leftRotate11_16(a) (leftRotate_16((a), 11)) +#define leftRotate12_16(a) (leftRotate_16((a), 12)) +#define leftRotate13_16(a) (leftRotate_16((a), 13)) +#define leftRotate14_16(a) (leftRotate_16((a), 14)) +#define leftRotate15_16(a) (leftRotate_16((a), 15)) + +/* Right rotate by a specific number of bits. These macros may be replaced + * with more efficient ones on platforms that lack a barrel shifter */ +#define rightRotate1_16(a) (rightRotate_16((a), 1)) +#define rightRotate2_16(a) (rightRotate_16((a), 2)) +#define rightRotate3_16(a) (rightRotate_16((a), 3)) +#define rightRotate4_16(a) (rightRotate_16((a), 4)) +#define rightRotate5_16(a) (rightRotate_16((a), 5)) +#define rightRotate6_16(a) (rightRotate_16((a), 6)) +#define rightRotate7_16(a) (rightRotate_16((a), 7)) +#define rightRotate8_16(a) (rightRotate_16((a), 8)) +#define rightRotate9_16(a) (rightRotate_16((a), 9)) +#define rightRotate10_16(a) (rightRotate_16((a), 10)) +#define rightRotate11_16(a) (rightRotate_16((a), 11)) +#define rightRotate12_16(a) (rightRotate_16((a), 12)) +#define rightRotate13_16(a) (rightRotate_16((a), 13)) +#define rightRotate14_16(a) (rightRotate_16((a), 14)) +#define rightRotate15_16(a) (rightRotate_16((a), 15)) + +/* Rotate an 8-bit value left by a number of bits */ +#define leftRotate_8(a, bits) \ + (__extension__ ({ \ + uint8_t _temp = (a); \ + (_temp << (bits)) | (_temp >> (8 - (bits))); \ + })) + +/* Rotate an 8-bit value right by a number of bits */ +#define rightRotate_8(a, bits) \ + (__extension__ ({ \ + uint8_t _temp = (a); \ + (_temp >> (bits)) | (_temp << (8 - (bits))); \ + })) + +/* Left rotate by a specific number of bits. These macros may be replaced + * with more efficient ones on platforms that lack a barrel shifter */ +#define leftRotate1_8(a) (leftRotate_8((a), 1)) +#define leftRotate2_8(a) (leftRotate_8((a), 2)) +#define leftRotate3_8(a) (leftRotate_8((a), 3)) +#define leftRotate4_8(a) (leftRotate_8((a), 4)) +#define leftRotate5_8(a) (leftRotate_8((a), 5)) +#define leftRotate6_8(a) (leftRotate_8((a), 6)) +#define leftRotate7_8(a) (leftRotate_8((a), 7)) + +/* Right rotate by a specific number of bits. These macros may be replaced + * with more efficient ones on platforms that lack a barrel shifter */ +#define rightRotate1_8(a) (rightRotate_8((a), 1)) +#define rightRotate2_8(a) (rightRotate_8((a), 2)) +#define rightRotate3_8(a) (rightRotate_8((a), 3)) +#define rightRotate4_8(a) (rightRotate_8((a), 4)) +#define rightRotate5_8(a) (rightRotate_8((a), 5)) +#define rightRotate6_8(a) (rightRotate_8((a), 6)) +#define rightRotate7_8(a) (rightRotate_8((a), 7)) + +#endif diff --git a/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/aead-common.c b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/aead-common.c new file mode 100644 index 0000000..84fc53a --- /dev/null +++ b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/aead-common.c @@ -0,0 +1,69 @@ +/* + * Copyright (C) 2020 Southern Storm Software, Pty Ltd. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#include "aead-common.h" + +int aead_check_tag + (unsigned char *plaintext, unsigned long long plaintext_len, + const unsigned char *tag1, const unsigned char *tag2, + unsigned size) +{ + /* Set "accum" to -1 if the tags match, or 0 if they don't match */ + int accum = 0; + while (size > 0) { + accum |= (*tag1++ ^ *tag2++); + --size; + } + accum = (accum - 1) >> 8; + + /* Destroy the plaintext if the tag match failed */ + while (plaintext_len > 0) { + *plaintext++ &= accum; + --plaintext_len; + } + + /* If "accum" is 0, return -1, otherwise return 0 */ + return ~accum; +} + +int aead_check_tag_precheck + (unsigned char *plaintext, unsigned long long plaintext_len, + const unsigned char *tag1, const unsigned char *tag2, + unsigned size, int precheck) +{ + /* Set "accum" to -1 if the tags match, or 0 if they don't match */ + int accum = 0; + while (size > 0) { + accum |= (*tag1++ ^ *tag2++); + --size; + } + accum = ((accum - 1) >> 8) & precheck; + + /* Destroy the plaintext if the tag match failed */ + while (plaintext_len > 0) { + *plaintext++ &= accum; + --plaintext_len; + } + + /* If "accum" is 0, return -1, otherwise return 0 */ + return ~accum; +} diff --git a/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/aead-common.h b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/aead-common.h new file mode 100644 index 0000000..2be95eb --- /dev/null +++ b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/aead-common.h @@ -0,0 +1,256 @@ +/* + * Copyright (C) 2020 Southern Storm Software, Pty Ltd. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#ifndef LWCRYPTO_AEAD_COMMON_H +#define LWCRYPTO_AEAD_COMMON_H + +#include + +/** + * \file aead-common.h + * \brief Definitions that are common across AEAD schemes. + * + * AEAD stands for "Authenticated Encryption with Associated Data". + * It is a standard API pattern for securely encrypting and + * authenticating packets of data. + */ + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * \brief Encrypts and authenticates a packet with an AEAD scheme. + * + * \param c Buffer to receive the output. + * \param clen On exit, set to the length of the output which includes + * the ciphertext and the authentication tag. + * \param m Buffer that contains the plaintext message to encrypt. + * \param mlen Length of the plaintext message in bytes. + * \param ad Buffer that contains associated data to authenticate + * along with the packet but which does not need to be encrypted. + * \param adlen Length of the associated data in bytes. + * \param nsec Secret nonce - normally not used by AEAD schemes. + * \param npub Points to the public nonce for the packet. + * \param k Points to the key to use to encrypt the packet. + * + * \return 0 on success, or a negative value if there was an error in + * the parameters. + */ +typedef int (*aead_cipher_encrypt_t) + (unsigned char *c, unsigned long long *clen, + const unsigned char *m, unsigned long long mlen, + const unsigned char *ad, unsigned long long adlen, + const unsigned char *nsec, + const unsigned char *npub, + const unsigned char *k); + +/** + * \brief Decrypts and authenticates a packet with an AEAD scheme. + * + * \param m Buffer to receive the plaintext message on output. + * \param mlen Receives the length of the plaintext message on output. + * \param nsec Secret nonce - normally not used by AEAD schemes. + * \param c Buffer that contains the ciphertext and authentication + * tag to decrypt. + * \param clen Length of the input data in bytes, which includes the + * ciphertext and the authentication tag. + * \param ad Buffer that contains associated data to authenticate + * along with the packet but which does not need to be encrypted. + * \param adlen Length of the associated data in bytes. + * \param npub Points to the public nonce for the packet. + * \param k Points to the key to use to decrypt the packet. + * + * \return 0 on success, -1 if the authentication tag was incorrect, + * or some other negative number if there was an error in the parameters. + */ +typedef int (*aead_cipher_decrypt_t) + (unsigned char *m, unsigned long long *mlen, + unsigned char *nsec, + const unsigned char *c, unsigned long long clen, + const unsigned char *ad, unsigned long long adlen, + const unsigned char *npub, + const unsigned char *k); + +/** + * \brief Hashes a block of input data. + * + * \param out Buffer to receive the hash output. + * \param in Points to the input data to be hashed. + * \param inlen Length of the input data in bytes. + * + * \return Returns zero on success or -1 if there was an error in the + * parameters. + */ +typedef int (*aead_hash_t) + (unsigned char *out, const unsigned char *in, unsigned long long inlen); + +/** + * \brief Initializes the state for a hashing operation. + * + * \param state Hash state to be initialized. + */ +typedef void (*aead_hash_init_t)(void *state); + +/** + * \brief Updates a hash state with more input data. + * + * \param state Hash state to be updated. + * \param in Points to the input data to be incorporated into the state. + * \param inlen Length of the input data to be incorporated into the state. + */ +typedef void (*aead_hash_update_t) + (void *state, const unsigned char *in, unsigned long long inlen); + +/** + * \brief Returns the final hash value from a hashing operation. + * + * \param Hash state to be finalized. + * \param out Points to the output buffer to receive the hash value. + */ +typedef void (*aead_hash_finalize_t)(void *state, unsigned char *out); + +/** + * \brief Aborbs more input data into an XOF state. + * + * \param state XOF state to be updated. + * \param in Points to the input data to be absorbed into the state. + * \param inlen Length of the input data to be absorbed into the state. + * + * \sa ascon_xof_init(), ascon_xof_squeeze() + */ +typedef void (*aead_xof_absorb_t) + (void *state, const unsigned char *in, unsigned long long inlen); + +/** + * \brief Squeezes output data from an XOF state. + * + * \param state XOF state to squeeze the output data from. + * \param out Points to the output buffer to receive the squeezed data. + * \param outlen Number of bytes of data to squeeze out of the state. + */ +typedef void (*aead_xof_squeeze_t) + (void *state, unsigned char *out, unsigned long long outlen); + +/** + * \brief No special AEAD features. + */ +#define AEAD_FLAG_NONE 0x0000 + +/** + * \brief The natural byte order of the AEAD cipher is little-endian. + * + * If this flag is not present, then the natural byte order of the + * AEAD cipher should be assumed to be big-endian. + * + * The natural byte order may be useful when formatting packet sequence + * numbers as nonces. The application needs to know whether the sequence + * number should be packed into the leading or trailing bytes of the nonce. + */ +#define AEAD_FLAG_LITTLE_ENDIAN 0x0001 + +/** + * \brief Meta-information about an AEAD cipher. + */ +typedef struct +{ + const char *name; /**< Name of the cipher */ + unsigned key_len; /**< Length of the key in bytes */ + unsigned nonce_len; /**< Length of the nonce in bytes */ + unsigned tag_len; /**< Length of the tag in bytes */ + unsigned flags; /**< Flags for extra features */ + aead_cipher_encrypt_t encrypt; /**< AEAD encryption function */ + aead_cipher_decrypt_t decrypt; /**< AEAD decryption function */ + +} aead_cipher_t; + +/** + * \brief Meta-information about a hash algorithm that is related to an AEAD. + * + * Regular hash algorithms should provide the "hash", "init", "update", + * and "finalize" functions. Extensible Output Functions (XOF's) should + * proivde the "hash", "init", "absorb", and "squeeze" functions. + */ +typedef struct +{ + const char *name; /**< Name of the hash algorithm */ + size_t state_size; /**< Size of the incremental state structure */ + unsigned hash_len; /**< Length of the hash in bytes */ + unsigned flags; /**< Flags for extra features */ + aead_hash_t hash; /**< All in one hashing function */ + aead_hash_init_t init; /**< Incremental hash/XOF init function */ + aead_hash_update_t update; /**< Incremental hash update function */ + aead_hash_finalize_t finalize; /**< Incremental hash finalize function */ + aead_xof_absorb_t absorb; /**< Incremental XOF absorb function */ + aead_xof_squeeze_t squeeze; /**< Incremental XOF squeeze function */ + +} aead_hash_algorithm_t; + +/** + * \brief Check an authentication tag in constant time. + * + * \param plaintext Points to the plaintext data. + * \param plaintext_len Length of the plaintext in bytes. + * \param tag1 First tag to compare. + * \param tag2 Second tag to compare. + * \param tag_len Length of the tags in bytes. + * + * \return Returns -1 if the tag check failed or 0 if the check succeeded. + * + * If the tag check fails, then the \a plaintext will also be zeroed to + * prevent it from being used accidentally by the application when the + * ciphertext was invalid. + */ +int aead_check_tag + (unsigned char *plaintext, unsigned long long plaintext_len, + const unsigned char *tag1, const unsigned char *tag2, + unsigned tag_len); + +/** + * \brief Check an authentication tag in constant time with a previous check. + * + * \param plaintext Points to the plaintext data. + * \param plaintext_len Length of the plaintext in bytes. + * \param tag1 First tag to compare. + * \param tag2 Second tag to compare. + * \param tag_len Length of the tags in bytes. + * \param precheck Set to -1 if previous check succeeded or 0 if it failed. + * + * \return Returns -1 if the tag check failed or 0 if the check succeeded. + * + * If the tag check fails, then the \a plaintext will also be zeroed to + * prevent it from being used accidentally by the application when the + * ciphertext was invalid. + * + * This version can be used to incorporate other information about the + * correctness of the plaintext into the final result. + */ +int aead_check_tag_precheck + (unsigned char *plaintext, unsigned long long plaintext_len, + const unsigned char *tag1, const unsigned char *tag2, + unsigned tag_len, int precheck); + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/api.h b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/api.h new file mode 100644 index 0000000..ae8c7f6 --- /dev/null +++ b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/api.h @@ -0,0 +1 @@ +#define CRYPTO_BYTES 32 diff --git a/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/gimli24.c b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/gimli24.c new file mode 100644 index 0000000..4bc7d9f --- /dev/null +++ b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/gimli24.c @@ -0,0 +1,330 @@ +/* + * Copyright (C) 2020 Southern Storm Software, Pty Ltd. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#include "gimli24.h" +#include "internal-gimli24.h" +#include + +aead_cipher_t const gimli24_cipher = { + "GIMLI-24", + GIMLI24_KEY_SIZE, + GIMLI24_NONCE_SIZE, + GIMLI24_TAG_SIZE, + AEAD_FLAG_LITTLE_ENDIAN, + gimli24_aead_encrypt, + gimli24_aead_decrypt +}; + +aead_hash_algorithm_t const gimli24_hash_algorithm = { + "GIMLI-24-HASH", + sizeof(gimli24_hash_state_t), + GIMLI24_HASH_SIZE, + AEAD_FLAG_LITTLE_ENDIAN, + gimli24_hash, + (aead_hash_init_t)gimli24_hash_init, + (aead_hash_update_t)gimli24_hash_absorb, + (aead_hash_finalize_t)gimli24_hash_finalize, + (aead_xof_absorb_t)gimli24_hash_absorb, + (aead_xof_squeeze_t)gimli24_hash_squeeze +}; + +/** + * \brief Number of bytes of input or output data to process per block. + */ +#define GIMLI24_BLOCK_SIZE 16 + +/** + * \brief Structure of the GIMLI-24 state as both an array of words + * and an array of bytes. + */ +typedef union +{ + uint32_t words[12]; /**< Words in the state */ + uint8_t bytes[48]; /**< Bytes in the state */ + +} gimli24_state_t; + +/** + * \brief Absorbs data into a GIMLI-24 state. + * + * \param state The state to absorb the data into. + * \param data Points to the data to be absorbed. + * \param len Length of the data to be absorbed. + */ +static void gimli24_absorb + (gimli24_state_t *state, const unsigned char *data, unsigned long long len) +{ + unsigned temp; + while (len >= GIMLI24_BLOCK_SIZE) { + lw_xor_block(state->bytes, data, GIMLI24_BLOCK_SIZE); + gimli24_permute(state->words); + data += GIMLI24_BLOCK_SIZE; + len -= GIMLI24_BLOCK_SIZE; + } + temp = (unsigned)len; + lw_xor_block(state->bytes, data, temp); + state->bytes[temp] ^= 0x01; /* Padding */ + state->bytes[47] ^= 0x01; + gimli24_permute(state->words); +} + +/** + * \brief Encrypts a block of data with a GIMLI-24 state. + * + * \param state The state to encrypt with. + * \param dest Points to the destination buffer. + * \param src Points to the source buffer. + * \param len Length of the data to encrypt from \a src into \a dest. + */ +static void gimli24_encrypt + (gimli24_state_t *state, unsigned char *dest, + const unsigned char *src, unsigned long long len) +{ + unsigned temp; + while (len >= GIMLI24_BLOCK_SIZE) { + lw_xor_block_2_dest(dest, state->bytes, src, GIMLI24_BLOCK_SIZE); + gimli24_permute(state->words); + dest += GIMLI24_BLOCK_SIZE; + src += GIMLI24_BLOCK_SIZE; + len -= GIMLI24_BLOCK_SIZE; + } + temp = (unsigned)len; + lw_xor_block_2_dest(dest, state->bytes, src, temp); + state->bytes[temp] ^= 0x01; /* Padding */ + state->bytes[47] ^= 0x01; + gimli24_permute(state->words); +} + +/** + * \brief Decrypts a block of data with a GIMLI-24 state. + * + * \param state The state to decrypt with. + * \param dest Points to the destination buffer. + * \param src Points to the source buffer. + * \param len Length of the data to decrypt from \a src into \a dest. + */ +static void gimli24_decrypt + (gimli24_state_t *state, unsigned char *dest, + const unsigned char *src, unsigned long long len) +{ + unsigned temp; + while (len >= GIMLI24_BLOCK_SIZE) { + lw_xor_block_swap(dest, state->bytes, src, GIMLI24_BLOCK_SIZE); + gimli24_permute(state->words); + dest += GIMLI24_BLOCK_SIZE; + src += GIMLI24_BLOCK_SIZE; + len -= GIMLI24_BLOCK_SIZE; + } + temp = (unsigned)len; + lw_xor_block_swap(dest, state->bytes, src, temp); + state->bytes[temp] ^= 0x01; /* Padding */ + state->bytes[47] ^= 0x01; + gimli24_permute(state->words); +} + +int gimli24_aead_encrypt + (unsigned char *c, unsigned long long *clen, + const unsigned char *m, unsigned long long mlen, + const unsigned char *ad, unsigned long long adlen, + const unsigned char *nsec, + const unsigned char *npub, + const unsigned char *k) +{ + gimli24_state_t state; + (void)nsec; + + /* Set the length of the returned ciphertext */ + *clen = mlen + GIMLI24_TAG_SIZE; + + /* Format the initial GIMLI state from the nonce and the key */ + memcpy(state.words, npub, GIMLI24_NONCE_SIZE); + memcpy(state.words + 4, k, GIMLI24_KEY_SIZE); + + /* Permute the initial state */ + gimli24_permute(state.words); + + /* Absorb the associated data */ + gimli24_absorb(&state, ad, adlen); + + /* Encrypt the plaintext to produce the ciphertext */ + gimli24_encrypt(&state, c, m, mlen); + + /* Generate the authentication tag at the end of the ciphertext */ + memcpy(c + mlen, state.bytes, GIMLI24_TAG_SIZE); + return 0; +} + +int gimli24_aead_decrypt + (unsigned char *m, unsigned long long *mlen, + unsigned char *nsec, + const unsigned char *c, unsigned long long clen, + const unsigned char *ad, unsigned long long adlen, + const unsigned char *npub, + const unsigned char *k) +{ + gimli24_state_t state; + (void)nsec; + + /* Validate the ciphertext length and set the return "mlen" value */ + if (clen < GIMLI24_TAG_SIZE) + return -1; + *mlen = clen - GIMLI24_TAG_SIZE; + + /* Format the initial GIMLI state from the nonce and the key */ + memcpy(state.words, npub, GIMLI24_NONCE_SIZE); + memcpy(state.words + 4, k, GIMLI24_KEY_SIZE); + + /* Permute the initial state */ + gimli24_permute(state.words); + + /* Absorb the associated data */ + gimli24_absorb(&state, ad, adlen); + + /* Decrypt the ciphertext to produce the plaintext */ + gimli24_decrypt(&state, m, c, *mlen); + + /* Check the authentication tag at the end of the packet */ + return aead_check_tag + (m, *mlen, state.bytes, c + *mlen, GIMLI24_TAG_SIZE); +} + +int gimli24_hash + (unsigned char *out, const unsigned char *in, unsigned long long inlen) +{ + gimli24_state_t state; + + /* Initialize the hash state to all zeroes */ + memset(&state, 0, sizeof(state)); + + /* Absorb the input */ + gimli24_absorb(&state, in, inlen); + + /* Generate the output hash */ + memcpy(out, state.bytes, GIMLI24_HASH_SIZE / 2); + gimli24_permute(state.words); + memcpy(out + GIMLI24_HASH_SIZE / 2, state.bytes, GIMLI24_HASH_SIZE / 2); + return 0; +} + +void gimli24_hash_init(gimli24_hash_state_t *state) +{ + memset(state, 0, sizeof(gimli24_hash_state_t)); +} + +#define GIMLI24_XOF_RATE 16 +#define gimli24_xof_permute() \ + gimli24_permute((uint32_t *)(state->s.state)) + +void gimli24_hash_absorb + (gimli24_hash_state_t *state, const unsigned char *in, + unsigned long long inlen) +{ + unsigned temp; + + if (state->s.mode) { + /* We were squeezing output - go back to the absorb phase */ + state->s.mode = 0; + state->s.count = 0; + gimli24_xof_permute(); + } + + /* Handle the partial left-over block from last time */ + if (state->s.count) { + temp = GIMLI24_XOF_RATE - state->s.count; + if (temp > inlen) { + temp = (unsigned)inlen; + lw_xor_block(state->s.state + state->s.count, in, temp); + state->s.count += temp; + return; + } + lw_xor_block(state->s.state + state->s.count, in, temp); + state->s.count = 0; + in += temp; + inlen -= temp; + gimli24_xof_permute(); + } + + /* Process full blocks that are aligned at state->s.count == 0 */ + while (inlen >= GIMLI24_XOF_RATE) { + lw_xor_block(state->s.state, in, GIMLI24_XOF_RATE); + in += GIMLI24_XOF_RATE; + inlen -= GIMLI24_XOF_RATE; + gimli24_xof_permute(); + } + + /* Process the left-over block at the end of the input */ + temp = (unsigned)inlen; + lw_xor_block(state->s.state, in, temp); + state->s.count = temp; +} + +void gimli24_hash_squeeze + (gimli24_hash_state_t *state, unsigned char *out, + unsigned long long outlen) +{ + unsigned temp; + + /* Pad the final input block if we were still in the absorb phase */ + if (!state->s.mode) { + state->s.state[state->s.count] ^= 0x01; + state->s.state[47] ^= 0x01; + state->s.count = 0; + state->s.mode = 1; + } + + /* Handle left-over partial blocks from last time */ + if (state->s.count) { + temp = GIMLI24_XOF_RATE - state->s.count; + if (temp > outlen) { + temp = (unsigned)outlen; + memcpy(out, state->s.state + state->s.count, temp); + state->s.count += temp; + return; + } + memcpy(out, state->s.state + state->s.count, temp); + out += temp; + outlen -= temp; + state->s.count = 0; + } + + /* Handle full blocks */ + while (outlen >= GIMLI24_XOF_RATE) { + gimli24_xof_permute(); + memcpy(out, state->s.state, GIMLI24_XOF_RATE); + out += GIMLI24_XOF_RATE; + outlen -= GIMLI24_XOF_RATE; + } + + /* Handle the left-over block */ + if (outlen > 0) { + temp = (unsigned)outlen; + gimli24_xof_permute(); + memcpy(out, state->s.state, temp); + state->s.count = temp; + } +} + +void gimli24_hash_finalize + (gimli24_hash_state_t *state, unsigned char *out) +{ + gimli24_hash_squeeze(state, out, GIMLI24_HASH_SIZE); +} diff --git a/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/gimli24.h b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/gimli24.h new file mode 100644 index 0000000..f72aec7 --- /dev/null +++ b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/gimli24.h @@ -0,0 +1,220 @@ +/* + * Copyright (C) 2020 Southern Storm Software, Pty Ltd. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#ifndef LWCRYPTO_GIMLI24_H +#define LWCRYPTO_GIMLI24_H + +#include "aead-common.h" + +/** + * \file gimli24.h + * \brief Gimli authenticated encryption algorithm. + * + * GIMLI-24-CIPHER has a 256-bit key, a 128-bit nonce, and a 128-bit tag. + * It is the spiritual successor to the widely used ChaCha20 and has a + * similar design. + * + * This library also includes an implementation of the hash algorithm + * GIMLI-24-HASH in both regular hashing and XOF modes. + * + * References: https://gimli.cr.yp.to/ + */ + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * \brief Size of the key for GIMLI-24. + */ +#define GIMLI24_KEY_SIZE 32 + +/** + * \brief Size of the nonce for GIMLI-24. + */ +#define GIMLI24_NONCE_SIZE 16 + +/** + * \brief Size of the authentication tag for GIMLI-24. + */ +#define GIMLI24_TAG_SIZE 16 + +/** + * \brief Size of the hash output for GIMLI-24. + */ +#define GIMLI24_HASH_SIZE 32 + +/** + * \brief State information for GIMLI-24-HASH incremental modes. + */ +typedef union +{ + struct { + unsigned char state[48]; /**< Current hash state */ + unsigned char count; /**< Number of bytes in the current block */ + unsigned char mode; /**< Hash mode: 0 for absorb, 1 for squeeze */ + } s; /**< State */ + unsigned long long align; /**< For alignment of this structure */ + +} gimli24_hash_state_t; + +/** + * \brief Meta-information block for the GIMLI-24 cipher. + */ +extern aead_cipher_t const gimli24_cipher; + +/** + * \brief Meta-information block for the GIMLI-24-HASH algorithm. + * + * This meta-information block can also be used in XOF mode. + */ +extern aead_hash_algorithm_t const gimli24_hash_algorithm; + +/** + * \brief Encrypts and authenticates a packet with GIMLI-24 using the + * full AEAD mode. + * + * \param c Buffer to receive the output. + * \param clen On exit, set to the length of the output which includes + * the ciphertext and the 16 byte authentication tag. + * \param m Buffer that contains the plaintext message to encrypt. + * \param mlen Length of the plaintext message in bytes. + * \param ad Buffer that contains associated data to authenticate + * along with the packet but which does not need to be encrypted. + * \param adlen Length of the associated data in bytes. + * \param nsec Secret nonce - not used by this algorithm. + * \param npub Points to the public nonce for the packet which must + * be 16 bytes in length. + * \param k Points to the 32 bytes of the key to use to encrypt the packet. + * + * \return 0 on success, or a negative value if there was an error in + * the parameters. + * + * \sa gimli24_aead_decrypt() + */ +int gimli24_aead_encrypt + (unsigned char *c, unsigned long long *clen, + const unsigned char *m, unsigned long long mlen, + const unsigned char *ad, unsigned long long adlen, + const unsigned char *nsec, + const unsigned char *npub, + const unsigned char *k); + +/** + * \brief Decrypts and authenticates a packet with GIMLI-24 using the + * full AEAD mode. + * + * \param m Buffer to receive the plaintext message on output. + * \param mlen Receives the length of the plaintext message on output. + * \param nsec Secret nonce - not used by this algorithm. + * \param c Buffer that contains the ciphertext and authentication + * tag to decrypt. + * \param clen Length of the input data in bytes, which includes the + * ciphertext and the 16 byte authentication tag. + * \param ad Buffer that contains associated data to authenticate + * along with the packet but which does not need to be encrypted. + * \param adlen Length of the associated data in bytes. + * \param npub Points to the public nonce for the packet which must + * be 16 bytes in length. + * \param k Points to the 32 bytes of the key to use to decrypt the packet. + * + * \return 0 on success, -1 if the authentication tag was incorrect, + * or some other negative number if there was an error in the parameters. + * + * \sa gimli24_aead_encrypt() + */ +int gimli24_aead_decrypt + (unsigned char *m, unsigned long long *mlen, + unsigned char *nsec, + const unsigned char *c, unsigned long long clen, + const unsigned char *ad, unsigned long long adlen, + const unsigned char *npub, + const unsigned char *k); + +/** + * \brief Hashes a block of input data with GIMLI-24 to generate a hash value. + * + * \param out Buffer to receive the hash output which must be at least + * GIMLI24_HASH_SIZE bytes in length. + * \param in Points to the input data to be hashed. + * \param inlen Length of the input data in bytes. + * + * \return Returns zero on success or -1 if there was an error in the + * parameters. + */ +int gimli24_hash + (unsigned char *out, const unsigned char *in, unsigned long long inlen); + +/** + * \brief Initializes the state for a GIMLI-24-HASH hashing operation. + * + * \param state Hash state to be initialized. + * + * \sa gimli24_hash_absorb(), gimli24_hash_squeeze(), gimli24_hash() + */ +void gimli24_hash_init(gimli24_hash_state_t *state); + +/** + * \brief Aborbs more input data into a GIMLI-24-HASH state. + * + * \param state Hash state to be updated. + * \param in Points to the input data to be absorbed into the state. + * \param inlen Length of the input data to be absorbed into the state. + * + * \sa gimli24_hash_init(), gimli24_hash_squeeze() + */ +void gimli24_hash_absorb + (gimli24_hash_state_t *state, const unsigned char *in, + unsigned long long inlen); + +/** + * \brief Squeezes output data from an GIMLI-24-HASH state. + * + * \param state Hash state to squeeze the output data from. + * \param out Points to the output buffer to receive the squeezed data. + * \param outlen Number of bytes of data to squeeze out of the state. + * + * \sa gimli24_hash_init(), gimli24_hash_absorb() + */ +void gimli24_hash_squeeze + (gimli24_hash_state_t *state, unsigned char *out, + unsigned long long outlen); + +/** + * \brief Returns the final hash value from a GIMLI-24-HASH hashing operation. + * + * \param state Hash state to be finalized. + * \param out Points to the output buffer to receive the hash value. + * + * \note This is a wrapper around gimli24_hash_squeeze() for a fixed length + * of GIMLI24_HASH_SIZE bytes. + * + * \sa gimli24_hash_init(), gimli24_hash_absorb() + */ +void gimli24_hash_finalize + (gimli24_hash_state_t *state, unsigned char *out); + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/hash.c b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/hash.c new file mode 100644 index 0000000..93789b1 --- /dev/null +++ b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/hash.c @@ -0,0 +1,8 @@ + +#include "gimli24.h" + +int crypto_hash + (unsigned char *out, const unsigned char *in, unsigned long long inlen) +{ + return gimli24_hash(out, in, inlen); +} diff --git a/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/internal-gimli24-avr.S b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/internal-gimli24-avr.S new file mode 100644 index 0000000..efcd500 --- /dev/null +++ b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/internal-gimli24-avr.S @@ -0,0 +1,9419 @@ +#if defined(__AVR__) +#include +/* Automatically generated - do not edit */ + + .text +.global gimli24_permute + .type gimli24_permute, @function +gimli24_permute: + push r28 + push r29 + push r2 + push r3 + push r4 + push r5 + push r6 + push r7 + push r8 + push r9 + push r10 + push r11 + push r12 + push r13 + push r14 + push r15 + push r16 + push r17 + movw r30,r24 +.L__stack_usage = 18 + ld r18,Z + ldd r19,Z+1 + ldd r20,Z+2 + ldd r21,Z+3 + ldd r22,Z+16 + ldd r23,Z+17 + ldd r26,Z+18 + ldd r27,Z+19 + ldd r28,Z+32 + ldd r29,Z+33 + ldd r2,Z+34 + ldd r3,Z+35 + ldd r4,Z+4 + ldd r5,Z+5 + ldd r6,Z+6 + ldd r7,Z+7 + ldd r8,Z+20 + ldd r9,Z+21 + ldd r10,Z+22 + ldd r11,Z+23 + ldd r12,Z+36 + ldd r13,Z+37 + ldd r14,Z+38 + ldd r15,Z+39 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + ldi r31,24 + eor r4,r31 + ldi r30,121 + eor r5,r30 + ldi r25,55 + eor r6,r25 + ldi r24,158 + eor r7,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + pop r30 + pop r31 + st Z,r4 + std Z+1,r5 + std Z+2,r6 + std Z+3,r7 + std Z+16,r22 + std Z+17,r23 + std Z+18,r26 + std Z+19,r27 + std Z+32,r28 + std Z+33,r29 + std Z+34,r2 + std Z+35,r3 + std Z+4,r18 + std Z+5,r19 + std Z+6,r20 + std Z+7,r21 + std Z+20,r8 + std Z+21,r9 + std Z+22,r10 + std Z+23,r11 + std Z+36,r12 + std Z+37,r13 + std Z+38,r14 + std Z+39,r15 + ldd r4,Z+8 + ldd r5,Z+9 + ldd r6,Z+10 + ldd r7,Z+11 + ldd r22,Z+24 + ldd r23,Z+25 + ldd r26,Z+26 + ldd r27,Z+27 + ldd r28,Z+40 + ldd r29,Z+41 + ldd r2,Z+42 + ldd r3,Z+43 + ldd r18,Z+12 + ldd r19,Z+13 + ldd r20,Z+14 + ldd r21,Z+15 + ldd r8,Z+28 + ldd r9,Z+29 + ldd r10,Z+30 + ldd r11,Z+31 + ldd r12,Z+44 + ldd r13,Z+45 + ldd r14,Z+46 + ldd r15,Z+47 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + pop r30 + pop r31 + push r21 + push r20 + push r19 + push r18 + push r7 + push r6 + push r5 + push r4 + ld r18,Z + ldd r19,Z+1 + ldd r20,Z+2 + ldd r21,Z+3 + ldd r4,Z+4 + ldd r5,Z+5 + ldd r6,Z+6 + ldd r7,Z+7 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + pop r30 + pop r31 + std Z+8,r4 + std Z+9,r5 + std Z+10,r6 + std Z+11,r7 + std Z+24,r22 + std Z+25,r23 + std Z+26,r26 + std Z+27,r27 + std Z+40,r28 + std Z+41,r29 + std Z+42,r2 + std Z+43,r3 + std Z+12,r18 + std Z+13,r19 + std Z+14,r20 + std Z+15,r21 + std Z+28,r8 + std Z+29,r9 + std Z+30,r10 + std Z+31,r11 + std Z+44,r12 + std Z+45,r13 + std Z+46,r14 + std Z+47,r15 + ld r4,Z + ldd r5,Z+1 + ldd r6,Z+2 + ldd r7,Z+3 + ldd r22,Z+16 + ldd r23,Z+17 + ldd r26,Z+18 + ldd r27,Z+19 + ldd r28,Z+32 + ldd r29,Z+33 + ldd r2,Z+34 + ldd r3,Z+35 + ldd r18,Z+4 + ldd r19,Z+5 + ldd r20,Z+6 + ldd r21,Z+7 + ldd r8,Z+20 + ldd r9,Z+21 + ldd r10,Z+22 + ldd r11,Z+23 + ldd r12,Z+36 + ldd r13,Z+37 + ldd r14,Z+38 + ldd r15,Z+39 + pop r18 + pop r19 + pop r20 + pop r21 + pop r4 + pop r5 + pop r6 + pop r7 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + ldi r31,20 + eor r18,r31 + ldi r30,121 + eor r19,r30 + ldi r25,55 + eor r20,r25 + ldi r24,158 + eor r21,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + pop r30 + pop r31 + push r21 + push r20 + push r19 + push r18 + push r7 + push r6 + push r5 + push r4 + ldd r18,Z+8 + ldd r19,Z+9 + ldd r20,Z+10 + ldd r21,Z+11 + ldd r4,Z+12 + ldd r5,Z+13 + ldd r6,Z+14 + ldd r7,Z+15 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + ldi r31,16 + eor r4,r31 + ldi r30,121 + eor r5,r30 + ldi r25,55 + eor r6,r25 + ldi r24,158 + eor r7,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + pop r30 + pop r31 + st Z,r4 + std Z+1,r5 + std Z+2,r6 + std Z+3,r7 + std Z+16,r22 + std Z+17,r23 + std Z+18,r26 + std Z+19,r27 + std Z+32,r28 + std Z+33,r29 + std Z+34,r2 + std Z+35,r3 + std Z+4,r18 + std Z+5,r19 + std Z+6,r20 + std Z+7,r21 + std Z+20,r8 + std Z+21,r9 + std Z+22,r10 + std Z+23,r11 + std Z+36,r12 + std Z+37,r13 + std Z+38,r14 + std Z+39,r15 + ldd r4,Z+8 + ldd r5,Z+9 + ldd r6,Z+10 + ldd r7,Z+11 + ldd r22,Z+24 + ldd r23,Z+25 + ldd r26,Z+26 + ldd r27,Z+27 + ldd r28,Z+40 + ldd r29,Z+41 + ldd r2,Z+42 + ldd r3,Z+43 + ldd r18,Z+12 + ldd r19,Z+13 + ldd r20,Z+14 + ldd r21,Z+15 + ldd r8,Z+28 + ldd r9,Z+29 + ldd r10,Z+30 + ldd r11,Z+31 + ldd r12,Z+44 + ldd r13,Z+45 + ldd r14,Z+46 + ldd r15,Z+47 + pop r18 + pop r19 + pop r20 + pop r21 + pop r4 + pop r5 + pop r6 + pop r7 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + pop r30 + pop r31 + push r21 + push r20 + push r19 + push r18 + push r7 + push r6 + push r5 + push r4 + ld r18,Z + ldd r19,Z+1 + ldd r20,Z+2 + ldd r21,Z+3 + ldd r4,Z+4 + ldd r5,Z+5 + ldd r6,Z+6 + ldd r7,Z+7 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + pop r30 + pop r31 + std Z+8,r4 + std Z+9,r5 + std Z+10,r6 + std Z+11,r7 + std Z+24,r22 + std Z+25,r23 + std Z+26,r26 + std Z+27,r27 + std Z+40,r28 + std Z+41,r29 + std Z+42,r2 + std Z+43,r3 + std Z+12,r18 + std Z+13,r19 + std Z+14,r20 + std Z+15,r21 + std Z+28,r8 + std Z+29,r9 + std Z+30,r10 + std Z+31,r11 + std Z+44,r12 + std Z+45,r13 + std Z+46,r14 + std Z+47,r15 + ld r4,Z + ldd r5,Z+1 + ldd r6,Z+2 + ldd r7,Z+3 + ldd r22,Z+16 + ldd r23,Z+17 + ldd r26,Z+18 + ldd r27,Z+19 + ldd r28,Z+32 + ldd r29,Z+33 + ldd r2,Z+34 + ldd r3,Z+35 + ldd r18,Z+4 + ldd r19,Z+5 + ldd r20,Z+6 + ldd r21,Z+7 + ldd r8,Z+20 + ldd r9,Z+21 + ldd r10,Z+22 + ldd r11,Z+23 + ldd r12,Z+36 + ldd r13,Z+37 + ldd r14,Z+38 + ldd r15,Z+39 + pop r18 + pop r19 + pop r20 + pop r21 + pop r4 + pop r5 + pop r6 + pop r7 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + ldi r31,12 + eor r18,r31 + ldi r30,121 + eor r19,r30 + ldi r25,55 + eor r20,r25 + ldi r24,158 + eor r21,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + pop r30 + pop r31 + push r21 + push r20 + push r19 + push r18 + push r7 + push r6 + push r5 + push r4 + ldd r18,Z+8 + ldd r19,Z+9 + ldd r20,Z+10 + ldd r21,Z+11 + ldd r4,Z+12 + ldd r5,Z+13 + ldd r6,Z+14 + ldd r7,Z+15 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + ldi r31,8 + eor r4,r31 + ldi r30,121 + eor r5,r30 + ldi r25,55 + eor r6,r25 + ldi r24,158 + eor r7,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + pop r30 + pop r31 + st Z,r4 + std Z+1,r5 + std Z+2,r6 + std Z+3,r7 + std Z+16,r22 + std Z+17,r23 + std Z+18,r26 + std Z+19,r27 + std Z+32,r28 + std Z+33,r29 + std Z+34,r2 + std Z+35,r3 + std Z+4,r18 + std Z+5,r19 + std Z+6,r20 + std Z+7,r21 + std Z+20,r8 + std Z+21,r9 + std Z+22,r10 + std Z+23,r11 + std Z+36,r12 + std Z+37,r13 + std Z+38,r14 + std Z+39,r15 + ldd r4,Z+8 + ldd r5,Z+9 + ldd r6,Z+10 + ldd r7,Z+11 + ldd r22,Z+24 + ldd r23,Z+25 + ldd r26,Z+26 + ldd r27,Z+27 + ldd r28,Z+40 + ldd r29,Z+41 + ldd r2,Z+42 + ldd r3,Z+43 + ldd r18,Z+12 + ldd r19,Z+13 + ldd r20,Z+14 + ldd r21,Z+15 + ldd r8,Z+28 + ldd r9,Z+29 + ldd r10,Z+30 + ldd r11,Z+31 + ldd r12,Z+44 + ldd r13,Z+45 + ldd r14,Z+46 + ldd r15,Z+47 + pop r18 + pop r19 + pop r20 + pop r21 + pop r4 + pop r5 + pop r6 + pop r7 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + pop r30 + pop r31 + push r21 + push r20 + push r19 + push r18 + push r7 + push r6 + push r5 + push r4 + ld r18,Z + ldd r19,Z+1 + ldd r20,Z+2 + ldd r21,Z+3 + ldd r4,Z+4 + ldd r5,Z+5 + ldd r6,Z+6 + ldd r7,Z+7 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + pop r30 + pop r31 + std Z+8,r4 + std Z+9,r5 + std Z+10,r6 + std Z+11,r7 + std Z+24,r22 + std Z+25,r23 + std Z+26,r26 + std Z+27,r27 + std Z+40,r28 + std Z+41,r29 + std Z+42,r2 + std Z+43,r3 + std Z+12,r18 + std Z+13,r19 + std Z+14,r20 + std Z+15,r21 + std Z+28,r8 + std Z+29,r9 + std Z+30,r10 + std Z+31,r11 + std Z+44,r12 + std Z+45,r13 + std Z+46,r14 + std Z+47,r15 + ld r4,Z + ldd r5,Z+1 + ldd r6,Z+2 + ldd r7,Z+3 + ldd r22,Z+16 + ldd r23,Z+17 + ldd r26,Z+18 + ldd r27,Z+19 + ldd r28,Z+32 + ldd r29,Z+33 + ldd r2,Z+34 + ldd r3,Z+35 + ldd r18,Z+4 + ldd r19,Z+5 + ldd r20,Z+6 + ldd r21,Z+7 + ldd r8,Z+20 + ldd r9,Z+21 + ldd r10,Z+22 + ldd r11,Z+23 + ldd r12,Z+36 + ldd r13,Z+37 + ldd r14,Z+38 + ldd r15,Z+39 + pop r18 + pop r19 + pop r20 + pop r21 + pop r4 + pop r5 + pop r6 + pop r7 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r27 + mov r5,r22 + mov r6,r23 + mov r7,r26 + and r4,r28 + and r5,r29 + and r6,r2 + and r7,r3 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r4 + movw r2,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r11 + mov r19,r8 + mov r20,r9 + mov r21,r10 + and r18,r12 + and r19,r13 + and r20,r14 + and r21,r15 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r18 + movw r14,r20 + movw r18,r30 + movw r20,r24 + ldi r31,4 + eor r18,r31 + ldi r30,121 + eor r19,r30 + ldi r25,55 + eor r20,r25 + ldi r24,158 + eor r21,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + pop r30 + pop r31 + push r21 + push r20 + push r19 + push r18 + push r7 + push r6 + push r5 + push r4 + ldd r18,Z+8 + ldd r19,Z+9 + ldd r20,Z+10 + ldd r21,Z+11 + ldd r4,Z+12 + ldd r5,Z+13 + ldd r6,Z+14 + ldd r7,Z+15 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + pop r30 + pop r31 + st Z,r18 + std Z+1,r19 + std Z+2,r20 + std Z+3,r21 + std Z+16,r22 + std Z+17,r23 + std Z+18,r26 + std Z+19,r27 + std Z+32,r28 + std Z+33,r29 + std Z+34,r2 + std Z+35,r3 + std Z+4,r4 + std Z+5,r5 + std Z+6,r6 + std Z+7,r7 + std Z+20,r8 + std Z+21,r9 + std Z+22,r10 + std Z+23,r11 + std Z+36,r12 + std Z+37,r13 + std Z+38,r14 + std Z+39,r15 + ldd r18,Z+8 + ldd r19,Z+9 + ldd r20,Z+10 + ldd r21,Z+11 + ldd r22,Z+24 + ldd r23,Z+25 + ldd r26,Z+26 + ldd r27,Z+27 + ldd r28,Z+40 + ldd r29,Z+41 + ldd r2,Z+42 + ldd r3,Z+43 + ldd r4,Z+12 + ldd r5,Z+13 + ldd r6,Z+14 + ldd r7,Z+15 + ldd r8,Z+28 + ldd r9,Z+29 + ldd r10,Z+30 + ldd r11,Z+31 + ldd r12,Z+44 + ldd r13,Z+45 + ldd r14,Z+46 + ldd r15,Z+47 + pop r4 + pop r5 + pop r6 + pop r7 + pop r18 + pop r19 + pop r20 + pop r21 + push r31 + push r30 + bst r26,7 + lsl r27 + rol r22 + rol r23 + rol r26 + bld r27,0 + mov r30,r19 + mov r31,r20 + mov r24,r21 + mov r25,r18 + movw r16,r28 + mov r1,r2 + mov r0,r3 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r18,r27 + mov r19,r22 + mov r20,r23 + mov r21,r26 + and r18,r28 + and r19,r29 + and r20,r2 + and r21,r3 + lsl r18 + rol r19 + rol r20 + rol r21 + lsl r18 + rol r19 + rol r20 + rol r21 + eor r18,r16 + eor r19,r17 + eor r20,r1 + eor r21,r0 + eor r18,r30 + eor r19,r31 + eor r20,r24 + eor r21,r25 + mov r16,r27 + mov r17,r22 + mov r1,r23 + mov r0,r26 + movw r22,r30 + movw r26,r24 + or r22,r28 + or r23,r29 + or r26,r2 + or r27,r3 + lsl r22 + rol r23 + rol r26 + rol r27 + eor r22,r30 + eor r23,r31 + eor r26,r24 + eor r27,r25 + eor r22,r16 + eor r23,r17 + eor r26,r1 + eor r27,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r28 + eor r31,r29 + eor r24,r2 + eor r25,r3 + movw r28,r18 + movw r2,r20 + movw r18,r30 + movw r20,r24 + bst r10,7 + lsl r11 + rol r8 + rol r9 + rol r10 + bld r11,0 + mov r30,r5 + mov r31,r6 + mov r24,r7 + mov r25,r4 + movw r16,r12 + mov r1,r14 + mov r0,r15 + lsl r16 + rol r17 + rol r1 + rol r0 + mov r4,r11 + mov r5,r8 + mov r6,r9 + mov r7,r10 + and r4,r12 + and r5,r13 + and r6,r14 + and r7,r15 + lsl r4 + rol r5 + rol r6 + rol r7 + lsl r4 + rol r5 + rol r6 + rol r7 + eor r4,r16 + eor r5,r17 + eor r6,r1 + eor r7,r0 + eor r4,r30 + eor r5,r31 + eor r6,r24 + eor r7,r25 + mov r16,r11 + mov r17,r8 + mov r1,r9 + mov r0,r10 + movw r8,r30 + movw r10,r24 + or r8,r12 + or r9,r13 + or r10,r14 + or r11,r15 + lsl r8 + rol r9 + rol r10 + rol r11 + eor r8,r30 + eor r9,r31 + eor r10,r24 + eor r11,r25 + eor r8,r16 + eor r9,r17 + eor r10,r1 + eor r11,r0 + and r30,r16 + and r31,r17 + and r24,r1 + and r25,r0 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + lsl r30 + rol r31 + rol r24 + rol r25 + eor r30,r16 + eor r31,r17 + eor r24,r1 + eor r25,r0 + eor r30,r12 + eor r31,r13 + eor r24,r14 + eor r25,r15 + movw r12,r4 + movw r14,r6 + movw r4,r30 + movw r6,r24 + pop r30 + pop r31 + std Z+8,r18 + std Z+9,r19 + std Z+10,r20 + std Z+11,r21 + std Z+24,r22 + std Z+25,r23 + std Z+26,r26 + std Z+27,r27 + std Z+40,r28 + std Z+41,r29 + std Z+42,r2 + std Z+43,r3 + std Z+12,r4 + std Z+13,r5 + std Z+14,r6 + std Z+15,r7 + std Z+28,r8 + std Z+29,r9 + std Z+30,r10 + std Z+31,r11 + std Z+44,r12 + std Z+45,r13 + std Z+46,r14 + std Z+47,r15 + pop r17 + pop r16 + pop r15 + pop r14 + pop r13 + pop r12 + pop r11 + pop r10 + pop r9 + pop r8 + pop r7 + pop r6 + pop r5 + pop r4 + pop r3 + pop r2 + pop r29 + pop r28 + eor r1,r1 + ret + .size gimli24_permute, .-gimli24_permute + +#endif diff --git a/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/internal-gimli24.c b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/internal-gimli24.c new file mode 100644 index 0000000..d719988 --- /dev/null +++ b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/internal-gimli24.c @@ -0,0 +1,142 @@ +/* + * Copyright (C) 2020 Southern Storm Software, Pty Ltd. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#include "internal-gimli24.h" + +#if !defined(__AVR__) + +/* Apply the SP-box to a specific column in the state array */ +#define GIMLI24_SP(s0, s4, s8) \ + do { \ + x = leftRotate24(s0); \ + y = leftRotate9(s4); \ + s4 = y ^ x ^ ((x | s8) << 1); \ + s0 = s8 ^ y ^ ((x & y) << 3); \ + s8 = x ^ (s8 << 1) ^ ((y & s8) << 2); \ + } while (0) + +void gimli24_permute(uint32_t state[12]) +{ + uint32_t s0, s1, s2, s3, s4, s5; + uint32_t s6, s7, s8, s9, s10, s11; + uint32_t x, y; + unsigned round; + + /* Load the state into local variables and convert from little-endian */ +#if defined(LW_UTIL_LITTLE_ENDIAN) + s0 = state[0]; + s1 = state[1]; + s2 = state[2]; + s3 = state[3]; + s4 = state[4]; + s5 = state[5]; + s6 = state[6]; + s7 = state[7]; + s8 = state[8]; + s9 = state[9]; + s10 = state[10]; + s11 = state[11]; +#else + s0 = le_load_word32((const unsigned char *)(&(state[0]))); + s1 = le_load_word32((const unsigned char *)(&(state[1]))); + s2 = le_load_word32((const unsigned char *)(&(state[2]))); + s3 = le_load_word32((const unsigned char *)(&(state[3]))); + s4 = le_load_word32((const unsigned char *)(&(state[4]))); + s5 = le_load_word32((const unsigned char *)(&(state[5]))); + s6 = le_load_word32((const unsigned char *)(&(state[6]))); + s7 = le_load_word32((const unsigned char *)(&(state[7]))); + s8 = le_load_word32((const unsigned char *)(&(state[8]))); + s9 = le_load_word32((const unsigned char *)(&(state[9]))); + s10 = le_load_word32((const unsigned char *)(&(state[10]))); + s11 = le_load_word32((const unsigned char *)(&(state[11]))); +#endif + + /* Unroll and perform the rounds 4 at a time */ + for (round = 24; round > 0; round -= 4) { + /* Round 0: SP-box, small swap, add round constant */ + GIMLI24_SP(s0, s4, s8); + GIMLI24_SP(s1, s5, s9); + GIMLI24_SP(s2, s6, s10); + GIMLI24_SP(s3, s7, s11); + x = s0; + y = s2; + s0 = s1 ^ 0x9e377900U ^ round; + s1 = x; + s2 = s3; + s3 = y; + + /* Round 1: SP-box only */ + GIMLI24_SP(s0, s4, s8); + GIMLI24_SP(s1, s5, s9); + GIMLI24_SP(s2, s6, s10); + GIMLI24_SP(s3, s7, s11); + + /* Round 2: SP-box, big swap */ + GIMLI24_SP(s0, s4, s8); + GIMLI24_SP(s1, s5, s9); + GIMLI24_SP(s2, s6, s10); + GIMLI24_SP(s3, s7, s11); + x = s0; + y = s1; + s0 = s2; + s1 = s3; + s2 = x; + s3 = y; + + /* Round 3: SP-box only */ + GIMLI24_SP(s0, s4, s8); + GIMLI24_SP(s1, s5, s9); + GIMLI24_SP(s2, s6, s10); + GIMLI24_SP(s3, s7, s11); + } + + /* Convert state to little-endian if the platform is not little-endian */ +#if defined(LW_UTIL_LITTLE_ENDIAN) + state[0] = s0; + state[1] = s1; + state[2] = s2; + state[3] = s3; + state[4] = s4; + state[5] = s5; + state[6] = s6; + state[7] = s7; + state[8] = s8; + state[9] = s9; + state[10] = s10; + state[11] = s11; +#else + le_store_word32(((unsigned char *)(&(state[0]))), s0); + le_store_word32(((unsigned char *)(&(state[1]))), s1); + le_store_word32(((unsigned char *)(&(state[2]))), s2); + le_store_word32(((unsigned char *)(&(state[3]))), s3); + le_store_word32(((unsigned char *)(&(state[4]))), s4); + le_store_word32(((unsigned char *)(&(state[5]))), s5); + le_store_word32(((unsigned char *)(&(state[6]))), s6); + le_store_word32(((unsigned char *)(&(state[7]))), s7); + le_store_word32(((unsigned char *)(&(state[8]))), s8); + le_store_word32(((unsigned char *)(&(state[9]))), s9); + le_store_word32(((unsigned char *)(&(state[10]))), s10); + le_store_word32(((unsigned char *)(&(state[11]))), s11); +#endif +} + +#endif /* !__AVR__ */ diff --git a/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/internal-gimli24.h b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/internal-gimli24.h new file mode 100644 index 0000000..c81ead1 --- /dev/null +++ b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/internal-gimli24.h @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2020 Southern Storm Software, Pty Ltd. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#ifndef LW_INTERNAL_GIMLI24_H +#define LW_INTERNAL_GIMLI24_H + +#include "internal-util.h" + +/** + * \file internal-gimli24.h + * \brief Internal implementation of the GIMLI-24 permutation. + * + * References: https://gimli.cr.yp.to/ + */ + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * \brief Permutes the GIMLI-24 state. + * + * \param state The GIMLI-24 state to be permuted. + * + * The input and output \a state will be in little-endian byte order. + */ +void gimli24_permute(uint32_t state[12]); + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/internal-util.h b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/internal-util.h new file mode 100644 index 0000000..e30166d --- /dev/null +++ b/gimli/Implementations/crypto_hash/gimli24v1/rhys-avr/internal-util.h @@ -0,0 +1,702 @@ +/* + * Copyright (C) 2020 Southern Storm Software, Pty Ltd. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#ifndef LW_INTERNAL_UTIL_H +#define LW_INTERNAL_UTIL_H + +#include + +/* Figure out how to inline functions using this C compiler */ +#if defined(__STDC__) && __STDC_VERSION__ >= 199901L +#define STATIC_INLINE static inline +#elif defined(__GNUC__) || defined(__clang__) +#define STATIC_INLINE static __inline__ +#else +#define STATIC_INLINE static +#endif + +/* Try to figure out whether the CPU is little-endian or big-endian. + * May need to modify this to include new compiler-specific defines. + * Alternatively, define __LITTLE_ENDIAN__ or __BIG_ENDIAN__ in your + * compiler flags when you compile this library */ +#if defined(__x86_64) || defined(__x86_64__) || \ + defined(__i386) || defined(__i386__) || \ + defined(__AVR__) || defined(__arm) || defined(__arm__) || \ + defined(_M_AMD64) || defined(_M_X64) || defined(_M_IX86) || \ + defined(_M_IA64) || defined(_M_ARM) || defined(_M_ARM_FP) || \ + (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == 1234) || \ + defined(__LITTLE_ENDIAN__) +#define LW_UTIL_LITTLE_ENDIAN 1 +#elif (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == 4321) || \ + defined(__BIG_ENDIAN__) +/* Big endian */ +#else +#error "Cannot determine the endianess of this platform" +#endif + +/* Helper macros to load and store values while converting endian-ness */ + +/* Load a big-endian 32-bit word from a byte buffer */ +#define be_load_word32(ptr) \ + ((((uint32_t)((ptr)[0])) << 24) | \ + (((uint32_t)((ptr)[1])) << 16) | \ + (((uint32_t)((ptr)[2])) << 8) | \ + ((uint32_t)((ptr)[3]))) + +/* Store a big-endian 32-bit word into a byte buffer */ +#define be_store_word32(ptr, x) \ + do { \ + uint32_t _x = (x); \ + (ptr)[0] = (uint8_t)(_x >> 24); \ + (ptr)[1] = (uint8_t)(_x >> 16); \ + (ptr)[2] = (uint8_t)(_x >> 8); \ + (ptr)[3] = (uint8_t)_x; \ + } while (0) + +/* Load a little-endian 32-bit word from a byte buffer */ +#define le_load_word32(ptr) \ + ((((uint32_t)((ptr)[3])) << 24) | \ + (((uint32_t)((ptr)[2])) << 16) | \ + (((uint32_t)((ptr)[1])) << 8) | \ + ((uint32_t)((ptr)[0]))) + +/* Store a little-endian 32-bit word into a byte buffer */ +#define le_store_word32(ptr, x) \ + do { \ + uint32_t _x = (x); \ + (ptr)[0] = (uint8_t)_x; \ + (ptr)[1] = (uint8_t)(_x >> 8); \ + (ptr)[2] = (uint8_t)(_x >> 16); \ + (ptr)[3] = (uint8_t)(_x >> 24); \ + } while (0) + +/* Load a big-endian 64-bit word from a byte buffer */ +#define be_load_word64(ptr) \ + ((((uint64_t)((ptr)[0])) << 56) | \ + (((uint64_t)((ptr)[1])) << 48) | \ + (((uint64_t)((ptr)[2])) << 40) | \ + (((uint64_t)((ptr)[3])) << 32) | \ + (((uint64_t)((ptr)[4])) << 24) | \ + (((uint64_t)((ptr)[5])) << 16) | \ + (((uint64_t)((ptr)[6])) << 8) | \ + ((uint64_t)((ptr)[7]))) + +/* Store a big-endian 64-bit word into a byte buffer */ +#define be_store_word64(ptr, x) \ + do { \ + uint64_t _x = (x); \ + (ptr)[0] = (uint8_t)(_x >> 56); \ + (ptr)[1] = (uint8_t)(_x >> 48); \ + (ptr)[2] = (uint8_t)(_x >> 40); \ + (ptr)[3] = (uint8_t)(_x >> 32); \ + (ptr)[4] = (uint8_t)(_x >> 24); \ + (ptr)[5] = (uint8_t)(_x >> 16); \ + (ptr)[6] = (uint8_t)(_x >> 8); \ + (ptr)[7] = (uint8_t)_x; \ + } while (0) + +/* Load a little-endian 64-bit word from a byte buffer */ +#define le_load_word64(ptr) \ + ((((uint64_t)((ptr)[7])) << 56) | \ + (((uint64_t)((ptr)[6])) << 48) | \ + (((uint64_t)((ptr)[5])) << 40) | \ + (((uint64_t)((ptr)[4])) << 32) | \ + (((uint64_t)((ptr)[3])) << 24) | \ + (((uint64_t)((ptr)[2])) << 16) | \ + (((uint64_t)((ptr)[1])) << 8) | \ + ((uint64_t)((ptr)[0]))) + +/* Store a little-endian 64-bit word into a byte buffer */ +#define le_store_word64(ptr, x) \ + do { \ + uint64_t _x = (x); \ + (ptr)[0] = (uint8_t)_x; \ + (ptr)[1] = (uint8_t)(_x >> 8); \ + (ptr)[2] = (uint8_t)(_x >> 16); \ + (ptr)[3] = (uint8_t)(_x >> 24); \ + (ptr)[4] = (uint8_t)(_x >> 32); \ + (ptr)[5] = (uint8_t)(_x >> 40); \ + (ptr)[6] = (uint8_t)(_x >> 48); \ + (ptr)[7] = (uint8_t)(_x >> 56); \ + } while (0) + +/* Load a big-endian 16-bit word from a byte buffer */ +#define be_load_word16(ptr) \ + ((((uint16_t)((ptr)[0])) << 8) | \ + ((uint16_t)((ptr)[1]))) + +/* Store a big-endian 16-bit word into a byte buffer */ +#define be_store_word16(ptr, x) \ + do { \ + uint16_t _x = (x); \ + (ptr)[0] = (uint8_t)(_x >> 8); \ + (ptr)[1] = (uint8_t)_x; \ + } while (0) + +/* Load a little-endian 16-bit word from a byte buffer */ +#define le_load_word16(ptr) \ + ((((uint16_t)((ptr)[1])) << 8) | \ + ((uint16_t)((ptr)[0]))) + +/* Store a little-endian 16-bit word into a byte buffer */ +#define le_store_word16(ptr, x) \ + do { \ + uint16_t _x = (x); \ + (ptr)[0] = (uint8_t)_x; \ + (ptr)[1] = (uint8_t)(_x >> 8); \ + } while (0) + +/* XOR a source byte buffer against a destination */ +#define lw_xor_block(dest, src, len) \ + do { \ + unsigned char *_dest = (dest); \ + const unsigned char *_src = (src); \ + unsigned _len = (len); \ + while (_len > 0) { \ + *_dest++ ^= *_src++; \ + --_len; \ + } \ + } while (0) + +/* XOR two source byte buffers and put the result in a destination buffer */ +#define lw_xor_block_2_src(dest, src1, src2, len) \ + do { \ + unsigned char *_dest = (dest); \ + const unsigned char *_src1 = (src1); \ + const unsigned char *_src2 = (src2); \ + unsigned _len = (len); \ + while (_len > 0) { \ + *_dest++ = *_src1++ ^ *_src2++; \ + --_len; \ + } \ + } while (0) + +/* XOR a source byte buffer against a destination and write to another + * destination at the same time */ +#define lw_xor_block_2_dest(dest2, dest, src, len) \ + do { \ + unsigned char *_dest2 = (dest2); \ + unsigned char *_dest = (dest); \ + const unsigned char *_src = (src); \ + unsigned _len = (len); \ + while (_len > 0) { \ + *_dest2++ = (*_dest++ ^= *_src++); \ + --_len; \ + } \ + } while (0) + +/* XOR two byte buffers and write to a destination which at the same + * time copying the contents of src2 to dest2 */ +#define lw_xor_block_copy_src(dest2, dest, src1, src2, len) \ + do { \ + unsigned char *_dest2 = (dest2); \ + unsigned char *_dest = (dest); \ + const unsigned char *_src1 = (src1); \ + const unsigned char *_src2 = (src2); \ + unsigned _len = (len); \ + while (_len > 0) { \ + unsigned char _temp = *_src2++; \ + *_dest2++ = _temp; \ + *_dest++ = *_src1++ ^ _temp; \ + --_len; \ + } \ + } while (0) + +/* XOR a source byte buffer against a destination and write to another + * destination at the same time. This version swaps the source value + * into the "dest" buffer */ +#define lw_xor_block_swap(dest2, dest, src, len) \ + do { \ + unsigned char *_dest2 = (dest2); \ + unsigned char *_dest = (dest); \ + const unsigned char *_src = (src); \ + unsigned _len = (len); \ + while (_len > 0) { \ + unsigned char _temp = *_src++; \ + *_dest2++ = *_dest ^ _temp; \ + *_dest++ = _temp; \ + --_len; \ + } \ + } while (0) + +/* Rotation functions need to be optimised for best performance on AVR. + * The most efficient rotations are where the number of bits is 1 or a + * multiple of 8, so we compose the efficient rotations to produce all + * other rotation counts of interest. */ + +#if defined(__AVR__) +#define LW_CRYPTO_ROTATE32_COMPOSED 1 +#else +#define LW_CRYPTO_ROTATE32_COMPOSED 0 +#endif + +/* Rotation macros for 32-bit arguments */ + +/* Generic left rotate */ +#define leftRotate(a, bits) \ + (__extension__ ({ \ + uint32_t _temp = (a); \ + (_temp << (bits)) | (_temp >> (32 - (bits))); \ + })) + +/* Generic right rotate */ +#define rightRotate(a, bits) \ + (__extension__ ({ \ + uint32_t _temp = (a); \ + (_temp >> (bits)) | (_temp << (32 - (bits))); \ + })) + +#if !LW_CRYPTO_ROTATE32_COMPOSED + +/* Left rotate by a specific number of bits. These macros may be replaced + * with more efficient ones on platforms that lack a barrel shifter */ +#define leftRotate1(a) (leftRotate((a), 1)) +#define leftRotate2(a) (leftRotate((a), 2)) +#define leftRotate3(a) (leftRotate((a), 3)) +#define leftRotate4(a) (leftRotate((a), 4)) +#define leftRotate5(a) (leftRotate((a), 5)) +#define leftRotate6(a) (leftRotate((a), 6)) +#define leftRotate7(a) (leftRotate((a), 7)) +#define leftRotate8(a) (leftRotate((a), 8)) +#define leftRotate9(a) (leftRotate((a), 9)) +#define leftRotate10(a) (leftRotate((a), 10)) +#define leftRotate11(a) (leftRotate((a), 11)) +#define leftRotate12(a) (leftRotate((a), 12)) +#define leftRotate13(a) (leftRotate((a), 13)) +#define leftRotate14(a) (leftRotate((a), 14)) +#define leftRotate15(a) (leftRotate((a), 15)) +#define leftRotate16(a) (leftRotate((a), 16)) +#define leftRotate17(a) (leftRotate((a), 17)) +#define leftRotate18(a) (leftRotate((a), 18)) +#define leftRotate19(a) (leftRotate((a), 19)) +#define leftRotate20(a) (leftRotate((a), 20)) +#define leftRotate21(a) (leftRotate((a), 21)) +#define leftRotate22(a) (leftRotate((a), 22)) +#define leftRotate23(a) (leftRotate((a), 23)) +#define leftRotate24(a) (leftRotate((a), 24)) +#define leftRotate25(a) (leftRotate((a), 25)) +#define leftRotate26(a) (leftRotate((a), 26)) +#define leftRotate27(a) (leftRotate((a), 27)) +#define leftRotate28(a) (leftRotate((a), 28)) +#define leftRotate29(a) (leftRotate((a), 29)) +#define leftRotate30(a) (leftRotate((a), 30)) +#define leftRotate31(a) (leftRotate((a), 31)) + +/* Right rotate by a specific number of bits. These macros may be replaced + * with more efficient ones on platforms that lack a barrel shifter */ +#define rightRotate1(a) (rightRotate((a), 1)) +#define rightRotate2(a) (rightRotate((a), 2)) +#define rightRotate3(a) (rightRotate((a), 3)) +#define rightRotate4(a) (rightRotate((a), 4)) +#define rightRotate5(a) (rightRotate((a), 5)) +#define rightRotate6(a) (rightRotate((a), 6)) +#define rightRotate7(a) (rightRotate((a), 7)) +#define rightRotate8(a) (rightRotate((a), 8)) +#define rightRotate9(a) (rightRotate((a), 9)) +#define rightRotate10(a) (rightRotate((a), 10)) +#define rightRotate11(a) (rightRotate((a), 11)) +#define rightRotate12(a) (rightRotate((a), 12)) +#define rightRotate13(a) (rightRotate((a), 13)) +#define rightRotate14(a) (rightRotate((a), 14)) +#define rightRotate15(a) (rightRotate((a), 15)) +#define rightRotate16(a) (rightRotate((a), 16)) +#define rightRotate17(a) (rightRotate((a), 17)) +#define rightRotate18(a) (rightRotate((a), 18)) +#define rightRotate19(a) (rightRotate((a), 19)) +#define rightRotate20(a) (rightRotate((a), 20)) +#define rightRotate21(a) (rightRotate((a), 21)) +#define rightRotate22(a) (rightRotate((a), 22)) +#define rightRotate23(a) (rightRotate((a), 23)) +#define rightRotate24(a) (rightRotate((a), 24)) +#define rightRotate25(a) (rightRotate((a), 25)) +#define rightRotate26(a) (rightRotate((a), 26)) +#define rightRotate27(a) (rightRotate((a), 27)) +#define rightRotate28(a) (rightRotate((a), 28)) +#define rightRotate29(a) (rightRotate((a), 29)) +#define rightRotate30(a) (rightRotate((a), 30)) +#define rightRotate31(a) (rightRotate((a), 31)) + +#else /* LW_CRYPTO_ROTATE32_COMPOSED */ + +/* Composed rotation macros where 1 and 8 are fast, but others are slow */ + +/* Left rotate by 1 */ +#define leftRotate1(a) (leftRotate((a), 1)) + +/* Left rotate by 2 */ +#define leftRotate2(a) (leftRotate(leftRotate((a), 1), 1)) + +/* Left rotate by 3 */ +#define leftRotate3(a) (leftRotate(leftRotate(leftRotate((a), 1), 1), 1)) + +/* Left rotate by 4 */ +#define leftRotate4(a) (leftRotate(leftRotate(leftRotate(leftRotate((a), 1), 1), 1), 1)) + +/* Left rotate by 5: Rotate left by 8, then right by 3 */ +#define leftRotate5(a) (rightRotate(rightRotate(rightRotate(leftRotate((a), 8), 1), 1), 1)) + +/* Left rotate by 6: Rotate left by 8, then right by 2 */ +#define leftRotate6(a) (rightRotate(rightRotate(leftRotate((a), 8), 1), 1)) + +/* Left rotate by 7: Rotate left by 8, then right by 1 */ +#define leftRotate7(a) (rightRotate(leftRotate((a), 8), 1)) + +/* Left rotate by 8 */ +#define leftRotate8(a) (leftRotate((a), 8)) + +/* Left rotate by 9: Rotate left by 8, then left by 1 */ +#define leftRotate9(a) (leftRotate(leftRotate((a), 8), 1)) + +/* Left rotate by 10: Rotate left by 8, then left by 2 */ +#define leftRotate10(a) (leftRotate(leftRotate(leftRotate((a), 8), 1), 1)) + +/* Left rotate by 11: Rotate left by 8, then left by 3 */ +#define leftRotate11(a) (leftRotate(leftRotate(leftRotate(leftRotate((a), 8), 1), 1), 1)) + +/* Left rotate by 12: Rotate left by 16, then right by 4 */ +#define leftRotate12(a) (rightRotate(rightRotate(rightRotate(rightRotate(leftRotate((a), 16), 1), 1), 1), 1)) + +/* Left rotate by 13: Rotate left by 16, then right by 3 */ +#define leftRotate13(a) (rightRotate(rightRotate(rightRotate(leftRotate((a), 16), 1), 1), 1)) + +/* Left rotate by 14: Rotate left by 16, then right by 2 */ +#define leftRotate14(a) (rightRotate(rightRotate(leftRotate((a), 16), 1), 1)) + +/* Left rotate by 15: Rotate left by 16, then right by 1 */ +#define leftRotate15(a) (rightRotate(leftRotate((a), 16), 1)) + +/* Left rotate by 16 */ +#define leftRotate16(a) (leftRotate((a), 16)) + +/* Left rotate by 17: Rotate left by 16, then left by 1 */ +#define leftRotate17(a) (leftRotate(leftRotate((a), 16), 1)) + +/* Left rotate by 18: Rotate left by 16, then left by 2 */ +#define leftRotate18(a) (leftRotate(leftRotate(leftRotate((a), 16), 1), 1)) + +/* Left rotate by 19: Rotate left by 16, then left by 3 */ +#define leftRotate19(a) (leftRotate(leftRotate(leftRotate(leftRotate((a), 16), 1), 1), 1)) + +/* Left rotate by 20: Rotate left by 16, then left by 4 */ +#define leftRotate20(a) (leftRotate(leftRotate(leftRotate(leftRotate(leftRotate((a), 16), 1), 1), 1), 1)) + +/* Left rotate by 21: Rotate left by 24, then right by 3 */ +#define leftRotate21(a) (rightRotate(rightRotate(rightRotate(leftRotate((a), 24), 1), 1), 1)) + +/* Left rotate by 22: Rotate left by 24, then right by 2 */ +#define leftRotate22(a) (rightRotate(rightRotate(leftRotate((a), 24), 1), 1)) + +/* Left rotate by 23: Rotate left by 24, then right by 1 */ +#define leftRotate23(a) (rightRotate(leftRotate((a), 24), 1)) + +/* Left rotate by 24 */ +#define leftRotate24(a) (leftRotate((a), 24)) + +/* Left rotate by 25: Rotate left by 24, then left by 1 */ +#define leftRotate25(a) (leftRotate(leftRotate((a), 24), 1)) + +/* Left rotate by 26: Rotate left by 24, then left by 2 */ +#define leftRotate26(a) (leftRotate(leftRotate(leftRotate((a), 24), 1), 1)) + +/* Left rotate by 27: Rotate left by 24, then left by 3 */ +#define leftRotate27(a) (leftRotate(leftRotate(leftRotate(leftRotate((a), 24), 1), 1), 1)) + +/* Left rotate by 28: Rotate right by 4 */ +#define leftRotate28(a) (rightRotate(rightRotate(rightRotate(rightRotate((a), 1), 1), 1), 1)) + +/* Left rotate by 29: Rotate right by 3 */ +#define leftRotate29(a) (rightRotate(rightRotate(rightRotate((a), 1), 1), 1)) + +/* Left rotate by 30: Rotate right by 2 */ +#define leftRotate30(a) (rightRotate(rightRotate((a), 1), 1)) + +/* Left rotate by 31: Rotate right by 1 */ +#define leftRotate31(a) (rightRotate((a), 1)) + +/* Define the 32-bit right rotations in terms of left rotations */ +#define rightRotate1(a) (leftRotate31((a))) +#define rightRotate2(a) (leftRotate30((a))) +#define rightRotate3(a) (leftRotate29((a))) +#define rightRotate4(a) (leftRotate28((a))) +#define rightRotate5(a) (leftRotate27((a))) +#define rightRotate6(a) (leftRotate26((a))) +#define rightRotate7(a) (leftRotate25((a))) +#define rightRotate8(a) (leftRotate24((a))) +#define rightRotate9(a) (leftRotate23((a))) +#define rightRotate10(a) (leftRotate22((a))) +#define rightRotate11(a) (leftRotate21((a))) +#define rightRotate12(a) (leftRotate20((a))) +#define rightRotate13(a) (leftRotate19((a))) +#define rightRotate14(a) (leftRotate18((a))) +#define rightRotate15(a) (leftRotate17((a))) +#define rightRotate16(a) (leftRotate16((a))) +#define rightRotate17(a) (leftRotate15((a))) +#define rightRotate18(a) (leftRotate14((a))) +#define rightRotate19(a) (leftRotate13((a))) +#define rightRotate20(a) (leftRotate12((a))) +#define rightRotate21(a) (leftRotate11((a))) +#define rightRotate22(a) (leftRotate10((a))) +#define rightRotate23(a) (leftRotate9((a))) +#define rightRotate24(a) (leftRotate8((a))) +#define rightRotate25(a) (leftRotate7((a))) +#define rightRotate26(a) (leftRotate6((a))) +#define rightRotate27(a) (leftRotate5((a))) +#define rightRotate28(a) (leftRotate4((a))) +#define rightRotate29(a) (leftRotate3((a))) +#define rightRotate30(a) (leftRotate2((a))) +#define rightRotate31(a) (leftRotate1((a))) + +#endif /* LW_CRYPTO_ROTATE32_COMPOSED */ + +/* Rotation macros for 64-bit arguments */ + +/* Generic left rotate */ +#define leftRotate_64(a, bits) \ + (__extension__ ({ \ + uint64_t _temp = (a); \ + (_temp << (bits)) | (_temp >> (64 - (bits))); \ + })) + +/* Generic right rotate */ +#define rightRotate_64(a, bits) \ + (__extension__ ({ \ + uint64_t _temp = (a); \ + (_temp >> (bits)) | (_temp << (64 - (bits))); \ + })) + +/* Left rotate by a specific number of bits. These macros may be replaced + * with more efficient ones on platforms that lack a barrel shifter */ +#define leftRotate1_64(a) (leftRotate_64((a), 1)) +#define leftRotate2_64(a) (leftRotate_64((a), 2)) +#define leftRotate3_64(a) (leftRotate_64((a), 3)) +#define leftRotate4_64(a) (leftRotate_64((a), 4)) +#define leftRotate5_64(a) (leftRotate_64((a), 5)) +#define leftRotate6_64(a) (leftRotate_64((a), 6)) +#define leftRotate7_64(a) (leftRotate_64((a), 7)) +#define leftRotate8_64(a) (leftRotate_64((a), 8)) +#define leftRotate9_64(a) (leftRotate_64((a), 9)) +#define leftRotate10_64(a) (leftRotate_64((a), 10)) +#define leftRotate11_64(a) (leftRotate_64((a), 11)) +#define leftRotate12_64(a) (leftRotate_64((a), 12)) +#define leftRotate13_64(a) (leftRotate_64((a), 13)) +#define leftRotate14_64(a) (leftRotate_64((a), 14)) +#define leftRotate15_64(a) (leftRotate_64((a), 15)) +#define leftRotate16_64(a) (leftRotate_64((a), 16)) +#define leftRotate17_64(a) (leftRotate_64((a), 17)) +#define leftRotate18_64(a) (leftRotate_64((a), 18)) +#define leftRotate19_64(a) (leftRotate_64((a), 19)) +#define leftRotate20_64(a) (leftRotate_64((a), 20)) +#define leftRotate21_64(a) (leftRotate_64((a), 21)) +#define leftRotate22_64(a) (leftRotate_64((a), 22)) +#define leftRotate23_64(a) (leftRotate_64((a), 23)) +#define leftRotate24_64(a) (leftRotate_64((a), 24)) +#define leftRotate25_64(a) (leftRotate_64((a), 25)) +#define leftRotate26_64(a) (leftRotate_64((a), 26)) +#define leftRotate27_64(a) (leftRotate_64((a), 27)) +#define leftRotate28_64(a) (leftRotate_64((a), 28)) +#define leftRotate29_64(a) (leftRotate_64((a), 29)) +#define leftRotate30_64(a) (leftRotate_64((a), 30)) +#define leftRotate31_64(a) (leftRotate_64((a), 31)) +#define leftRotate32_64(a) (leftRotate_64((a), 32)) +#define leftRotate33_64(a) (leftRotate_64((a), 33)) +#define leftRotate34_64(a) (leftRotate_64((a), 34)) +#define leftRotate35_64(a) (leftRotate_64((a), 35)) +#define leftRotate36_64(a) (leftRotate_64((a), 36)) +#define leftRotate37_64(a) (leftRotate_64((a), 37)) +#define leftRotate38_64(a) (leftRotate_64((a), 38)) +#define leftRotate39_64(a) (leftRotate_64((a), 39)) +#define leftRotate40_64(a) (leftRotate_64((a), 40)) +#define leftRotate41_64(a) (leftRotate_64((a), 41)) +#define leftRotate42_64(a) (leftRotate_64((a), 42)) +#define leftRotate43_64(a) (leftRotate_64((a), 43)) +#define leftRotate44_64(a) (leftRotate_64((a), 44)) +#define leftRotate45_64(a) (leftRotate_64((a), 45)) +#define leftRotate46_64(a) (leftRotate_64((a), 46)) +#define leftRotate47_64(a) (leftRotate_64((a), 47)) +#define leftRotate48_64(a) (leftRotate_64((a), 48)) +#define leftRotate49_64(a) (leftRotate_64((a), 49)) +#define leftRotate50_64(a) (leftRotate_64((a), 50)) +#define leftRotate51_64(a) (leftRotate_64((a), 51)) +#define leftRotate52_64(a) (leftRotate_64((a), 52)) +#define leftRotate53_64(a) (leftRotate_64((a), 53)) +#define leftRotate54_64(a) (leftRotate_64((a), 54)) +#define leftRotate55_64(a) (leftRotate_64((a), 55)) +#define leftRotate56_64(a) (leftRotate_64((a), 56)) +#define leftRotate57_64(a) (leftRotate_64((a), 57)) +#define leftRotate58_64(a) (leftRotate_64((a), 58)) +#define leftRotate59_64(a) (leftRotate_64((a), 59)) +#define leftRotate60_64(a) (leftRotate_64((a), 60)) +#define leftRotate61_64(a) (leftRotate_64((a), 61)) +#define leftRotate62_64(a) (leftRotate_64((a), 62)) +#define leftRotate63_64(a) (leftRotate_64((a), 63)) + +/* Right rotate by a specific number of bits. These macros may be replaced + * with more efficient ones on platforms that lack a barrel shifter */ +#define rightRotate1_64(a) (rightRotate_64((a), 1)) +#define rightRotate2_64(a) (rightRotate_64((a), 2)) +#define rightRotate3_64(a) (rightRotate_64((a), 3)) +#define rightRotate4_64(a) (rightRotate_64((a), 4)) +#define rightRotate5_64(a) (rightRotate_64((a), 5)) +#define rightRotate6_64(a) (rightRotate_64((a), 6)) +#define rightRotate7_64(a) (rightRotate_64((a), 7)) +#define rightRotate8_64(a) (rightRotate_64((a), 8)) +#define rightRotate9_64(a) (rightRotate_64((a), 9)) +#define rightRotate10_64(a) (rightRotate_64((a), 10)) +#define rightRotate11_64(a) (rightRotate_64((a), 11)) +#define rightRotate12_64(a) (rightRotate_64((a), 12)) +#define rightRotate13_64(a) (rightRotate_64((a), 13)) +#define rightRotate14_64(a) (rightRotate_64((a), 14)) +#define rightRotate15_64(a) (rightRotate_64((a), 15)) +#define rightRotate16_64(a) (rightRotate_64((a), 16)) +#define rightRotate17_64(a) (rightRotate_64((a), 17)) +#define rightRotate18_64(a) (rightRotate_64((a), 18)) +#define rightRotate19_64(a) (rightRotate_64((a), 19)) +#define rightRotate20_64(a) (rightRotate_64((a), 20)) +#define rightRotate21_64(a) (rightRotate_64((a), 21)) +#define rightRotate22_64(a) (rightRotate_64((a), 22)) +#define rightRotate23_64(a) (rightRotate_64((a), 23)) +#define rightRotate24_64(a) (rightRotate_64((a), 24)) +#define rightRotate25_64(a) (rightRotate_64((a), 25)) +#define rightRotate26_64(a) (rightRotate_64((a), 26)) +#define rightRotate27_64(a) (rightRotate_64((a), 27)) +#define rightRotate28_64(a) (rightRotate_64((a), 28)) +#define rightRotate29_64(a) (rightRotate_64((a), 29)) +#define rightRotate30_64(a) (rightRotate_64((a), 30)) +#define rightRotate31_64(a) (rightRotate_64((a), 31)) +#define rightRotate32_64(a) (rightRotate_64((a), 32)) +#define rightRotate33_64(a) (rightRotate_64((a), 33)) +#define rightRotate34_64(a) (rightRotate_64((a), 34)) +#define rightRotate35_64(a) (rightRotate_64((a), 35)) +#define rightRotate36_64(a) (rightRotate_64((a), 36)) +#define rightRotate37_64(a) (rightRotate_64((a), 37)) +#define rightRotate38_64(a) (rightRotate_64((a), 38)) +#define rightRotate39_64(a) (rightRotate_64((a), 39)) +#define rightRotate40_64(a) (rightRotate_64((a), 40)) +#define rightRotate41_64(a) (rightRotate_64((a), 41)) +#define rightRotate42_64(a) (rightRotate_64((a), 42)) +#define rightRotate43_64(a) (rightRotate_64((a), 43)) +#define rightRotate44_64(a) (rightRotate_64((a), 44)) +#define rightRotate45_64(a) (rightRotate_64((a), 45)) +#define rightRotate46_64(a) (rightRotate_64((a), 46)) +#define rightRotate47_64(a) (rightRotate_64((a), 47)) +#define rightRotate48_64(a) (rightRotate_64((a), 48)) +#define rightRotate49_64(a) (rightRotate_64((a), 49)) +#define rightRotate50_64(a) (rightRotate_64((a), 50)) +#define rightRotate51_64(a) (rightRotate_64((a), 51)) +#define rightRotate52_64(a) (rightRotate_64((a), 52)) +#define rightRotate53_64(a) (rightRotate_64((a), 53)) +#define rightRotate54_64(a) (rightRotate_64((a), 54)) +#define rightRotate55_64(a) (rightRotate_64((a), 55)) +#define rightRotate56_64(a) (rightRotate_64((a), 56)) +#define rightRotate57_64(a) (rightRotate_64((a), 57)) +#define rightRotate58_64(a) (rightRotate_64((a), 58)) +#define rightRotate59_64(a) (rightRotate_64((a), 59)) +#define rightRotate60_64(a) (rightRotate_64((a), 60)) +#define rightRotate61_64(a) (rightRotate_64((a), 61)) +#define rightRotate62_64(a) (rightRotate_64((a), 62)) +#define rightRotate63_64(a) (rightRotate_64((a), 63)) + +/* Rotate a 16-bit value left by a number of bits */ +#define leftRotate_16(a, bits) \ + (__extension__ ({ \ + uint16_t _temp = (a); \ + (_temp << (bits)) | (_temp >> (16 - (bits))); \ + })) + +/* Rotate a 16-bit value right by a number of bits */ +#define rightRotate_16(a, bits) \ + (__extension__ ({ \ + uint16_t _temp = (a); \ + (_temp >> (bits)) | (_temp << (16 - (bits))); \ + })) + +/* Left rotate by a specific number of bits. These macros may be replaced + * with more efficient ones on platforms that lack a barrel shifter */ +#define leftRotate1_16(a) (leftRotate_16((a), 1)) +#define leftRotate2_16(a) (leftRotate_16((a), 2)) +#define leftRotate3_16(a) (leftRotate_16((a), 3)) +#define leftRotate4_16(a) (leftRotate_16((a), 4)) +#define leftRotate5_16(a) (leftRotate_16((a), 5)) +#define leftRotate6_16(a) (leftRotate_16((a), 6)) +#define leftRotate7_16(a) (leftRotate_16((a), 7)) +#define leftRotate8_16(a) (leftRotate_16((a), 8)) +#define leftRotate9_16(a) (leftRotate_16((a), 9)) +#define leftRotate10_16(a) (leftRotate_16((a), 10)) +#define leftRotate11_16(a) (leftRotate_16((a), 11)) +#define leftRotate12_16(a) (leftRotate_16((a), 12)) +#define leftRotate13_16(a) (leftRotate_16((a), 13)) +#define leftRotate14_16(a) (leftRotate_16((a), 14)) +#define leftRotate15_16(a) (leftRotate_16((a), 15)) + +/* Right rotate by a specific number of bits. These macros may be replaced + * with more efficient ones on platforms that lack a barrel shifter */ +#define rightRotate1_16(a) (rightRotate_16((a), 1)) +#define rightRotate2_16(a) (rightRotate_16((a), 2)) +#define rightRotate3_16(a) (rightRotate_16((a), 3)) +#define rightRotate4_16(a) (rightRotate_16((a), 4)) +#define rightRotate5_16(a) (rightRotate_16((a), 5)) +#define rightRotate6_16(a) (rightRotate_16((a), 6)) +#define rightRotate7_16(a) (rightRotate_16((a), 7)) +#define rightRotate8_16(a) (rightRotate_16((a), 8)) +#define rightRotate9_16(a) (rightRotate_16((a), 9)) +#define rightRotate10_16(a) (rightRotate_16((a), 10)) +#define rightRotate11_16(a) (rightRotate_16((a), 11)) +#define rightRotate12_16(a) (rightRotate_16((a), 12)) +#define rightRotate13_16(a) (rightRotate_16((a), 13)) +#define rightRotate14_16(a) (rightRotate_16((a), 14)) +#define rightRotate15_16(a) (rightRotate_16((a), 15)) + +/* Rotate an 8-bit value left by a number of bits */ +#define leftRotate_8(a, bits) \ + (__extension__ ({ \ + uint8_t _temp = (a); \ + (_temp << (bits)) | (_temp >> (8 - (bits))); \ + })) + +/* Rotate an 8-bit value right by a number of bits */ +#define rightRotate_8(a, bits) \ + (__extension__ ({ \ + uint8_t _temp = (a); \ + (_temp >> (bits)) | (_temp << (8 - (bits))); \ + })) + +/* Left rotate by a specific number of bits. These macros may be replaced + * with more efficient ones on platforms that lack a barrel shifter */ +#define leftRotate1_8(a) (leftRotate_8((a), 1)) +#define leftRotate2_8(a) (leftRotate_8((a), 2)) +#define leftRotate3_8(a) (leftRotate_8((a), 3)) +#define leftRotate4_8(a) (leftRotate_8((a), 4)) +#define leftRotate5_8(a) (leftRotate_8((a), 5)) +#define leftRotate6_8(a) (leftRotate_8((a), 6)) +#define leftRotate7_8(a) (leftRotate_8((a), 7)) + +/* Right rotate by a specific number of bits. These macros may be replaced + * with more efficient ones on platforms that lack a barrel shifter */ +#define rightRotate1_8(a) (rightRotate_8((a), 1)) +#define rightRotate2_8(a) (rightRotate_8((a), 2)) +#define rightRotate3_8(a) (rightRotate_8((a), 3)) +#define rightRotate4_8(a) (rightRotate_8((a), 4)) +#define rightRotate5_8(a) (rightRotate_8((a), 5)) +#define rightRotate6_8(a) (rightRotate_8((a), 6)) +#define rightRotate7_8(a) (rightRotate_8((a), 7)) + +#endif