| 12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358 | 
							- /*
 
-  * Based on public domain code available at: http://cr.yp.to/snuffle.html
 
-  *
 
-  * Modifications and C-native SSE macro based SSE implementation by
 
-  * Adam Ierymenko <[email protected]>.
 
-  *
 
-  * Since the original was public domain, this is too.
 
-  */
 
- #include "Constants.hpp"
 
- #include "Salsa20.hpp"
 
- #define ROTATE(v,c) (((v) << (c)) | ((v) >> (32 - (c))))
 
- #define XOR(v,w) ((v) ^ (w))
 
- #define PLUS(v,w) ((uint32_t)((v) + (w)))
 
- // Set up laod/store macros with appropriate endianness (we don't use these in SSE mode)
 
- #ifndef ZT_SALSA20_SSE
 
- #if __BYTE_ORDER == __LITTLE_ENDIAN
 
- #ifdef ZT_NO_TYPE_PUNNING
 
- // Slower version that does not use type punning
 
- #define U8TO32_LITTLE(p) ( ((uint32_t)(p)[0]) | ((uint32_t)(p)[1] << 8) | ((uint32_t)(p)[2] << 16) | ((uint32_t)(p)[3] << 24) )
 
- static inline void U32TO8_LITTLE(uint8_t *const c,const uint32_t v) { c[0] = (uint8_t)v; c[1] = (uint8_t)(v >> 8); c[2] = (uint8_t)(v >> 16); c[3] = (uint8_t)(v >> 24); }
 
- #else
 
- // Fast version that just does 32-bit load/store
 
- #define U8TO32_LITTLE(p) (*((const uint32_t *)((const void *)(p))))
 
- #define U32TO8_LITTLE(c,v) *((uint32_t *)((void *)(c))) = (v)
 
- #endif // ZT_NO_TYPE_PUNNING
 
- #else // __BYTE_ORDER == __BIG_ENDIAN (we don't support anything else... does MIDDLE_ENDIAN even still exist?)
 
- #ifdef __GNUC__
 
- // Use GNUC builtin bswap macros on big-endian machines if available
 
- #define U8TO32_LITTLE(p) __builtin_bswap32(*((const uint32_t *)((const void *)(p))))
 
- #define U32TO8_LITTLE(c,v) *((uint32_t *)((void *)(c))) = __builtin_bswap32((v))
 
- #else // no __GNUC__
 
- // Otherwise do it the slow, manual way on BE machines
 
- #define U8TO32_LITTLE(p) ( ((uint32_t)(p)[0]) | ((uint32_t)(p)[1] << 8) | ((uint32_t)(p)[2] << 16) | ((uint32_t)(p)[3] << 24) )
 
- static inline void U32TO8_LITTLE(uint8_t *const c,const uint32_t v) { c[0] = (uint8_t)v; c[1] = (uint8_t)(v >> 8); c[2] = (uint8_t)(v >> 16); c[3] = (uint8_t)(v >> 24); }
 
- #endif // __GNUC__ or not
 
- #endif // __BYTE_ORDER little or big?
 
- #endif // !ZT_SALSA20_SSE
 
- // Statically compute and define SSE constants
 
- #ifdef ZT_SALSA20_SSE
 
- class _s20sseconsts
 
- {
 
- public:
 
- 	_s20sseconsts()
 
- 	{
 
- 		maskLo32 = _mm_shuffle_epi32(_mm_cvtsi32_si128(-1), _MM_SHUFFLE(1, 0, 1, 0));
 
- 		maskHi32 = _mm_slli_epi64(maskLo32, 32);
 
- 	}
 
- 	__m128i maskLo32,maskHi32;
 
- };
 
- static const _s20sseconsts _S20SSECONSTANTS;
 
- #endif
 
- namespace ZeroTier {
 
- void Salsa20::init(const void *key,unsigned int kbits,const void *iv)
 
- 	throw()
 
- {
 
- #ifdef ZT_SALSA20_SSE
 
- 	const uint32_t *k = (const uint32_t *)key;
 
- 	_state.i[0] = 0x61707865;
 
- 	_state.i[3] = 0x6b206574;
 
- 	_state.i[13] = k[0];
 
- 	_state.i[10] = k[1];
 
- 	_state.i[7] = k[2];
 
- 	_state.i[4] = k[3];
 
- 	if (kbits == 256) {
 
- 		k += 4;
 
- 		_state.i[1] = 0x3320646e;
 
- 		_state.i[2] = 0x79622d32;
 
- 	} else {
 
- 		_state.i[1] = 0x3120646e;
 
- 		_state.i[2] = 0x79622d36;
 
- 	}
 
- 	_state.i[15] = k[0];
 
- 	_state.i[12] = k[1];
 
- 	_state.i[9] = k[2];
 
- 	_state.i[6] = k[3];
 
- 	_state.i[14] = ((const uint32_t *)iv)[0];
 
- 	_state.i[11] = ((const uint32_t *)iv)[1];
 
- 	_state.i[5] = 0;
 
- 	_state.i[8] = 0;
 
- #else
 
- 	const char *constants;
 
- 	const uint8_t *k = (const uint8_t *)key;
 
- 	_state.i[1] = U8TO32_LITTLE(k + 0);
 
- 	_state.i[2] = U8TO32_LITTLE(k + 4);
 
- 	_state.i[3] = U8TO32_LITTLE(k + 8);
 
- 	_state.i[4] = U8TO32_LITTLE(k + 12);
 
- 	if (kbits == 256) { /* recommended */
 
- 		k += 16;
 
- 		constants = "expand 32-byte k";
 
- 	} else { /* kbits == 128 */
 
- 		constants = "expand 16-byte k";
 
- 	}
 
- 	_state.i[5] = U8TO32_LITTLE(constants + 4);
 
- 	_state.i[6] = U8TO32_LITTLE(((const uint8_t *)iv) + 0);
 
- 	_state.i[7] = U8TO32_LITTLE(((const uint8_t *)iv) + 4);
 
- 	_state.i[8] = 0;
 
- 	_state.i[9] = 0;
 
- 	_state.i[10] = U8TO32_LITTLE(constants + 8);
 
- 	_state.i[11] = U8TO32_LITTLE(k + 0);
 
- 	_state.i[12] = U8TO32_LITTLE(k + 4);
 
- 	_state.i[13] = U8TO32_LITTLE(k + 8);
 
- 	_state.i[14] = U8TO32_LITTLE(k + 12);
 
- 	_state.i[15] = U8TO32_LITTLE(constants + 12);
 
- 	_state.i[0] = U8TO32_LITTLE(constants + 0);
 
- #endif
 
- }
 
- void Salsa20::encrypt12(const void *in,void *out,unsigned int bytes)
 
- 	throw()
 
- {
 
- 	uint8_t tmp[64];
 
- 	const uint8_t *m = (const uint8_t *)in;
 
- 	uint8_t *c = (uint8_t *)out;
 
- 	uint8_t *ctarget = c;
 
- 	unsigned int i;
 
- #ifndef ZT_SALSA20_SSE
 
- 	uint32_t x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15;
 
- 	uint32_t j0, j1, j2, j3, j4, j5, j6, j7, j8, j9, j10, j11, j12, j13, j14, j15;
 
- #endif
 
- 	if (!bytes)
 
- 		return;
 
- #ifndef ZT_SALSA20_SSE
 
- 	j0 = _state.i[0];
 
- 	j1 = _state.i[1];
 
- 	j2 = _state.i[2];
 
- 	j3 = _state.i[3];
 
- 	j4 = _state.i[4];
 
- 	j5 = _state.i[5];
 
- 	j6 = _state.i[6];
 
- 	j7 = _state.i[7];
 
- 	j8 = _state.i[8];
 
- 	j9 = _state.i[9];
 
- 	j10 = _state.i[10];
 
- 	j11 = _state.i[11];
 
- 	j12 = _state.i[12];
 
- 	j13 = _state.i[13];
 
- 	j14 = _state.i[14];
 
- 	j15 = _state.i[15];
 
- #endif
 
- 	for (;;) {
 
- 		if (bytes < 64) {
 
- 			for (i = 0;i < bytes;++i)
 
- 				tmp[i] = m[i];
 
- 			m = tmp;
 
- 			ctarget = c;
 
- 			c = tmp;
 
- 		}
 
- #ifdef ZT_SALSA20_SSE
 
- 		__m128i X0 = _mm_loadu_si128((const __m128i *)&(_state.v[0]));
 
- 		__m128i X1 = _mm_loadu_si128((const __m128i *)&(_state.v[1]));
 
- 		__m128i X2 = _mm_loadu_si128((const __m128i *)&(_state.v[2]));
 
- 		__m128i X3 = _mm_loadu_si128((const __m128i *)&(_state.v[3]));
 
- 		__m128i T;
 
- 		__m128i X0s = X0;
 
- 		__m128i X1s = X1;
 
- 		__m128i X2s = X2;
 
- 		__m128i X3s = X3;
 
- 		// 2X round -------------------------------------------------------------
 
- 		T = _mm_add_epi32(X0, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X1, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X3, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x93);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x39);
 
- 		T = _mm_add_epi32(X0, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X3, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X1, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x39);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x93);
 
- 		// 2X round -------------------------------------------------------------
 
- 		T = _mm_add_epi32(X0, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X1, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X3, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x93);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x39);
 
- 		T = _mm_add_epi32(X0, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X3, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X1, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x39);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x93);
 
- 		// 2X round -------------------------------------------------------------
 
- 		T = _mm_add_epi32(X0, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X1, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X3, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x93);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x39);
 
- 		T = _mm_add_epi32(X0, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X3, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X1, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x39);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x93);
 
- 		// 2X round -------------------------------------------------------------
 
- 		T = _mm_add_epi32(X0, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X1, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X3, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x93);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x39);
 
- 		T = _mm_add_epi32(X0, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X3, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X1, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x39);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x93);
 
- 		// 2X round -------------------------------------------------------------
 
- 		T = _mm_add_epi32(X0, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X1, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X3, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x93);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x39);
 
- 		T = _mm_add_epi32(X0, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X3, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X1, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x39);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x93);
 
- 		// 2X round -------------------------------------------------------------
 
- 		T = _mm_add_epi32(X0, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X1, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X3, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x93);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x39);
 
- 		T = _mm_add_epi32(X0, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X3, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X1, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x39);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x93);
 
- 		X0 = _mm_add_epi32(X0s,X0);
 
- 		X1 = _mm_add_epi32(X1s,X1);
 
- 		X2 = _mm_add_epi32(X2s,X2);
 
- 		X3 = _mm_add_epi32(X3s,X3);
 
- 		__m128i k02 = _mm_shuffle_epi32(_mm_or_si128(_mm_slli_epi64(X0, 32), _mm_srli_epi64(X3, 32)), _MM_SHUFFLE(0, 1, 2, 3));
 
- 		__m128i k13 = _mm_shuffle_epi32(_mm_or_si128(_mm_slli_epi64(X1, 32), _mm_srli_epi64(X0, 32)), _MM_SHUFFLE(0, 1, 2, 3));
 
- 		__m128i k20 = _mm_or_si128(_mm_and_si128(X2, _S20SSECONSTANTS.maskLo32), _mm_and_si128(X1, _S20SSECONSTANTS.maskHi32));
 
- 		__m128i k31 = _mm_or_si128(_mm_and_si128(X3, _S20SSECONSTANTS.maskLo32), _mm_and_si128(X2, _S20SSECONSTANTS.maskHi32));
 
- 		_mm_storeu_ps(reinterpret_cast<float *>(c),_mm_castsi128_ps(_mm_xor_si128(_mm_unpackhi_epi64(k02,k20),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m))))));
 
- 		_mm_storeu_ps(reinterpret_cast<float *>(c) + 4,_mm_castsi128_ps(_mm_xor_si128(_mm_unpackhi_epi64(k13,k31),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m) + 4)))));
 
- 		_mm_storeu_ps(reinterpret_cast<float *>(c) + 8,_mm_castsi128_ps(_mm_xor_si128(_mm_unpacklo_epi64(k20,k02),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m) + 8)))));
 
- 		_mm_storeu_ps(reinterpret_cast<float *>(c) + 12,_mm_castsi128_ps(_mm_xor_si128(_mm_unpacklo_epi64(k31,k13),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m) + 12)))));
 
- 		if (!(++_state.i[8])) {
 
- 			++_state.i[5]; // state reordered for SSE
 
- 			/* stopping at 2^70 bytes per nonce is user's responsibility */
 
- 		}
 
- #else
 
- 		x0 = j0;
 
- 		x1 = j1;
 
- 		x2 = j2;
 
- 		x3 = j3;
 
- 		x4 = j4;
 
- 		x5 = j5;
 
- 		x6 = j6;
 
- 		x7 = j7;
 
- 		x8 = j8;
 
- 		x9 = j9;
 
- 		x10 = j10;
 
- 		x11 = j11;
 
- 		x12 = j12;
 
- 		x13 = j13;
 
- 		x14 = j14;
 
- 		x15 = j15;
 
- 		// 2X round -------------------------------------------------------------
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
 
- 		x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
 
- 		 x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
 
- 		 x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
 
- 		x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
 
- 		 x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
 
- 		 x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
 
- 		x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
 
- 		x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
 
- 		// 2X round -------------------------------------------------------------
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
 
- 		x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
 
- 		 x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
 
- 		 x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
 
- 		x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
 
- 		 x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
 
- 		 x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
 
- 		x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
 
- 		x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
 
- 		// 2X round -------------------------------------------------------------
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
 
- 		x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
 
- 		 x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
 
- 		 x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
 
- 		x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
 
- 		 x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
 
- 		 x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
 
- 		x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
 
- 		x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
 
- 		// 2X round -------------------------------------------------------------
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
 
- 		x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
 
- 		 x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
 
- 		 x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
 
- 		x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
 
- 		 x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
 
- 		 x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
 
- 		x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
 
- 		x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
 
- 		// 2X round -------------------------------------------------------------
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
 
- 		x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
 
- 		 x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
 
- 		 x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
 
- 		x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
 
- 		 x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
 
- 		 x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
 
- 		x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
 
- 		x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
 
- 		// 2X round -------------------------------------------------------------
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
 
- 		x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
 
- 		 x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
 
- 		 x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
 
- 		x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
 
- 		 x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
 
- 		 x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
 
- 		x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
 
- 		x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
 
- 		x0 = PLUS(x0,j0);
 
- 		x1 = PLUS(x1,j1);
 
- 		x2 = PLUS(x2,j2);
 
- 		x3 = PLUS(x3,j3);
 
- 		x4 = PLUS(x4,j4);
 
- 		x5 = PLUS(x5,j5);
 
- 		x6 = PLUS(x6,j6);
 
- 		x7 = PLUS(x7,j7);
 
- 		x8 = PLUS(x8,j8);
 
- 		x9 = PLUS(x9,j9);
 
- 		x10 = PLUS(x10,j10);
 
- 		x11 = PLUS(x11,j11);
 
- 		x12 = PLUS(x12,j12);
 
- 		x13 = PLUS(x13,j13);
 
- 		x14 = PLUS(x14,j14);
 
- 		x15 = PLUS(x15,j15);
 
- 		U32TO8_LITTLE(c + 0,XOR(x0,U8TO32_LITTLE(m + 0)));
 
- 		U32TO8_LITTLE(c + 4,XOR(x1,U8TO32_LITTLE(m + 4)));
 
- 		U32TO8_LITTLE(c + 8,XOR(x2,U8TO32_LITTLE(m + 8)));
 
- 		U32TO8_LITTLE(c + 12,XOR(x3,U8TO32_LITTLE(m + 12)));
 
- 		U32TO8_LITTLE(c + 16,XOR(x4,U8TO32_LITTLE(m + 16)));
 
- 		U32TO8_LITTLE(c + 20,XOR(x5,U8TO32_LITTLE(m + 20)));
 
- 		U32TO8_LITTLE(c + 24,XOR(x6,U8TO32_LITTLE(m + 24)));
 
- 		U32TO8_LITTLE(c + 28,XOR(x7,U8TO32_LITTLE(m + 28)));
 
- 		U32TO8_LITTLE(c + 32,XOR(x8,U8TO32_LITTLE(m + 32)));
 
- 		U32TO8_LITTLE(c + 36,XOR(x9,U8TO32_LITTLE(m + 36)));
 
- 		U32TO8_LITTLE(c + 40,XOR(x10,U8TO32_LITTLE(m + 40)));
 
- 		U32TO8_LITTLE(c + 44,XOR(x11,U8TO32_LITTLE(m + 44)));
 
- 		U32TO8_LITTLE(c + 48,XOR(x12,U8TO32_LITTLE(m + 48)));
 
- 		U32TO8_LITTLE(c + 52,XOR(x13,U8TO32_LITTLE(m + 52)));
 
- 		U32TO8_LITTLE(c + 56,XOR(x14,U8TO32_LITTLE(m + 56)));
 
- 		U32TO8_LITTLE(c + 60,XOR(x15,U8TO32_LITTLE(m + 60)));
 
- 		if (!(++j8)) {
 
- 			++j9;
 
- 			/* stopping at 2^70 bytes per nonce is user's responsibility */
 
- 		}
 
- #endif
 
- 		if (bytes <= 64) {
 
- 			if (bytes < 64) {
 
- 				for (i = 0;i < bytes;++i)
 
- 					ctarget[i] = c[i];
 
- 			}
 
- #ifndef ZT_SALSA20_SSE
 
- 			_state.i[8] = j8;
 
- 			_state.i[9] = j9;
 
- #endif
 
- 			return;
 
- 		}
 
- 		bytes -= 64;
 
- 		c += 64;
 
- 		m += 64;
 
- 	}
 
- }
 
- void Salsa20::encrypt20(const void *in,void *out,unsigned int bytes)
 
- 	throw()
 
- {
 
- 	uint8_t tmp[64];
 
- 	const uint8_t *m = (const uint8_t *)in;
 
- 	uint8_t *c = (uint8_t *)out;
 
- 	uint8_t *ctarget = c;
 
- 	unsigned int i;
 
- #ifndef ZT_SALSA20_SSE
 
- 	uint32_t x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15;
 
- 	uint32_t j0, j1, j2, j3, j4, j5, j6, j7, j8, j9, j10, j11, j12, j13, j14, j15;
 
- #endif
 
- 	if (!bytes)
 
- 		return;
 
- #ifndef ZT_SALSA20_SSE
 
- 	j0 = _state.i[0];
 
- 	j1 = _state.i[1];
 
- 	j2 = _state.i[2];
 
- 	j3 = _state.i[3];
 
- 	j4 = _state.i[4];
 
- 	j5 = _state.i[5];
 
- 	j6 = _state.i[6];
 
- 	j7 = _state.i[7];
 
- 	j8 = _state.i[8];
 
- 	j9 = _state.i[9];
 
- 	j10 = _state.i[10];
 
- 	j11 = _state.i[11];
 
- 	j12 = _state.i[12];
 
- 	j13 = _state.i[13];
 
- 	j14 = _state.i[14];
 
- 	j15 = _state.i[15];
 
- #endif
 
- 	for (;;) {
 
- 		if (bytes < 64) {
 
- 			for (i = 0;i < bytes;++i)
 
- 				tmp[i] = m[i];
 
- 			m = tmp;
 
- 			ctarget = c;
 
- 			c = tmp;
 
- 		}
 
- #ifdef ZT_SALSA20_SSE
 
- 		__m128i X0 = _mm_loadu_si128((const __m128i *)&(_state.v[0]));
 
- 		__m128i X1 = _mm_loadu_si128((const __m128i *)&(_state.v[1]));
 
- 		__m128i X2 = _mm_loadu_si128((const __m128i *)&(_state.v[2]));
 
- 		__m128i X3 = _mm_loadu_si128((const __m128i *)&(_state.v[3]));
 
- 		__m128i T;
 
- 		__m128i X0s = X0;
 
- 		__m128i X1s = X1;
 
- 		__m128i X2s = X2;
 
- 		__m128i X3s = X3;
 
- 		// 2X round -------------------------------------------------------------
 
- 		T = _mm_add_epi32(X0, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X1, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X3, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x93);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x39);
 
- 		T = _mm_add_epi32(X0, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X3, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X1, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x39);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x93);
 
- 		// 2X round -------------------------------------------------------------
 
- 		T = _mm_add_epi32(X0, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X1, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X3, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x93);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x39);
 
- 		T = _mm_add_epi32(X0, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X3, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X1, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x39);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x93);
 
- 		// 2X round -------------------------------------------------------------
 
- 		T = _mm_add_epi32(X0, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X1, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X3, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x93);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x39);
 
- 		T = _mm_add_epi32(X0, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X3, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X1, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x39);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x93);
 
- 		// 2X round -------------------------------------------------------------
 
- 		T = _mm_add_epi32(X0, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X1, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X3, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x93);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x39);
 
- 		T = _mm_add_epi32(X0, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X3, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X1, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x39);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x93);
 
- 		// 2X round -------------------------------------------------------------
 
- 		T = _mm_add_epi32(X0, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X1, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X3, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x93);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x39);
 
- 		T = _mm_add_epi32(X0, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X3, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X1, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x39);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x93);
 
- 		// 2X round -------------------------------------------------------------
 
- 		T = _mm_add_epi32(X0, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X1, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X3, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x93);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x39);
 
- 		T = _mm_add_epi32(X0, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X3, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X1, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x39);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x93);
 
- 		// 2X round -------------------------------------------------------------
 
- 		T = _mm_add_epi32(X0, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X1, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X3, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x93);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x39);
 
- 		T = _mm_add_epi32(X0, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X3, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X1, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x39);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x93);
 
- 		// 2X round -------------------------------------------------------------
 
- 		T = _mm_add_epi32(X0, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X1, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X3, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x93);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x39);
 
- 		T = _mm_add_epi32(X0, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X3, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X1, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x39);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x93);
 
- 		// 2X round -------------------------------------------------------------
 
- 		T = _mm_add_epi32(X0, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X1, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X3, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x93);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x39);
 
- 		T = _mm_add_epi32(X0, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X3, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X1, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x39);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x93);
 
- 		// 2X round -------------------------------------------------------------
 
- 		T = _mm_add_epi32(X0, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X1, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X3, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x93);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x39);
 
- 		T = _mm_add_epi32(X0, X1);
 
- 		X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
 
- 		T = _mm_add_epi32(X3, X0);
 
- 		X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
 
- 		T = _mm_add_epi32(X2, X3);
 
- 		X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
 
- 		T = _mm_add_epi32(X1, X2);
 
- 		X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
 
- 		X1 = _mm_shuffle_epi32(X1, 0x39);
 
- 		X2 = _mm_shuffle_epi32(X2, 0x4E);
 
- 		X3 = _mm_shuffle_epi32(X3, 0x93);
 
- 		X0 = _mm_add_epi32(X0s,X0);
 
- 		X1 = _mm_add_epi32(X1s,X1);
 
- 		X2 = _mm_add_epi32(X2s,X2);
 
- 		X3 = _mm_add_epi32(X3s,X3);
 
- 		__m128i k02 = _mm_shuffle_epi32(_mm_or_si128(_mm_slli_epi64(X0, 32), _mm_srli_epi64(X3, 32)), _MM_SHUFFLE(0, 1, 2, 3));
 
- 		__m128i k13 = _mm_shuffle_epi32(_mm_or_si128(_mm_slli_epi64(X1, 32), _mm_srli_epi64(X0, 32)), _MM_SHUFFLE(0, 1, 2, 3));
 
- 		__m128i k20 = _mm_or_si128(_mm_and_si128(X2, _S20SSECONSTANTS.maskLo32), _mm_and_si128(X1, _S20SSECONSTANTS.maskHi32));
 
- 		__m128i k31 = _mm_or_si128(_mm_and_si128(X3, _S20SSECONSTANTS.maskLo32), _mm_and_si128(X2, _S20SSECONSTANTS.maskHi32));
 
- 		_mm_storeu_ps(reinterpret_cast<float *>(c),_mm_castsi128_ps(_mm_xor_si128(_mm_unpackhi_epi64(k02,k20),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m))))));
 
- 		_mm_storeu_ps(reinterpret_cast<float *>(c) + 4,_mm_castsi128_ps(_mm_xor_si128(_mm_unpackhi_epi64(k13,k31),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m) + 4)))));
 
- 		_mm_storeu_ps(reinterpret_cast<float *>(c) + 8,_mm_castsi128_ps(_mm_xor_si128(_mm_unpacklo_epi64(k20,k02),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m) + 8)))));
 
- 		_mm_storeu_ps(reinterpret_cast<float *>(c) + 12,_mm_castsi128_ps(_mm_xor_si128(_mm_unpacklo_epi64(k31,k13),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m) + 12)))));
 
- 		if (!(++_state.i[8])) {
 
- 			++_state.i[5]; // state reordered for SSE
 
- 			/* stopping at 2^70 bytes per nonce is user's responsibility */
 
- 		}
 
- #else
 
- 		x0 = j0;
 
- 		x1 = j1;
 
- 		x2 = j2;
 
- 		x3 = j3;
 
- 		x4 = j4;
 
- 		x5 = j5;
 
- 		x6 = j6;
 
- 		x7 = j7;
 
- 		x8 = j8;
 
- 		x9 = j9;
 
- 		x10 = j10;
 
- 		x11 = j11;
 
- 		x12 = j12;
 
- 		x13 = j13;
 
- 		x14 = j14;
 
- 		x15 = j15;
 
- 		// 2X round -------------------------------------------------------------
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
 
- 		x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
 
- 		 x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
 
- 		 x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
 
- 		x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
 
- 		 x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
 
- 		 x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
 
- 		x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
 
- 		x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
 
- 		// 2X round -------------------------------------------------------------
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
 
- 		x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
 
- 		 x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
 
- 		 x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
 
- 		x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
 
- 		 x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
 
- 		 x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
 
- 		x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
 
- 		x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
 
- 		// 2X round -------------------------------------------------------------
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
 
- 		x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
 
- 		 x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
 
- 		 x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
 
- 		x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
 
- 		 x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
 
- 		 x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
 
- 		x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
 
- 		x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
 
- 		// 2X round -------------------------------------------------------------
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
 
- 		x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
 
- 		 x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
 
- 		 x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
 
- 		x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
 
- 		 x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
 
- 		 x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
 
- 		x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
 
- 		x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
 
- 		// 2X round -------------------------------------------------------------
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
 
- 		x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
 
- 		 x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
 
- 		 x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
 
- 		x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
 
- 		 x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
 
- 		 x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
 
- 		x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
 
- 		x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
 
- 		// 2X round -------------------------------------------------------------
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
 
- 		x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
 
- 		 x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
 
- 		 x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
 
- 		x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
 
- 		 x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
 
- 		 x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
 
- 		x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
 
- 		x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
 
- 		// 2X round -------------------------------------------------------------
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
 
- 		x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
 
- 		 x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
 
- 		 x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
 
- 		x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
 
- 		 x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
 
- 		 x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
 
- 		x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
 
- 		x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
 
- 		// 2X round -------------------------------------------------------------
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
 
- 		x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
 
- 		 x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
 
- 		 x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
 
- 		x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
 
- 		 x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
 
- 		 x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
 
- 		x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
 
- 		x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
 
- 		// 2X round -------------------------------------------------------------
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
 
- 		x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
 
- 		 x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
 
- 		 x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
 
- 		x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
 
- 		 x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
 
- 		 x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
 
- 		x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
 
- 		x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
 
- 		// 2X round -------------------------------------------------------------
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
 
- 		x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
 
- 		 x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
 
- 		 x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
 
- 		x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
 
- 		 x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
 
- 		 x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
 
- 		 x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
 
- 		 x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
 
- 		 x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
 
- 		 x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
 
- 		 x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
 
- 		 x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
 
- 		x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
 
- 		 x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
 
- 		 x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
 
- 		x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
 
- 		x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
 
- 		x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
 
- 		x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
 
- 		x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
 
- 		x0 = PLUS(x0,j0);
 
- 		x1 = PLUS(x1,j1);
 
- 		x2 = PLUS(x2,j2);
 
- 		x3 = PLUS(x3,j3);
 
- 		x4 = PLUS(x4,j4);
 
- 		x5 = PLUS(x5,j5);
 
- 		x6 = PLUS(x6,j6);
 
- 		x7 = PLUS(x7,j7);
 
- 		x8 = PLUS(x8,j8);
 
- 		x9 = PLUS(x9,j9);
 
- 		x10 = PLUS(x10,j10);
 
- 		x11 = PLUS(x11,j11);
 
- 		x12 = PLUS(x12,j12);
 
- 		x13 = PLUS(x13,j13);
 
- 		x14 = PLUS(x14,j14);
 
- 		x15 = PLUS(x15,j15);
 
- 		U32TO8_LITTLE(c + 0,XOR(x0,U8TO32_LITTLE(m + 0)));
 
- 		U32TO8_LITTLE(c + 4,XOR(x1,U8TO32_LITTLE(m + 4)));
 
- 		U32TO8_LITTLE(c + 8,XOR(x2,U8TO32_LITTLE(m + 8)));
 
- 		U32TO8_LITTLE(c + 12,XOR(x3,U8TO32_LITTLE(m + 12)));
 
- 		U32TO8_LITTLE(c + 16,XOR(x4,U8TO32_LITTLE(m + 16)));
 
- 		U32TO8_LITTLE(c + 20,XOR(x5,U8TO32_LITTLE(m + 20)));
 
- 		U32TO8_LITTLE(c + 24,XOR(x6,U8TO32_LITTLE(m + 24)));
 
- 		U32TO8_LITTLE(c + 28,XOR(x7,U8TO32_LITTLE(m + 28)));
 
- 		U32TO8_LITTLE(c + 32,XOR(x8,U8TO32_LITTLE(m + 32)));
 
- 		U32TO8_LITTLE(c + 36,XOR(x9,U8TO32_LITTLE(m + 36)));
 
- 		U32TO8_LITTLE(c + 40,XOR(x10,U8TO32_LITTLE(m + 40)));
 
- 		U32TO8_LITTLE(c + 44,XOR(x11,U8TO32_LITTLE(m + 44)));
 
- 		U32TO8_LITTLE(c + 48,XOR(x12,U8TO32_LITTLE(m + 48)));
 
- 		U32TO8_LITTLE(c + 52,XOR(x13,U8TO32_LITTLE(m + 52)));
 
- 		U32TO8_LITTLE(c + 56,XOR(x14,U8TO32_LITTLE(m + 56)));
 
- 		U32TO8_LITTLE(c + 60,XOR(x15,U8TO32_LITTLE(m + 60)));
 
- 		if (!(++j8)) {
 
- 			++j9;
 
- 			/* stopping at 2^70 bytes per nonce is user's responsibility */
 
- 		}
 
- #endif
 
- 		if (bytes <= 64) {
 
- 			if (bytes < 64) {
 
- 				for (i = 0;i < bytes;++i)
 
- 					ctarget[i] = c[i];
 
- 			}
 
- #ifndef ZT_SALSA20_SSE
 
- 			_state.i[8] = j8;
 
- 			_state.i[9] = j9;
 
- #endif
 
- 			return;
 
- 		}
 
- 		bytes -= 64;
 
- 		c += 64;
 
- 		m += 64;
 
- 	}
 
- }
 
- } // namespace ZeroTier
 
 
  |