cavium_crypto.c 63 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283
  1. /*
  2. * Copyright (c) 2009 David McCullough <[email protected]>
  3. *
  4. * Copyright (c) 2003-2007 Cavium Networks ([email protected]). All rights
  5. * reserved.
  6. *
  7. * Redistribution and use in source and binary forms, with or without
  8. * modification, are permitted provided that the following conditions are met:
  9. * 1. Redistributions of source code must retain the above copyright notice,
  10. * this list of conditions and the following disclaimer.
  11. * 2. Redistributions in binary form must reproduce the above copyright notice,
  12. * this list of conditions and the following disclaimer in the documentation
  13. * and/or other materials provided with the distribution.
  14. * 3. All advertising materials mentioning features or use of this software
  15. * must display the following acknowledgement:
  16. * This product includes software developed by Cavium Networks
  17. * 4. Cavium Networks' name may not be used to endorse or promote products
  18. * derived from this software without specific prior written permission.
  19. *
  20. * This Software, including technical data, may be subject to U.S. export
  21. * control laws, including the U.S. Export Administration Act and its
  22. * associated regulations, and may be subject to export or import regulations
  23. * in other countries. You warrant that You will comply strictly in all
  24. * respects with all such regulations and acknowledge that you have the
  25. * responsibility to obtain licenses to export, re-export or import the
  26. * Software.
  27. *
  28. * TO THE MAXIMUM EXTENT PERMITTED BY LAW, THE SOFTWARE IS PROVIDED "AS IS" AND
  29. * WITH ALL FAULTS AND CAVIUM MAKES NO PROMISES, REPRESENTATIONS OR WARRANTIES,
  30. * EITHER EXPRESS, IMPLIED, STATUTORY, OR OTHERWISE, WITH RESPECT TO THE
  31. * SOFTWARE, INCLUDING ITS CONDITION, ITS CONFORMITY TO ANY REPRESENTATION OR
  32. * DESCRIPTION, OR THE EXISTENCE OF ANY LATENT OR PATENT DEFECTS, AND CAVIUM
  33. * SPECIFICALLY DISCLAIMS ALL IMPLIED (IF ANY) WARRANTIES OF TITLE,
  34. * MERCHANTABILITY, NONINFRINGEMENT, FITNESS FOR A PARTICULAR PURPOSE, LACK OF
  35. * VIRUSES, ACCURACY OR COMPLETENESS, QUIET ENJOYMENT, QUIET POSSESSION OR
  36. * CORRESPONDENCE TO DESCRIPTION. THE ENTIRE RISK ARISING OUT OF USE OR
  37. * PERFORMANCE OF THE SOFTWARE LIES WITH YOU.
  38. */
  39. /****************************************************************************/
  40. #include <linux/scatterlist.h>
  41. #include <asm/octeon/octeon.h>
  42. #include "octeon-asm.h"
  43. /****************************************************************************/
  44. extern unsigned long octeon_crypto_enable(struct octeon_cop2_state *);
  45. extern void octeon_crypto_disable(struct octeon_cop2_state *, unsigned long);
  46. #define SG_INIT(s, p, i, l) \
  47. { \
  48. (i) = 0; \
  49. (l) = (s)[0].length; \
  50. (p) = (typeof(p)) sg_virt((s)); \
  51. CVMX_PREFETCH0((p)); \
  52. }
  53. #define SG_CONSUME(s, p, i, l) \
  54. { \
  55. (p)++; \
  56. (l) -= sizeof(*(p)); \
  57. if ((l) < 0) { \
  58. dprintk("%s, %d: l = %d\n", __FILE__, __LINE__, l); \
  59. } else if ((l) == 0) { \
  60. (i)++; \
  61. (l) = (s)[0].length; \
  62. (p) = (typeof(p)) sg_virt(s); \
  63. CVMX_PREFETCH0((p)); \
  64. } \
  65. }
  66. #define ESP_HEADER_LENGTH 8
  67. #define DES_CBC_IV_LENGTH 8
  68. #define AES_CBC_IV_LENGTH 16
  69. #define ESP_HMAC_LEN 12
  70. #define ESP_HEADER_LENGTH 8
  71. #define DES_CBC_IV_LENGTH 8
  72. /****************************************************************************/
  73. #define CVM_LOAD_SHA_UNIT(dat, next) { \
  74. if (next == 0) { \
  75. next = 1; \
  76. CVMX_MT_HSH_DAT (dat, 0); \
  77. } else if (next == 1) { \
  78. next = 2; \
  79. CVMX_MT_HSH_DAT (dat, 1); \
  80. } else if (next == 2) { \
  81. next = 3; \
  82. CVMX_MT_HSH_DAT (dat, 2); \
  83. } else if (next == 3) { \
  84. next = 4; \
  85. CVMX_MT_HSH_DAT (dat, 3); \
  86. } else if (next == 4) { \
  87. next = 5; \
  88. CVMX_MT_HSH_DAT (dat, 4); \
  89. } else if (next == 5) { \
  90. next = 6; \
  91. CVMX_MT_HSH_DAT (dat, 5); \
  92. } else if (next == 6) { \
  93. next = 7; \
  94. CVMX_MT_HSH_DAT (dat, 6); \
  95. } else { \
  96. CVMX_MT_HSH_STARTSHA (dat); \
  97. next = 0; \
  98. } \
  99. }
  100. #define CVM_LOAD2_SHA_UNIT(dat1, dat2, next) { \
  101. if (next == 0) { \
  102. CVMX_MT_HSH_DAT (dat1, 0); \
  103. CVMX_MT_HSH_DAT (dat2, 1); \
  104. next = 2; \
  105. } else if (next == 1) { \
  106. CVMX_MT_HSH_DAT (dat1, 1); \
  107. CVMX_MT_HSH_DAT (dat2, 2); \
  108. next = 3; \
  109. } else if (next == 2) { \
  110. CVMX_MT_HSH_DAT (dat1, 2); \
  111. CVMX_MT_HSH_DAT (dat2, 3); \
  112. next = 4; \
  113. } else if (next == 3) { \
  114. CVMX_MT_HSH_DAT (dat1, 3); \
  115. CVMX_MT_HSH_DAT (dat2, 4); \
  116. next = 5; \
  117. } else if (next == 4) { \
  118. CVMX_MT_HSH_DAT (dat1, 4); \
  119. CVMX_MT_HSH_DAT (dat2, 5); \
  120. next = 6; \
  121. } else if (next == 5) { \
  122. CVMX_MT_HSH_DAT (dat1, 5); \
  123. CVMX_MT_HSH_DAT (dat2, 6); \
  124. next = 7; \
  125. } else if (next == 6) { \
  126. CVMX_MT_HSH_DAT (dat1, 6); \
  127. CVMX_MT_HSH_STARTSHA (dat2); \
  128. next = 0; \
  129. } else { \
  130. CVMX_MT_HSH_STARTSHA (dat1); \
  131. CVMX_MT_HSH_DAT (dat2, 0); \
  132. next = 1; \
  133. } \
  134. }
  135. /****************************************************************************/
  136. #define CVM_LOAD_MD5_UNIT(dat, next) { \
  137. if (next == 0) { \
  138. next = 1; \
  139. CVMX_MT_HSH_DAT (dat, 0); \
  140. } else if (next == 1) { \
  141. next = 2; \
  142. CVMX_MT_HSH_DAT (dat, 1); \
  143. } else if (next == 2) { \
  144. next = 3; \
  145. CVMX_MT_HSH_DAT (dat, 2); \
  146. } else if (next == 3) { \
  147. next = 4; \
  148. CVMX_MT_HSH_DAT (dat, 3); \
  149. } else if (next == 4) { \
  150. next = 5; \
  151. CVMX_MT_HSH_DAT (dat, 4); \
  152. } else if (next == 5) { \
  153. next = 6; \
  154. CVMX_MT_HSH_DAT (dat, 5); \
  155. } else if (next == 6) { \
  156. next = 7; \
  157. CVMX_MT_HSH_DAT (dat, 6); \
  158. } else { \
  159. CVMX_MT_HSH_STARTMD5 (dat); \
  160. next = 0; \
  161. } \
  162. }
  163. #define CVM_LOAD2_MD5_UNIT(dat1, dat2, next) { \
  164. if (next == 0) { \
  165. CVMX_MT_HSH_DAT (dat1, 0); \
  166. CVMX_MT_HSH_DAT (dat2, 1); \
  167. next = 2; \
  168. } else if (next == 1) { \
  169. CVMX_MT_HSH_DAT (dat1, 1); \
  170. CVMX_MT_HSH_DAT (dat2, 2); \
  171. next = 3; \
  172. } else if (next == 2) { \
  173. CVMX_MT_HSH_DAT (dat1, 2); \
  174. CVMX_MT_HSH_DAT (dat2, 3); \
  175. next = 4; \
  176. } else if (next == 3) { \
  177. CVMX_MT_HSH_DAT (dat1, 3); \
  178. CVMX_MT_HSH_DAT (dat2, 4); \
  179. next = 5; \
  180. } else if (next == 4) { \
  181. CVMX_MT_HSH_DAT (dat1, 4); \
  182. CVMX_MT_HSH_DAT (dat2, 5); \
  183. next = 6; \
  184. } else if (next == 5) { \
  185. CVMX_MT_HSH_DAT (dat1, 5); \
  186. CVMX_MT_HSH_DAT (dat2, 6); \
  187. next = 7; \
  188. } else if (next == 6) { \
  189. CVMX_MT_HSH_DAT (dat1, 6); \
  190. CVMX_MT_HSH_STARTMD5 (dat2); \
  191. next = 0; \
  192. } else { \
  193. CVMX_MT_HSH_STARTMD5 (dat1); \
  194. CVMX_MT_HSH_DAT (dat2, 0); \
  195. next = 1; \
  196. } \
  197. }
  198. /****************************************************************************/
  199. static inline uint64_t
  200. swap64(uint64_t a)
  201. {
  202. return ((a >> 56) |
  203. (((a >> 48) & 0xfful) << 8) |
  204. (((a >> 40) & 0xfful) << 16) |
  205. (((a >> 32) & 0xfful) << 24) |
  206. (((a >> 24) & 0xfful) << 32) |
  207. (((a >> 16) & 0xfful) << 40) |
  208. (((a >> 8) & 0xfful) << 48) | (((a >> 0) & 0xfful) << 56));
  209. }
  210. /****************************************************************************/
  211. void
  212. octo_calc_hash(__u8 auth, unsigned char *key, uint64_t *inner, uint64_t *outer)
  213. {
  214. uint8_t hash_key[64];
  215. uint64_t *key1;
  216. register uint64_t xor1 = 0x3636363636363636ULL;
  217. register uint64_t xor2 = 0x5c5c5c5c5c5c5c5cULL;
  218. struct octeon_cop2_state state;
  219. unsigned long flags;
  220. dprintk("%s()\n", __FUNCTION__);
  221. memset(hash_key, 0, sizeof(hash_key));
  222. memcpy(hash_key, (uint8_t *) key, (auth ? 20 : 16));
  223. key1 = (uint64_t *) hash_key;
  224. flags = octeon_crypto_enable(&state);
  225. if (auth) {
  226. CVMX_MT_HSH_IV(0x67452301EFCDAB89ULL, 0);
  227. CVMX_MT_HSH_IV(0x98BADCFE10325476ULL, 1);
  228. CVMX_MT_HSH_IV(0xC3D2E1F000000000ULL, 2);
  229. } else {
  230. CVMX_MT_HSH_IV(0x0123456789ABCDEFULL, 0);
  231. CVMX_MT_HSH_IV(0xFEDCBA9876543210ULL, 1);
  232. }
  233. CVMX_MT_HSH_DAT((*key1 ^ xor1), 0);
  234. key1++;
  235. CVMX_MT_HSH_DAT((*key1 ^ xor1), 1);
  236. key1++;
  237. CVMX_MT_HSH_DAT((*key1 ^ xor1), 2);
  238. key1++;
  239. CVMX_MT_HSH_DAT((*key1 ^ xor1), 3);
  240. key1++;
  241. CVMX_MT_HSH_DAT((*key1 ^ xor1), 4);
  242. key1++;
  243. CVMX_MT_HSH_DAT((*key1 ^ xor1), 5);
  244. key1++;
  245. CVMX_MT_HSH_DAT((*key1 ^ xor1), 6);
  246. key1++;
  247. if (auth)
  248. CVMX_MT_HSH_STARTSHA((*key1 ^ xor1));
  249. else
  250. CVMX_MT_HSH_STARTMD5((*key1 ^ xor1));
  251. CVMX_MF_HSH_IV(inner[0], 0);
  252. CVMX_MF_HSH_IV(inner[1], 1);
  253. if (auth) {
  254. inner[2] = 0;
  255. CVMX_MF_HSH_IV(((uint64_t *) inner)[2], 2);
  256. }
  257. memset(hash_key, 0, sizeof(hash_key));
  258. memcpy(hash_key, (uint8_t *) key, (auth ? 20 : 16));
  259. key1 = (uint64_t *) hash_key;
  260. if (auth) {
  261. CVMX_MT_HSH_IV(0x67452301EFCDAB89ULL, 0);
  262. CVMX_MT_HSH_IV(0x98BADCFE10325476ULL, 1);
  263. CVMX_MT_HSH_IV(0xC3D2E1F000000000ULL, 2);
  264. } else {
  265. CVMX_MT_HSH_IV(0x0123456789ABCDEFULL, 0);
  266. CVMX_MT_HSH_IV(0xFEDCBA9876543210ULL, 1);
  267. }
  268. CVMX_MT_HSH_DAT((*key1 ^ xor2), 0);
  269. key1++;
  270. CVMX_MT_HSH_DAT((*key1 ^ xor2), 1);
  271. key1++;
  272. CVMX_MT_HSH_DAT((*key1 ^ xor2), 2);
  273. key1++;
  274. CVMX_MT_HSH_DAT((*key1 ^ xor2), 3);
  275. key1++;
  276. CVMX_MT_HSH_DAT((*key1 ^ xor2), 4);
  277. key1++;
  278. CVMX_MT_HSH_DAT((*key1 ^ xor2), 5);
  279. key1++;
  280. CVMX_MT_HSH_DAT((*key1 ^ xor2), 6);
  281. key1++;
  282. if (auth)
  283. CVMX_MT_HSH_STARTSHA((*key1 ^ xor2));
  284. else
  285. CVMX_MT_HSH_STARTMD5((*key1 ^ xor2));
  286. CVMX_MF_HSH_IV(outer[0], 0);
  287. CVMX_MF_HSH_IV(outer[1], 1);
  288. if (auth) {
  289. outer[2] = 0;
  290. CVMX_MF_HSH_IV(outer[2], 2);
  291. }
  292. octeon_crypto_disable(&state, flags);
  293. return;
  294. }
  295. /****************************************************************************/
  296. /* DES functions */
  297. int
  298. octo_des_cbc_encrypt(
  299. struct octo_sess *od,
  300. struct scatterlist *sg, int sg_len,
  301. int auth_off, int auth_len,
  302. int crypt_off, int crypt_len,
  303. int icv_off, uint8_t *ivp)
  304. {
  305. uint64_t *data;
  306. int data_i, data_l;
  307. struct octeon_cop2_state state;
  308. unsigned long flags;
  309. dprintk("%s()\n", __FUNCTION__);
  310. if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
  311. (crypt_off & 0x7) || (crypt_off + crypt_len > sg_len))) {
  312. dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
  313. "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
  314. "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
  315. auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
  316. return -EINVAL;
  317. }
  318. SG_INIT(sg, data, data_i, data_l);
  319. CVMX_PREFETCH0(ivp);
  320. CVMX_PREFETCH0(od->octo_enckey);
  321. flags = octeon_crypto_enable(&state);
  322. /* load 3DES Key */
  323. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
  324. if (od->octo_encklen == 24) {
  325. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
  326. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
  327. } else if (od->octo_encklen == 8) {
  328. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 1);
  329. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 2);
  330. } else {
  331. octeon_crypto_disable(&state, flags);
  332. dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
  333. return -EINVAL;
  334. }
  335. CVMX_MT_3DES_IV(* (uint64_t *) ivp);
  336. while (crypt_off > 0) {
  337. SG_CONSUME(sg, data, data_i, data_l);
  338. crypt_off -= 8;
  339. }
  340. while (crypt_len > 0) {
  341. CVMX_MT_3DES_ENC_CBC(*data);
  342. CVMX_MF_3DES_RESULT(*data);
  343. SG_CONSUME(sg, data, data_i, data_l);
  344. crypt_len -= 8;
  345. }
  346. octeon_crypto_disable(&state, flags);
  347. return 0;
  348. }
  349. int
  350. octo_des_cbc_decrypt(
  351. struct octo_sess *od,
  352. struct scatterlist *sg, int sg_len,
  353. int auth_off, int auth_len,
  354. int crypt_off, int crypt_len,
  355. int icv_off, uint8_t *ivp)
  356. {
  357. uint64_t *data;
  358. int data_i, data_l;
  359. struct octeon_cop2_state state;
  360. unsigned long flags;
  361. dprintk("%s()\n", __FUNCTION__);
  362. if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
  363. (crypt_off & 0x7) || (crypt_off + crypt_len > sg_len))) {
  364. dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
  365. "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
  366. "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
  367. auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
  368. return -EINVAL;
  369. }
  370. SG_INIT(sg, data, data_i, data_l);
  371. CVMX_PREFETCH0(ivp);
  372. CVMX_PREFETCH0(od->octo_enckey);
  373. flags = octeon_crypto_enable(&state);
  374. /* load 3DES Key */
  375. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
  376. if (od->octo_encklen == 24) {
  377. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
  378. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
  379. } else if (od->octo_encklen == 8) {
  380. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 1);
  381. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 2);
  382. } else {
  383. octeon_crypto_disable(&state, flags);
  384. dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
  385. return -EINVAL;
  386. }
  387. CVMX_MT_3DES_IV(* (uint64_t *) ivp);
  388. while (crypt_off > 0) {
  389. SG_CONSUME(sg, data, data_i, data_l);
  390. crypt_off -= 8;
  391. }
  392. while (crypt_len > 0) {
  393. CVMX_MT_3DES_DEC_CBC(*data);
  394. CVMX_MF_3DES_RESULT(*data);
  395. SG_CONSUME(sg, data, data_i, data_l);
  396. crypt_len -= 8;
  397. }
  398. octeon_crypto_disable(&state, flags);
  399. return 0;
  400. }
  401. /****************************************************************************/
  402. /* AES functions */
  403. int
  404. octo_aes_cbc_encrypt(
  405. struct octo_sess *od,
  406. struct scatterlist *sg, int sg_len,
  407. int auth_off, int auth_len,
  408. int crypt_off, int crypt_len,
  409. int icv_off, uint8_t *ivp)
  410. {
  411. uint64_t *data, *pdata;
  412. int data_i, data_l;
  413. struct octeon_cop2_state state;
  414. unsigned long flags;
  415. dprintk("%s()\n", __FUNCTION__);
  416. if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
  417. (crypt_off & 0x7) || (crypt_off + crypt_len > sg_len))) {
  418. dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
  419. "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
  420. "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
  421. auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
  422. return -EINVAL;
  423. }
  424. SG_INIT(sg, data, data_i, data_l);
  425. CVMX_PREFETCH0(ivp);
  426. CVMX_PREFETCH0(od->octo_enckey);
  427. flags = octeon_crypto_enable(&state);
  428. /* load AES Key */
  429. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
  430. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
  431. if (od->octo_encklen == 16) {
  432. CVMX_MT_AES_KEY(0x0, 2);
  433. CVMX_MT_AES_KEY(0x0, 3);
  434. } else if (od->octo_encklen == 24) {
  435. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
  436. CVMX_MT_AES_KEY(0x0, 3);
  437. } else if (od->octo_encklen == 32) {
  438. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
  439. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[3], 3);
  440. } else {
  441. octeon_crypto_disable(&state, flags);
  442. dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
  443. return -EINVAL;
  444. }
  445. CVMX_MT_AES_KEYLENGTH(od->octo_encklen / 8 - 1);
  446. CVMX_MT_AES_IV(((uint64_t *) ivp)[0], 0);
  447. CVMX_MT_AES_IV(((uint64_t *) ivp)[1], 1);
  448. while (crypt_off > 0) {
  449. SG_CONSUME(sg, data, data_i, data_l);
  450. crypt_off -= 8;
  451. }
  452. while (crypt_len > 0) {
  453. pdata = data;
  454. CVMX_MT_AES_ENC_CBC0(*data);
  455. SG_CONSUME(sg, data, data_i, data_l);
  456. CVMX_MT_AES_ENC_CBC1(*data);
  457. CVMX_MF_AES_RESULT(*pdata, 0);
  458. CVMX_MF_AES_RESULT(*data, 1);
  459. SG_CONSUME(sg, data, data_i, data_l);
  460. crypt_len -= 16;
  461. }
  462. octeon_crypto_disable(&state, flags);
  463. return 0;
  464. }
  465. int
  466. octo_aes_cbc_decrypt(
  467. struct octo_sess *od,
  468. struct scatterlist *sg, int sg_len,
  469. int auth_off, int auth_len,
  470. int crypt_off, int crypt_len,
  471. int icv_off, uint8_t *ivp)
  472. {
  473. uint64_t *data, *pdata;
  474. int data_i, data_l;
  475. struct octeon_cop2_state state;
  476. unsigned long flags;
  477. dprintk("%s()\n", __FUNCTION__);
  478. if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
  479. (crypt_off & 0x7) || (crypt_off + crypt_len > sg_len))) {
  480. dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
  481. "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
  482. "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
  483. auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
  484. return -EINVAL;
  485. }
  486. SG_INIT(sg, data, data_i, data_l);
  487. CVMX_PREFETCH0(ivp);
  488. CVMX_PREFETCH0(od->octo_enckey);
  489. flags = octeon_crypto_enable(&state);
  490. /* load AES Key */
  491. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
  492. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
  493. if (od->octo_encklen == 16) {
  494. CVMX_MT_AES_KEY(0x0, 2);
  495. CVMX_MT_AES_KEY(0x0, 3);
  496. } else if (od->octo_encklen == 24) {
  497. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
  498. CVMX_MT_AES_KEY(0x0, 3);
  499. } else if (od->octo_encklen == 32) {
  500. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
  501. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[3], 3);
  502. } else {
  503. octeon_crypto_disable(&state, flags);
  504. dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
  505. return -EINVAL;
  506. }
  507. CVMX_MT_AES_KEYLENGTH(od->octo_encklen / 8 - 1);
  508. CVMX_MT_AES_IV(((uint64_t *) ivp)[0], 0);
  509. CVMX_MT_AES_IV(((uint64_t *) ivp)[1], 1);
  510. while (crypt_off > 0) {
  511. SG_CONSUME(sg, data, data_i, data_l);
  512. crypt_off -= 8;
  513. }
  514. while (crypt_len > 0) {
  515. pdata = data;
  516. CVMX_MT_AES_DEC_CBC0(*data);
  517. SG_CONSUME(sg, data, data_i, data_l);
  518. CVMX_MT_AES_DEC_CBC1(*data);
  519. CVMX_MF_AES_RESULT(*pdata, 0);
  520. CVMX_MF_AES_RESULT(*data, 1);
  521. SG_CONSUME(sg, data, data_i, data_l);
  522. crypt_len -= 16;
  523. }
  524. octeon_crypto_disable(&state, flags);
  525. return 0;
  526. }
  527. /****************************************************************************/
  528. /* MD5 */
  529. int
  530. octo_null_md5_encrypt(
  531. struct octo_sess *od,
  532. struct scatterlist *sg, int sg_len,
  533. int auth_off, int auth_len,
  534. int crypt_off, int crypt_len,
  535. int icv_off, uint8_t *ivp)
  536. {
  537. register int next = 0;
  538. uint64_t *data;
  539. uint64_t tmp1, tmp2;
  540. int data_i, data_l, alen = auth_len;
  541. struct octeon_cop2_state state;
  542. unsigned long flags;
  543. dprintk("%s()\n", __FUNCTION__);
  544. if (unlikely(od == NULL || sg==NULL || sg_len==0 ||
  545. (auth_off & 0x7) || (auth_off + auth_len > sg_len))) {
  546. dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
  547. "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
  548. "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
  549. auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
  550. return -EINVAL;
  551. }
  552. SG_INIT(sg, data, data_i, data_l);
  553. flags = octeon_crypto_enable(&state);
  554. /* Load MD5 IV */
  555. CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
  556. CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
  557. while (auth_off > 0) {
  558. SG_CONSUME(sg, data, data_i, data_l);
  559. auth_off -= 8;
  560. }
  561. while (auth_len > 0) {
  562. CVM_LOAD_MD5_UNIT(*data, next);
  563. auth_len -= 8;
  564. SG_CONSUME(sg, data, data_i, data_l);
  565. }
  566. /* finish the hash */
  567. CVMX_PREFETCH0(od->octo_hmouter);
  568. #if 0
  569. if (unlikely(inplen)) {
  570. uint64_t tmp = 0;
  571. uint8_t *p = (uint8_t *) & tmp;
  572. p[inplen] = 0x80;
  573. do {
  574. inplen--;
  575. p[inplen] = ((uint8_t *) data)[inplen];
  576. } while (inplen);
  577. CVM_LOAD_MD5_UNIT(tmp, next);
  578. } else {
  579. CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
  580. }
  581. #else
  582. CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
  583. #endif
  584. /* Finish Inner hash */
  585. while (next != 7) {
  586. CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL), next);
  587. }
  588. CVMX_ES64(tmp1, ((alen + 64) << 3));
  589. CVM_LOAD_MD5_UNIT(tmp1, next);
  590. /* Get the inner hash of HMAC */
  591. CVMX_MF_HSH_IV(tmp1, 0);
  592. CVMX_MF_HSH_IV(tmp2, 1);
  593. /* Initialize hash unit */
  594. CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
  595. CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
  596. CVMX_MT_HSH_DAT(tmp1, 0);
  597. CVMX_MT_HSH_DAT(tmp2, 1);
  598. CVMX_MT_HSH_DAT(0x8000000000000000ULL, 2);
  599. CVMX_MT_HSH_DATZ(3);
  600. CVMX_MT_HSH_DATZ(4);
  601. CVMX_MT_HSH_DATZ(5);
  602. CVMX_MT_HSH_DATZ(6);
  603. CVMX_ES64(tmp1, ((64 + 16) << 3));
  604. CVMX_MT_HSH_STARTMD5(tmp1);
  605. /* save the HMAC */
  606. SG_INIT(sg, data, data_i, data_l);
  607. while (icv_off > 0) {
  608. SG_CONSUME(sg, data, data_i, data_l);
  609. icv_off -= 8;
  610. }
  611. CVMX_MF_HSH_IV(*data, 0);
  612. SG_CONSUME(sg, data, data_i, data_l);
  613. CVMX_MF_HSH_IV(tmp1, 1);
  614. *(uint32_t *)data = (uint32_t) (tmp1 >> 32);
  615. octeon_crypto_disable(&state, flags);
  616. return 0;
  617. }
  618. /****************************************************************************/
  619. /* SHA1 */
  620. int
  621. octo_null_sha1_encrypt(
  622. struct octo_sess *od,
  623. struct scatterlist *sg, int sg_len,
  624. int auth_off, int auth_len,
  625. int crypt_off, int crypt_len,
  626. int icv_off, uint8_t *ivp)
  627. {
  628. register int next = 0;
  629. uint64_t *data;
  630. uint64_t tmp1, tmp2, tmp3;
  631. int data_i, data_l, alen = auth_len;
  632. struct octeon_cop2_state state;
  633. unsigned long flags;
  634. dprintk("%s()\n", __FUNCTION__);
  635. if (unlikely(od == NULL || sg==NULL || sg_len==0 ||
  636. (auth_off & 0x7) || (auth_off + auth_len > sg_len))) {
  637. dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
  638. "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
  639. "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
  640. auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
  641. return -EINVAL;
  642. }
  643. SG_INIT(sg, data, data_i, data_l);
  644. flags = octeon_crypto_enable(&state);
  645. /* Load SHA1 IV */
  646. CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
  647. CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
  648. CVMX_MT_HSH_IV(od->octo_hminner[2], 2);
  649. while (auth_off > 0) {
  650. SG_CONSUME(sg, data, data_i, data_l);
  651. auth_off -= 8;
  652. }
  653. while (auth_len > 0) {
  654. CVM_LOAD_SHA_UNIT(*data, next);
  655. auth_len -= 8;
  656. SG_CONSUME(sg, data, data_i, data_l);
  657. }
  658. /* finish the hash */
  659. CVMX_PREFETCH0(od->octo_hmouter);
  660. #if 0
  661. if (unlikely(inplen)) {
  662. uint64_t tmp = 0;
  663. uint8_t *p = (uint8_t *) & tmp;
  664. p[inplen] = 0x80;
  665. do {
  666. inplen--;
  667. p[inplen] = ((uint8_t *) data)[inplen];
  668. } while (inplen);
  669. CVM_LOAD_MD5_UNIT(tmp, next);
  670. } else {
  671. CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
  672. }
  673. #else
  674. CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
  675. #endif
  676. /* Finish Inner hash */
  677. while (next != 7) {
  678. CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL), next);
  679. }
  680. CVM_LOAD_SHA_UNIT((uint64_t) ((alen + 64) << 3), next);
  681. /* Get the inner hash of HMAC */
  682. CVMX_MF_HSH_IV(tmp1, 0);
  683. CVMX_MF_HSH_IV(tmp2, 1);
  684. tmp3 = 0;
  685. CVMX_MF_HSH_IV(tmp3, 2);
  686. /* Initialize hash unit */
  687. CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
  688. CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
  689. CVMX_MT_HSH_IV(od->octo_hmouter[2], 2);
  690. CVMX_MT_HSH_DAT(tmp1, 0);
  691. CVMX_MT_HSH_DAT(tmp2, 1);
  692. tmp3 |= 0x0000000080000000;
  693. CVMX_MT_HSH_DAT(tmp3, 2);
  694. CVMX_MT_HSH_DATZ(3);
  695. CVMX_MT_HSH_DATZ(4);
  696. CVMX_MT_HSH_DATZ(5);
  697. CVMX_MT_HSH_DATZ(6);
  698. CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
  699. /* save the HMAC */
  700. SG_INIT(sg, data, data_i, data_l);
  701. while (icv_off > 0) {
  702. SG_CONSUME(sg, data, data_i, data_l);
  703. icv_off -= 8;
  704. }
  705. CVMX_MF_HSH_IV(*data, 0);
  706. SG_CONSUME(sg, data, data_i, data_l);
  707. CVMX_MF_HSH_IV(tmp1, 1);
  708. *(uint32_t *)data = (uint32_t) (tmp1 >> 32);
  709. octeon_crypto_disable(&state, flags);
  710. return 0;
  711. }
  712. /****************************************************************************/
  713. /* DES MD5 */
  714. int
  715. octo_des_cbc_md5_encrypt(
  716. struct octo_sess *od,
  717. struct scatterlist *sg, int sg_len,
  718. int auth_off, int auth_len,
  719. int crypt_off, int crypt_len,
  720. int icv_off, uint8_t *ivp)
  721. {
  722. register int next = 0;
  723. union {
  724. uint32_t data32[2];
  725. uint64_t data64[1];
  726. } mydata;
  727. uint64_t *data = &mydata.data64[0];
  728. uint32_t *data32;
  729. uint64_t tmp1, tmp2;
  730. int data_i, data_l, alen = auth_len;
  731. struct octeon_cop2_state state;
  732. unsigned long flags;
  733. dprintk("%s()\n", __FUNCTION__);
  734. if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
  735. (crypt_off & 0x3) || (crypt_off + crypt_len > sg_len) ||
  736. (crypt_len & 0x7) ||
  737. (auth_len & 0x7) ||
  738. (auth_off & 0x3) || (auth_off + auth_len > sg_len))) {
  739. dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
  740. "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
  741. "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
  742. auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
  743. return -EINVAL;
  744. }
  745. SG_INIT(sg, data32, data_i, data_l);
  746. CVMX_PREFETCH0(ivp);
  747. CVMX_PREFETCH0(od->octo_enckey);
  748. flags = octeon_crypto_enable(&state);
  749. /* load 3DES Key */
  750. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
  751. if (od->octo_encklen == 24) {
  752. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
  753. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
  754. } else if (od->octo_encklen == 8) {
  755. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 1);
  756. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 2);
  757. } else {
  758. octeon_crypto_disable(&state, flags);
  759. dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
  760. return -EINVAL;
  761. }
  762. CVMX_MT_3DES_IV(* (uint64_t *) ivp);
  763. /* Load MD5 IV */
  764. CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
  765. CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
  766. while (crypt_off > 0 && auth_off > 0) {
  767. SG_CONSUME(sg, data32, data_i, data_l);
  768. crypt_off -= 4;
  769. auth_off -= 4;
  770. }
  771. while (crypt_len > 0 || auth_len > 0) {
  772. uint32_t *first = data32;
  773. mydata.data32[0] = *first;
  774. SG_CONSUME(sg, data32, data_i, data_l);
  775. mydata.data32[1] = *data32;
  776. if (crypt_off <= 0) {
  777. if (crypt_len > 0) {
  778. CVMX_MT_3DES_ENC_CBC(*data);
  779. CVMX_MF_3DES_RESULT(*data);
  780. crypt_len -= 8;
  781. }
  782. } else
  783. crypt_off -= 8;
  784. if (auth_off <= 0) {
  785. if (auth_len > 0) {
  786. CVM_LOAD_MD5_UNIT(*data, next);
  787. auth_len -= 8;
  788. }
  789. } else
  790. auth_off -= 8;
  791. *first = mydata.data32[0];
  792. *data32 = mydata.data32[1];
  793. SG_CONSUME(sg, data32, data_i, data_l);
  794. }
  795. /* finish the hash */
  796. CVMX_PREFETCH0(od->octo_hmouter);
  797. #if 0
  798. if (unlikely(inplen)) {
  799. uint64_t tmp = 0;
  800. uint8_t *p = (uint8_t *) & tmp;
  801. p[inplen] = 0x80;
  802. do {
  803. inplen--;
  804. p[inplen] = ((uint8_t *) data)[inplen];
  805. } while (inplen);
  806. CVM_LOAD_MD5_UNIT(tmp, next);
  807. } else {
  808. CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
  809. }
  810. #else
  811. CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
  812. #endif
  813. /* Finish Inner hash */
  814. while (next != 7) {
  815. CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL), next);
  816. }
  817. CVMX_ES64(tmp1, ((alen + 64) << 3));
  818. CVM_LOAD_MD5_UNIT(tmp1, next);
  819. /* Get the inner hash of HMAC */
  820. CVMX_MF_HSH_IV(tmp1, 0);
  821. CVMX_MF_HSH_IV(tmp2, 1);
  822. /* Initialize hash unit */
  823. CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
  824. CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
  825. CVMX_MT_HSH_DAT(tmp1, 0);
  826. CVMX_MT_HSH_DAT(tmp2, 1);
  827. CVMX_MT_HSH_DAT(0x8000000000000000ULL, 2);
  828. CVMX_MT_HSH_DATZ(3);
  829. CVMX_MT_HSH_DATZ(4);
  830. CVMX_MT_HSH_DATZ(5);
  831. CVMX_MT_HSH_DATZ(6);
  832. CVMX_ES64(tmp1, ((64 + 16) << 3));
  833. CVMX_MT_HSH_STARTMD5(tmp1);
  834. /* save the HMAC */
  835. SG_INIT(sg, data32, data_i, data_l);
  836. while (icv_off > 0) {
  837. SG_CONSUME(sg, data32, data_i, data_l);
  838. icv_off -= 4;
  839. }
  840. CVMX_MF_HSH_IV(tmp1, 0);
  841. *data32 = (uint32_t) (tmp1 >> 32);
  842. SG_CONSUME(sg, data32, data_i, data_l);
  843. *data32 = (uint32_t) tmp1;
  844. SG_CONSUME(sg, data32, data_i, data_l);
  845. CVMX_MF_HSH_IV(tmp1, 1);
  846. *data32 = (uint32_t) (tmp1 >> 32);
  847. octeon_crypto_disable(&state, flags);
  848. return 0;
  849. }
  850. int
  851. octo_des_cbc_md5_decrypt(
  852. struct octo_sess *od,
  853. struct scatterlist *sg, int sg_len,
  854. int auth_off, int auth_len,
  855. int crypt_off, int crypt_len,
  856. int icv_off, uint8_t *ivp)
  857. {
  858. register int next = 0;
  859. union {
  860. uint32_t data32[2];
  861. uint64_t data64[1];
  862. } mydata;
  863. uint64_t *data = &mydata.data64[0];
  864. uint32_t *data32;
  865. uint64_t tmp1, tmp2;
  866. int data_i, data_l, alen = auth_len;
  867. struct octeon_cop2_state state;
  868. unsigned long flags;
  869. dprintk("%s()\n", __FUNCTION__);
  870. if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
  871. (crypt_off & 0x3) || (crypt_off + crypt_len > sg_len) ||
  872. (crypt_len & 0x7) ||
  873. (auth_len & 0x7) ||
  874. (auth_off & 0x3) || (auth_off + auth_len > sg_len))) {
  875. dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
  876. "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
  877. "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
  878. auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
  879. return -EINVAL;
  880. }
  881. SG_INIT(sg, data32, data_i, data_l);
  882. CVMX_PREFETCH0(ivp);
  883. CVMX_PREFETCH0(od->octo_enckey);
  884. flags = octeon_crypto_enable(&state);
  885. /* load 3DES Key */
  886. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
  887. if (od->octo_encklen == 24) {
  888. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
  889. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
  890. } else if (od->octo_encklen == 8) {
  891. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 1);
  892. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 2);
  893. } else {
  894. octeon_crypto_disable(&state, flags);
  895. dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
  896. return -EINVAL;
  897. }
  898. CVMX_MT_3DES_IV(* (uint64_t *) ivp);
  899. /* Load MD5 IV */
  900. CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
  901. CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
  902. while (crypt_off > 0 && auth_off > 0) {
  903. SG_CONSUME(sg, data32, data_i, data_l);
  904. crypt_off -= 4;
  905. auth_off -= 4;
  906. }
  907. while (crypt_len > 0 || auth_len > 0) {
  908. uint32_t *first = data32;
  909. mydata.data32[0] = *first;
  910. SG_CONSUME(sg, data32, data_i, data_l);
  911. mydata.data32[1] = *data32;
  912. if (auth_off <= 0) {
  913. if (auth_len > 0) {
  914. CVM_LOAD_MD5_UNIT(*data, next);
  915. auth_len -= 8;
  916. }
  917. } else
  918. auth_off -= 8;
  919. if (crypt_off <= 0) {
  920. if (crypt_len > 0) {
  921. CVMX_MT_3DES_DEC_CBC(*data);
  922. CVMX_MF_3DES_RESULT(*data);
  923. crypt_len -= 8;
  924. }
  925. } else
  926. crypt_off -= 8;
  927. *first = mydata.data32[0];
  928. *data32 = mydata.data32[1];
  929. SG_CONSUME(sg, data32, data_i, data_l);
  930. }
  931. /* finish the hash */
  932. CVMX_PREFETCH0(od->octo_hmouter);
  933. #if 0
  934. if (unlikely(inplen)) {
  935. uint64_t tmp = 0;
  936. uint8_t *p = (uint8_t *) & tmp;
  937. p[inplen] = 0x80;
  938. do {
  939. inplen--;
  940. p[inplen] = ((uint8_t *) data)[inplen];
  941. } while (inplen);
  942. CVM_LOAD_MD5_UNIT(tmp, next);
  943. } else {
  944. CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
  945. }
  946. #else
  947. CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
  948. #endif
  949. /* Finish Inner hash */
  950. while (next != 7) {
  951. CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL), next);
  952. }
  953. CVMX_ES64(tmp1, ((alen + 64) << 3));
  954. CVM_LOAD_MD5_UNIT(tmp1, next);
  955. /* Get the inner hash of HMAC */
  956. CVMX_MF_HSH_IV(tmp1, 0);
  957. CVMX_MF_HSH_IV(tmp2, 1);
  958. /* Initialize hash unit */
  959. CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
  960. CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
  961. CVMX_MT_HSH_DAT(tmp1, 0);
  962. CVMX_MT_HSH_DAT(tmp2, 1);
  963. CVMX_MT_HSH_DAT(0x8000000000000000ULL, 2);
  964. CVMX_MT_HSH_DATZ(3);
  965. CVMX_MT_HSH_DATZ(4);
  966. CVMX_MT_HSH_DATZ(5);
  967. CVMX_MT_HSH_DATZ(6);
  968. CVMX_ES64(tmp1, ((64 + 16) << 3));
  969. CVMX_MT_HSH_STARTMD5(tmp1);
  970. /* save the HMAC */
  971. SG_INIT(sg, data32, data_i, data_l);
  972. while (icv_off > 0) {
  973. SG_CONSUME(sg, data32, data_i, data_l);
  974. icv_off -= 4;
  975. }
  976. CVMX_MF_HSH_IV(tmp1, 0);
  977. *data32 = (uint32_t) (tmp1 >> 32);
  978. SG_CONSUME(sg, data32, data_i, data_l);
  979. *data32 = (uint32_t) tmp1;
  980. SG_CONSUME(sg, data32, data_i, data_l);
  981. CVMX_MF_HSH_IV(tmp1, 1);
  982. *data32 = (uint32_t) (tmp1 >> 32);
  983. octeon_crypto_disable(&state, flags);
  984. return 0;
  985. }
  986. /****************************************************************************/
  987. /* DES SHA */
  988. int
  989. octo_des_cbc_sha1_encrypt(
  990. struct octo_sess *od,
  991. struct scatterlist *sg, int sg_len,
  992. int auth_off, int auth_len,
  993. int crypt_off, int crypt_len,
  994. int icv_off, uint8_t *ivp)
  995. {
  996. register int next = 0;
  997. union {
  998. uint32_t data32[2];
  999. uint64_t data64[1];
  1000. } mydata;
  1001. uint64_t *data = &mydata.data64[0];
  1002. uint32_t *data32;
  1003. uint64_t tmp1, tmp2, tmp3;
  1004. int data_i, data_l, alen = auth_len;
  1005. struct octeon_cop2_state state;
  1006. unsigned long flags;
  1007. dprintk("%s()\n", __FUNCTION__);
  1008. if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
  1009. (crypt_off & 0x3) || (crypt_off + crypt_len > sg_len) ||
  1010. (crypt_len & 0x7) ||
  1011. (auth_len & 0x7) ||
  1012. (auth_off & 0x3) || (auth_off + auth_len > sg_len))) {
  1013. dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
  1014. "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
  1015. "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
  1016. auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
  1017. return -EINVAL;
  1018. }
  1019. SG_INIT(sg, data32, data_i, data_l);
  1020. CVMX_PREFETCH0(ivp);
  1021. CVMX_PREFETCH0(od->octo_enckey);
  1022. flags = octeon_crypto_enable(&state);
  1023. /* load 3DES Key */
  1024. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
  1025. if (od->octo_encklen == 24) {
  1026. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
  1027. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
  1028. } else if (od->octo_encklen == 8) {
  1029. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 1);
  1030. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 2);
  1031. } else {
  1032. octeon_crypto_disable(&state, flags);
  1033. dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
  1034. return -EINVAL;
  1035. }
  1036. CVMX_MT_3DES_IV(* (uint64_t *) ivp);
  1037. /* Load SHA1 IV */
  1038. CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
  1039. CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
  1040. CVMX_MT_HSH_IV(od->octo_hminner[2], 2);
  1041. while (crypt_off > 0 && auth_off > 0) {
  1042. SG_CONSUME(sg, data32, data_i, data_l);
  1043. crypt_off -= 4;
  1044. auth_off -= 4;
  1045. }
  1046. while (crypt_len > 0 || auth_len > 0) {
  1047. uint32_t *first = data32;
  1048. mydata.data32[0] = *first;
  1049. SG_CONSUME(sg, data32, data_i, data_l);
  1050. mydata.data32[1] = *data32;
  1051. if (crypt_off <= 0) {
  1052. if (crypt_len > 0) {
  1053. CVMX_MT_3DES_ENC_CBC(*data);
  1054. CVMX_MF_3DES_RESULT(*data);
  1055. crypt_len -= 8;
  1056. }
  1057. } else
  1058. crypt_off -= 8;
  1059. if (auth_off <= 0) {
  1060. if (auth_len > 0) {
  1061. CVM_LOAD_SHA_UNIT(*data, next);
  1062. auth_len -= 8;
  1063. }
  1064. } else
  1065. auth_off -= 8;
  1066. *first = mydata.data32[0];
  1067. *data32 = mydata.data32[1];
  1068. SG_CONSUME(sg, data32, data_i, data_l);
  1069. }
  1070. /* finish the hash */
  1071. CVMX_PREFETCH0(od->octo_hmouter);
  1072. #if 0
  1073. if (unlikely(inplen)) {
  1074. uint64_t tmp = 0;
  1075. uint8_t *p = (uint8_t *) & tmp;
  1076. p[inplen] = 0x80;
  1077. do {
  1078. inplen--;
  1079. p[inplen] = ((uint8_t *) data)[inplen];
  1080. } while (inplen);
  1081. CVM_LOAD_SHA_UNIT(tmp, next);
  1082. } else {
  1083. CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
  1084. }
  1085. #else
  1086. CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
  1087. #endif
  1088. /* Finish Inner hash */
  1089. while (next != 7) {
  1090. CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL), next);
  1091. }
  1092. CVM_LOAD_SHA_UNIT((uint64_t) ((alen + 64) << 3), next);
  1093. /* Get the inner hash of HMAC */
  1094. CVMX_MF_HSH_IV(tmp1, 0);
  1095. CVMX_MF_HSH_IV(tmp2, 1);
  1096. tmp3 = 0;
  1097. CVMX_MF_HSH_IV(tmp3, 2);
  1098. /* Initialize hash unit */
  1099. CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
  1100. CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
  1101. CVMX_MT_HSH_IV(od->octo_hmouter[2], 2);
  1102. CVMX_MT_HSH_DAT(tmp1, 0);
  1103. CVMX_MT_HSH_DAT(tmp2, 1);
  1104. tmp3 |= 0x0000000080000000;
  1105. CVMX_MT_HSH_DAT(tmp3, 2);
  1106. CVMX_MT_HSH_DATZ(3);
  1107. CVMX_MT_HSH_DATZ(4);
  1108. CVMX_MT_HSH_DATZ(5);
  1109. CVMX_MT_HSH_DATZ(6);
  1110. CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
  1111. /* save the HMAC */
  1112. SG_INIT(sg, data32, data_i, data_l);
  1113. while (icv_off > 0) {
  1114. SG_CONSUME(sg, data32, data_i, data_l);
  1115. icv_off -= 4;
  1116. }
  1117. CVMX_MF_HSH_IV(tmp1, 0);
  1118. *data32 = (uint32_t) (tmp1 >> 32);
  1119. SG_CONSUME(sg, data32, data_i, data_l);
  1120. *data32 = (uint32_t) tmp1;
  1121. SG_CONSUME(sg, data32, data_i, data_l);
  1122. CVMX_MF_HSH_IV(tmp1, 1);
  1123. *data32 = (uint32_t) (tmp1 >> 32);
  1124. octeon_crypto_disable(&state, flags);
  1125. return 0;
  1126. }
  1127. int
  1128. octo_des_cbc_sha1_decrypt(
  1129. struct octo_sess *od,
  1130. struct scatterlist *sg, int sg_len,
  1131. int auth_off, int auth_len,
  1132. int crypt_off, int crypt_len,
  1133. int icv_off, uint8_t *ivp)
  1134. {
  1135. register int next = 0;
  1136. union {
  1137. uint32_t data32[2];
  1138. uint64_t data64[1];
  1139. } mydata;
  1140. uint64_t *data = &mydata.data64[0];
  1141. uint32_t *data32;
  1142. uint64_t tmp1, tmp2, tmp3;
  1143. int data_i, data_l, alen = auth_len;
  1144. struct octeon_cop2_state state;
  1145. unsigned long flags;
  1146. dprintk("%s()\n", __FUNCTION__);
  1147. if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
  1148. (crypt_off & 0x3) || (crypt_off + crypt_len > sg_len) ||
  1149. (crypt_len & 0x7) ||
  1150. (auth_len & 0x7) ||
  1151. (auth_off & 0x3) || (auth_off + auth_len > sg_len))) {
  1152. dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
  1153. "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
  1154. "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
  1155. auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
  1156. return -EINVAL;
  1157. }
  1158. SG_INIT(sg, data32, data_i, data_l);
  1159. CVMX_PREFETCH0(ivp);
  1160. CVMX_PREFETCH0(od->octo_enckey);
  1161. flags = octeon_crypto_enable(&state);
  1162. /* load 3DES Key */
  1163. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
  1164. if (od->octo_encklen == 24) {
  1165. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
  1166. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
  1167. } else if (od->octo_encklen == 8) {
  1168. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 1);
  1169. CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 2);
  1170. } else {
  1171. octeon_crypto_disable(&state, flags);
  1172. dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
  1173. return -EINVAL;
  1174. }
  1175. CVMX_MT_3DES_IV(* (uint64_t *) ivp);
  1176. /* Load SHA1 IV */
  1177. CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
  1178. CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
  1179. CVMX_MT_HSH_IV(od->octo_hminner[2], 2);
  1180. while (crypt_off > 0 && auth_off > 0) {
  1181. SG_CONSUME(sg, data32, data_i, data_l);
  1182. crypt_off -= 4;
  1183. auth_off -= 4;
  1184. }
  1185. while (crypt_len > 0 || auth_len > 0) {
  1186. uint32_t *first = data32;
  1187. mydata.data32[0] = *first;
  1188. SG_CONSUME(sg, data32, data_i, data_l);
  1189. mydata.data32[1] = *data32;
  1190. if (auth_off <= 0) {
  1191. if (auth_len > 0) {
  1192. CVM_LOAD_SHA_UNIT(*data, next);
  1193. auth_len -= 8;
  1194. }
  1195. } else
  1196. auth_off -= 8;
  1197. if (crypt_off <= 0) {
  1198. if (crypt_len > 0) {
  1199. CVMX_MT_3DES_DEC_CBC(*data);
  1200. CVMX_MF_3DES_RESULT(*data);
  1201. crypt_len -= 8;
  1202. }
  1203. } else
  1204. crypt_off -= 8;
  1205. *first = mydata.data32[0];
  1206. *data32 = mydata.data32[1];
  1207. SG_CONSUME(sg, data32, data_i, data_l);
  1208. }
  1209. /* finish the hash */
  1210. CVMX_PREFETCH0(od->octo_hmouter);
  1211. #if 0
  1212. if (unlikely(inplen)) {
  1213. uint64_t tmp = 0;
  1214. uint8_t *p = (uint8_t *) & tmp;
  1215. p[inplen] = 0x80;
  1216. do {
  1217. inplen--;
  1218. p[inplen] = ((uint8_t *) data)[inplen];
  1219. } while (inplen);
  1220. CVM_LOAD_SHA_UNIT(tmp, next);
  1221. } else {
  1222. CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
  1223. }
  1224. #else
  1225. CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
  1226. #endif
  1227. /* Finish Inner hash */
  1228. while (next != 7) {
  1229. CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL), next);
  1230. }
  1231. CVM_LOAD_SHA_UNIT((uint64_t) ((alen + 64) << 3), next);
  1232. /* Get the inner hash of HMAC */
  1233. CVMX_MF_HSH_IV(tmp1, 0);
  1234. CVMX_MF_HSH_IV(tmp2, 1);
  1235. tmp3 = 0;
  1236. CVMX_MF_HSH_IV(tmp3, 2);
  1237. /* Initialize hash unit */
  1238. CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
  1239. CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
  1240. CVMX_MT_HSH_IV(od->octo_hmouter[2], 2);
  1241. CVMX_MT_HSH_DAT(tmp1, 0);
  1242. CVMX_MT_HSH_DAT(tmp2, 1);
  1243. tmp3 |= 0x0000000080000000;
  1244. CVMX_MT_HSH_DAT(tmp3, 2);
  1245. CVMX_MT_HSH_DATZ(3);
  1246. CVMX_MT_HSH_DATZ(4);
  1247. CVMX_MT_HSH_DATZ(5);
  1248. CVMX_MT_HSH_DATZ(6);
  1249. CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
  1250. /* save the HMAC */
  1251. SG_INIT(sg, data32, data_i, data_l);
  1252. while (icv_off > 0) {
  1253. SG_CONSUME(sg, data32, data_i, data_l);
  1254. icv_off -= 4;
  1255. }
  1256. CVMX_MF_HSH_IV(tmp1, 0);
  1257. *data32 = (uint32_t) (tmp1 >> 32);
  1258. SG_CONSUME(sg, data32, data_i, data_l);
  1259. *data32 = (uint32_t) tmp1;
  1260. SG_CONSUME(sg, data32, data_i, data_l);
  1261. CVMX_MF_HSH_IV(tmp1, 1);
  1262. *data32 = (uint32_t) (tmp1 >> 32);
  1263. octeon_crypto_disable(&state, flags);
  1264. return 0;
  1265. }
  1266. /****************************************************************************/
  1267. /* AES MD5 */
  1268. int
  1269. octo_aes_cbc_md5_encrypt(
  1270. struct octo_sess *od,
  1271. struct scatterlist *sg, int sg_len,
  1272. int auth_off, int auth_len,
  1273. int crypt_off, int crypt_len,
  1274. int icv_off, uint8_t *ivp)
  1275. {
  1276. register int next = 0;
  1277. union {
  1278. uint32_t data32[2];
  1279. uint64_t data64[1];
  1280. } mydata[2];
  1281. uint64_t *pdata = &mydata[0].data64[0];
  1282. uint64_t *data = &mydata[1].data64[0];
  1283. uint32_t *data32;
  1284. uint64_t tmp1, tmp2;
  1285. int data_i, data_l, alen = auth_len;
  1286. struct octeon_cop2_state state;
  1287. unsigned long flags;
  1288. dprintk("%s()\n", __FUNCTION__);
  1289. if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
  1290. (crypt_off & 0x3) || (crypt_off + crypt_len > sg_len) ||
  1291. (crypt_len & 0x7) ||
  1292. (auth_len & 0x7) ||
  1293. (auth_off & 0x3) || (auth_off + auth_len > sg_len))) {
  1294. dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
  1295. "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
  1296. "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
  1297. auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
  1298. return -EINVAL;
  1299. }
  1300. SG_INIT(sg, data32, data_i, data_l);
  1301. CVMX_PREFETCH0(ivp);
  1302. CVMX_PREFETCH0(od->octo_enckey);
  1303. flags = octeon_crypto_enable(&state);
  1304. /* load AES Key */
  1305. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
  1306. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
  1307. if (od->octo_encklen == 16) {
  1308. CVMX_MT_AES_KEY(0x0, 2);
  1309. CVMX_MT_AES_KEY(0x0, 3);
  1310. } else if (od->octo_encklen == 24) {
  1311. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
  1312. CVMX_MT_AES_KEY(0x0, 3);
  1313. } else if (od->octo_encklen == 32) {
  1314. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
  1315. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[3], 3);
  1316. } else {
  1317. octeon_crypto_disable(&state, flags);
  1318. dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
  1319. return -EINVAL;
  1320. }
  1321. CVMX_MT_AES_KEYLENGTH(od->octo_encklen / 8 - 1);
  1322. CVMX_MT_AES_IV(((uint64_t *) ivp)[0], 0);
  1323. CVMX_MT_AES_IV(((uint64_t *) ivp)[1], 1);
  1324. /* Load MD5 IV */
  1325. CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
  1326. CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
  1327. while (crypt_off > 0 && auth_off > 0) {
  1328. SG_CONSUME(sg, data32, data_i, data_l);
  1329. crypt_off -= 4;
  1330. auth_off -= 4;
  1331. }
  1332. /* align auth and crypt */
  1333. while (crypt_off > 0 && auth_len > 0) {
  1334. mydata[0].data32[0] = *data32;
  1335. SG_CONSUME(sg, data32, data_i, data_l);
  1336. mydata[0].data32[1] = *data32;
  1337. SG_CONSUME(sg, data32, data_i, data_l);
  1338. CVM_LOAD_MD5_UNIT(*pdata, next);
  1339. crypt_off -= 8;
  1340. auth_len -= 8;
  1341. }
  1342. while (crypt_len > 0) {
  1343. uint32_t *pdata32[3];
  1344. pdata32[0] = data32;
  1345. mydata[0].data32[0] = *data32;
  1346. SG_CONSUME(sg, data32, data_i, data_l);
  1347. pdata32[1] = data32;
  1348. mydata[0].data32[1] = *data32;
  1349. SG_CONSUME(sg, data32, data_i, data_l);
  1350. pdata32[2] = data32;
  1351. mydata[1].data32[0] = *data32;
  1352. SG_CONSUME(sg, data32, data_i, data_l);
  1353. mydata[1].data32[1] = *data32;
  1354. CVMX_MT_AES_ENC_CBC0(*pdata);
  1355. CVMX_MT_AES_ENC_CBC1(*data);
  1356. CVMX_MF_AES_RESULT(*pdata, 0);
  1357. CVMX_MF_AES_RESULT(*data, 1);
  1358. crypt_len -= 16;
  1359. if (auth_len > 0) {
  1360. CVM_LOAD_MD5_UNIT(*pdata, next);
  1361. auth_len -= 8;
  1362. }
  1363. if (auth_len > 0) {
  1364. CVM_LOAD_MD5_UNIT(*data, next);
  1365. auth_len -= 8;
  1366. }
  1367. *pdata32[0] = mydata[0].data32[0];
  1368. *pdata32[1] = mydata[0].data32[1];
  1369. *pdata32[2] = mydata[1].data32[0];
  1370. *data32 = mydata[1].data32[1];
  1371. SG_CONSUME(sg, data32, data_i, data_l);
  1372. }
  1373. /* finish any left over hashing */
  1374. while (auth_len > 0) {
  1375. mydata[0].data32[0] = *data32;
  1376. SG_CONSUME(sg, data32, data_i, data_l);
  1377. mydata[0].data32[1] = *data32;
  1378. SG_CONSUME(sg, data32, data_i, data_l);
  1379. CVM_LOAD_MD5_UNIT(*pdata, next);
  1380. auth_len -= 8;
  1381. }
  1382. /* finish the hash */
  1383. CVMX_PREFETCH0(od->octo_hmouter);
  1384. #if 0
  1385. if (unlikely(inplen)) {
  1386. uint64_t tmp = 0;
  1387. uint8_t *p = (uint8_t *) & tmp;
  1388. p[inplen] = 0x80;
  1389. do {
  1390. inplen--;
  1391. p[inplen] = ((uint8_t *) data)[inplen];
  1392. } while (inplen);
  1393. CVM_LOAD_MD5_UNIT(tmp, next);
  1394. } else {
  1395. CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
  1396. }
  1397. #else
  1398. CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
  1399. #endif
  1400. /* Finish Inner hash */
  1401. while (next != 7) {
  1402. CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL), next);
  1403. }
  1404. CVMX_ES64(tmp1, ((alen + 64) << 3));
  1405. CVM_LOAD_MD5_UNIT(tmp1, next);
  1406. /* Get the inner hash of HMAC */
  1407. CVMX_MF_HSH_IV(tmp1, 0);
  1408. CVMX_MF_HSH_IV(tmp2, 1);
  1409. /* Initialize hash unit */
  1410. CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
  1411. CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
  1412. CVMX_MT_HSH_DAT(tmp1, 0);
  1413. CVMX_MT_HSH_DAT(tmp2, 1);
  1414. CVMX_MT_HSH_DAT(0x8000000000000000ULL, 2);
  1415. CVMX_MT_HSH_DATZ(3);
  1416. CVMX_MT_HSH_DATZ(4);
  1417. CVMX_MT_HSH_DATZ(5);
  1418. CVMX_MT_HSH_DATZ(6);
  1419. CVMX_ES64(tmp1, ((64 + 16) << 3));
  1420. CVMX_MT_HSH_STARTMD5(tmp1);
  1421. /* save the HMAC */
  1422. SG_INIT(sg, data32, data_i, data_l);
  1423. while (icv_off > 0) {
  1424. SG_CONSUME(sg, data32, data_i, data_l);
  1425. icv_off -= 4;
  1426. }
  1427. CVMX_MF_HSH_IV(tmp1, 0);
  1428. *data32 = (uint32_t) (tmp1 >> 32);
  1429. SG_CONSUME(sg, data32, data_i, data_l);
  1430. *data32 = (uint32_t) tmp1;
  1431. SG_CONSUME(sg, data32, data_i, data_l);
  1432. CVMX_MF_HSH_IV(tmp1, 1);
  1433. *data32 = (uint32_t) (tmp1 >> 32);
  1434. octeon_crypto_disable(&state, flags);
  1435. return 0;
  1436. }
  1437. int
  1438. octo_aes_cbc_md5_decrypt(
  1439. struct octo_sess *od,
  1440. struct scatterlist *sg, int sg_len,
  1441. int auth_off, int auth_len,
  1442. int crypt_off, int crypt_len,
  1443. int icv_off, uint8_t *ivp)
  1444. {
  1445. register int next = 0;
  1446. union {
  1447. uint32_t data32[2];
  1448. uint64_t data64[1];
  1449. } mydata[2];
  1450. uint64_t *pdata = &mydata[0].data64[0];
  1451. uint64_t *data = &mydata[1].data64[0];
  1452. uint32_t *data32;
  1453. uint64_t tmp1, tmp2;
  1454. int data_i, data_l, alen = auth_len;
  1455. struct octeon_cop2_state state;
  1456. unsigned long flags;
  1457. dprintk("%s()\n", __FUNCTION__);
  1458. if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
  1459. (crypt_off & 0x3) || (crypt_off + crypt_len > sg_len) ||
  1460. (crypt_len & 0x7) ||
  1461. (auth_len & 0x7) ||
  1462. (auth_off & 0x3) || (auth_off + auth_len > sg_len))) {
  1463. dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
  1464. "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
  1465. "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
  1466. auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
  1467. return -EINVAL;
  1468. }
  1469. SG_INIT(sg, data32, data_i, data_l);
  1470. CVMX_PREFETCH0(ivp);
  1471. CVMX_PREFETCH0(od->octo_enckey);
  1472. flags = octeon_crypto_enable(&state);
  1473. /* load AES Key */
  1474. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
  1475. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
  1476. if (od->octo_encklen == 16) {
  1477. CVMX_MT_AES_KEY(0x0, 2);
  1478. CVMX_MT_AES_KEY(0x0, 3);
  1479. } else if (od->octo_encklen == 24) {
  1480. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
  1481. CVMX_MT_AES_KEY(0x0, 3);
  1482. } else if (od->octo_encklen == 32) {
  1483. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
  1484. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[3], 3);
  1485. } else {
  1486. octeon_crypto_disable(&state, flags);
  1487. dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
  1488. return -EINVAL;
  1489. }
  1490. CVMX_MT_AES_KEYLENGTH(od->octo_encklen / 8 - 1);
  1491. CVMX_MT_AES_IV(((uint64_t *) ivp)[0], 0);
  1492. CVMX_MT_AES_IV(((uint64_t *) ivp)[1], 1);
  1493. /* Load MD5 IV */
  1494. CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
  1495. CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
  1496. while (crypt_off > 0 && auth_off > 0) {
  1497. SG_CONSUME(sg, data32, data_i, data_l);
  1498. crypt_off -= 4;
  1499. auth_off -= 4;
  1500. }
  1501. /* align auth and crypt */
  1502. while (crypt_off > 0 && auth_len > 0) {
  1503. mydata[0].data32[0] = *data32;
  1504. SG_CONSUME(sg, data32, data_i, data_l);
  1505. mydata[0].data32[1] = *data32;
  1506. SG_CONSUME(sg, data32, data_i, data_l);
  1507. CVM_LOAD_MD5_UNIT(*pdata, next);
  1508. crypt_off -= 8;
  1509. auth_len -= 8;
  1510. }
  1511. while (crypt_len > 0) {
  1512. uint32_t *pdata32[3];
  1513. pdata32[0] = data32;
  1514. mydata[0].data32[0] = *data32;
  1515. SG_CONSUME(sg, data32, data_i, data_l);
  1516. pdata32[1] = data32;
  1517. mydata[0].data32[1] = *data32;
  1518. SG_CONSUME(sg, data32, data_i, data_l);
  1519. pdata32[2] = data32;
  1520. mydata[1].data32[0] = *data32;
  1521. SG_CONSUME(sg, data32, data_i, data_l);
  1522. mydata[1].data32[1] = *data32;
  1523. if (auth_len > 0) {
  1524. CVM_LOAD_MD5_UNIT(*pdata, next);
  1525. auth_len -= 8;
  1526. }
  1527. if (auth_len > 0) {
  1528. CVM_LOAD_MD5_UNIT(*data, next);
  1529. auth_len -= 8;
  1530. }
  1531. CVMX_MT_AES_DEC_CBC0(*pdata);
  1532. CVMX_MT_AES_DEC_CBC1(*data);
  1533. CVMX_MF_AES_RESULT(*pdata, 0);
  1534. CVMX_MF_AES_RESULT(*data, 1);
  1535. crypt_len -= 16;
  1536. *pdata32[0] = mydata[0].data32[0];
  1537. *pdata32[1] = mydata[0].data32[1];
  1538. *pdata32[2] = mydata[1].data32[0];
  1539. *data32 = mydata[1].data32[1];
  1540. SG_CONSUME(sg, data32, data_i, data_l);
  1541. }
  1542. /* finish left over hash if any */
  1543. while (auth_len > 0) {
  1544. mydata[0].data32[0] = *data32;
  1545. SG_CONSUME(sg, data32, data_i, data_l);
  1546. mydata[0].data32[1] = *data32;
  1547. SG_CONSUME(sg, data32, data_i, data_l);
  1548. CVM_LOAD_MD5_UNIT(*pdata, next);
  1549. auth_len -= 8;
  1550. }
  1551. /* finish the hash */
  1552. CVMX_PREFETCH0(od->octo_hmouter);
  1553. #if 0
  1554. if (unlikely(inplen)) {
  1555. uint64_t tmp = 0;
  1556. uint8_t *p = (uint8_t *) & tmp;
  1557. p[inplen] = 0x80;
  1558. do {
  1559. inplen--;
  1560. p[inplen] = ((uint8_t *) data)[inplen];
  1561. } while (inplen);
  1562. CVM_LOAD_MD5_UNIT(tmp, next);
  1563. } else {
  1564. CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
  1565. }
  1566. #else
  1567. CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
  1568. #endif
  1569. /* Finish Inner hash */
  1570. while (next != 7) {
  1571. CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL), next);
  1572. }
  1573. CVMX_ES64(tmp1, ((alen + 64) << 3));
  1574. CVM_LOAD_MD5_UNIT(tmp1, next);
  1575. /* Get the inner hash of HMAC */
  1576. CVMX_MF_HSH_IV(tmp1, 0);
  1577. CVMX_MF_HSH_IV(tmp2, 1);
  1578. /* Initialize hash unit */
  1579. CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
  1580. CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
  1581. CVMX_MT_HSH_DAT(tmp1, 0);
  1582. CVMX_MT_HSH_DAT(tmp2, 1);
  1583. CVMX_MT_HSH_DAT(0x8000000000000000ULL, 2);
  1584. CVMX_MT_HSH_DATZ(3);
  1585. CVMX_MT_HSH_DATZ(4);
  1586. CVMX_MT_HSH_DATZ(5);
  1587. CVMX_MT_HSH_DATZ(6);
  1588. CVMX_ES64(tmp1, ((64 + 16) << 3));
  1589. CVMX_MT_HSH_STARTMD5(tmp1);
  1590. /* save the HMAC */
  1591. SG_INIT(sg, data32, data_i, data_l);
  1592. while (icv_off > 0) {
  1593. SG_CONSUME(sg, data32, data_i, data_l);
  1594. icv_off -= 4;
  1595. }
  1596. CVMX_MF_HSH_IV(tmp1, 0);
  1597. *data32 = (uint32_t) (tmp1 >> 32);
  1598. SG_CONSUME(sg, data32, data_i, data_l);
  1599. *data32 = (uint32_t) tmp1;
  1600. SG_CONSUME(sg, data32, data_i, data_l);
  1601. CVMX_MF_HSH_IV(tmp1, 1);
  1602. *data32 = (uint32_t) (tmp1 >> 32);
  1603. octeon_crypto_disable(&state, flags);
  1604. return 0;
  1605. }
  1606. /****************************************************************************/
  1607. /* AES SHA1 */
  1608. int
  1609. octo_aes_cbc_sha1_encrypt(
  1610. struct octo_sess *od,
  1611. struct scatterlist *sg, int sg_len,
  1612. int auth_off, int auth_len,
  1613. int crypt_off, int crypt_len,
  1614. int icv_off, uint8_t *ivp)
  1615. {
  1616. register int next = 0;
  1617. union {
  1618. uint32_t data32[2];
  1619. uint64_t data64[1];
  1620. } mydata[2];
  1621. uint64_t *pdata = &mydata[0].data64[0];
  1622. uint64_t *data = &mydata[1].data64[0];
  1623. uint32_t *data32;
  1624. uint64_t tmp1, tmp2, tmp3;
  1625. int data_i, data_l, alen = auth_len;
  1626. struct octeon_cop2_state state;
  1627. unsigned long flags;
  1628. dprintk("%s(a_off=%d a_len=%d c_off=%d c_len=%d icv_off=%d)\n",
  1629. __FUNCTION__, auth_off, auth_len, crypt_off, crypt_len, icv_off);
  1630. if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
  1631. (crypt_off & 0x3) || (crypt_off + crypt_len > sg_len) ||
  1632. (crypt_len & 0x7) ||
  1633. (auth_len & 0x7) ||
  1634. (auth_off & 0x3) || (auth_off + auth_len > sg_len))) {
  1635. dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
  1636. "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
  1637. "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
  1638. auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
  1639. return -EINVAL;
  1640. }
  1641. SG_INIT(sg, data32, data_i, data_l);
  1642. CVMX_PREFETCH0(ivp);
  1643. CVMX_PREFETCH0(od->octo_enckey);
  1644. flags = octeon_crypto_enable(&state);
  1645. /* load AES Key */
  1646. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
  1647. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
  1648. if (od->octo_encklen == 16) {
  1649. CVMX_MT_AES_KEY(0x0, 2);
  1650. CVMX_MT_AES_KEY(0x0, 3);
  1651. } else if (od->octo_encklen == 24) {
  1652. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
  1653. CVMX_MT_AES_KEY(0x0, 3);
  1654. } else if (od->octo_encklen == 32) {
  1655. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
  1656. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[3], 3);
  1657. } else {
  1658. octeon_crypto_disable(&state, flags);
  1659. dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
  1660. return -EINVAL;
  1661. }
  1662. CVMX_MT_AES_KEYLENGTH(od->octo_encklen / 8 - 1);
  1663. CVMX_MT_AES_IV(((uint64_t *) ivp)[0], 0);
  1664. CVMX_MT_AES_IV(((uint64_t *) ivp)[1], 1);
  1665. /* Load SHA IV */
  1666. CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
  1667. CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
  1668. CVMX_MT_HSH_IV(od->octo_hminner[2], 2);
  1669. while (crypt_off > 0 && auth_off > 0) {
  1670. SG_CONSUME(sg, data32, data_i, data_l);
  1671. crypt_off -= 4;
  1672. auth_off -= 4;
  1673. }
  1674. /* align auth and crypt */
  1675. while (crypt_off > 0 && auth_len > 0) {
  1676. mydata[0].data32[0] = *data32;
  1677. SG_CONSUME(sg, data32, data_i, data_l);
  1678. mydata[0].data32[1] = *data32;
  1679. SG_CONSUME(sg, data32, data_i, data_l);
  1680. CVM_LOAD_SHA_UNIT(*pdata, next);
  1681. crypt_off -= 8;
  1682. auth_len -= 8;
  1683. }
  1684. while (crypt_len > 0) {
  1685. uint32_t *pdata32[3];
  1686. pdata32[0] = data32;
  1687. mydata[0].data32[0] = *data32;
  1688. SG_CONSUME(sg, data32, data_i, data_l);
  1689. pdata32[1] = data32;
  1690. mydata[0].data32[1] = *data32;
  1691. SG_CONSUME(sg, data32, data_i, data_l);
  1692. pdata32[2] = data32;
  1693. mydata[1].data32[0] = *data32;
  1694. SG_CONSUME(sg, data32, data_i, data_l);
  1695. mydata[1].data32[1] = *data32;
  1696. CVMX_MT_AES_ENC_CBC0(*pdata);
  1697. CVMX_MT_AES_ENC_CBC1(*data);
  1698. CVMX_MF_AES_RESULT(*pdata, 0);
  1699. CVMX_MF_AES_RESULT(*data, 1);
  1700. crypt_len -= 16;
  1701. if (auth_len > 0) {
  1702. CVM_LOAD_SHA_UNIT(*pdata, next);
  1703. auth_len -= 8;
  1704. }
  1705. if (auth_len > 0) {
  1706. CVM_LOAD_SHA_UNIT(*data, next);
  1707. auth_len -= 8;
  1708. }
  1709. *pdata32[0] = mydata[0].data32[0];
  1710. *pdata32[1] = mydata[0].data32[1];
  1711. *pdata32[2] = mydata[1].data32[0];
  1712. *data32 = mydata[1].data32[1];
  1713. SG_CONSUME(sg, data32, data_i, data_l);
  1714. }
  1715. /* finish and hashing */
  1716. while (auth_len > 0) {
  1717. mydata[0].data32[0] = *data32;
  1718. SG_CONSUME(sg, data32, data_i, data_l);
  1719. mydata[0].data32[1] = *data32;
  1720. SG_CONSUME(sg, data32, data_i, data_l);
  1721. CVM_LOAD_SHA_UNIT(*pdata, next);
  1722. auth_len -= 8;
  1723. }
  1724. /* finish the hash */
  1725. CVMX_PREFETCH0(od->octo_hmouter);
  1726. #if 0
  1727. if (unlikely(inplen)) {
  1728. uint64_t tmp = 0;
  1729. uint8_t *p = (uint8_t *) & tmp;
  1730. p[inplen] = 0x80;
  1731. do {
  1732. inplen--;
  1733. p[inplen] = ((uint8_t *) data)[inplen];
  1734. } while (inplen);
  1735. CVM_LOAD_SHA_UNIT(tmp, next);
  1736. } else {
  1737. CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
  1738. }
  1739. #else
  1740. CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
  1741. #endif
  1742. /* Finish Inner hash */
  1743. while (next != 7) {
  1744. CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL), next);
  1745. }
  1746. CVM_LOAD_SHA_UNIT((uint64_t) ((alen + 64) << 3), next);
  1747. /* Get the inner hash of HMAC */
  1748. CVMX_MF_HSH_IV(tmp1, 0);
  1749. CVMX_MF_HSH_IV(tmp2, 1);
  1750. tmp3 = 0;
  1751. CVMX_MF_HSH_IV(tmp3, 2);
  1752. /* Initialize hash unit */
  1753. CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
  1754. CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
  1755. CVMX_MT_HSH_IV(od->octo_hmouter[2], 2);
  1756. CVMX_MT_HSH_DAT(tmp1, 0);
  1757. CVMX_MT_HSH_DAT(tmp2, 1);
  1758. tmp3 |= 0x0000000080000000;
  1759. CVMX_MT_HSH_DAT(tmp3, 2);
  1760. CVMX_MT_HSH_DATZ(3);
  1761. CVMX_MT_HSH_DATZ(4);
  1762. CVMX_MT_HSH_DATZ(5);
  1763. CVMX_MT_HSH_DATZ(6);
  1764. CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
  1765. /* finish the hash */
  1766. CVMX_PREFETCH0(od->octo_hmouter);
  1767. #if 0
  1768. if (unlikely(inplen)) {
  1769. uint64_t tmp = 0;
  1770. uint8_t *p = (uint8_t *) & tmp;
  1771. p[inplen] = 0x80;
  1772. do {
  1773. inplen--;
  1774. p[inplen] = ((uint8_t *) data)[inplen];
  1775. } while (inplen);
  1776. CVM_LOAD_MD5_UNIT(tmp, next);
  1777. } else {
  1778. CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
  1779. }
  1780. #else
  1781. CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
  1782. #endif
  1783. /* save the HMAC */
  1784. SG_INIT(sg, data32, data_i, data_l);
  1785. while (icv_off > 0) {
  1786. SG_CONSUME(sg, data32, data_i, data_l);
  1787. icv_off -= 4;
  1788. }
  1789. CVMX_MF_HSH_IV(tmp1, 0);
  1790. *data32 = (uint32_t) (tmp1 >> 32);
  1791. SG_CONSUME(sg, data32, data_i, data_l);
  1792. *data32 = (uint32_t) tmp1;
  1793. SG_CONSUME(sg, data32, data_i, data_l);
  1794. CVMX_MF_HSH_IV(tmp1, 1);
  1795. *data32 = (uint32_t) (tmp1 >> 32);
  1796. octeon_crypto_disable(&state, flags);
  1797. return 0;
  1798. }
  1799. int
  1800. octo_aes_cbc_sha1_decrypt(
  1801. struct octo_sess *od,
  1802. struct scatterlist *sg, int sg_len,
  1803. int auth_off, int auth_len,
  1804. int crypt_off, int crypt_len,
  1805. int icv_off, uint8_t *ivp)
  1806. {
  1807. register int next = 0;
  1808. union {
  1809. uint32_t data32[2];
  1810. uint64_t data64[1];
  1811. } mydata[2];
  1812. uint64_t *pdata = &mydata[0].data64[0];
  1813. uint64_t *data = &mydata[1].data64[0];
  1814. uint32_t *data32;
  1815. uint64_t tmp1, tmp2, tmp3;
  1816. int data_i, data_l, alen = auth_len;
  1817. struct octeon_cop2_state state;
  1818. unsigned long flags;
  1819. dprintk("%s(a_off=%d a_len=%d c_off=%d c_len=%d icv_off=%d)\n",
  1820. __FUNCTION__, auth_off, auth_len, crypt_off, crypt_len, icv_off);
  1821. if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
  1822. (crypt_off & 0x3) || (crypt_off + crypt_len > sg_len) ||
  1823. (crypt_len & 0x7) ||
  1824. (auth_len & 0x7) ||
  1825. (auth_off & 0x3) || (auth_off + auth_len > sg_len))) {
  1826. dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
  1827. "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
  1828. "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
  1829. auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
  1830. return -EINVAL;
  1831. }
  1832. SG_INIT(sg, data32, data_i, data_l);
  1833. CVMX_PREFETCH0(ivp);
  1834. CVMX_PREFETCH0(od->octo_enckey);
  1835. flags = octeon_crypto_enable(&state);
  1836. /* load AES Key */
  1837. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
  1838. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
  1839. if (od->octo_encklen == 16) {
  1840. CVMX_MT_AES_KEY(0x0, 2);
  1841. CVMX_MT_AES_KEY(0x0, 3);
  1842. } else if (od->octo_encklen == 24) {
  1843. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
  1844. CVMX_MT_AES_KEY(0x0, 3);
  1845. } else if (od->octo_encklen == 32) {
  1846. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
  1847. CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[3], 3);
  1848. } else {
  1849. octeon_crypto_disable(&state, flags);
  1850. dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
  1851. return -EINVAL;
  1852. }
  1853. CVMX_MT_AES_KEYLENGTH(od->octo_encklen / 8 - 1);
  1854. CVMX_MT_AES_IV(((uint64_t *) ivp)[0], 0);
  1855. CVMX_MT_AES_IV(((uint64_t *) ivp)[1], 1);
  1856. /* Load SHA1 IV */
  1857. CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
  1858. CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
  1859. CVMX_MT_HSH_IV(od->octo_hminner[2], 2);
  1860. while (crypt_off > 0 && auth_off > 0) {
  1861. SG_CONSUME(sg, data32, data_i, data_l);
  1862. crypt_off -= 4;
  1863. auth_off -= 4;
  1864. }
  1865. /* align auth and crypt */
  1866. while (crypt_off > 0 && auth_len > 0) {
  1867. mydata[0].data32[0] = *data32;
  1868. SG_CONSUME(sg, data32, data_i, data_l);
  1869. mydata[0].data32[1] = *data32;
  1870. SG_CONSUME(sg, data32, data_i, data_l);
  1871. CVM_LOAD_SHA_UNIT(*pdata, next);
  1872. crypt_off -= 8;
  1873. auth_len -= 8;
  1874. }
  1875. while (crypt_len > 0) {
  1876. uint32_t *pdata32[3];
  1877. pdata32[0] = data32;
  1878. mydata[0].data32[0] = *data32;
  1879. SG_CONSUME(sg, data32, data_i, data_l);
  1880. pdata32[1] = data32;
  1881. mydata[0].data32[1] = *data32;
  1882. SG_CONSUME(sg, data32, data_i, data_l);
  1883. pdata32[2] = data32;
  1884. mydata[1].data32[0] = *data32;
  1885. SG_CONSUME(sg, data32, data_i, data_l);
  1886. mydata[1].data32[1] = *data32;
  1887. if (auth_len > 0) {
  1888. CVM_LOAD_SHA_UNIT(*pdata, next);
  1889. auth_len -= 8;
  1890. }
  1891. if (auth_len > 0) {
  1892. CVM_LOAD_SHA_UNIT(*data, next);
  1893. auth_len -= 8;
  1894. }
  1895. CVMX_MT_AES_DEC_CBC0(*pdata);
  1896. CVMX_MT_AES_DEC_CBC1(*data);
  1897. CVMX_MF_AES_RESULT(*pdata, 0);
  1898. CVMX_MF_AES_RESULT(*data, 1);
  1899. crypt_len -= 16;
  1900. *pdata32[0] = mydata[0].data32[0];
  1901. *pdata32[1] = mydata[0].data32[1];
  1902. *pdata32[2] = mydata[1].data32[0];
  1903. *data32 = mydata[1].data32[1];
  1904. SG_CONSUME(sg, data32, data_i, data_l);
  1905. }
  1906. /* finish and leftover hashing */
  1907. while (auth_len > 0) {
  1908. mydata[0].data32[0] = *data32;
  1909. SG_CONSUME(sg, data32, data_i, data_l);
  1910. mydata[0].data32[1] = *data32;
  1911. SG_CONSUME(sg, data32, data_i, data_l);
  1912. CVM_LOAD_SHA_UNIT(*pdata, next);
  1913. auth_len -= 8;
  1914. }
  1915. /* finish the hash */
  1916. CVMX_PREFETCH0(od->octo_hmouter);
  1917. #if 0
  1918. if (unlikely(inplen)) {
  1919. uint64_t tmp = 0;
  1920. uint8_t *p = (uint8_t *) & tmp;
  1921. p[inplen] = 0x80;
  1922. do {
  1923. inplen--;
  1924. p[inplen] = ((uint8_t *) data)[inplen];
  1925. } while (inplen);
  1926. CVM_LOAD_SHA_UNIT(tmp, next);
  1927. } else {
  1928. CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
  1929. }
  1930. #else
  1931. CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
  1932. #endif
  1933. /* Finish Inner hash */
  1934. while (next != 7) {
  1935. CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL), next);
  1936. }
  1937. CVM_LOAD_SHA_UNIT((uint64_t) ((alen + 64) << 3), next);
  1938. /* Get the inner hash of HMAC */
  1939. CVMX_MF_HSH_IV(tmp1, 0);
  1940. CVMX_MF_HSH_IV(tmp2, 1);
  1941. tmp3 = 0;
  1942. CVMX_MF_HSH_IV(tmp3, 2);
  1943. /* Initialize hash unit */
  1944. CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
  1945. CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
  1946. CVMX_MT_HSH_IV(od->octo_hmouter[2], 2);
  1947. CVMX_MT_HSH_DAT(tmp1, 0);
  1948. CVMX_MT_HSH_DAT(tmp2, 1);
  1949. tmp3 |= 0x0000000080000000;
  1950. CVMX_MT_HSH_DAT(tmp3, 2);
  1951. CVMX_MT_HSH_DATZ(3);
  1952. CVMX_MT_HSH_DATZ(4);
  1953. CVMX_MT_HSH_DATZ(5);
  1954. CVMX_MT_HSH_DATZ(6);
  1955. CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
  1956. /* finish the hash */
  1957. CVMX_PREFETCH0(od->octo_hmouter);
  1958. #if 0
  1959. if (unlikely(inplen)) {
  1960. uint64_t tmp = 0;
  1961. uint8_t *p = (uint8_t *) & tmp;
  1962. p[inplen] = 0x80;
  1963. do {
  1964. inplen--;
  1965. p[inplen] = ((uint8_t *) data)[inplen];
  1966. } while (inplen);
  1967. CVM_LOAD_MD5_UNIT(tmp, next);
  1968. } else {
  1969. CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
  1970. }
  1971. #else
  1972. CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
  1973. #endif
  1974. /* save the HMAC */
  1975. SG_INIT(sg, data32, data_i, data_l);
  1976. while (icv_off > 0) {
  1977. SG_CONSUME(sg, data32, data_i, data_l);
  1978. icv_off -= 4;
  1979. }
  1980. CVMX_MF_HSH_IV(tmp1, 0);
  1981. *data32 = (uint32_t) (tmp1 >> 32);
  1982. SG_CONSUME(sg, data32, data_i, data_l);
  1983. *data32 = (uint32_t) tmp1;
  1984. SG_CONSUME(sg, data32, data_i, data_l);
  1985. CVMX_MF_HSH_IV(tmp1, 1);
  1986. *data32 = (uint32_t) (tmp1 >> 32);
  1987. octeon_crypto_disable(&state, flags);
  1988. return 0;
  1989. }
  1990. /****************************************************************************/