scrypt130511.cl 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856
  1. /*-
  2. * Copyright 2009 Colin Percival
  3. * Copyright 2011 ArtForz
  4. * Copyright 2011 pooler
  5. * Copyright 2012 mtrlt
  6. * Copyright 2012-2013 Con Kolivas
  7. * All rights reserved.
  8. *
  9. * Redistribution and use in source and binary forms, with or without
  10. * modification, are permitted provided that the following conditions
  11. * are met:
  12. * 1. Redistributions of source code must retain the above copyright
  13. * notice, this list of conditions and the following disclaimer.
  14. * 2. Redistributions in binary form must reproduce the above copyright
  15. * notice, this list of conditions and the following disclaimer in the
  16. * documentation and/or other materials provided with the distribution.
  17. *
  18. * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
  19. * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  20. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  21. * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
  22. * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
  23. * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
  24. * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
  25. * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
  26. * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
  27. * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
  28. * SUCH DAMAGE.
  29. *
  30. * This file was originally written by Colin Percival as part of the Tarsnap
  31. * online backup system.
  32. */
  33. __constant uint ES[2] = { 0x00FF00FF, 0xFF00FF00 };
  34. __constant uint K[] = {
  35. 0x428a2f98U,
  36. 0x71374491U,
  37. 0xb5c0fbcfU,
  38. 0xe9b5dba5U,
  39. 0x3956c25bU,
  40. 0x59f111f1U,
  41. 0x923f82a4U,
  42. 0xab1c5ed5U,
  43. 0xd807aa98U,
  44. 0x12835b01U,
  45. 0x243185beU, // 10
  46. 0x550c7dc3U,
  47. 0x72be5d74U,
  48. 0x80deb1feU,
  49. 0x9bdc06a7U,
  50. 0xe49b69c1U,
  51. 0xefbe4786U,
  52. 0x0fc19dc6U,
  53. 0x240ca1ccU,
  54. 0x2de92c6fU,
  55. 0x4a7484aaU, // 20
  56. 0x5cb0a9dcU,
  57. 0x76f988daU,
  58. 0x983e5152U,
  59. 0xa831c66dU,
  60. 0xb00327c8U,
  61. 0xbf597fc7U,
  62. 0xc6e00bf3U,
  63. 0xd5a79147U,
  64. 0x06ca6351U,
  65. 0x14292967U, // 30
  66. 0x27b70a85U,
  67. 0x2e1b2138U,
  68. 0x4d2c6dfcU,
  69. 0x53380d13U,
  70. 0x650a7354U,
  71. 0x766a0abbU,
  72. 0x81c2c92eU,
  73. 0x92722c85U,
  74. 0xa2bfe8a1U,
  75. 0xa81a664bU, // 40
  76. 0xc24b8b70U,
  77. 0xc76c51a3U,
  78. 0xd192e819U,
  79. 0xd6990624U,
  80. 0xf40e3585U,
  81. 0x106aa070U,
  82. 0x19a4c116U,
  83. 0x1e376c08U,
  84. 0x2748774cU,
  85. 0x34b0bcb5U, // 50
  86. 0x391c0cb3U,
  87. 0x4ed8aa4aU,
  88. 0x5b9cca4fU,
  89. 0x682e6ff3U,
  90. 0x748f82eeU,
  91. 0x78a5636fU,
  92. 0x84c87814U,
  93. 0x8cc70208U,
  94. 0x90befffaU,
  95. 0xa4506cebU, // 60
  96. 0xbef9a3f7U,
  97. 0xc67178f2U,
  98. 0x98c7e2a2U,
  99. 0xfc08884dU,
  100. 0xcd2a11aeU,
  101. 0x510e527fU,
  102. 0x9b05688cU,
  103. 0xC3910C8EU,
  104. 0xfb6feee7U,
  105. 0x2a01a605U, // 70
  106. 0x0c2e12e0U,
  107. 0x4498517BU,
  108. 0x6a09e667U,
  109. 0xa4ce148bU,
  110. 0x95F61999U,
  111. 0xc19bf174U,
  112. 0xBB67AE85U,
  113. 0x3C6EF372U,
  114. 0xA54FF53AU,
  115. 0x1F83D9ABU, // 80
  116. 0x5BE0CD19U,
  117. 0x5C5C5C5CU,
  118. 0x36363636U,
  119. 0x80000000U,
  120. 0x000003FFU,
  121. 0x00000280U,
  122. 0x000004a0U,
  123. 0x00000300U
  124. };
  125. #define rotl(x,y) rotate(x,y)
  126. #define Ch(x,y,z) bitselect(z,y,x)
  127. #define Maj(x,y,z) Ch((x^z),y,z)
  128. #define EndianSwap(n) (rotl(n & ES[0], 24U)|rotl(n & ES[1], 8U))
  129. #define Tr2(x) (rotl(x, 30U) ^ rotl(x, 19U) ^ rotl(x, 10U))
  130. #define Tr1(x) (rotl(x, 26U) ^ rotl(x, 21U) ^ rotl(x, 7U))
  131. #define Wr2(x) (rotl(x, 25U) ^ rotl(x, 14U) ^ (x>>3U))
  132. #define Wr1(x) (rotl(x, 15U) ^ rotl(x, 13U) ^ (x>>10U))
  133. #define RND(a, b, c, d, e, f, g, h, k) \
  134. h += Tr1(e); \
  135. h += Ch(e, f, g); \
  136. h += k; \
  137. d += h; \
  138. h += Tr2(a); \
  139. h += Maj(a, b, c);
  140. void SHA256(uint4*restrict state0,uint4*restrict state1, const uint4 block0, const uint4 block1, const uint4 block2, const uint4 block3)
  141. {
  142. uint4 S0 = *state0;
  143. uint4 S1 = *state1;
  144. #define A S0.x
  145. #define B S0.y
  146. #define C S0.z
  147. #define D S0.w
  148. #define E S1.x
  149. #define F S1.y
  150. #define G S1.z
  151. #define H S1.w
  152. uint4 W[4];
  153. W[ 0].x = block0.x;
  154. RND(A,B,C,D,E,F,G,H, W[0].x+ K[0]);
  155. W[ 0].y = block0.y;
  156. RND(H,A,B,C,D,E,F,G, W[0].y+ K[1]);
  157. W[ 0].z = block0.z;
  158. RND(G,H,A,B,C,D,E,F, W[0].z+ K[2]);
  159. W[ 0].w = block0.w;
  160. RND(F,G,H,A,B,C,D,E, W[0].w+ K[3]);
  161. W[ 1].x = block1.x;
  162. RND(E,F,G,H,A,B,C,D, W[1].x+ K[4]);
  163. W[ 1].y = block1.y;
  164. RND(D,E,F,G,H,A,B,C, W[1].y+ K[5]);
  165. W[ 1].z = block1.z;
  166. RND(C,D,E,F,G,H,A,B, W[1].z+ K[6]);
  167. W[ 1].w = block1.w;
  168. RND(B,C,D,E,F,G,H,A, W[1].w+ K[7]);
  169. W[ 2].x = block2.x;
  170. RND(A,B,C,D,E,F,G,H, W[2].x+ K[8]);
  171. W[ 2].y = block2.y;
  172. RND(H,A,B,C,D,E,F,G, W[2].y+ K[9]);
  173. W[ 2].z = block2.z;
  174. RND(G,H,A,B,C,D,E,F, W[2].z+ K[10]);
  175. W[ 2].w = block2.w;
  176. RND(F,G,H,A,B,C,D,E, W[2].w+ K[11]);
  177. W[ 3].x = block3.x;
  178. RND(E,F,G,H,A,B,C,D, W[3].x+ K[12]);
  179. W[ 3].y = block3.y;
  180. RND(D,E,F,G,H,A,B,C, W[3].y+ K[13]);
  181. W[ 3].z = block3.z;
  182. RND(C,D,E,F,G,H,A,B, W[3].z+ K[14]);
  183. W[ 3].w = block3.w;
  184. RND(B,C,D,E,F,G,H,A, W[3].w+ K[76]);
  185. W[ 0].x += Wr1(W[ 3].z) + W[ 2].y + Wr2(W[ 0].y);
  186. RND(A,B,C,D,E,F,G,H, W[0].x+ K[15]);
  187. W[ 0].y += Wr1(W[ 3].w) + W[ 2].z + Wr2(W[ 0].z);
  188. RND(H,A,B,C,D,E,F,G, W[0].y+ K[16]);
  189. W[ 0].z += Wr1(W[ 0].x) + W[ 2].w + Wr2(W[ 0].w);
  190. RND(G,H,A,B,C,D,E,F, W[0].z+ K[17]);
  191. W[ 0].w += Wr1(W[ 0].y) + W[ 3].x + Wr2(W[ 1].x);
  192. RND(F,G,H,A,B,C,D,E, W[0].w+ K[18]);
  193. W[ 1].x += Wr1(W[ 0].z) + W[ 3].y + Wr2(W[ 1].y);
  194. RND(E,F,G,H,A,B,C,D, W[1].x+ K[19]);
  195. W[ 1].y += Wr1(W[ 0].w) + W[ 3].z + Wr2(W[ 1].z);
  196. RND(D,E,F,G,H,A,B,C, W[1].y+ K[20]);
  197. W[ 1].z += Wr1(W[ 1].x) + W[ 3].w + Wr2(W[ 1].w);
  198. RND(C,D,E,F,G,H,A,B, W[1].z+ K[21]);
  199. W[ 1].w += Wr1(W[ 1].y) + W[ 0].x + Wr2(W[ 2].x);
  200. RND(B,C,D,E,F,G,H,A, W[1].w+ K[22]);
  201. W[ 2].x += Wr1(W[ 1].z) + W[ 0].y + Wr2(W[ 2].y);
  202. RND(A,B,C,D,E,F,G,H, W[2].x+ K[23]);
  203. W[ 2].y += Wr1(W[ 1].w) + W[ 0].z + Wr2(W[ 2].z);
  204. RND(H,A,B,C,D,E,F,G, W[2].y+ K[24]);
  205. W[ 2].z += Wr1(W[ 2].x) + W[ 0].w + Wr2(W[ 2].w);
  206. RND(G,H,A,B,C,D,E,F, W[2].z+ K[25]);
  207. W[ 2].w += Wr1(W[ 2].y) + W[ 1].x + Wr2(W[ 3].x);
  208. RND(F,G,H,A,B,C,D,E, W[2].w+ K[26]);
  209. W[ 3].x += Wr1(W[ 2].z) + W[ 1].y + Wr2(W[ 3].y);
  210. RND(E,F,G,H,A,B,C,D, W[3].x+ K[27]);
  211. W[ 3].y += Wr1(W[ 2].w) + W[ 1].z + Wr2(W[ 3].z);
  212. RND(D,E,F,G,H,A,B,C, W[3].y+ K[28]);
  213. W[ 3].z += Wr1(W[ 3].x) + W[ 1].w + Wr2(W[ 3].w);
  214. RND(C,D,E,F,G,H,A,B, W[3].z+ K[29]);
  215. W[ 3].w += Wr1(W[ 3].y) + W[ 2].x + Wr2(W[ 0].x);
  216. RND(B,C,D,E,F,G,H,A, W[3].w+ K[30]);
  217. W[ 0].x += Wr1(W[ 3].z) + W[ 2].y + Wr2(W[ 0].y);
  218. RND(A,B,C,D,E,F,G,H, W[0].x+ K[31]);
  219. W[ 0].y += Wr1(W[ 3].w) + W[ 2].z + Wr2(W[ 0].z);
  220. RND(H,A,B,C,D,E,F,G, W[0].y+ K[32]);
  221. W[ 0].z += Wr1(W[ 0].x) + W[ 2].w + Wr2(W[ 0].w);
  222. RND(G,H,A,B,C,D,E,F, W[0].z+ K[33]);
  223. W[ 0].w += Wr1(W[ 0].y) + W[ 3].x + Wr2(W[ 1].x);
  224. RND(F,G,H,A,B,C,D,E, W[0].w+ K[34]);
  225. W[ 1].x += Wr1(W[ 0].z) + W[ 3].y + Wr2(W[ 1].y);
  226. RND(E,F,G,H,A,B,C,D, W[1].x+ K[35]);
  227. W[ 1].y += Wr1(W[ 0].w) + W[ 3].z + Wr2(W[ 1].z);
  228. RND(D,E,F,G,H,A,B,C, W[1].y+ K[36]);
  229. W[ 1].z += Wr1(W[ 1].x) + W[ 3].w + Wr2(W[ 1].w);
  230. RND(C,D,E,F,G,H,A,B, W[1].z+ K[37]);
  231. W[ 1].w += Wr1(W[ 1].y) + W[ 0].x + Wr2(W[ 2].x);
  232. RND(B,C,D,E,F,G,H,A, W[1].w+ K[38]);
  233. W[ 2].x += Wr1(W[ 1].z) + W[ 0].y + Wr2(W[ 2].y);
  234. RND(A,B,C,D,E,F,G,H, W[2].x+ K[39]);
  235. W[ 2].y += Wr1(W[ 1].w) + W[ 0].z + Wr2(W[ 2].z);
  236. RND(H,A,B,C,D,E,F,G, W[2].y+ K[40]);
  237. W[ 2].z += Wr1(W[ 2].x) + W[ 0].w + Wr2(W[ 2].w);
  238. RND(G,H,A,B,C,D,E,F, W[2].z+ K[41]);
  239. W[ 2].w += Wr1(W[ 2].y) + W[ 1].x + Wr2(W[ 3].x);
  240. RND(F,G,H,A,B,C,D,E, W[2].w+ K[42]);
  241. W[ 3].x += Wr1(W[ 2].z) + W[ 1].y + Wr2(W[ 3].y);
  242. RND(E,F,G,H,A,B,C,D, W[3].x+ K[43]);
  243. W[ 3].y += Wr1(W[ 2].w) + W[ 1].z + Wr2(W[ 3].z);
  244. RND(D,E,F,G,H,A,B,C, W[3].y+ K[44]);
  245. W[ 3].z += Wr1(W[ 3].x) + W[ 1].w + Wr2(W[ 3].w);
  246. RND(C,D,E,F,G,H,A,B, W[3].z+ K[45]);
  247. W[ 3].w += Wr1(W[ 3].y) + W[ 2].x + Wr2(W[ 0].x);
  248. RND(B,C,D,E,F,G,H,A, W[3].w+ K[46]);
  249. W[ 0].x += Wr1(W[ 3].z) + W[ 2].y + Wr2(W[ 0].y);
  250. RND(A,B,C,D,E,F,G,H, W[0].x+ K[47]);
  251. W[ 0].y += Wr1(W[ 3].w) + W[ 2].z + Wr2(W[ 0].z);
  252. RND(H,A,B,C,D,E,F,G, W[0].y+ K[48]);
  253. W[ 0].z += Wr1(W[ 0].x) + W[ 2].w + Wr2(W[ 0].w);
  254. RND(G,H,A,B,C,D,E,F, W[0].z+ K[49]);
  255. W[ 0].w += Wr1(W[ 0].y) + W[ 3].x + Wr2(W[ 1].x);
  256. RND(F,G,H,A,B,C,D,E, W[0].w+ K[50]);
  257. W[ 1].x += Wr1(W[ 0].z) + W[ 3].y + Wr2(W[ 1].y);
  258. RND(E,F,G,H,A,B,C,D, W[1].x+ K[51]);
  259. W[ 1].y += Wr1(W[ 0].w) + W[ 3].z + Wr2(W[ 1].z);
  260. RND(D,E,F,G,H,A,B,C, W[1].y+ K[52]);
  261. W[ 1].z += Wr1(W[ 1].x) + W[ 3].w + Wr2(W[ 1].w);
  262. RND(C,D,E,F,G,H,A,B, W[1].z+ K[53]);
  263. W[ 1].w += Wr1(W[ 1].y) + W[ 0].x + Wr2(W[ 2].x);
  264. RND(B,C,D,E,F,G,H,A, W[1].w+ K[54]);
  265. W[ 2].x += Wr1(W[ 1].z) + W[ 0].y + Wr2(W[ 2].y);
  266. RND(A,B,C,D,E,F,G,H, W[2].x+ K[55]);
  267. W[ 2].y += Wr1(W[ 1].w) + W[ 0].z + Wr2(W[ 2].z);
  268. RND(H,A,B,C,D,E,F,G, W[2].y+ K[56]);
  269. W[ 2].z += Wr1(W[ 2].x) + W[ 0].w + Wr2(W[ 2].w);
  270. RND(G,H,A,B,C,D,E,F, W[2].z+ K[57]);
  271. W[ 2].w += Wr1(W[ 2].y) + W[ 1].x + Wr2(W[ 3].x);
  272. RND(F,G,H,A,B,C,D,E, W[2].w+ K[58]);
  273. W[ 3].x += Wr1(W[ 2].z) + W[ 1].y + Wr2(W[ 3].y);
  274. RND(E,F,G,H,A,B,C,D, W[3].x+ K[59]);
  275. W[ 3].y += Wr1(W[ 2].w) + W[ 1].z + Wr2(W[ 3].z);
  276. RND(D,E,F,G,H,A,B,C, W[3].y+ K[60]);
  277. W[ 3].z += Wr1(W[ 3].x) + W[ 1].w + Wr2(W[ 3].w);
  278. RND(C,D,E,F,G,H,A,B, W[3].z+ K[61]);
  279. W[ 3].w += Wr1(W[ 3].y) + W[ 2].x + Wr2(W[ 0].x);
  280. RND(B,C,D,E,F,G,H,A, W[3].w+ K[62]);
  281. #undef A
  282. #undef B
  283. #undef C
  284. #undef D
  285. #undef E
  286. #undef F
  287. #undef G
  288. #undef H
  289. *state0 += S0;
  290. *state1 += S1;
  291. }
  292. void SHA256_fresh(uint4*restrict state0,uint4*restrict state1, const uint4 block0, const uint4 block1, const uint4 block2, const uint4 block3)
  293. {
  294. #define A (*state0).x
  295. #define B (*state0).y
  296. #define C (*state0).z
  297. #define D (*state0).w
  298. #define E (*state1).x
  299. #define F (*state1).y
  300. #define G (*state1).z
  301. #define H (*state1).w
  302. uint4 W[4];
  303. W[0].x = block0.x;
  304. D= K[63] +W[0].x;
  305. H= K[64] +W[0].x;
  306. W[0].y = block0.y;
  307. C= K[65] +Tr1(D)+Ch(D, K[66], K[67])+W[0].y;
  308. G= K[68] +C+Tr2(H)+Ch(H, K[69] ,K[70]);
  309. W[0].z = block0.z;
  310. B= K[71] +Tr1(C)+Ch(C,D,K[66])+W[0].z;
  311. F= K[72] +B+Tr2(G)+Maj(G,H, K[73]);
  312. W[0].w = block0.w;
  313. A= K[74] +Tr1(B)+Ch(B,C,D)+W[0].w;
  314. E= K[75] +A+Tr2(F)+Maj(F,G,H);
  315. W[1].x = block1.x;
  316. RND(E,F,G,H,A,B,C,D, W[1].x+ K[4]);
  317. W[1].y = block1.y;
  318. RND(D,E,F,G,H,A,B,C, W[1].y+ K[5]);
  319. W[1].z = block1.z;
  320. RND(C,D,E,F,G,H,A,B, W[1].z+ K[6]);
  321. W[1].w = block1.w;
  322. RND(B,C,D,E,F,G,H,A, W[1].w+ K[7]);
  323. W[2].x = block2.x;
  324. RND(A,B,C,D,E,F,G,H, W[2].x+ K[8]);
  325. W[2].y = block2.y;
  326. RND(H,A,B,C,D,E,F,G, W[2].y+ K[9]);
  327. W[2].z = block2.z;
  328. RND(G,H,A,B,C,D,E,F, W[2].z+ K[10]);
  329. W[2].w = block2.w;
  330. RND(F,G,H,A,B,C,D,E, W[2].w+ K[11]);
  331. W[3].x = block3.x;
  332. RND(E,F,G,H,A,B,C,D, W[3].x+ K[12]);
  333. W[3].y = block3.y;
  334. RND(D,E,F,G,H,A,B,C, W[3].y+ K[13]);
  335. W[3].z = block3.z;
  336. RND(C,D,E,F,G,H,A,B, W[3].z+ K[14]);
  337. W[3].w = block3.w;
  338. RND(B,C,D,E,F,G,H,A, W[3].w+ K[76]);
  339. W[0].x += Wr1(W[3].z) + W[2].y + Wr2(W[0].y);
  340. RND(A,B,C,D,E,F,G,H, W[0].x+ K[15]);
  341. W[0].y += Wr1(W[3].w) + W[2].z + Wr2(W[0].z);
  342. RND(H,A,B,C,D,E,F,G, W[0].y+ K[16]);
  343. W[0].z += Wr1(W[0].x) + W[2].w + Wr2(W[0].w);
  344. RND(G,H,A,B,C,D,E,F, W[0].z+ K[17]);
  345. W[0].w += Wr1(W[0].y) + W[3].x + Wr2(W[1].x);
  346. RND(F,G,H,A,B,C,D,E, W[0].w+ K[18]);
  347. W[1].x += Wr1(W[0].z) + W[3].y + Wr2(W[1].y);
  348. RND(E,F,G,H,A,B,C,D, W[1].x+ K[19]);
  349. W[1].y += Wr1(W[0].w) + W[3].z + Wr2(W[1].z);
  350. RND(D,E,F,G,H,A,B,C, W[1].y+ K[20]);
  351. W[1].z += Wr1(W[1].x) + W[3].w + Wr2(W[1].w);
  352. RND(C,D,E,F,G,H,A,B, W[1].z+ K[21]);
  353. W[1].w += Wr1(W[1].y) + W[0].x + Wr2(W[2].x);
  354. RND(B,C,D,E,F,G,H,A, W[1].w+ K[22]);
  355. W[2].x += Wr1(W[1].z) + W[0].y + Wr2(W[2].y);
  356. RND(A,B,C,D,E,F,G,H, W[2].x+ K[23]);
  357. W[2].y += Wr1(W[1].w) + W[0].z + Wr2(W[2].z);
  358. RND(H,A,B,C,D,E,F,G, W[2].y+ K[24]);
  359. W[2].z += Wr1(W[2].x) + W[0].w + Wr2(W[2].w);
  360. RND(G,H,A,B,C,D,E,F, W[2].z+ K[25]);
  361. W[2].w += Wr1(W[2].y) + W[1].x + Wr2(W[3].x);
  362. RND(F,G,H,A,B,C,D,E, W[2].w+ K[26]);
  363. W[3].x += Wr1(W[2].z) + W[1].y + Wr2(W[3].y);
  364. RND(E,F,G,H,A,B,C,D, W[3].x+ K[27]);
  365. W[3].y += Wr1(W[2].w) + W[1].z + Wr2(W[3].z);
  366. RND(D,E,F,G,H,A,B,C, W[3].y+ K[28]);
  367. W[3].z += Wr1(W[3].x) + W[1].w + Wr2(W[3].w);
  368. RND(C,D,E,F,G,H,A,B, W[3].z+ K[29]);
  369. W[3].w += Wr1(W[3].y) + W[2].x + Wr2(W[0].x);
  370. RND(B,C,D,E,F,G,H,A, W[3].w+ K[30]);
  371. W[0].x += Wr1(W[3].z) + W[2].y + Wr2(W[0].y);
  372. RND(A,B,C,D,E,F,G,H, W[0].x+ K[31]);
  373. W[0].y += Wr1(W[3].w) + W[2].z + Wr2(W[0].z);
  374. RND(H,A,B,C,D,E,F,G, W[0].y+ K[32]);
  375. W[0].z += Wr1(W[0].x) + W[2].w + Wr2(W[0].w);
  376. RND(G,H,A,B,C,D,E,F, W[0].z+ K[33]);
  377. W[0].w += Wr1(W[0].y) + W[3].x + Wr2(W[1].x);
  378. RND(F,G,H,A,B,C,D,E, W[0].w+ K[34]);
  379. W[1].x += Wr1(W[0].z) + W[3].y + Wr2(W[1].y);
  380. RND(E,F,G,H,A,B,C,D, W[1].x+ K[35]);
  381. W[1].y += Wr1(W[0].w) + W[3].z + Wr2(W[1].z);
  382. RND(D,E,F,G,H,A,B,C, W[1].y+ K[36]);
  383. W[1].z += Wr1(W[1].x) + W[3].w + Wr2(W[1].w);
  384. RND(C,D,E,F,G,H,A,B, W[1].z+ K[37]);
  385. W[1].w += Wr1(W[1].y) + W[0].x + Wr2(W[2].x);
  386. RND(B,C,D,E,F,G,H,A, W[1].w+ K[38]);
  387. W[2].x += Wr1(W[1].z) + W[0].y + Wr2(W[2].y);
  388. RND(A,B,C,D,E,F,G,H, W[2].x+ K[39]);
  389. W[2].y += Wr1(W[1].w) + W[0].z + Wr2(W[2].z);
  390. RND(H,A,B,C,D,E,F,G, W[2].y+ K[40]);
  391. W[2].z += Wr1(W[2].x) + W[0].w + Wr2(W[2].w);
  392. RND(G,H,A,B,C,D,E,F, W[2].z+ K[41]);
  393. W[2].w += Wr1(W[2].y) + W[1].x + Wr2(W[3].x);
  394. RND(F,G,H,A,B,C,D,E, W[2].w+ K[42]);
  395. W[3].x += Wr1(W[2].z) + W[1].y + Wr2(W[3].y);
  396. RND(E,F,G,H,A,B,C,D, W[3].x+ K[43]);
  397. W[3].y += Wr1(W[2].w) + W[1].z + Wr2(W[3].z);
  398. RND(D,E,F,G,H,A,B,C, W[3].y+ K[44]);
  399. W[3].z += Wr1(W[3].x) + W[1].w + Wr2(W[3].w);
  400. RND(C,D,E,F,G,H,A,B, W[3].z+ K[45]);
  401. W[3].w += Wr1(W[3].y) + W[2].x + Wr2(W[0].x);
  402. RND(B,C,D,E,F,G,H,A, W[3].w+ K[46]);
  403. W[0].x += Wr1(W[3].z) + W[2].y + Wr2(W[0].y);
  404. RND(A,B,C,D,E,F,G,H, W[0].x+ K[47]);
  405. W[0].y += Wr1(W[3].w) + W[2].z + Wr2(W[0].z);
  406. RND(H,A,B,C,D,E,F,G, W[0].y+ K[48]);
  407. W[0].z += Wr1(W[0].x) + W[2].w + Wr2(W[0].w);
  408. RND(G,H,A,B,C,D,E,F, W[0].z+ K[49]);
  409. W[0].w += Wr1(W[0].y) + W[3].x + Wr2(W[1].x);
  410. RND(F,G,H,A,B,C,D,E, W[0].w+ K[50]);
  411. W[1].x += Wr1(W[0].z) + W[3].y + Wr2(W[1].y);
  412. RND(E,F,G,H,A,B,C,D, W[1].x+ K[51]);
  413. W[1].y += Wr1(W[0].w) + W[3].z + Wr2(W[1].z);
  414. RND(D,E,F,G,H,A,B,C, W[1].y+ K[52]);
  415. W[1].z += Wr1(W[1].x) + W[3].w + Wr2(W[1].w);
  416. RND(C,D,E,F,G,H,A,B, W[1].z+ K[53]);
  417. W[1].w += Wr1(W[1].y) + W[0].x + Wr2(W[2].x);
  418. RND(B,C,D,E,F,G,H,A, W[1].w+ K[54]);
  419. W[2].x += Wr1(W[1].z) + W[0].y + Wr2(W[2].y);
  420. RND(A,B,C,D,E,F,G,H, W[2].x+ K[55]);
  421. W[2].y += Wr1(W[1].w) + W[0].z + Wr2(W[2].z);
  422. RND(H,A,B,C,D,E,F,G, W[2].y+ K[56]);
  423. W[2].z += Wr1(W[2].x) + W[0].w + Wr2(W[2].w);
  424. RND(G,H,A,B,C,D,E,F, W[2].z+ K[57]);
  425. W[2].w += Wr1(W[2].y) + W[1].x + Wr2(W[3].x);
  426. RND(F,G,H,A,B,C,D,E, W[2].w+ K[58]);
  427. W[3].x += Wr1(W[2].z) + W[1].y + Wr2(W[3].y);
  428. RND(E,F,G,H,A,B,C,D, W[3].x+ K[59]);
  429. W[3].y += Wr1(W[2].w) + W[1].z + Wr2(W[3].z);
  430. RND(D,E,F,G,H,A,B,C, W[3].y+ K[60]);
  431. W[3].z += Wr1(W[3].x) + W[1].w + Wr2(W[3].w);
  432. RND(C,D,E,F,G,H,A,B, W[3].z+ K[61]);
  433. W[3].w += Wr1(W[3].y) + W[2].x + Wr2(W[0].x);
  434. RND(B,C,D,E,F,G,H,A, W[3].w+ K[62]);
  435. #undef A
  436. #undef B
  437. #undef C
  438. #undef D
  439. #undef E
  440. #undef F
  441. #undef G
  442. #undef H
  443. *state0 += (uint4)(K[73], K[77], K[78], K[79]);
  444. *state1 += (uint4)(K[66], K[67], K[80], K[81]);
  445. }
  446. __constant uint fixedW[64] =
  447. {
  448. 0x428a2f99,0xf1374491,0xb5c0fbcf,0xe9b5dba5,0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5,
  449. 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3,0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf794,
  450. 0xf59b89c2,0x73924787,0x23c6886e,0xa42ca65c,0x15ed3627,0x4d6edcbf,0xe28217fc,0xef02488f,
  451. 0xb707775c,0x0468c23f,0xe7e72b4c,0x49e1f1a2,0x4b99c816,0x926d1570,0xaa0fc072,0xadb36e2c,
  452. 0xad87a3ea,0xbcb1d3a3,0x7b993186,0x562b9420,0xbff3ca0c,0xda4b0c23,0x6cd8711a,0x8f337caa,
  453. 0xc91b1417,0xc359dce1,0xa83253a7,0x3b13c12d,0x9d3d725d,0xd9031a84,0xb1a03340,0x16f58012,
  454. 0xe64fb6a2,0xe84d923a,0xe93a5730,0x09837686,0x078ff753,0x29833341,0xd5de0b7e,0x6948ccf4,
  455. 0xe0a1adbe,0x7c728e11,0x511c78e4,0x315b45bd,0xfca71413,0xea28f96a,0x79703128,0x4e1ef848,
  456. };
  457. void SHA256_fixed(uint4*restrict state0,uint4*restrict state1)
  458. {
  459. uint4 S0 = *state0;
  460. uint4 S1 = *state1;
  461. #define A S0.x
  462. #define B S0.y
  463. #define C S0.z
  464. #define D S0.w
  465. #define E S1.x
  466. #define F S1.y
  467. #define G S1.z
  468. #define H S1.w
  469. RND(A,B,C,D,E,F,G,H, fixedW[0]);
  470. RND(H,A,B,C,D,E,F,G, fixedW[1]);
  471. RND(G,H,A,B,C,D,E,F, fixedW[2]);
  472. RND(F,G,H,A,B,C,D,E, fixedW[3]);
  473. RND(E,F,G,H,A,B,C,D, fixedW[4]);
  474. RND(D,E,F,G,H,A,B,C, fixedW[5]);
  475. RND(C,D,E,F,G,H,A,B, fixedW[6]);
  476. RND(B,C,D,E,F,G,H,A, fixedW[7]);
  477. RND(A,B,C,D,E,F,G,H, fixedW[8]);
  478. RND(H,A,B,C,D,E,F,G, fixedW[9]);
  479. RND(G,H,A,B,C,D,E,F, fixedW[10]);
  480. RND(F,G,H,A,B,C,D,E, fixedW[11]);
  481. RND(E,F,G,H,A,B,C,D, fixedW[12]);
  482. RND(D,E,F,G,H,A,B,C, fixedW[13]);
  483. RND(C,D,E,F,G,H,A,B, fixedW[14]);
  484. RND(B,C,D,E,F,G,H,A, fixedW[15]);
  485. RND(A,B,C,D,E,F,G,H, fixedW[16]);
  486. RND(H,A,B,C,D,E,F,G, fixedW[17]);
  487. RND(G,H,A,B,C,D,E,F, fixedW[18]);
  488. RND(F,G,H,A,B,C,D,E, fixedW[19]);
  489. RND(E,F,G,H,A,B,C,D, fixedW[20]);
  490. RND(D,E,F,G,H,A,B,C, fixedW[21]);
  491. RND(C,D,E,F,G,H,A,B, fixedW[22]);
  492. RND(B,C,D,E,F,G,H,A, fixedW[23]);
  493. RND(A,B,C,D,E,F,G,H, fixedW[24]);
  494. RND(H,A,B,C,D,E,F,G, fixedW[25]);
  495. RND(G,H,A,B,C,D,E,F, fixedW[26]);
  496. RND(F,G,H,A,B,C,D,E, fixedW[27]);
  497. RND(E,F,G,H,A,B,C,D, fixedW[28]);
  498. RND(D,E,F,G,H,A,B,C, fixedW[29]);
  499. RND(C,D,E,F,G,H,A,B, fixedW[30]);
  500. RND(B,C,D,E,F,G,H,A, fixedW[31]);
  501. RND(A,B,C,D,E,F,G,H, fixedW[32]);
  502. RND(H,A,B,C,D,E,F,G, fixedW[33]);
  503. RND(G,H,A,B,C,D,E,F, fixedW[34]);
  504. RND(F,G,H,A,B,C,D,E, fixedW[35]);
  505. RND(E,F,G,H,A,B,C,D, fixedW[36]);
  506. RND(D,E,F,G,H,A,B,C, fixedW[37]);
  507. RND(C,D,E,F,G,H,A,B, fixedW[38]);
  508. RND(B,C,D,E,F,G,H,A, fixedW[39]);
  509. RND(A,B,C,D,E,F,G,H, fixedW[40]);
  510. RND(H,A,B,C,D,E,F,G, fixedW[41]);
  511. RND(G,H,A,B,C,D,E,F, fixedW[42]);
  512. RND(F,G,H,A,B,C,D,E, fixedW[43]);
  513. RND(E,F,G,H,A,B,C,D, fixedW[44]);
  514. RND(D,E,F,G,H,A,B,C, fixedW[45]);
  515. RND(C,D,E,F,G,H,A,B, fixedW[46]);
  516. RND(B,C,D,E,F,G,H,A, fixedW[47]);
  517. RND(A,B,C,D,E,F,G,H, fixedW[48]);
  518. RND(H,A,B,C,D,E,F,G, fixedW[49]);
  519. RND(G,H,A,B,C,D,E,F, fixedW[50]);
  520. RND(F,G,H,A,B,C,D,E, fixedW[51]);
  521. RND(E,F,G,H,A,B,C,D, fixedW[52]);
  522. RND(D,E,F,G,H,A,B,C, fixedW[53]);
  523. RND(C,D,E,F,G,H,A,B, fixedW[54]);
  524. RND(B,C,D,E,F,G,H,A, fixedW[55]);
  525. RND(A,B,C,D,E,F,G,H, fixedW[56]);
  526. RND(H,A,B,C,D,E,F,G, fixedW[57]);
  527. RND(G,H,A,B,C,D,E,F, fixedW[58]);
  528. RND(F,G,H,A,B,C,D,E, fixedW[59]);
  529. RND(E,F,G,H,A,B,C,D, fixedW[60]);
  530. RND(D,E,F,G,H,A,B,C, fixedW[61]);
  531. RND(C,D,E,F,G,H,A,B, fixedW[62]);
  532. RND(B,C,D,E,F,G,H,A, fixedW[63]);
  533. #undef A
  534. #undef B
  535. #undef C
  536. #undef D
  537. #undef E
  538. #undef F
  539. #undef G
  540. #undef H
  541. *state0 += S0;
  542. *state1 += S1;
  543. }
  544. void shittify(uint4 B[8])
  545. {
  546. uint4 tmp[4];
  547. tmp[0] = (uint4)(B[1].x,B[2].y,B[3].z,B[0].w);
  548. tmp[1] = (uint4)(B[2].x,B[3].y,B[0].z,B[1].w);
  549. tmp[2] = (uint4)(B[3].x,B[0].y,B[1].z,B[2].w);
  550. tmp[3] = (uint4)(B[0].x,B[1].y,B[2].z,B[3].w);
  551. #pragma unroll
  552. for(uint i=0; i<4; ++i)
  553. B[i] = EndianSwap(tmp[i]);
  554. tmp[0] = (uint4)(B[5].x,B[6].y,B[7].z,B[4].w);
  555. tmp[1] = (uint4)(B[6].x,B[7].y,B[4].z,B[5].w);
  556. tmp[2] = (uint4)(B[7].x,B[4].y,B[5].z,B[6].w);
  557. tmp[3] = (uint4)(B[4].x,B[5].y,B[6].z,B[7].w);
  558. #pragma unroll
  559. for(uint i=0; i<4; ++i)
  560. B[i+4] = EndianSwap(tmp[i]);
  561. }
  562. void unshittify(uint4 B[8])
  563. {
  564. uint4 tmp[4];
  565. tmp[0] = (uint4)(B[3].x,B[2].y,B[1].z,B[0].w);
  566. tmp[1] = (uint4)(B[0].x,B[3].y,B[2].z,B[1].w);
  567. tmp[2] = (uint4)(B[1].x,B[0].y,B[3].z,B[2].w);
  568. tmp[3] = (uint4)(B[2].x,B[1].y,B[0].z,B[3].w);
  569. #pragma unroll
  570. for(uint i=0; i<4; ++i)
  571. B[i] = EndianSwap(tmp[i]);
  572. tmp[0] = (uint4)(B[7].x,B[6].y,B[5].z,B[4].w);
  573. tmp[1] = (uint4)(B[4].x,B[7].y,B[6].z,B[5].w);
  574. tmp[2] = (uint4)(B[5].x,B[4].y,B[7].z,B[6].w);
  575. tmp[3] = (uint4)(B[6].x,B[5].y,B[4].z,B[7].w);
  576. #pragma unroll
  577. for(uint i=0; i<4; ++i)
  578. B[i+4] = EndianSwap(tmp[i]);
  579. }
  580. void salsa(uint4 B[8])
  581. {
  582. uint4 w[4];
  583. #pragma unroll
  584. for(uint i=0; i<4; ++i)
  585. w[i] = (B[i]^=B[i+4]);
  586. #pragma unroll
  587. for(uint i=0; i<4; ++i)
  588. {
  589. w[0] ^= rotl(w[3] +w[2] , 7U);
  590. w[1] ^= rotl(w[0] +w[3] , 9U);
  591. w[2] ^= rotl(w[1] +w[0] ,13U);
  592. w[3] ^= rotl(w[2] +w[1] ,18U);
  593. w[2] ^= rotl(w[3].wxyz+w[0].zwxy, 7U);
  594. w[1] ^= rotl(w[2].wxyz+w[3].zwxy, 9U);
  595. w[0] ^= rotl(w[1].wxyz+w[2].zwxy,13U);
  596. w[3] ^= rotl(w[0].wxyz+w[1].zwxy,18U);
  597. }
  598. #pragma unroll
  599. for(uint i=0; i<4; ++i)
  600. w[i] = (B[i+4]^=(B[i]+=w[i]));
  601. #pragma unroll
  602. for(uint i=0; i<4; ++i)
  603. {
  604. w[0] ^= rotl(w[3] +w[2] , 7U);
  605. w[1] ^= rotl(w[0] +w[3] , 9U);
  606. w[2] ^= rotl(w[1] +w[0] ,13U);
  607. w[3] ^= rotl(w[2] +w[1] ,18U);
  608. w[2] ^= rotl(w[3].wxyz+w[0].zwxy, 7U);
  609. w[1] ^= rotl(w[2].wxyz+w[3].zwxy, 9U);
  610. w[0] ^= rotl(w[1].wxyz+w[2].zwxy,13U);
  611. w[3] ^= rotl(w[0].wxyz+w[1].zwxy,18U);
  612. }
  613. #pragma unroll
  614. for(uint i=0; i<4; ++i)
  615. B[i+4] += w[i];
  616. }
  617. #define Coord(x,y,z) x+y*(x ## SIZE)+z*(y ## SIZE)*(x ## SIZE)
  618. #define CO Coord(z,x,y)
  619. void scrypt_core(uint4 X[8], __global uint4*restrict lookup)
  620. {
  621. shittify(X);
  622. const uint zSIZE = 8;
  623. const uint ySIZE = (1024/LOOKUP_GAP+(1024%LOOKUP_GAP>0));
  624. const uint xSIZE = CONCURRENT_THREADS;
  625. uint x = get_global_id(0)%xSIZE;
  626. for(uint y=0; y<1024/LOOKUP_GAP; ++y)
  627. {
  628. #pragma unroll
  629. for(uint z=0; z<zSIZE; ++z)
  630. lookup[CO] = X[z];
  631. for(uint i=0; i<LOOKUP_GAP; ++i)
  632. salsa(X);
  633. }
  634. #if (LOOKUP_GAP != 1) && (LOOKUP_GAP != 2) && (LOOKUP_GAP != 4) && (LOOKUP_GAP != 8)
  635. {
  636. uint y = (1024/LOOKUP_GAP);
  637. #pragma unroll
  638. for(uint z=0; z<zSIZE; ++z)
  639. lookup[CO] = X[z];
  640. for(uint i=0; i<1024%LOOKUP_GAP; ++i)
  641. salsa(X);
  642. }
  643. #endif
  644. for (uint i=0; i<1024; ++i)
  645. {
  646. uint4 V[8];
  647. uint j = X[7].x & K[85];
  648. uint y = (j/LOOKUP_GAP);
  649. #pragma unroll
  650. for(uint z=0; z<zSIZE; ++z)
  651. V[z] = lookup[CO];
  652. #if (LOOKUP_GAP == 1)
  653. #elif (LOOKUP_GAP == 2)
  654. if (j&1)
  655. salsa(V);
  656. #else
  657. uint val = j%LOOKUP_GAP;
  658. for (uint z=0; z<val; ++z)
  659. salsa(V);
  660. #endif
  661. #pragma unroll
  662. for(uint z=0; z<zSIZE; ++z)
  663. X[z] ^= V[z];
  664. salsa(X);
  665. }
  666. unshittify(X);
  667. }
  668. #define SCRYPT_FOUND (0xFF)
  669. #define SETFOUND(Xnonce) output[output[SCRYPT_FOUND]++] = Xnonce
  670. __attribute__((reqd_work_group_size(WORKSIZE, 1, 1)))
  671. __kernel void search(__global const uint4 * restrict input,
  672. volatile __global uint*restrict output, __global uint4*restrict padcache,
  673. const uint4 midstate0, const uint4 midstate16, const uint target)
  674. {
  675. uint gid = get_global_id(0);
  676. uint4 X[8];
  677. uint4 tstate0, tstate1, ostate0, ostate1, tmp0, tmp1;
  678. uint4 data = (uint4)(input[4].x,input[4].y,input[4].z,gid);
  679. uint4 pad0 = midstate0, pad1 = midstate16;
  680. SHA256(&pad0,&pad1, data, (uint4)(K[84],0,0,0), (uint4)(0,0,0,0), (uint4)(0,0,0, K[86]));
  681. SHA256_fresh(&ostate0,&ostate1, pad0^ K[82], pad1^ K[82], K[82], K[82]);
  682. SHA256_fresh(&tstate0,&tstate1, pad0^ K[83], pad1^ K[83], K[83], K[83]);
  683. tmp0 = tstate0;
  684. tmp1 = tstate1;
  685. SHA256(&tstate0, &tstate1, input[0],input[1],input[2],input[3]);
  686. #pragma unroll
  687. for (uint i=0; i<4; i++)
  688. {
  689. pad0 = tstate0;
  690. pad1 = tstate1;
  691. X[i*2 ] = ostate0;
  692. X[i*2+1] = ostate1;
  693. SHA256(&pad0,&pad1, data, (uint4)(i+1,K[84],0,0), (uint4)(0,0,0,0), (uint4)(0,0,0, K[87]));
  694. SHA256(X+i*2,X+i*2+1, pad0, pad1, (uint4)(K[84], 0U, 0U, 0U), (uint4)(0U, 0U, 0U, K[88]));
  695. }
  696. scrypt_core(X,padcache);
  697. SHA256(&tmp0,&tmp1, X[0], X[1], X[2], X[3]);
  698. SHA256(&tmp0,&tmp1, X[4], X[5], X[6], X[7]);
  699. SHA256_fixed(&tmp0,&tmp1);
  700. SHA256(&ostate0,&ostate1, tmp0, tmp1, (uint4)(K[84], 0U, 0U, 0U), (uint4)(0U, 0U, 0U, K[88]));
  701. bool result = (EndianSwap(ostate1.w) <= target);
  702. if (result)
  703. SETFOUND(gid);
  704. }