crypt_s390.h 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385
  1. /*
  2. * Cryptographic API.
  3. *
  4. * Support for s390 cryptographic instructions.
  5. *
  6. * Copyright (C) 2003 IBM Deutschland GmbH, IBM Corporation
  7. * Author(s): Thomas Spatzier (tspat@de.ibm.com)
  8. *
  9. * This program is free software; you can redistribute it and/or modify it
  10. * under the terms of the GNU General Public License as published by the Free
  11. * Software Foundation; either version 2 of the License, or (at your option)
  12. * any later version.
  13. *
  14. */
  15. #ifndef _CRYPTO_ARCH_S390_CRYPT_S390_H
  16. #define _CRYPTO_ARCH_S390_CRYPT_S390_H
  17. #include <asm/errno.h>
  18. #define CRYPT_S390_OP_MASK 0xFF00
  19. #define CRYPT_S390_FUNC_MASK 0x00FF
  20. /* s930 cryptographic operations */
  21. enum crypt_s390_operations {
  22. CRYPT_S390_KM = 0x0100,
  23. CRYPT_S390_KMC = 0x0200,
  24. CRYPT_S390_KIMD = 0x0300,
  25. CRYPT_S390_KLMD = 0x0400,
  26. CRYPT_S390_KMAC = 0x0500
  27. };
  28. /* function codes for KM (CIPHER MESSAGE) instruction
  29. * 0x80 is the decipher modifier bit
  30. */
  31. enum crypt_s390_km_func {
  32. KM_QUERY = CRYPT_S390_KM | 0,
  33. KM_DEA_ENCRYPT = CRYPT_S390_KM | 1,
  34. KM_DEA_DECRYPT = CRYPT_S390_KM | 1 | 0x80,
  35. KM_TDEA_128_ENCRYPT = CRYPT_S390_KM | 2,
  36. KM_TDEA_128_DECRYPT = CRYPT_S390_KM | 2 | 0x80,
  37. KM_TDEA_192_ENCRYPT = CRYPT_S390_KM | 3,
  38. KM_TDEA_192_DECRYPT = CRYPT_S390_KM | 3 | 0x80,
  39. };
  40. /* function codes for KMC (CIPHER MESSAGE WITH CHAINING)
  41. * instruction
  42. */
  43. enum crypt_s390_kmc_func {
  44. KMC_QUERY = CRYPT_S390_KMC | 0,
  45. KMC_DEA_ENCRYPT = CRYPT_S390_KMC | 1,
  46. KMC_DEA_DECRYPT = CRYPT_S390_KMC | 1 | 0x80,
  47. KMC_TDEA_128_ENCRYPT = CRYPT_S390_KMC | 2,
  48. KMC_TDEA_128_DECRYPT = CRYPT_S390_KMC | 2 | 0x80,
  49. KMC_TDEA_192_ENCRYPT = CRYPT_S390_KMC | 3,
  50. KMC_TDEA_192_DECRYPT = CRYPT_S390_KMC | 3 | 0x80,
  51. };
  52. /* function codes for KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST)
  53. * instruction
  54. */
  55. enum crypt_s390_kimd_func {
  56. KIMD_QUERY = CRYPT_S390_KIMD | 0,
  57. KIMD_SHA_1 = CRYPT_S390_KIMD | 1,
  58. KIMD_SHA_256 = CRYPT_S390_KIMD | 2,
  59. };
  60. /* function codes for KLMD (COMPUTE LAST MESSAGE DIGEST)
  61. * instruction
  62. */
  63. enum crypt_s390_klmd_func {
  64. KLMD_QUERY = CRYPT_S390_KLMD | 0,
  65. KLMD_SHA_1 = CRYPT_S390_KLMD | 1,
  66. KLMD_SHA_256 = CRYPT_S390_KLMD | 2,
  67. };
  68. /* function codes for KMAC (COMPUTE MESSAGE AUTHENTICATION CODE)
  69. * instruction
  70. */
  71. enum crypt_s390_kmac_func {
  72. KMAC_QUERY = CRYPT_S390_KMAC | 0,
  73. KMAC_DEA = CRYPT_S390_KMAC | 1,
  74. KMAC_TDEA_128 = CRYPT_S390_KMAC | 2,
  75. KMAC_TDEA_192 = CRYPT_S390_KMAC | 3
  76. };
  77. /* status word for s390 crypto instructions' QUERY functions */
  78. struct crypt_s390_query_status {
  79. u64 high;
  80. u64 low;
  81. };
  82. /*
  83. * Standard fixup and ex_table sections for crypt_s390 inline functions.
  84. * label 0: the s390 crypto operation
  85. * label 1: just after 1 to catch illegal operation exception
  86. * (unsupported model)
  87. * label 6: the return point after fixup
  88. * label 7: set error value if exception _in_ crypto operation
  89. * label 8: set error value if illegal operation exception
  90. * [ret] is the variable to receive the error code
  91. * [ERR] is the error code value
  92. */
  93. #ifndef __s390x__
  94. #define __crypt_s390_fixup \
  95. ".section .fixup,\"ax\" \n" \
  96. "7: lhi %0,%h[e1] \n" \
  97. " bras 1,9f \n" \
  98. " .long 6b \n" \
  99. "8: lhi %0,%h[e2] \n" \
  100. " bras 1,9f \n" \
  101. " .long 6b \n" \
  102. "9: l 1,0(1) \n" \
  103. " br 1 \n" \
  104. ".previous \n" \
  105. ".section __ex_table,\"a\" \n" \
  106. " .align 4 \n" \
  107. " .long 0b,7b \n" \
  108. " .long 1b,8b \n" \
  109. ".previous"
  110. #else /* __s390x__ */
  111. #define __crypt_s390_fixup \
  112. ".section .fixup,\"ax\" \n" \
  113. "7: lhi %0,%h[e1] \n" \
  114. " jg 6b \n" \
  115. "8: lhi %0,%h[e2] \n" \
  116. " jg 6b \n" \
  117. ".previous\n" \
  118. ".section __ex_table,\"a\" \n" \
  119. " .align 8 \n" \
  120. " .quad 0b,7b \n" \
  121. " .quad 1b,8b \n" \
  122. ".previous"
  123. #endif /* __s390x__ */
  124. /*
  125. * Standard code for setting the result of s390 crypto instructions.
  126. * %0: the register which will receive the result
  127. * [result]: the register containing the result (e.g. second operand length
  128. * to compute number of processed bytes].
  129. */
  130. #ifndef __s390x__
  131. #define __crypt_s390_set_result \
  132. " lr %0,%[result] \n"
  133. #else /* __s390x__ */
  134. #define __crypt_s390_set_result \
  135. " lgr %0,%[result] \n"
  136. #endif
  137. /*
  138. * Executes the KM (CIPHER MESSAGE) operation of the CPU.
  139. * @param func: the function code passed to KM; see crypt_s390_km_func
  140. * @param param: address of parameter block; see POP for details on each func
  141. * @param dest: address of destination memory area
  142. * @param src: address of source memory area
  143. * @param src_len: length of src operand in bytes
  144. * @returns < zero for failure, 0 for the query func, number of processed bytes
  145. * for encryption/decryption funcs
  146. */
  147. static inline int
  148. crypt_s390_km(long func, void* param, u8* dest, const u8* src, long src_len)
  149. {
  150. register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
  151. register void* __param asm("1") = param;
  152. register u8* __dest asm("4") = dest;
  153. register const u8* __src asm("2") = src;
  154. register long __src_len asm("3") = src_len;
  155. int ret;
  156. ret = 0;
  157. __asm__ __volatile__ (
  158. "0: .insn rre,0xB92E0000,%1,%2 \n" /* KM opcode */
  159. "1: brc 1,0b \n" /* handle partial completion */
  160. __crypt_s390_set_result
  161. "6: \n"
  162. __crypt_s390_fixup
  163. : "+d" (ret), "+a" (__dest), "+a" (__src),
  164. [result] "+d" (__src_len)
  165. : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
  166. "a" (__param)
  167. : "cc", "memory"
  168. );
  169. if (ret >= 0 && func & CRYPT_S390_FUNC_MASK){
  170. ret = src_len - ret;
  171. }
  172. return ret;
  173. }
  174. /*
  175. * Executes the KMC (CIPHER MESSAGE WITH CHAINING) operation of the CPU.
  176. * @param func: the function code passed to KM; see crypt_s390_kmc_func
  177. * @param param: address of parameter block; see POP for details on each func
  178. * @param dest: address of destination memory area
  179. * @param src: address of source memory area
  180. * @param src_len: length of src operand in bytes
  181. * @returns < zero for failure, 0 for the query func, number of processed bytes
  182. * for encryption/decryption funcs
  183. */
  184. static inline int
  185. crypt_s390_kmc(long func, void* param, u8* dest, const u8* src, long src_len)
  186. {
  187. register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
  188. register void* __param asm("1") = param;
  189. register u8* __dest asm("4") = dest;
  190. register const u8* __src asm("2") = src;
  191. register long __src_len asm("3") = src_len;
  192. int ret;
  193. ret = 0;
  194. __asm__ __volatile__ (
  195. "0: .insn rre,0xB92F0000,%1,%2 \n" /* KMC opcode */
  196. "1: brc 1,0b \n" /* handle partial completion */
  197. __crypt_s390_set_result
  198. "6: \n"
  199. __crypt_s390_fixup
  200. : "+d" (ret), "+a" (__dest), "+a" (__src),
  201. [result] "+d" (__src_len)
  202. : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
  203. "a" (__param)
  204. : "cc", "memory"
  205. );
  206. if (ret >= 0 && func & CRYPT_S390_FUNC_MASK){
  207. ret = src_len - ret;
  208. }
  209. return ret;
  210. }
  211. /*
  212. * Executes the KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST) operation
  213. * of the CPU.
  214. * @param func: the function code passed to KM; see crypt_s390_kimd_func
  215. * @param param: address of parameter block; see POP for details on each func
  216. * @param src: address of source memory area
  217. * @param src_len: length of src operand in bytes
  218. * @returns < zero for failure, 0 for the query func, number of processed bytes
  219. * for digest funcs
  220. */
  221. static inline int
  222. crypt_s390_kimd(long func, void* param, const u8* src, long src_len)
  223. {
  224. register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
  225. register void* __param asm("1") = param;
  226. register const u8* __src asm("2") = src;
  227. register long __src_len asm("3") = src_len;
  228. int ret;
  229. ret = 0;
  230. __asm__ __volatile__ (
  231. "0: .insn rre,0xB93E0000,%1,%1 \n" /* KIMD opcode */
  232. "1: brc 1,0b \n" /* handle partical completion */
  233. __crypt_s390_set_result
  234. "6: \n"
  235. __crypt_s390_fixup
  236. : "+d" (ret), "+a" (__src), [result] "+d" (__src_len)
  237. : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
  238. "a" (__param)
  239. : "cc", "memory"
  240. );
  241. if (ret >= 0 && (func & CRYPT_S390_FUNC_MASK)){
  242. ret = src_len - ret;
  243. }
  244. return ret;
  245. }
  246. /*
  247. * Executes the KLMD (COMPUTE LAST MESSAGE DIGEST) operation of the CPU.
  248. * @param func: the function code passed to KM; see crypt_s390_klmd_func
  249. * @param param: address of parameter block; see POP for details on each func
  250. * @param src: address of source memory area
  251. * @param src_len: length of src operand in bytes
  252. * @returns < zero for failure, 0 for the query func, number of processed bytes
  253. * for digest funcs
  254. */
  255. static inline int
  256. crypt_s390_klmd(long func, void* param, const u8* src, long src_len)
  257. {
  258. register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
  259. register void* __param asm("1") = param;
  260. register const u8* __src asm("2") = src;
  261. register long __src_len asm("3") = src_len;
  262. int ret;
  263. ret = 0;
  264. __asm__ __volatile__ (
  265. "0: .insn rre,0xB93F0000,%1,%1 \n" /* KLMD opcode */
  266. "1: brc 1,0b \n" /* handle partical completion */
  267. __crypt_s390_set_result
  268. "6: \n"
  269. __crypt_s390_fixup
  270. : "+d" (ret), "+a" (__src), [result] "+d" (__src_len)
  271. : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
  272. "a" (__param)
  273. : "cc", "memory"
  274. );
  275. if (ret >= 0 && func & CRYPT_S390_FUNC_MASK){
  276. ret = src_len - ret;
  277. }
  278. return ret;
  279. }
  280. /*
  281. * Executes the KMAC (COMPUTE MESSAGE AUTHENTICATION CODE) operation
  282. * of the CPU.
  283. * @param func: the function code passed to KM; see crypt_s390_klmd_func
  284. * @param param: address of parameter block; see POP for details on each func
  285. * @param src: address of source memory area
  286. * @param src_len: length of src operand in bytes
  287. * @returns < zero for failure, 0 for the query func, number of processed bytes
  288. * for digest funcs
  289. */
  290. static inline int
  291. crypt_s390_kmac(long func, void* param, const u8* src, long src_len)
  292. {
  293. register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
  294. register void* __param asm("1") = param;
  295. register const u8* __src asm("2") = src;
  296. register long __src_len asm("3") = src_len;
  297. int ret;
  298. ret = 0;
  299. __asm__ __volatile__ (
  300. "0: .insn rre,0xB91E0000,%5,%5 \n" /* KMAC opcode */
  301. "1: brc 1,0b \n" /* handle partical completion */
  302. __crypt_s390_set_result
  303. "6: \n"
  304. __crypt_s390_fixup
  305. : "+d" (ret), "+a" (__src), [result] "+d" (__src_len)
  306. : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
  307. "a" (__param)
  308. : "cc", "memory"
  309. );
  310. if (ret >= 0 && func & CRYPT_S390_FUNC_MASK){
  311. ret = src_len - ret;
  312. }
  313. return ret;
  314. }
  315. /**
  316. * Tests if a specific crypto function is implemented on the machine.
  317. * @param func: the function code of the specific function; 0 if op in general
  318. * @return 1 if func available; 0 if func or op in general not available
  319. */
  320. static inline int
  321. crypt_s390_func_available(int func)
  322. {
  323. int ret;
  324. struct crypt_s390_query_status status = {
  325. .high = 0,
  326. .low = 0
  327. };
  328. switch (func & CRYPT_S390_OP_MASK){
  329. case CRYPT_S390_KM:
  330. ret = crypt_s390_km(KM_QUERY, &status, NULL, NULL, 0);
  331. break;
  332. case CRYPT_S390_KMC:
  333. ret = crypt_s390_kmc(KMC_QUERY, &status, NULL, NULL, 0);
  334. break;
  335. case CRYPT_S390_KIMD:
  336. ret = crypt_s390_kimd(KIMD_QUERY, &status, NULL, 0);
  337. break;
  338. case CRYPT_S390_KLMD:
  339. ret = crypt_s390_klmd(KLMD_QUERY, &status, NULL, 0);
  340. break;
  341. case CRYPT_S390_KMAC:
  342. ret = crypt_s390_kmac(KMAC_QUERY, &status, NULL, 0);
  343. break;
  344. default:
  345. ret = 0;
  346. return ret;
  347. }
  348. if (ret >= 0){
  349. func &= CRYPT_S390_FUNC_MASK;
  350. func &= 0x7f; //mask modifier bit
  351. if (func < 64){
  352. ret = (status.high >> (64 - func - 1)) & 0x1;
  353. } else {
  354. ret = (status.low >> (128 - func - 1)) & 0x1;
  355. }
  356. } else {
  357. ret = 0;
  358. }
  359. return ret;
  360. }
  361. #endif // _CRYPTO_ARCH_S390_CRYPT_S390_H