nv84_crypt.c 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205
  1. /*
  2. * Copyright 2010 Red Hat Inc.
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice shall be included in
  12. * all copies or substantial portions of the Software.
  13. *
  14. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  17. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20. * OTHER DEALINGS IN THE SOFTWARE.
  21. *
  22. * Authors: Ben Skeggs
  23. */
  24. #include "drmP.h"
  25. #include "nouveau_drv.h"
  26. #include "nouveau_util.h"
  27. #include "nouveau_vm.h"
  28. #include "nouveau_ramht.h"
  29. struct nv84_crypt_engine {
  30. struct nouveau_exec_engine base;
  31. };
  32. static int
  33. nv84_crypt_context_new(struct nouveau_channel *chan, int engine)
  34. {
  35. struct drm_device *dev = chan->dev;
  36. struct drm_nouveau_private *dev_priv = dev->dev_private;
  37. struct nouveau_gpuobj *ramin = chan->ramin;
  38. struct nouveau_gpuobj *ctx;
  39. int ret;
  40. NV_DEBUG(dev, "ch%d\n", chan->id);
  41. ret = nouveau_gpuobj_new(dev, chan, 256, 0, NVOBJ_FLAG_ZERO_ALLOC |
  42. NVOBJ_FLAG_ZERO_FREE, &ctx);
  43. if (ret)
  44. return ret;
  45. nv_wo32(ramin, 0xa0, 0x00190000);
  46. nv_wo32(ramin, 0xa4, ctx->vinst + ctx->size - 1);
  47. nv_wo32(ramin, 0xa8, ctx->vinst);
  48. nv_wo32(ramin, 0xac, 0);
  49. nv_wo32(ramin, 0xb0, 0);
  50. nv_wo32(ramin, 0xb4, 0);
  51. dev_priv->engine.instmem.flush(dev);
  52. atomic_inc(&chan->vm->engref[engine]);
  53. chan->engctx[engine] = ctx;
  54. return 0;
  55. }
  56. static void
  57. nv84_crypt_context_del(struct nouveau_channel *chan, int engine)
  58. {
  59. struct nouveau_gpuobj *ctx = chan->engctx[engine];
  60. struct drm_device *dev = chan->dev;
  61. u32 inst;
  62. inst = (chan->ramin->vinst >> 12);
  63. inst |= 0x80000000;
  64. /* mark context as invalid if still on the hardware, not
  65. * doing this causes issues the next time PCRYPT is used,
  66. * unsurprisingly :)
  67. */
  68. nv_wr32(dev, 0x10200c, 0x00000000);
  69. if (nv_rd32(dev, 0x102188) == inst)
  70. nv_mask(dev, 0x102188, 0x80000000, 0x00000000);
  71. if (nv_rd32(dev, 0x10218c) == inst)
  72. nv_mask(dev, 0x10218c, 0x80000000, 0x00000000);
  73. nv_wr32(dev, 0x10200c, 0x00000010);
  74. nouveau_gpuobj_ref(NULL, &ctx);
  75. atomic_dec(&chan->vm->engref[engine]);
  76. chan->engctx[engine] = NULL;
  77. }
  78. static int
  79. nv84_crypt_object_new(struct nouveau_channel *chan, int engine,
  80. u32 handle, u16 class)
  81. {
  82. struct drm_device *dev = chan->dev;
  83. struct drm_nouveau_private *dev_priv = dev->dev_private;
  84. struct nouveau_gpuobj *obj = NULL;
  85. int ret;
  86. ret = nouveau_gpuobj_new(dev, chan, 16, 16, NVOBJ_FLAG_ZERO_FREE, &obj);
  87. if (ret)
  88. return ret;
  89. obj->engine = 5;
  90. obj->class = class;
  91. nv_wo32(obj, 0x00, class);
  92. dev_priv->engine.instmem.flush(dev);
  93. ret = nouveau_ramht_insert(chan, handle, obj);
  94. nouveau_gpuobj_ref(NULL, &obj);
  95. return ret;
  96. }
  97. static void
  98. nv84_crypt_tlb_flush(struct drm_device *dev, int engine)
  99. {
  100. nv50_vm_flush_engine(dev, 0x0a);
  101. }
  102. static struct nouveau_bitfield nv84_crypt_intr[] = {
  103. { 0x00000001, "INVALID_STATE" },
  104. { 0x00000002, "ILLEGAL_MTHD" },
  105. { 0x00000004, "ILLEGAL_CLASS" },
  106. { 0x00000080, "QUERY" },
  107. { 0x00000100, "FAULT" },
  108. {}
  109. };
  110. static void
  111. nv84_crypt_isr(struct drm_device *dev)
  112. {
  113. u32 stat = nv_rd32(dev, 0x102130);
  114. u32 mthd = nv_rd32(dev, 0x102190);
  115. u32 data = nv_rd32(dev, 0x102194);
  116. u64 inst = (u64)(nv_rd32(dev, 0x102188) & 0x7fffffff) << 12;
  117. int show = nouveau_ratelimit();
  118. int chid = nv50_graph_isr_chid(dev, inst);
  119. if (show) {
  120. NV_INFO(dev, "PCRYPT:");
  121. nouveau_bitfield_print(nv84_crypt_intr, stat);
  122. printk(KERN_CONT " ch %d (0x%010llx) mthd 0x%04x data 0x%08x\n",
  123. chid, inst, mthd, data);
  124. }
  125. nv_wr32(dev, 0x102130, stat);
  126. nv_wr32(dev, 0x10200c, 0x10);
  127. nv50_fb_vm_trap(dev, show);
  128. }
  129. static int
  130. nv84_crypt_fini(struct drm_device *dev, int engine, bool suspend)
  131. {
  132. nv_wr32(dev, 0x102140, 0x00000000);
  133. return 0;
  134. }
  135. static int
  136. nv84_crypt_init(struct drm_device *dev, int engine)
  137. {
  138. nv_mask(dev, 0x000200, 0x00004000, 0x00000000);
  139. nv_mask(dev, 0x000200, 0x00004000, 0x00004000);
  140. nv_wr32(dev, 0x102130, 0xffffffff);
  141. nv_wr32(dev, 0x102140, 0xffffffbf);
  142. nv_wr32(dev, 0x10200c, 0x00000010);
  143. return 0;
  144. }
  145. static void
  146. nv84_crypt_destroy(struct drm_device *dev, int engine)
  147. {
  148. struct nv84_crypt_engine *pcrypt = nv_engine(dev, engine);
  149. NVOBJ_ENGINE_DEL(dev, CRYPT);
  150. nouveau_irq_unregister(dev, 14);
  151. kfree(pcrypt);
  152. }
  153. int
  154. nv84_crypt_create(struct drm_device *dev)
  155. {
  156. struct nv84_crypt_engine *pcrypt;
  157. pcrypt = kzalloc(sizeof(*pcrypt), GFP_KERNEL);
  158. if (!pcrypt)
  159. return -ENOMEM;
  160. pcrypt->base.destroy = nv84_crypt_destroy;
  161. pcrypt->base.init = nv84_crypt_init;
  162. pcrypt->base.fini = nv84_crypt_fini;
  163. pcrypt->base.context_new = nv84_crypt_context_new;
  164. pcrypt->base.context_del = nv84_crypt_context_del;
  165. pcrypt->base.object_new = nv84_crypt_object_new;
  166. pcrypt->base.tlb_flush = nv84_crypt_tlb_flush;
  167. nouveau_irq_register(dev, 14, nv84_crypt_isr);
  168. NVOBJ_ENGINE_ADD(dev, CRYPT, &pcrypt->base);
  169. NVOBJ_CLASS (dev, 0x74c1, CRYPT);
  170. return 0;
  171. }