1 #ifndef _INTEL_RINGBUFFER_H_
2 #define _INTEL_RINGBUFFER_H_
4 struct intel_hw_status_page {
7 struct drm_gem_object *obj;
10 #define I915_READ_TAIL(ring) I915_READ(RING_TAIL(ring->mmio_base))
11 #define I915_WRITE_TAIL(ring, val) I915_WRITE(RING_TAIL(ring->mmio_base), val)
12 #define I915_READ_START(ring) I915_READ(RING_START(ring->mmio_base))
13 #define I915_WRITE_START(ring, val) I915_WRITE(RING_START(ring->mmio_base), val)
14 #define I915_READ_HEAD(ring) I915_READ(RING_HEAD(ring->mmio_base))
15 #define I915_WRITE_HEAD(ring, val) I915_WRITE(RING_HEAD(ring->mmio_base), val)
16 #define I915_READ_CTL(ring) I915_READ(RING_CTL(ring->mmio_base))
17 #define I915_WRITE_CTL(ring, val) I915_WRITE(RING_CTL(ring->mmio_base), val)
19 struct drm_i915_gem_execbuffer2;
20 struct intel_ring_buffer {
29 struct drm_device *dev;
30 struct drm_gem_object *gem_object;
35 struct intel_hw_status_page status_page;
37 u32 irq_gem_seqno; /* last seq seem at irq time */
38 u32 waiting_gem_seqno;
39 int user_irq_refcount;
40 void (*user_irq_get)(struct drm_device *dev,
41 struct intel_ring_buffer *ring);
42 void (*user_irq_put)(struct drm_device *dev,
43 struct intel_ring_buffer *ring);
44 void (*setup_status_page)(struct drm_device *dev,
45 struct intel_ring_buffer *ring);
47 int (*init)(struct drm_device *dev,
48 struct intel_ring_buffer *ring);
50 void (*set_tail)(struct drm_device *dev,
51 struct intel_ring_buffer *ring,
53 unsigned int (*get_active_head)(struct drm_device *dev,
54 struct intel_ring_buffer *ring);
55 void (*flush)(struct drm_device *dev,
56 struct intel_ring_buffer *ring,
57 u32 invalidate_domains,
59 u32 (*add_request)(struct drm_device *dev,
60 struct intel_ring_buffer *ring,
62 u32 (*get_seqno)(struct drm_device *dev,
63 struct intel_ring_buffer *ring);
64 int (*dispatch_gem_execbuffer)(struct drm_device *dev,
65 struct intel_ring_buffer *ring,
66 struct drm_i915_gem_execbuffer2 *exec,
67 struct drm_clip_rect *cliprects,
68 uint64_t exec_offset);
71 * List of objects currently involved in rendering from the
74 * Includes buffers having the contents of their GPU caches
75 * flushed, not necessarily primitives. last_rendering_seqno
76 * represents when the rendering involved will be completed.
78 * A reference is held on the buffer while on this list.
80 struct list_head active_list;
83 * List of breadcrumbs associated with GPU requests currently
86 struct list_head request_list;
88 wait_queue_head_t irq_queue;
93 intel_read_status_page(struct intel_ring_buffer *ring,
96 u32 *regs = ring->status_page.page_addr;
100 int intel_init_ring_buffer(struct drm_device *dev,
101 struct intel_ring_buffer *ring);
102 void intel_cleanup_ring_buffer(struct drm_device *dev,
103 struct intel_ring_buffer *ring);
104 int intel_wait_ring_buffer(struct drm_device *dev,
105 struct intel_ring_buffer *ring, int n);
106 void intel_ring_begin(struct drm_device *dev,
107 struct intel_ring_buffer *ring, int n);
109 static inline void intel_ring_emit(struct drm_device *dev,
110 struct intel_ring_buffer *ring,
113 unsigned int *virt = ring->virtual_start + ring->tail;
118 void intel_fill_struct(struct drm_device *dev,
119 struct intel_ring_buffer *ring,
122 void intel_ring_advance(struct drm_device *dev,
123 struct intel_ring_buffer *ring);
125 u32 intel_ring_get_seqno(struct drm_device *dev,
126 struct intel_ring_buffer *ring);
128 int intel_init_render_ring_buffer(struct drm_device *dev);
129 int intel_init_bsd_ring_buffer(struct drm_device *dev);
131 #endif /* _INTEL_RINGBUFFER_H_ */