Path: blob/master/drivers/gpu/drm/i915/intel_ringbuffer.h
15113 views
#ifndef _INTEL_RINGBUFFER_H_1#define _INTEL_RINGBUFFER_H_23enum {4RCS = 0x0,5VCS,6BCS,7I915_NUM_RINGS,8};910struct intel_hw_status_page {11u32 __iomem *page_addr;12unsigned int gfx_addr;13struct drm_i915_gem_object *obj;14};1516#define I915_READ_TAIL(ring) I915_READ(RING_TAIL((ring)->mmio_base))17#define I915_WRITE_TAIL(ring, val) I915_WRITE(RING_TAIL((ring)->mmio_base), val)1819#define I915_READ_START(ring) I915_READ(RING_START((ring)->mmio_base))20#define I915_WRITE_START(ring, val) I915_WRITE(RING_START((ring)->mmio_base), val)2122#define I915_READ_HEAD(ring) I915_READ(RING_HEAD((ring)->mmio_base))23#define I915_WRITE_HEAD(ring, val) I915_WRITE(RING_HEAD((ring)->mmio_base), val)2425#define I915_READ_CTL(ring) I915_READ(RING_CTL((ring)->mmio_base))26#define I915_WRITE_CTL(ring, val) I915_WRITE(RING_CTL((ring)->mmio_base), val)2728#define I915_READ_IMR(ring) I915_READ(RING_IMR((ring)->mmio_base))29#define I915_WRITE_IMR(ring, val) I915_WRITE(RING_IMR((ring)->mmio_base), val)3031#define I915_READ_NOPID(ring) I915_READ(RING_NOPID((ring)->mmio_base))32#define I915_READ_SYNC_0(ring) I915_READ(RING_SYNC_0((ring)->mmio_base))33#define I915_READ_SYNC_1(ring) I915_READ(RING_SYNC_1((ring)->mmio_base))3435struct intel_ring_buffer {36const char *name;37enum intel_ring_id {38RING_RENDER = 0x1,39RING_BSD = 0x2,40RING_BLT = 0x4,41} id;42u32 mmio_base;43void __iomem *virtual_start;44struct drm_device *dev;45struct drm_i915_gem_object *obj;4647u32 head;48u32 tail;49int space;50int size;51int effective_size;52struct intel_hw_status_page status_page;5354spinlock_t irq_lock;55u32 irq_refcount;56u32 irq_mask;57u32 irq_seqno; /* last seq seem at irq time */58u32 trace_irq_seqno;59u32 waiting_seqno;60u32 sync_seqno[I915_NUM_RINGS-1];61bool __must_check (*irq_get)(struct intel_ring_buffer *ring);62void (*irq_put)(struct intel_ring_buffer *ring);6364int (*init)(struct intel_ring_buffer *ring);6566void (*write_tail)(struct intel_ring_buffer *ring,67u32 value);68int __must_check (*flush)(struct intel_ring_buffer *ring,69u32 invalidate_domains,70u32 flush_domains);71int (*add_request)(struct intel_ring_buffer *ring,72u32 *seqno);73u32 (*get_seqno)(struct intel_ring_buffer *ring);74int (*dispatch_execbuffer)(struct intel_ring_buffer *ring,75u32 offset, u32 length);76void (*cleanup)(struct intel_ring_buffer *ring);7778/**79* List of objects currently involved in rendering from the80* ringbuffer.81*82* Includes buffers having the contents of their GPU caches83* flushed, not necessarily primitives. last_rendering_seqno84* represents when the rendering involved will be completed.85*86* A reference is held on the buffer while on this list.87*/88struct list_head active_list;8990/**91* List of breadcrumbs associated with GPU requests currently92* outstanding.93*/94struct list_head request_list;9596/**97* List of objects currently pending a GPU write flush.98*99* All elements on this list will belong to either the100* active_list or flushing_list, last_rendering_seqno can101* be used to differentiate between the two elements.102*/103struct list_head gpu_write_list;104105/**106* Do we have some not yet emitted requests outstanding?107*/108u32 outstanding_lazy_request;109110wait_queue_head_t irq_queue;111drm_local_map_t map;112113void *private;114};115116static inline u32117intel_ring_sync_index(struct intel_ring_buffer *ring,118struct intel_ring_buffer *other)119{120int idx;121122/*123* cs -> 0 = vcs, 1 = bcs124* vcs -> 0 = bcs, 1 = cs,125* bcs -> 0 = cs, 1 = vcs.126*/127128idx = (other - ring) - 1;129if (idx < 0)130idx += I915_NUM_RINGS;131132return idx;133}134135static inline u32136intel_read_status_page(struct intel_ring_buffer *ring,137int reg)138{139return ioread32(ring->status_page.page_addr + reg);140}141142/**143* Reads a dword out of the status page, which is written to from the command144* queue by automatic updates, MI_REPORT_HEAD, MI_STORE_DATA_INDEX, or145* MI_STORE_DATA_IMM.146*147* The following dwords have a reserved meaning:148* 0x00: ISR copy, updated when an ISR bit not set in the HWSTAM changes.149* 0x04: ring 0 head pointer150* 0x05: ring 1 head pointer (915-class)151* 0x06: ring 2 head pointer (915-class)152* 0x10-0x1b: Context status DWords (GM45)153* 0x1f: Last written status offset. (GM45)154*155* The area from dword 0x20 to 0x3ff is available for driver usage.156*/157#define READ_HWSP(dev_priv, reg) intel_read_status_page(LP_RING(dev_priv), reg)158#define READ_BREADCRUMB(dev_priv) READ_HWSP(dev_priv, I915_BREADCRUMB_INDEX)159#define I915_GEM_HWS_INDEX 0x20160#define I915_BREADCRUMB_INDEX 0x21161162void intel_cleanup_ring_buffer(struct intel_ring_buffer *ring);163164int __must_check intel_wait_ring_buffer(struct intel_ring_buffer *ring, int n);165static inline int intel_wait_ring_idle(struct intel_ring_buffer *ring)166{167return intel_wait_ring_buffer(ring, ring->size - 8);168}169170int __must_check intel_ring_begin(struct intel_ring_buffer *ring, int n);171172static inline void intel_ring_emit(struct intel_ring_buffer *ring,173u32 data)174{175iowrite32(data, ring->virtual_start + ring->tail);176ring->tail += 4;177}178179void intel_ring_advance(struct intel_ring_buffer *ring);180181u32 intel_ring_get_seqno(struct intel_ring_buffer *ring);182int intel_ring_sync(struct intel_ring_buffer *ring,183struct intel_ring_buffer *to,184u32 seqno);185186int intel_init_render_ring_buffer(struct drm_device *dev);187int intel_init_bsd_ring_buffer(struct drm_device *dev);188int intel_init_blt_ring_buffer(struct drm_device *dev);189190u32 intel_ring_get_active_head(struct intel_ring_buffer *ring);191void intel_ring_setup_status_page(struct intel_ring_buffer *ring);192193static inline void i915_trace_irq_get(struct intel_ring_buffer *ring, u32 seqno)194{195if (ring->trace_irq_seqno == 0 && ring->irq_get(ring))196ring->trace_irq_seqno = seqno;197}198199/* DRI warts */200int intel_render_ring_init_dri(struct drm_device *dev, u64 start, u32 size);201202#endif /* _INTEL_RINGBUFFER_H_ */203204205