Path: blob/21.2-virgl/src/gallium/auxiliary/util/u_inlines.h
4561 views
/**************************************************************************1*2* Copyright 2007 VMware, Inc.3* All Rights Reserved.4*5* Permission is hereby granted, free of charge, to any person obtaining a6* copy of this software and associated documentation files (the7* "Software"), to deal in the Software without restriction, including8* without limitation the rights to use, copy, modify, merge, publish,9* distribute, sub license, and/or sell copies of the Software, and to10* permit persons to whom the Software is furnished to do so, subject to11* the following conditions:12*13* The above copyright notice and this permission notice (including the14* next paragraph) shall be included in all copies or substantial portions15* of the Software.16*17* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS18* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF19* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.20* IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR21* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,22* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE23* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.24*25**************************************************************************/2627#ifndef U_INLINES_H28#define U_INLINES_H2930#include "pipe/p_context.h"31#include "pipe/p_defines.h"32#include "pipe/p_shader_tokens.h"33#include "pipe/p_state.h"34#include "pipe/p_screen.h"35#include "util/compiler.h"36#include "util/format/u_format.h"37#include "util/u_debug.h"38#include "util/u_debug_describe.h"39#include "util/u_debug_refcnt.h"40#include "util/u_atomic.h"41#include "util/u_box.h"42#include "util/u_math.h"434445#ifdef __cplusplus46extern "C" {47#endif484950/*51* Reference counting helper functions.52*/535455static inline void56pipe_reference_init(struct pipe_reference *dst, unsigned count)57{58dst->count = count;59}6061static inline boolean62pipe_is_referenced(struct pipe_reference *src)63{64return p_atomic_read(&src->count) != 0;65}6667/**68* Update reference counting.69* The old thing pointed to, if any, will be unreferenced.70* Both 'dst' and 'src' may be NULL.71* \return TRUE if the object's refcount hits zero and should be destroyed.72*/73static inline boolean74pipe_reference_described(struct pipe_reference *dst,75struct pipe_reference *src,76debug_reference_descriptor get_desc)77{78if (dst != src) {79/* bump the src.count first */80if (src) {81ASSERTED int count = p_atomic_inc_return(&src->count);82assert(count != 1); /* src had to be referenced */83debug_reference(src, get_desc, 1);84}8586if (dst) {87int count = p_atomic_dec_return(&dst->count);88assert(count != -1); /* dst had to be referenced */89debug_reference(dst, get_desc, -1);90if (!count)91return true;92}93}9495return false;96}9798static inline boolean99pipe_reference(struct pipe_reference *dst, struct pipe_reference *src)100{101return pipe_reference_described(dst, src,102(debug_reference_descriptor)103debug_describe_reference);104}105106static inline void107pipe_surface_reference(struct pipe_surface **dst, struct pipe_surface *src)108{109struct pipe_surface *old_dst = *dst;110111if (pipe_reference_described(old_dst ? &old_dst->reference : NULL,112src ? &src->reference : NULL,113(debug_reference_descriptor)114debug_describe_surface))115old_dst->context->surface_destroy(old_dst->context, old_dst);116*dst = src;117}118119/**120* Similar to pipe_surface_reference() but always set the pointer to NULL121* and pass in an explicit context. The explicit context avoids the problem122* of using a deleted context's surface_destroy() method when freeing a surface123* that's shared by multiple contexts.124*/125static inline void126pipe_surface_release(struct pipe_context *pipe, struct pipe_surface **ptr)127{128struct pipe_surface *old = *ptr;129130if (pipe_reference_described(&old->reference, NULL,131(debug_reference_descriptor)132debug_describe_surface))133pipe->surface_destroy(pipe, old);134*ptr = NULL;135}136137static inline void138pipe_resource_destroy(struct pipe_resource *res)139{140/* Avoid recursion, which would prevent inlining this function */141do {142struct pipe_resource *next = res->next;143144res->screen->resource_destroy(res->screen, res);145res = next;146} while (pipe_reference_described(res ? &res->reference : NULL,147NULL,148(debug_reference_descriptor)149debug_describe_resource));150}151152static inline void153pipe_resource_reference(struct pipe_resource **dst, struct pipe_resource *src)154{155struct pipe_resource *old_dst = *dst;156157if (pipe_reference_described(old_dst ? &old_dst->reference : NULL,158src ? &src->reference : NULL,159(debug_reference_descriptor)160debug_describe_resource)) {161pipe_resource_destroy(old_dst);162}163*dst = src;164}165166/**167* Subtract the given number of references.168*/169static inline void170pipe_drop_resource_references(struct pipe_resource *dst, int num_refs)171{172int count = p_atomic_add_return(&dst->reference.count, -num_refs);173174assert(count >= 0);175/* Underflows shouldn't happen, but let's be safe. */176if (count <= 0)177pipe_resource_destroy(dst);178}179180/**181* Same as pipe_surface_release, but used when pipe_context doesn't exist182* anymore.183*/184static inline void185pipe_surface_release_no_context(struct pipe_surface **ptr)186{187struct pipe_surface *surf = *ptr;188189if (pipe_reference_described(&surf->reference, NULL,190(debug_reference_descriptor)191debug_describe_surface)) {192/* trivially destroy pipe_surface */193pipe_resource_reference(&surf->texture, NULL);194free(surf);195}196*ptr = NULL;197}198199/**200* Set *dst to \p src with proper reference counting.201*202* The caller must guarantee that \p src and *dst were created in203* the same context (if they exist), and that this must be the current context.204*/205static inline void206pipe_sampler_view_reference(struct pipe_sampler_view **dst,207struct pipe_sampler_view *src)208{209struct pipe_sampler_view *old_dst = *dst;210211if (pipe_reference_described(old_dst ? &old_dst->reference : NULL,212src ? &src->reference : NULL,213(debug_reference_descriptor)214debug_describe_sampler_view))215old_dst->context->sampler_view_destroy(old_dst->context, old_dst);216*dst = src;217}218219static inline void220pipe_so_target_reference(struct pipe_stream_output_target **dst,221struct pipe_stream_output_target *src)222{223struct pipe_stream_output_target *old_dst = *dst;224225if (pipe_reference_described(old_dst ? &old_dst->reference : NULL,226src ? &src->reference : NULL,227(debug_reference_descriptor)debug_describe_so_target))228old_dst->context->stream_output_target_destroy(old_dst->context, old_dst);229*dst = src;230}231232static inline void233pipe_vertex_buffer_unreference(struct pipe_vertex_buffer *dst)234{235if (dst->is_user_buffer)236dst->buffer.user = NULL;237else238pipe_resource_reference(&dst->buffer.resource, NULL);239}240241static inline void242pipe_vertex_buffer_reference(struct pipe_vertex_buffer *dst,243const struct pipe_vertex_buffer *src)244{245if (dst->buffer.resource == src->buffer.resource) {246/* Just copy the fields, don't touch reference counts. */247dst->stride = src->stride;248dst->is_user_buffer = src->is_user_buffer;249dst->buffer_offset = src->buffer_offset;250return;251}252253pipe_vertex_buffer_unreference(dst);254if (!src->is_user_buffer)255pipe_resource_reference(&dst->buffer.resource, src->buffer.resource);256memcpy(dst, src, sizeof(*src));257}258259static inline void260pipe_surface_reset(struct pipe_context *ctx, struct pipe_surface* ps,261struct pipe_resource *pt, unsigned level, unsigned layer)262{263pipe_resource_reference(&ps->texture, pt);264ps->format = pt->format;265ps->width = u_minify(pt->width0, level);266ps->height = u_minify(pt->height0, level);267ps->u.tex.level = level;268ps->u.tex.first_layer = ps->u.tex.last_layer = layer;269ps->context = ctx;270}271272static inline void273pipe_surface_init(struct pipe_context *ctx, struct pipe_surface* ps,274struct pipe_resource *pt, unsigned level, unsigned layer)275{276ps->texture = 0;277pipe_reference_init(&ps->reference, 1);278pipe_surface_reset(ctx, ps, pt, level, layer);279}280281/* Return true if the surfaces are equal. */282static inline boolean283pipe_surface_equal(struct pipe_surface *s1, struct pipe_surface *s2)284{285return s1->texture == s2->texture &&286s1->format == s2->format &&287(s1->texture->target != PIPE_BUFFER ||288(s1->u.buf.first_element == s2->u.buf.first_element &&289s1->u.buf.last_element == s2->u.buf.last_element)) &&290(s1->texture->target == PIPE_BUFFER ||291(s1->u.tex.level == s2->u.tex.level &&292s1->u.tex.first_layer == s2->u.tex.first_layer &&293s1->u.tex.last_layer == s2->u.tex.last_layer));294}295296/*297* Convenience wrappers for screen buffer functions.298*/299300301/**302* Create a new resource.303* \param bind bitmask of PIPE_BIND_x flags304* \param usage a PIPE_USAGE_x value305*/306static inline struct pipe_resource *307pipe_buffer_create(struct pipe_screen *screen,308unsigned bind,309enum pipe_resource_usage usage,310unsigned size)311{312struct pipe_resource buffer;313memset(&buffer, 0, sizeof buffer);314buffer.target = PIPE_BUFFER;315buffer.format = PIPE_FORMAT_R8_UNORM; /* want TYPELESS or similar */316buffer.bind = bind;317buffer.usage = usage;318buffer.flags = 0;319buffer.width0 = size;320buffer.height0 = 1;321buffer.depth0 = 1;322buffer.array_size = 1;323return screen->resource_create(screen, &buffer);324}325326327static inline struct pipe_resource *328pipe_buffer_create_const0(struct pipe_screen *screen,329unsigned bind,330enum pipe_resource_usage usage,331unsigned size)332{333struct pipe_resource buffer;334memset(&buffer, 0, sizeof buffer);335buffer.target = PIPE_BUFFER;336buffer.format = PIPE_FORMAT_R8_UNORM;337buffer.bind = bind;338buffer.usage = usage;339buffer.flags = screen->get_param(screen, PIPE_CAP_CONSTBUF0_FLAGS);340buffer.width0 = size;341buffer.height0 = 1;342buffer.depth0 = 1;343buffer.array_size = 1;344return screen->resource_create(screen, &buffer);345}346347348/**349* Map a range of a resource.350* \param offset start of region, in bytes351* \param length size of region, in bytes352* \param access bitmask of PIPE_MAP_x flags353* \param transfer returns a transfer object354*/355static inline void *356pipe_buffer_map_range(struct pipe_context *pipe,357struct pipe_resource *buffer,358unsigned offset,359unsigned length,360unsigned access,361struct pipe_transfer **transfer)362{363struct pipe_box box;364void *map;365366assert(offset < buffer->width0);367assert(offset + length <= buffer->width0);368assert(length);369370u_box_1d(offset, length, &box);371372map = pipe->buffer_map(pipe, buffer, 0, access, &box, transfer);373if (!map) {374return NULL;375}376377return map;378}379380381/**382* Map whole resource.383* \param access bitmask of PIPE_MAP_x flags384* \param transfer returns a transfer object385*/386static inline void *387pipe_buffer_map(struct pipe_context *pipe,388struct pipe_resource *buffer,389unsigned access,390struct pipe_transfer **transfer)391{392return pipe_buffer_map_range(pipe, buffer, 0, buffer->width0,393access, transfer);394}395396397static inline void398pipe_buffer_unmap(struct pipe_context *pipe,399struct pipe_transfer *transfer)400{401pipe->buffer_unmap(pipe, transfer);402}403404static inline void405pipe_buffer_flush_mapped_range(struct pipe_context *pipe,406struct pipe_transfer *transfer,407unsigned offset,408unsigned length)409{410struct pipe_box box;411int transfer_offset;412413assert(length);414assert(transfer->box.x <= (int) offset);415assert((int) (offset + length) <= transfer->box.x + transfer->box.width);416417/* Match old screen->buffer_flush_mapped_range() behaviour, where418* offset parameter is relative to the start of the buffer, not the419* mapped range.420*/421transfer_offset = offset - transfer->box.x;422423u_box_1d(transfer_offset, length, &box);424425pipe->transfer_flush_region(pipe, transfer, &box);426}427428static inline void429pipe_buffer_write(struct pipe_context *pipe,430struct pipe_resource *buf,431unsigned offset,432unsigned size,433const void *data)434{435/* Don't set any other usage bits. Drivers should derive them. */436pipe->buffer_subdata(pipe, buf, PIPE_MAP_WRITE, offset, size, data);437}438439/**440* Special case for writing non-overlapping ranges.441*442* We can avoid GPU/CPU synchronization when writing range that has never443* been written before.444*/445static inline void446pipe_buffer_write_nooverlap(struct pipe_context *pipe,447struct pipe_resource *buf,448unsigned offset, unsigned size,449const void *data)450{451pipe->buffer_subdata(pipe, buf,452(PIPE_MAP_WRITE |453PIPE_MAP_UNSYNCHRONIZED),454offset, size, data);455}456457/**458* Utility for simplifying pipe_context::resource_copy_region calls459*/460static inline void461pipe_buffer_copy(struct pipe_context *pipe,462struct pipe_resource *dst,463struct pipe_resource *src,464unsigned dst_offset,465unsigned src_offset,466unsigned size)467{468struct pipe_box box;469/* only these fields are used */470box.x = (int)src_offset;471box.width = (int)size;472pipe->resource_copy_region(pipe, dst, 0, dst_offset, 0, 0, src, 0, &box);473}474475/**476* Create a new resource and immediately put data into it477* \param bind bitmask of PIPE_BIND_x flags478* \param usage bitmask of PIPE_USAGE_x flags479*/480static inline struct pipe_resource *481pipe_buffer_create_with_data(struct pipe_context *pipe,482unsigned bind,483enum pipe_resource_usage usage,484unsigned size,485const void *ptr)486{487struct pipe_resource *res = pipe_buffer_create(pipe->screen,488bind, usage, size);489pipe_buffer_write_nooverlap(pipe, res, 0, size, ptr);490return res;491}492493static inline void494pipe_buffer_read(struct pipe_context *pipe,495struct pipe_resource *buf,496unsigned offset,497unsigned size,498void *data)499{500struct pipe_transfer *src_transfer;501ubyte *map;502503map = (ubyte *) pipe_buffer_map_range(pipe,504buf,505offset, size,506PIPE_MAP_READ,507&src_transfer);508if (!map)509return;510511memcpy(data, map, size);512pipe_buffer_unmap(pipe, src_transfer);513}514515516/**517* Map a resource for reading/writing.518* \param access bitmask of PIPE_MAP_x flags519*/520static inline void *521pipe_texture_map(struct pipe_context *context,522struct pipe_resource *resource,523unsigned level, unsigned layer,524unsigned access,525unsigned x, unsigned y,526unsigned w, unsigned h,527struct pipe_transfer **transfer)528{529struct pipe_box box;530u_box_2d_zslice(x, y, layer, w, h, &box);531return context->texture_map(context, resource, level, access,532&box, transfer);533}534535536/**537* Map a 3D (texture) resource for reading/writing.538* \param access bitmask of PIPE_MAP_x flags539*/540static inline void *541pipe_texture_map_3d(struct pipe_context *context,542struct pipe_resource *resource,543unsigned level,544unsigned access,545unsigned x, unsigned y, unsigned z,546unsigned w, unsigned h, unsigned d,547struct pipe_transfer **transfer)548{549struct pipe_box box;550u_box_3d(x, y, z, w, h, d, &box);551return context->texture_map(context, resource, level, access,552&box, transfer);553}554555static inline void556pipe_texture_unmap(struct pipe_context *context,557struct pipe_transfer *transfer)558{559context->texture_unmap(context, transfer);560}561562static inline void563pipe_set_constant_buffer(struct pipe_context *pipe,564enum pipe_shader_type shader, uint index,565struct pipe_resource *buf)566{567if (buf) {568struct pipe_constant_buffer cb;569cb.buffer = buf;570cb.buffer_offset = 0;571cb.buffer_size = buf->width0;572cb.user_buffer = NULL;573pipe->set_constant_buffer(pipe, shader, index, false, &cb);574} else {575pipe->set_constant_buffer(pipe, shader, index, false, NULL);576}577}578579580/**581* Get the polygon offset enable/disable flag for the given polygon fill mode.582* \param fill_mode one of PIPE_POLYGON_MODE_POINT/LINE/FILL583*/584static inline boolean585util_get_offset(const struct pipe_rasterizer_state *templ,586unsigned fill_mode)587{588switch(fill_mode) {589case PIPE_POLYGON_MODE_POINT:590return templ->offset_point;591case PIPE_POLYGON_MODE_LINE:592return templ->offset_line;593case PIPE_POLYGON_MODE_FILL:594return templ->offset_tri;595default:596assert(0);597return FALSE;598}599}600601static inline float602util_get_min_point_size(const struct pipe_rasterizer_state *state)603{604/* The point size should be clamped to this value at the rasterizer stage.605*/606return !state->point_quad_rasterization &&607!state->point_smooth &&608!state->multisample ? 1.0f : 0.0f;609}610611static inline void612util_query_clear_result(union pipe_query_result *result, unsigned type)613{614switch (type) {615case PIPE_QUERY_OCCLUSION_PREDICATE:616case PIPE_QUERY_OCCLUSION_PREDICATE_CONSERVATIVE:617case PIPE_QUERY_SO_OVERFLOW_PREDICATE:618case PIPE_QUERY_SO_OVERFLOW_ANY_PREDICATE:619case PIPE_QUERY_GPU_FINISHED:620result->b = FALSE;621break;622case PIPE_QUERY_OCCLUSION_COUNTER:623case PIPE_QUERY_TIMESTAMP:624case PIPE_QUERY_TIME_ELAPSED:625case PIPE_QUERY_PRIMITIVES_GENERATED:626case PIPE_QUERY_PRIMITIVES_EMITTED:627result->u64 = 0;628break;629case PIPE_QUERY_SO_STATISTICS:630memset(&result->so_statistics, 0, sizeof(result->so_statistics));631break;632case PIPE_QUERY_TIMESTAMP_DISJOINT:633memset(&result->timestamp_disjoint, 0, sizeof(result->timestamp_disjoint));634break;635case PIPE_QUERY_PIPELINE_STATISTICS:636memset(&result->pipeline_statistics, 0, sizeof(result->pipeline_statistics));637break;638default:639memset(result, 0, sizeof(*result));640}641}642643/** Convert PIPE_TEXTURE_x to TGSI_TEXTURE_x */644static inline enum tgsi_texture_type645util_pipe_tex_to_tgsi_tex(enum pipe_texture_target pipe_tex_target,646unsigned nr_samples)647{648switch (pipe_tex_target) {649case PIPE_BUFFER:650return TGSI_TEXTURE_BUFFER;651652case PIPE_TEXTURE_1D:653assert(nr_samples <= 1);654return TGSI_TEXTURE_1D;655656case PIPE_TEXTURE_2D:657return nr_samples > 1 ? TGSI_TEXTURE_2D_MSAA : TGSI_TEXTURE_2D;658659case PIPE_TEXTURE_RECT:660assert(nr_samples <= 1);661return TGSI_TEXTURE_RECT;662663case PIPE_TEXTURE_3D:664assert(nr_samples <= 1);665return TGSI_TEXTURE_3D;666667case PIPE_TEXTURE_CUBE:668assert(nr_samples <= 1);669return TGSI_TEXTURE_CUBE;670671case PIPE_TEXTURE_1D_ARRAY:672assert(nr_samples <= 1);673return TGSI_TEXTURE_1D_ARRAY;674675case PIPE_TEXTURE_2D_ARRAY:676return nr_samples > 1 ? TGSI_TEXTURE_2D_ARRAY_MSAA :677TGSI_TEXTURE_2D_ARRAY;678679case PIPE_TEXTURE_CUBE_ARRAY:680return TGSI_TEXTURE_CUBE_ARRAY;681682default:683assert(0 && "unexpected texture target");684return TGSI_TEXTURE_UNKNOWN;685}686}687688689static inline void690util_copy_constant_buffer(struct pipe_constant_buffer *dst,691const struct pipe_constant_buffer *src,692bool take_ownership)693{694if (src) {695if (take_ownership) {696pipe_resource_reference(&dst->buffer, NULL);697dst->buffer = src->buffer;698} else {699pipe_resource_reference(&dst->buffer, src->buffer);700}701dst->buffer_offset = src->buffer_offset;702dst->buffer_size = src->buffer_size;703dst->user_buffer = src->user_buffer;704}705else {706pipe_resource_reference(&dst->buffer, NULL);707dst->buffer_offset = 0;708dst->buffer_size = 0;709dst->user_buffer = NULL;710}711}712713static inline void714util_copy_shader_buffer(struct pipe_shader_buffer *dst,715const struct pipe_shader_buffer *src)716{717if (src) {718pipe_resource_reference(&dst->buffer, src->buffer);719dst->buffer_offset = src->buffer_offset;720dst->buffer_size = src->buffer_size;721}722else {723pipe_resource_reference(&dst->buffer, NULL);724dst->buffer_offset = 0;725dst->buffer_size = 0;726}727}728729static inline void730util_copy_image_view(struct pipe_image_view *dst,731const struct pipe_image_view *src)732{733if (src) {734pipe_resource_reference(&dst->resource, src->resource);735dst->format = src->format;736dst->access = src->access;737dst->shader_access = src->shader_access;738dst->u = src->u;739} else {740pipe_resource_reference(&dst->resource, NULL);741dst->format = PIPE_FORMAT_NONE;742dst->access = 0;743dst->shader_access = 0;744memset(&dst->u, 0, sizeof(dst->u));745}746}747748static inline unsigned749util_max_layer(const struct pipe_resource *r, unsigned level)750{751switch (r->target) {752case PIPE_TEXTURE_3D:753return u_minify(r->depth0, level) - 1;754case PIPE_TEXTURE_CUBE:755assert(r->array_size == 6);756FALLTHROUGH;757case PIPE_TEXTURE_1D_ARRAY:758case PIPE_TEXTURE_2D_ARRAY:759case PIPE_TEXTURE_CUBE_ARRAY:760return r->array_size - 1;761default:762return 0;763}764}765766static inline unsigned767util_num_layers(const struct pipe_resource *r, unsigned level)768{769return util_max_layer(r, level) + 1;770}771772static inline bool773util_texrange_covers_whole_level(const struct pipe_resource *tex,774unsigned level, unsigned x, unsigned y,775unsigned z, unsigned width,776unsigned height, unsigned depth)777{778return x == 0 && y == 0 && z == 0 &&779width == u_minify(tex->width0, level) &&780height == u_minify(tex->height0, level) &&781depth == util_num_layers(tex, level);782}783784/**785* Returns true if the blit will fully initialize all pixels in the resource.786*/787static inline bool788util_blit_covers_whole_resource(const struct pipe_blit_info *info)789{790/* No conditional rendering or scissoring. (We assume that the caller would791* have dropped any redundant scissoring)792*/793if (info->scissor_enable || info->window_rectangle_include || info->render_condition_enable || info->alpha_blend)794return false;795796const struct pipe_resource *dst = info->dst.resource;797/* A single blit can't initialize a miptree. */798if (dst->last_level != 0)799return false;800801assert(info->dst.level == 0);802803/* Make sure the dst box covers the whole resource. */804if (!(util_texrange_covers_whole_level(dst, 0,8050, 0, 0,806info->dst.box.width, info->dst.box.height, info->dst.box.depth))) {807return false;808}809810/* Make sure the mask actually updates all the channels present in the dst format. */811if (info->mask & PIPE_MASK_RGBA) {812if ((info->mask & PIPE_MASK_RGBA) != PIPE_MASK_RGBA)813return false;814}815816if (info->mask & PIPE_MASK_ZS) {817const struct util_format_description *format_desc = util_format_description(info->dst.format);818uint32_t dst_has = 0;819if (util_format_has_depth(format_desc))820dst_has |= PIPE_MASK_Z;821if (util_format_has_stencil(format_desc))822dst_has |= PIPE_MASK_S;823if (dst_has & ~(info->mask & PIPE_MASK_ZS))824return false;825}826827return true;828}829830static inline bool831util_logicop_reads_dest(enum pipe_logicop op)832{833switch (op) {834case PIPE_LOGICOP_NOR:835case PIPE_LOGICOP_AND_INVERTED:836case PIPE_LOGICOP_AND_REVERSE:837case PIPE_LOGICOP_INVERT:838case PIPE_LOGICOP_XOR:839case PIPE_LOGICOP_NAND:840case PIPE_LOGICOP_AND:841case PIPE_LOGICOP_EQUIV:842case PIPE_LOGICOP_NOOP:843case PIPE_LOGICOP_OR_INVERTED:844case PIPE_LOGICOP_OR_REVERSE:845case PIPE_LOGICOP_OR:846return true;847case PIPE_LOGICOP_CLEAR:848case PIPE_LOGICOP_COPY_INVERTED:849case PIPE_LOGICOP_COPY:850case PIPE_LOGICOP_SET:851return false;852}853unreachable("bad logicop");854}855856static inline bool857util_writes_stencil(const struct pipe_stencil_state *s)858{859return s->enabled && s->writemask &&860((s->fail_op != PIPE_STENCIL_OP_KEEP) ||861(s->zpass_op != PIPE_STENCIL_OP_KEEP) ||862(s->zfail_op != PIPE_STENCIL_OP_KEEP));863}864865static inline bool866util_writes_depth_stencil(const struct pipe_depth_stencil_alpha_state *zsa)867{868if (zsa->depth_enabled && zsa->depth_writemask &&869(zsa->depth_func != PIPE_FUNC_NEVER))870return true;871872return util_writes_stencil(&zsa->stencil[0]) ||873util_writes_stencil(&zsa->stencil[1]);874}875876static inline struct pipe_context *877pipe_create_multimedia_context(struct pipe_screen *screen)878{879unsigned flags = 0;880881if (!screen->get_param(screen, PIPE_CAP_GRAPHICS))882flags |= PIPE_CONTEXT_COMPUTE_ONLY;883884return screen->context_create(screen, NULL, flags);885}886887static inline unsigned util_res_sample_count(struct pipe_resource *res)888{889return res->nr_samples > 0 ? res->nr_samples : 1;890}891892#ifdef __cplusplus893}894#endif895896#endif /* U_INLINES_H */897898899