Dirk Hohndel (VMware) | dff9688 | 2018-05-07 01:16:26 +0200 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 OR MIT |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 2 | /************************************************************************** |
| 3 | * |
Dirk Hohndel (VMware) | dff9688 | 2018-05-07 01:16:26 +0200 | [diff] [blame] | 4 | * Copyright 2009 - 2015 VMware, Inc., Palo Alto, CA., USA |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 5 | * |
| 6 | * Permission is hereby granted, free of charge, to any person obtaining a |
| 7 | * copy of this software and associated documentation files (the |
| 8 | * "Software"), to deal in the Software without restriction, including |
| 9 | * without limitation the rights to use, copy, modify, merge, publish, |
| 10 | * distribute, sub license, and/or sell copies of the Software, and to |
| 11 | * permit persons to whom the Software is furnished to do so, subject to |
| 12 | * the following conditions: |
| 13 | * |
| 14 | * The above copyright notice and this permission notice (including the |
| 15 | * next paragraph) shall be included in all copies or substantial portions |
| 16 | * of the Software. |
| 17 | * |
| 18 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| 19 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 20 | * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL |
| 21 | * THE COPYRIGHT HOLDERS, AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, |
| 22 | * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
| 23 | * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
| 24 | * USE OR OTHER DEALINGS IN THE SOFTWARE. |
| 25 | * |
| 26 | **************************************************************************/ |
Sinclair Yeh | 58585116 | 2017-07-05 01:45:40 -0700 | [diff] [blame] | 27 | #include <linux/sync_file.h> |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 28 | |
| 29 | #include "vmwgfx_drv.h" |
| 30 | #include "vmwgfx_reg.h" |
David Howells | 760285e | 2012-10-02 18:01:07 +0100 | [diff] [blame] | 31 | #include <drm/ttm/ttm_bo_api.h> |
| 32 | #include <drm/ttm/ttm_placement.h> |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 33 | #include "vmwgfx_so.h" |
| 34 | #include "vmwgfx_binding.h" |
Martin Krastev | 7a7a933 | 2021-06-09 13:23:00 -0400 | [diff] [blame] | 35 | #include "vmwgfx_mksstat.h" |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 36 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 37 | #define VMW_RES_HT_ORDER 12 |
| 38 | |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 39 | /* |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 40 | * Helper macro to get dx_ctx_node if available otherwise print an error |
| 41 | * message. This is for use in command verifier function where if dx_ctx_node |
| 42 | * is not set then command is invalid. |
| 43 | */ |
| 44 | #define VMW_GET_CTX_NODE(__sw_context) \ |
| 45 | ({ \ |
| 46 | __sw_context->dx_ctx_node ? __sw_context->dx_ctx_node : ({ \ |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 47 | VMW_DEBUG_USER("SM context is not set at %s\n", __func__); \ |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 48 | __sw_context->dx_ctx_node; \ |
| 49 | }); \ |
| 50 | }) |
| 51 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 52 | #define VMW_DECLARE_CMD_VAR(__var, __type) \ |
| 53 | struct { \ |
| 54 | SVGA3dCmdHeader header; \ |
| 55 | __type body; \ |
| 56 | } __var |
| 57 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 58 | /** |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 59 | * struct vmw_relocation - Buffer object relocation |
| 60 | * |
| 61 | * @head: List head for the command submission context's relocation list |
Thomas Hellstrom | cc1e3b7 | 2018-09-26 15:38:13 +0200 | [diff] [blame] | 62 | * @vbo: Non ref-counted pointer to buffer object |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 63 | * @mob_loc: Pointer to location for mob id to be modified |
| 64 | * @location: Pointer to location for guest pointer to be modified |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 65 | */ |
| 66 | struct vmw_relocation { |
| 67 | struct list_head head; |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 68 | struct vmw_buffer_object *vbo; |
Thomas Hellstrom | cc1e3b7 | 2018-09-26 15:38:13 +0200 | [diff] [blame] | 69 | union { |
| 70 | SVGAMobId *mob_loc; |
| 71 | SVGAGuestPtr *location; |
| 72 | }; |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 73 | }; |
| 74 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 75 | /** |
Thomas Hellstrom | a194403 | 2016-10-10 11:06:45 -0700 | [diff] [blame] | 76 | * enum vmw_resource_relocation_type - Relocation type for resources |
| 77 | * |
| 78 | * @vmw_res_rel_normal: Traditional relocation. The resource id in the |
| 79 | * command stream is replaced with the actual id after validation. |
| 80 | * @vmw_res_rel_nop: NOP relocation. The command is unconditionally replaced |
| 81 | * with a NOP. |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 82 | * @vmw_res_rel_cond_nop: Conditional NOP relocation. If the resource id after |
| 83 | * validation is -1, the command is replaced with a NOP. Otherwise no action. |
Lee Jones | 7450bf7 | 2021-01-15 18:12:36 +0000 | [diff] [blame] | 84 | * @vmw_res_rel_max: Last value in the enum - used for error checking |
| 85 | */ |
Thomas Hellstrom | a194403 | 2016-10-10 11:06:45 -0700 | [diff] [blame] | 86 | enum vmw_resource_relocation_type { |
| 87 | vmw_res_rel_normal, |
| 88 | vmw_res_rel_nop, |
| 89 | vmw_res_rel_cond_nop, |
| 90 | vmw_res_rel_max |
| 91 | }; |
| 92 | |
| 93 | /** |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 94 | * struct vmw_resource_relocation - Relocation info for resources |
| 95 | * |
| 96 | * @head: List head for the software context's relocation list. |
| 97 | * @res: Non-ref-counted pointer to the resource. |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 98 | * @offset: Offset of single byte entries into the command buffer where the id |
| 99 | * that needs fixup is located. |
Thomas Hellstrom | a194403 | 2016-10-10 11:06:45 -0700 | [diff] [blame] | 100 | * @rel_type: Type of relocation. |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 101 | */ |
| 102 | struct vmw_resource_relocation { |
| 103 | struct list_head head; |
| 104 | const struct vmw_resource *res; |
Thomas Hellstrom | a194403 | 2016-10-10 11:06:45 -0700 | [diff] [blame] | 105 | u32 offset:29; |
| 106 | enum vmw_resource_relocation_type rel_type:3; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 107 | }; |
| 108 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 109 | /** |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 110 | * struct vmw_ctx_validation_info - Extra validation metadata for contexts |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 111 | * |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 112 | * @head: List head of context list |
| 113 | * @ctx: The context resource |
| 114 | * @cur: The context's persistent binding state |
| 115 | * @staged: The binding state changes of this command buffer |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 116 | */ |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 117 | struct vmw_ctx_validation_info { |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 118 | struct list_head head; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 119 | struct vmw_resource *ctx; |
| 120 | struct vmw_ctx_binding_state *cur; |
| 121 | struct vmw_ctx_binding_state *staged; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 122 | }; |
| 123 | |
| 124 | /** |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 125 | * struct vmw_cmd_entry - Describe a command for the verifier |
| 126 | * |
Lee Jones | 7450bf7 | 2021-01-15 18:12:36 +0000 | [diff] [blame] | 127 | * @func: Call-back to handle the command. |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 128 | * @user_allow: Whether allowed from the execbuf ioctl. |
| 129 | * @gb_disable: Whether disabled if guest-backed objects are available. |
| 130 | * @gb_enable: Whether enabled iff guest-backed objects are available. |
Lee Jones | 7450bf7 | 2021-01-15 18:12:36 +0000 | [diff] [blame] | 131 | * @cmd_name: Name of the command. |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 132 | */ |
| 133 | struct vmw_cmd_entry { |
| 134 | int (*func) (struct vmw_private *, struct vmw_sw_context *, |
| 135 | SVGA3dCmdHeader *); |
| 136 | bool user_allow; |
| 137 | bool gb_disable; |
| 138 | bool gb_enable; |
Thomas Hellstrom | 65b97a2 | 2017-08-24 08:06:29 +0200 | [diff] [blame] | 139 | const char *cmd_name; |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 140 | }; |
| 141 | |
| 142 | #define VMW_CMD_DEF(_cmd, _func, _user_allow, _gb_disable, _gb_enable) \ |
| 143 | [(_cmd) - SVGA_3D_CMD_BASE] = {(_func), (_user_allow),\ |
Thomas Hellstrom | 65b97a2 | 2017-08-24 08:06:29 +0200 | [diff] [blame] | 144 | (_gb_disable), (_gb_enable), #_cmd} |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 145 | |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 146 | static int vmw_resource_context_res_add(struct vmw_private *dev_priv, |
| 147 | struct vmw_sw_context *sw_context, |
| 148 | struct vmw_resource *ctx); |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 149 | static int vmw_translate_mob_ptr(struct vmw_private *dev_priv, |
| 150 | struct vmw_sw_context *sw_context, |
| 151 | SVGAMobId *id, |
Thomas Hellstrom | f1d34bf | 2018-06-19 15:02:16 +0200 | [diff] [blame] | 152 | struct vmw_buffer_object **vmw_bo_p); |
Thomas Hellstrom | e7a4528 | 2016-10-10 10:44:00 -0700 | [diff] [blame] | 153 | /** |
| 154 | * vmw_ptr_diff - Compute the offset from a to b in bytes |
| 155 | * |
| 156 | * @a: A starting pointer. |
| 157 | * @b: A pointer offset in the same address space. |
| 158 | * |
| 159 | * Returns: The offset in bytes between the two pointers. |
| 160 | */ |
| 161 | static size_t vmw_ptr_diff(void *a, void *b) |
| 162 | { |
| 163 | return (unsigned long) b - (unsigned long) a; |
| 164 | } |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 165 | |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 166 | /** |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 167 | * vmw_execbuf_bindings_commit - Commit modified binding state |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 168 | * |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 169 | * @sw_context: The command submission context |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 170 | * @backoff: Whether this is part of the error path and binding state changes |
| 171 | * should be ignored |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 172 | */ |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 173 | static void vmw_execbuf_bindings_commit(struct vmw_sw_context *sw_context, |
| 174 | bool backoff) |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 175 | { |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 176 | struct vmw_ctx_validation_info *entry; |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 177 | |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 178 | list_for_each_entry(entry, &sw_context->ctx_list, head) { |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 179 | if (!backoff) |
| 180 | vmw_binding_state_commit(entry->cur, entry->staged); |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 181 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 182 | if (entry->staged != sw_context->staged_bindings) |
| 183 | vmw_binding_state_free(entry->staged); |
| 184 | else |
| 185 | sw_context->staged_bindings_inuse = false; |
| 186 | } |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 187 | |
| 188 | /* List entries are freed with the validation context */ |
| 189 | INIT_LIST_HEAD(&sw_context->ctx_list); |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 190 | } |
| 191 | |
| 192 | /** |
| 193 | * vmw_bind_dx_query_mob - Bind the DX query MOB if referenced |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 194 | * |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 195 | * @sw_context: The command submission context |
| 196 | */ |
| 197 | static void vmw_bind_dx_query_mob(struct vmw_sw_context *sw_context) |
| 198 | { |
| 199 | if (sw_context->dx_query_mob) |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 200 | vmw_context_bind_dx_query(sw_context->dx_query_ctx, |
| 201 | sw_context->dx_query_mob); |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 202 | } |
| 203 | |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 204 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 205 | * vmw_cmd_ctx_first_setup - Perform the setup needed when a context is added to |
| 206 | * the validate list. |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 207 | * |
| 208 | * @dev_priv: Pointer to the device private: |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 209 | * @sw_context: The command submission context |
Lee Jones | 7450bf7 | 2021-01-15 18:12:36 +0000 | [diff] [blame] | 210 | * @res: Pointer to the resource |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 211 | * @node: The validation node holding the context resource metadata |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 212 | */ |
| 213 | static int vmw_cmd_ctx_first_setup(struct vmw_private *dev_priv, |
| 214 | struct vmw_sw_context *sw_context, |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 215 | struct vmw_resource *res, |
| 216 | struct vmw_ctx_validation_info *node) |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 217 | { |
| 218 | int ret; |
| 219 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 220 | ret = vmw_resource_context_res_add(dev_priv, sw_context, res); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 221 | if (unlikely(ret != 0)) |
| 222 | goto out_err; |
| 223 | |
| 224 | if (!sw_context->staged_bindings) { |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 225 | sw_context->staged_bindings = vmw_binding_state_alloc(dev_priv); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 226 | if (IS_ERR(sw_context->staged_bindings)) { |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 227 | ret = PTR_ERR(sw_context->staged_bindings); |
| 228 | sw_context->staged_bindings = NULL; |
| 229 | goto out_err; |
| 230 | } |
| 231 | } |
| 232 | |
| 233 | if (sw_context->staged_bindings_inuse) { |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 234 | node->staged = vmw_binding_state_alloc(dev_priv); |
| 235 | if (IS_ERR(node->staged)) { |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 236 | ret = PTR_ERR(node->staged); |
| 237 | node->staged = NULL; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 238 | goto out_err; |
| 239 | } |
| 240 | } else { |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 241 | node->staged = sw_context->staged_bindings; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 242 | sw_context->staged_bindings_inuse = true; |
| 243 | } |
| 244 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 245 | node->ctx = res; |
| 246 | node->cur = vmw_context_binding_state(res); |
| 247 | list_add_tail(&node->head, &sw_context->ctx_list); |
| 248 | |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 249 | return 0; |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 250 | |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 251 | out_err: |
| 252 | return ret; |
| 253 | } |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 254 | |
| 255 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 256 | * vmw_execbuf_res_size - calculate extra size fore the resource validation node |
| 257 | * |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 258 | * @dev_priv: Pointer to the device private struct. |
| 259 | * @res_type: The resource type. |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 260 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 261 | * Guest-backed contexts and DX contexts require extra size to store execbuf |
| 262 | * private information in the validation node. Typically the binding manager |
| 263 | * associated data structures. |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 264 | * |
| 265 | * Returns: The extra size requirement based on resource type. |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 266 | */ |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 267 | static unsigned int vmw_execbuf_res_size(struct vmw_private *dev_priv, |
| 268 | enum vmw_res_type res_type) |
| 269 | { |
| 270 | return (res_type == vmw_res_dx_context || |
| 271 | (res_type == vmw_res_context && dev_priv->has_mob)) ? |
| 272 | sizeof(struct vmw_ctx_validation_info) : 0; |
| 273 | } |
| 274 | |
| 275 | /** |
| 276 | * vmw_execbuf_rcache_update - Update a resource-node cache entry |
| 277 | * |
| 278 | * @rcache: Pointer to the entry to update. |
| 279 | * @res: Pointer to the resource. |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 280 | * @private: Pointer to the execbuf-private space in the resource validation |
| 281 | * node. |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 282 | */ |
| 283 | static void vmw_execbuf_rcache_update(struct vmw_res_cache_entry *rcache, |
| 284 | struct vmw_resource *res, |
| 285 | void *private) |
| 286 | { |
| 287 | rcache->res = res; |
| 288 | rcache->private = private; |
| 289 | rcache->valid = 1; |
| 290 | rcache->valid_handle = 0; |
| 291 | } |
| 292 | |
| 293 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 294 | * vmw_execbuf_res_noref_val_add - Add a resource described by an unreferenced |
| 295 | * rcu-protected pointer to the validation list. |
| 296 | * |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 297 | * @sw_context: Pointer to the software context. |
| 298 | * @res: Unreferenced rcu-protected pointer to the resource. |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 299 | * @dirty: Whether to change dirty status. |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 300 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 301 | * Returns: 0 on success. Negative error code on failure. Typical error codes |
| 302 | * are %-EINVAL on inconsistency and %-ESRCH if the resource was doomed. |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 303 | */ |
| 304 | static int vmw_execbuf_res_noref_val_add(struct vmw_sw_context *sw_context, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 305 | struct vmw_resource *res, |
| 306 | u32 dirty) |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 307 | { |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 308 | struct vmw_private *dev_priv = res->dev_priv; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 309 | int ret; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 310 | enum vmw_res_type res_type = vmw_res_type(res); |
| 311 | struct vmw_res_cache_entry *rcache; |
| 312 | struct vmw_ctx_validation_info *ctx_info; |
| 313 | bool first_usage; |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 314 | unsigned int priv_size; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 315 | |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 316 | rcache = &sw_context->res_cache[res_type]; |
| 317 | if (likely(rcache->valid && rcache->res == res)) { |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 318 | if (dirty) |
| 319 | vmw_validation_res_set_dirty(sw_context->ctx, |
| 320 | rcache->private, dirty); |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 321 | vmw_user_resource_noref_release(); |
| 322 | return 0; |
| 323 | } |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 324 | |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 325 | priv_size = vmw_execbuf_res_size(dev_priv, res_type); |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 326 | ret = vmw_validation_add_resource(sw_context->ctx, res, priv_size, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 327 | dirty, (void **)&ctx_info, |
| 328 | &first_usage); |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 329 | vmw_user_resource_noref_release(); |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 330 | if (ret) |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 331 | return ret; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 332 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 333 | if (priv_size && first_usage) { |
| 334 | ret = vmw_cmd_ctx_first_setup(dev_priv, sw_context, res, |
| 335 | ctx_info); |
Deepak Rawat | b289840 | 2019-02-11 14:59:57 -0800 | [diff] [blame] | 336 | if (ret) { |
| 337 | VMW_DEBUG_USER("Failed first usage context setup.\n"); |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 338 | return ret; |
Deepak Rawat | b289840 | 2019-02-11 14:59:57 -0800 | [diff] [blame] | 339 | } |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 340 | } |
| 341 | |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 342 | vmw_execbuf_rcache_update(rcache, res, ctx_info); |
| 343 | return 0; |
| 344 | } |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 345 | |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 346 | /** |
| 347 | * vmw_execbuf_res_noctx_val_add - Add a non-context resource to the resource |
| 348 | * validation list if it's not already on it |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 349 | * |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 350 | * @sw_context: Pointer to the software context. |
| 351 | * @res: Pointer to the resource. |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 352 | * @dirty: Whether to change dirty status. |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 353 | * |
| 354 | * Returns: Zero on success. Negative error code on failure. |
| 355 | */ |
| 356 | static int vmw_execbuf_res_noctx_val_add(struct vmw_sw_context *sw_context, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 357 | struct vmw_resource *res, |
| 358 | u32 dirty) |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 359 | { |
| 360 | struct vmw_res_cache_entry *rcache; |
| 361 | enum vmw_res_type res_type = vmw_res_type(res); |
| 362 | void *ptr; |
| 363 | int ret; |
| 364 | |
| 365 | rcache = &sw_context->res_cache[res_type]; |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 366 | if (likely(rcache->valid && rcache->res == res)) { |
| 367 | if (dirty) |
| 368 | vmw_validation_res_set_dirty(sw_context->ctx, |
| 369 | rcache->private, dirty); |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 370 | return 0; |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 371 | } |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 372 | |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 373 | ret = vmw_validation_add_resource(sw_context->ctx, res, 0, dirty, |
| 374 | &ptr, NULL); |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 375 | if (ret) |
| 376 | return ret; |
| 377 | |
| 378 | vmw_execbuf_rcache_update(rcache, res, ptr); |
| 379 | |
| 380 | return 0; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 381 | } |
| 382 | |
| 383 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 384 | * vmw_view_res_val_add - Add a view and the surface it's pointing to to the |
| 385 | * validation list |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 386 | * |
| 387 | * @sw_context: The software context holding the validation list. |
| 388 | * @view: Pointer to the view resource. |
| 389 | * |
| 390 | * Returns 0 if success, negative error code otherwise. |
| 391 | */ |
| 392 | static int vmw_view_res_val_add(struct vmw_sw_context *sw_context, |
| 393 | struct vmw_resource *view) |
| 394 | { |
| 395 | int ret; |
| 396 | |
| 397 | /* |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 398 | * First add the resource the view is pointing to, otherwise it may be |
| 399 | * swapped out when the view is validated. |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 400 | */ |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 401 | ret = vmw_execbuf_res_noctx_val_add(sw_context, vmw_view_srf(view), |
| 402 | vmw_view_dirtying(view)); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 403 | if (ret) |
| 404 | return ret; |
| 405 | |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 406 | return vmw_execbuf_res_noctx_val_add(sw_context, view, |
| 407 | VMW_RES_DIRTY_NONE); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 408 | } |
| 409 | |
| 410 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 411 | * vmw_view_id_val_add - Look up a view and add it and the surface it's pointing |
| 412 | * to to the validation list. |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 413 | * |
| 414 | * @sw_context: The software context holding the validation list. |
| 415 | * @view_type: The view type to look up. |
| 416 | * @id: view id of the view. |
| 417 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 418 | * The view is represented by a view id and the DX context it's created on, or |
| 419 | * scheduled for creation on. If there is no DX context set, the function will |
| 420 | * return an -EINVAL error pointer. |
Thomas Hellstrom | 508108e | 2018-09-26 16:28:45 +0200 | [diff] [blame] | 421 | * |
| 422 | * Returns: Unreferenced pointer to the resource on success, negative error |
| 423 | * pointer on failure. |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 424 | */ |
Thomas Hellstrom | 508108e | 2018-09-26 16:28:45 +0200 | [diff] [blame] | 425 | static struct vmw_resource * |
| 426 | vmw_view_id_val_add(struct vmw_sw_context *sw_context, |
| 427 | enum vmw_view_type view_type, u32 id) |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 428 | { |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 429 | struct vmw_ctx_validation_info *ctx_node = sw_context->dx_ctx_node; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 430 | struct vmw_resource *view; |
| 431 | int ret; |
| 432 | |
Deepak Rawat | b289840 | 2019-02-11 14:59:57 -0800 | [diff] [blame] | 433 | if (!ctx_node) |
Thomas Hellstrom | 508108e | 2018-09-26 16:28:45 +0200 | [diff] [blame] | 434 | return ERR_PTR(-EINVAL); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 435 | |
| 436 | view = vmw_view_lookup(sw_context->man, view_type, id); |
| 437 | if (IS_ERR(view)) |
Thomas Hellstrom | 508108e | 2018-09-26 16:28:45 +0200 | [diff] [blame] | 438 | return view; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 439 | |
| 440 | ret = vmw_view_res_val_add(sw_context, view); |
Thomas Hellstrom | 508108e | 2018-09-26 16:28:45 +0200 | [diff] [blame] | 441 | if (ret) |
| 442 | return ERR_PTR(ret); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 443 | |
Thomas Hellstrom | 508108e | 2018-09-26 16:28:45 +0200 | [diff] [blame] | 444 | return view; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 445 | } |
| 446 | |
| 447 | /** |
Thomas Hellstrom | 30f82d81 | 2014-02-05 08:13:56 +0100 | [diff] [blame] | 448 | * vmw_resource_context_res_add - Put resources previously bound to a context on |
| 449 | * the validation list |
| 450 | * |
| 451 | * @dev_priv: Pointer to a device private structure |
| 452 | * @sw_context: Pointer to a software context used for this command submission |
| 453 | * @ctx: Pointer to the context resource |
| 454 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 455 | * This function puts all resources that were previously bound to @ctx on the |
| 456 | * resource validation list. This is part of the context state reemission |
Thomas Hellstrom | 30f82d81 | 2014-02-05 08:13:56 +0100 | [diff] [blame] | 457 | */ |
| 458 | static int vmw_resource_context_res_add(struct vmw_private *dev_priv, |
| 459 | struct vmw_sw_context *sw_context, |
| 460 | struct vmw_resource *ctx) |
| 461 | { |
| 462 | struct list_head *binding_list; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 463 | struct vmw_ctx_bindinfo *entry; |
Thomas Hellstrom | 30f82d81 | 2014-02-05 08:13:56 +0100 | [diff] [blame] | 464 | int ret = 0; |
| 465 | struct vmw_resource *res; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 466 | u32 i; |
Deepak Rawat | 5e8ec0d | 2018-12-13 13:51:08 -0800 | [diff] [blame] | 467 | u32 cotable_max = has_sm5_context(ctx->dev_priv) ? |
| 468 | SVGA_COTABLE_MAX : SVGA_COTABLE_DX10_MAX; |
Thomas Hellstrom | 30f82d81 | 2014-02-05 08:13:56 +0100 | [diff] [blame] | 469 | |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 470 | /* Add all cotables to the validation list. */ |
Deepak Rawat | 878c6ec | 2018-12-13 11:44:42 -0800 | [diff] [blame] | 471 | if (has_sm4_context(dev_priv) && |
| 472 | vmw_res_type(ctx) == vmw_res_dx_context) { |
Deepak Rawat | 5e8ec0d | 2018-12-13 13:51:08 -0800 | [diff] [blame] | 473 | for (i = 0; i < cotable_max; ++i) { |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 474 | res = vmw_context_cotable(ctx, i); |
| 475 | if (IS_ERR(res)) |
| 476 | continue; |
| 477 | |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 478 | ret = vmw_execbuf_res_noctx_val_add(sw_context, res, |
| 479 | VMW_RES_DIRTY_SET); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 480 | if (unlikely(ret != 0)) |
| 481 | return ret; |
| 482 | } |
| 483 | } |
| 484 | |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 485 | /* Add all resources bound to the context to the validation list */ |
Thomas Hellstrom | 30f82d81 | 2014-02-05 08:13:56 +0100 | [diff] [blame] | 486 | mutex_lock(&dev_priv->binding_mutex); |
| 487 | binding_list = vmw_context_binding_list(ctx); |
| 488 | |
| 489 | list_for_each_entry(entry, binding_list, ctx_list) { |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 490 | if (vmw_res_type(entry->res) == vmw_res_view) |
| 491 | ret = vmw_view_res_val_add(sw_context, entry->res); |
| 492 | else |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 493 | ret = vmw_execbuf_res_noctx_val_add |
| 494 | (sw_context, entry->res, |
| 495 | vmw_binding_dirtying(entry->bt)); |
Thomas Hellstrom | 30f82d81 | 2014-02-05 08:13:56 +0100 | [diff] [blame] | 496 | if (unlikely(ret != 0)) |
| 497 | break; |
| 498 | } |
| 499 | |
Deepak Rawat | 878c6ec | 2018-12-13 11:44:42 -0800 | [diff] [blame] | 500 | if (has_sm4_context(dev_priv) && |
| 501 | vmw_res_type(ctx) == vmw_res_dx_context) { |
Thomas Hellstrom | f1d34bf | 2018-06-19 15:02:16 +0200 | [diff] [blame] | 502 | struct vmw_buffer_object *dx_query_mob; |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 503 | |
| 504 | dx_query_mob = vmw_context_get_dx_query_mob(ctx); |
| 505 | if (dx_query_mob) |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 506 | ret = vmw_validation_add_bo(sw_context->ctx, |
| 507 | dx_query_mob, true, false); |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 508 | } |
| 509 | |
Thomas Hellstrom | 30f82d81 | 2014-02-05 08:13:56 +0100 | [diff] [blame] | 510 | mutex_unlock(&dev_priv->binding_mutex); |
| 511 | return ret; |
| 512 | } |
| 513 | |
| 514 | /** |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 515 | * vmw_resource_relocation_add - Add a relocation to the relocation list |
| 516 | * |
Lee Jones | 7450bf7 | 2021-01-15 18:12:36 +0000 | [diff] [blame] | 517 | * @sw_context: Pointer to the software context. |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 518 | * @res: The resource. |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 519 | * @offset: Offset into the command buffer currently being parsed where the id |
| 520 | * that needs fixup is located. Granularity is one byte. |
Thomas Hellstrom | a194403 | 2016-10-10 11:06:45 -0700 | [diff] [blame] | 521 | * @rel_type: Relocation type. |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 522 | */ |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 523 | static int vmw_resource_relocation_add(struct vmw_sw_context *sw_context, |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 524 | const struct vmw_resource *res, |
Thomas Hellstrom | a194403 | 2016-10-10 11:06:45 -0700 | [diff] [blame] | 525 | unsigned long offset, |
| 526 | enum vmw_resource_relocation_type |
| 527 | rel_type) |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 528 | { |
| 529 | struct vmw_resource_relocation *rel; |
| 530 | |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 531 | rel = vmw_validation_mem_alloc(sw_context->ctx, sizeof(*rel)); |
Ravikant B Sharma | 1a4adb0 | 2016-11-08 17:30:31 +0530 | [diff] [blame] | 532 | if (unlikely(!rel)) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 533 | VMW_DEBUG_USER("Failed to allocate a resource relocation.\n"); |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 534 | return -ENOMEM; |
| 535 | } |
| 536 | |
| 537 | rel->res = res; |
| 538 | rel->offset = offset; |
Thomas Hellstrom | a194403 | 2016-10-10 11:06:45 -0700 | [diff] [blame] | 539 | rel->rel_type = rel_type; |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 540 | list_add_tail(&rel->head, &sw_context->res_relocations); |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 541 | |
| 542 | return 0; |
| 543 | } |
| 544 | |
| 545 | /** |
| 546 | * vmw_resource_relocations_free - Free all relocations on a list |
| 547 | * |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 548 | * @list: Pointer to the head of the relocation list |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 549 | */ |
| 550 | static void vmw_resource_relocations_free(struct list_head *list) |
| 551 | { |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 552 | /* Memory is validation context memory, so no need to free it */ |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 553 | INIT_LIST_HEAD(list); |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 554 | } |
| 555 | |
| 556 | /** |
| 557 | * vmw_resource_relocations_apply - Apply all relocations on a list |
| 558 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 559 | * @cb: Pointer to the start of the command buffer bein patch. This need not be |
| 560 | * the same buffer as the one being parsed when the relocation list was built, |
| 561 | * but the contents must be the same modulo the resource ids. |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 562 | * @list: Pointer to the head of the relocation list. |
| 563 | */ |
| 564 | static void vmw_resource_relocations_apply(uint32_t *cb, |
| 565 | struct list_head *list) |
| 566 | { |
| 567 | struct vmw_resource_relocation *rel; |
| 568 | |
Thomas Hellstrom | a194403 | 2016-10-10 11:06:45 -0700 | [diff] [blame] | 569 | /* Validate the struct vmw_resource_relocation member size */ |
| 570 | BUILD_BUG_ON(SVGA_CB_MAX_SIZE >= (1 << 29)); |
| 571 | BUILD_BUG_ON(vmw_res_rel_max >= (1 << 3)); |
| 572 | |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 573 | list_for_each_entry(rel, list, head) { |
Thomas Hellstrom | e7a4528 | 2016-10-10 10:44:00 -0700 | [diff] [blame] | 574 | u32 *addr = (u32 *)((unsigned long) cb + rel->offset); |
Thomas Hellstrom | a194403 | 2016-10-10 11:06:45 -0700 | [diff] [blame] | 575 | switch (rel->rel_type) { |
| 576 | case vmw_res_rel_normal: |
Thomas Hellstrom | e7a4528 | 2016-10-10 10:44:00 -0700 | [diff] [blame] | 577 | *addr = rel->res->id; |
Thomas Hellstrom | a194403 | 2016-10-10 11:06:45 -0700 | [diff] [blame] | 578 | break; |
| 579 | case vmw_res_rel_nop: |
Thomas Hellstrom | e7a4528 | 2016-10-10 10:44:00 -0700 | [diff] [blame] | 580 | *addr = SVGA_3D_CMD_NOP; |
Thomas Hellstrom | a194403 | 2016-10-10 11:06:45 -0700 | [diff] [blame] | 581 | break; |
| 582 | default: |
| 583 | if (rel->res->id == -1) |
| 584 | *addr = SVGA_3D_CMD_NOP; |
| 585 | break; |
| 586 | } |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 587 | } |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 588 | } |
| 589 | |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 590 | static int vmw_cmd_invalid(struct vmw_private *dev_priv, |
| 591 | struct vmw_sw_context *sw_context, |
| 592 | SVGA3dCmdHeader *header) |
| 593 | { |
Sinclair Yeh | fcfffdd | 2017-07-17 23:28:36 -0700 | [diff] [blame] | 594 | return -EINVAL; |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 595 | } |
| 596 | |
| 597 | static int vmw_cmd_ok(struct vmw_private *dev_priv, |
| 598 | struct vmw_sw_context *sw_context, |
| 599 | SVGA3dCmdHeader *header) |
| 600 | { |
| 601 | return 0; |
| 602 | } |
| 603 | |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 604 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 605 | * vmw_resources_reserve - Reserve all resources on the sw_context's resource |
| 606 | * list. |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 607 | * |
| 608 | * @sw_context: Pointer to the software context. |
| 609 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 610 | * Note that since vmware's command submission currently is protected by the |
| 611 | * cmdbuf mutex, no fancy deadlock avoidance is required for resources, since |
| 612 | * only a single thread at once will attempt this. |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 613 | */ |
| 614 | static int vmw_resources_reserve(struct vmw_sw_context *sw_context) |
| 615 | { |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 616 | int ret; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 617 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 618 | ret = vmw_validation_res_reserve(sw_context->ctx, true); |
| 619 | if (ret) |
| 620 | return ret; |
Charmaine Lee | 2f633e5 | 2015-08-10 10:45:11 -0700 | [diff] [blame] | 621 | |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 622 | if (sw_context->dx_query_mob) { |
Thomas Hellstrom | f1d34bf | 2018-06-19 15:02:16 +0200 | [diff] [blame] | 623 | struct vmw_buffer_object *expected_dx_query_mob; |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 624 | |
| 625 | expected_dx_query_mob = |
| 626 | vmw_context_get_dx_query_mob(sw_context->dx_query_ctx); |
| 627 | if (expected_dx_query_mob && |
| 628 | expected_dx_query_mob != sw_context->dx_query_mob) { |
| 629 | ret = -EINVAL; |
| 630 | } |
| 631 | } |
| 632 | |
| 633 | return ret; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 634 | } |
| 635 | |
| 636 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 637 | * vmw_cmd_res_check - Check that a resource is present and if so, put it on the |
| 638 | * resource validate list unless it's already there. |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 639 | * |
| 640 | * @dev_priv: Pointer to a device private structure. |
| 641 | * @sw_context: Pointer to the software context. |
| 642 | * @res_type: Resource type. |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 643 | * @dirty: Whether to change dirty status. |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 644 | * @converter: User-space visisble type specific information. |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 645 | * @id_loc: Pointer to the location in the command buffer currently being parsed |
| 646 | * from where the user-space resource id handle is located. |
Lee Jones | 7450bf7 | 2021-01-15 18:12:36 +0000 | [diff] [blame] | 647 | * @p_res: Pointer to pointer to resource validalidation node. Populated on |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 648 | * exit. |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 649 | */ |
| 650 | static int |
| 651 | vmw_cmd_res_check(struct vmw_private *dev_priv, |
| 652 | struct vmw_sw_context *sw_context, |
| 653 | enum vmw_res_type res_type, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 654 | u32 dirty, |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 655 | const struct vmw_user_resource_conv *converter, |
| 656 | uint32_t *id_loc, |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 657 | struct vmw_resource **p_res) |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 658 | { |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 659 | struct vmw_res_cache_entry *rcache = &sw_context->res_cache[res_type]; |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 660 | struct vmw_resource *res; |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 661 | int ret; |
| 662 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 663 | if (p_res) |
| 664 | *p_res = NULL; |
| 665 | |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 666 | if (*id_loc == SVGA3D_INVALID_ID) { |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 667 | if (res_type == vmw_res_context) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 668 | VMW_DEBUG_USER("Illegal context invalid id.\n"); |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 669 | return -EINVAL; |
| 670 | } |
| 671 | return 0; |
| 672 | } |
| 673 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 674 | if (likely(rcache->valid_handle && *id_loc == rcache->handle)) { |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 675 | res = rcache->res; |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 676 | if (dirty) |
| 677 | vmw_validation_res_set_dirty(sw_context->ctx, |
| 678 | rcache->private, dirty); |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 679 | } else { |
| 680 | unsigned int size = vmw_execbuf_res_size(dev_priv, res_type); |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 681 | |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 682 | ret = vmw_validation_preload_res(sw_context->ctx, size); |
| 683 | if (ret) |
| 684 | return ret; |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 685 | |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 686 | res = vmw_user_resource_noref_lookup_handle |
| 687 | (dev_priv, sw_context->fp->tfile, *id_loc, converter); |
Chengguang Xu | 4efa666 | 2019-03-01 10:14:06 -0800 | [diff] [blame] | 688 | if (IS_ERR(res)) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 689 | VMW_DEBUG_USER("Could not find/use resource 0x%08x.\n", |
| 690 | (unsigned int) *id_loc); |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 691 | return PTR_ERR(res); |
| 692 | } |
| 693 | |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 694 | ret = vmw_execbuf_res_noref_val_add(sw_context, res, dirty); |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 695 | if (unlikely(ret != 0)) |
| 696 | return ret; |
| 697 | |
| 698 | if (rcache->valid && rcache->res == res) { |
| 699 | rcache->valid_handle = true; |
| 700 | rcache->handle = *id_loc; |
| 701 | } |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 702 | } |
| 703 | |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 704 | ret = vmw_resource_relocation_add(sw_context, res, |
| 705 | vmw_ptr_diff(sw_context->buf_start, |
| 706 | id_loc), |
| 707 | vmw_res_rel_normal); |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 708 | if (p_res) |
| 709 | *p_res = res; |
| 710 | |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 711 | return 0; |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 712 | } |
| 713 | |
| 714 | /** |
Zack Rusin | 2cd80db | 2021-05-05 15:10:07 -0400 | [diff] [blame] | 715 | * vmw_rebind_all_dx_query - Rebind DX query associated with the context |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 716 | * |
| 717 | * @ctx_res: context the query belongs to |
| 718 | * |
| 719 | * This function assumes binding_mutex is held. |
| 720 | */ |
| 721 | static int vmw_rebind_all_dx_query(struct vmw_resource *ctx_res) |
| 722 | { |
| 723 | struct vmw_private *dev_priv = ctx_res->dev_priv; |
Thomas Hellstrom | f1d34bf | 2018-06-19 15:02:16 +0200 | [diff] [blame] | 724 | struct vmw_buffer_object *dx_query_mob; |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 725 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDXBindAllQuery); |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 726 | |
| 727 | dx_query_mob = vmw_context_get_dx_query_mob(ctx_res); |
| 728 | |
| 729 | if (!dx_query_mob || dx_query_mob->dx_query_ctx) |
| 730 | return 0; |
| 731 | |
Zack Rusin | 8426ed9 | 2020-11-18 12:54:19 -0500 | [diff] [blame] | 732 | cmd = VMW_CMD_CTX_RESERVE(dev_priv, sizeof(*cmd), ctx_res->id); |
Deepak Rawat | b289840 | 2019-02-11 14:59:57 -0800 | [diff] [blame] | 733 | if (cmd == NULL) |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 734 | return -ENOMEM; |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 735 | |
| 736 | cmd->header.id = SVGA_3D_CMD_DX_BIND_ALL_QUERY; |
| 737 | cmd->header.size = sizeof(cmd->body); |
| 738 | cmd->body.cid = ctx_res->id; |
Christian König | d311675 | 2021-04-12 15:11:47 +0200 | [diff] [blame] | 739 | cmd->body.mobid = dx_query_mob->base.resource->start; |
Zack Rusin | 8426ed9 | 2020-11-18 12:54:19 -0500 | [diff] [blame] | 740 | vmw_cmd_commit(dev_priv, sizeof(*cmd)); |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 741 | |
| 742 | vmw_context_bind_dx_query(ctx_res, dx_query_mob); |
| 743 | |
| 744 | return 0; |
| 745 | } |
| 746 | |
| 747 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 748 | * vmw_rebind_contexts - Rebind all resources previously bound to referenced |
| 749 | * contexts. |
Thomas Hellstrom | 30f82d81 | 2014-02-05 08:13:56 +0100 | [diff] [blame] | 750 | * |
| 751 | * @sw_context: Pointer to the software context. |
| 752 | * |
| 753 | * Rebind context binding points that have been scrubbed because of eviction. |
| 754 | */ |
| 755 | static int vmw_rebind_contexts(struct vmw_sw_context *sw_context) |
| 756 | { |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 757 | struct vmw_ctx_validation_info *val; |
Thomas Hellstrom | 30f82d81 | 2014-02-05 08:13:56 +0100 | [diff] [blame] | 758 | int ret; |
| 759 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 760 | list_for_each_entry(val, &sw_context->ctx_list, head) { |
| 761 | ret = vmw_binding_rebind_all(val->cur); |
Thomas Hellstrom | 30f82d81 | 2014-02-05 08:13:56 +0100 | [diff] [blame] | 762 | if (unlikely(ret != 0)) { |
| 763 | if (ret != -ERESTARTSYS) |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 764 | VMW_DEBUG_USER("Failed to rebind context.\n"); |
Thomas Hellstrom | 30f82d81 | 2014-02-05 08:13:56 +0100 | [diff] [blame] | 765 | return ret; |
| 766 | } |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 767 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 768 | ret = vmw_rebind_all_dx_query(val->ctx); |
Deepak Rawat | b289840 | 2019-02-11 14:59:57 -0800 | [diff] [blame] | 769 | if (ret != 0) { |
| 770 | VMW_DEBUG_USER("Failed to rebind queries.\n"); |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 771 | return ret; |
Deepak Rawat | b289840 | 2019-02-11 14:59:57 -0800 | [diff] [blame] | 772 | } |
Thomas Hellstrom | 30f82d81 | 2014-02-05 08:13:56 +0100 | [diff] [blame] | 773 | } |
| 774 | |
| 775 | return 0; |
| 776 | } |
| 777 | |
| 778 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 779 | * vmw_view_bindings_add - Add an array of view bindings to a context binding |
| 780 | * state tracker. |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 781 | * |
| 782 | * @sw_context: The execbuf state used for this command. |
| 783 | * @view_type: View type for the bindings. |
| 784 | * @binding_type: Binding type for the bindings. |
| 785 | * @shader_slot: The shader slot to user for the bindings. |
| 786 | * @view_ids: Array of view ids to be bound. |
| 787 | * @num_views: Number of view ids in @view_ids. |
| 788 | * @first_slot: The binding slot to be used for the first view id in @view_ids. |
| 789 | */ |
| 790 | static int vmw_view_bindings_add(struct vmw_sw_context *sw_context, |
| 791 | enum vmw_view_type view_type, |
| 792 | enum vmw_ctx_binding_type binding_type, |
| 793 | uint32 shader_slot, |
| 794 | uint32 view_ids[], u32 num_views, |
| 795 | u32 first_slot) |
| 796 | { |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 797 | struct vmw_ctx_validation_info *ctx_node = VMW_GET_CTX_NODE(sw_context); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 798 | u32 i; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 799 | |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 800 | if (!ctx_node) |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 801 | return -EINVAL; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 802 | |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 803 | for (i = 0; i < num_views; ++i) { |
| 804 | struct vmw_ctx_bindinfo_view binding; |
| 805 | struct vmw_resource *view = NULL; |
| 806 | |
| 807 | if (view_ids[i] != SVGA3D_INVALID_ID) { |
Thomas Hellstrom | 508108e | 2018-09-26 16:28:45 +0200 | [diff] [blame] | 808 | view = vmw_view_id_val_add(sw_context, view_type, |
| 809 | view_ids[i]); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 810 | if (IS_ERR(view)) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 811 | VMW_DEBUG_USER("View not found.\n"); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 812 | return PTR_ERR(view); |
| 813 | } |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 814 | } |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 815 | binding.bi.ctx = ctx_node->ctx; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 816 | binding.bi.res = view; |
| 817 | binding.bi.bt = binding_type; |
| 818 | binding.shader_slot = shader_slot; |
| 819 | binding.slot = first_slot + i; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 820 | vmw_binding_add(ctx_node->staged, &binding.bi, |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 821 | shader_slot, binding.slot); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 822 | } |
| 823 | |
| 824 | return 0; |
| 825 | } |
| 826 | |
| 827 | /** |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 828 | * vmw_cmd_cid_check - Check a command header for valid context information. |
| 829 | * |
| 830 | * @dev_priv: Pointer to a device private structure. |
| 831 | * @sw_context: Pointer to the software context. |
| 832 | * @header: A command header with an embedded user-space context handle. |
| 833 | * |
| 834 | * Convenience function: Call vmw_cmd_res_check with the user-space context |
| 835 | * handle embedded in @header. |
| 836 | */ |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 837 | static int vmw_cmd_cid_check(struct vmw_private *dev_priv, |
| 838 | struct vmw_sw_context *sw_context, |
| 839 | SVGA3dCmdHeader *header) |
| 840 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 841 | VMW_DECLARE_CMD_VAR(*cmd, uint32_t) = |
| 842 | container_of(header, typeof(*cmd), header); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 843 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 844 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_context, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 845 | VMW_RES_DIRTY_SET, user_context_converter, |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 846 | &cmd->body, NULL); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 847 | } |
| 848 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 849 | /** |
| 850 | * vmw_execbuf_info_from_res - Get the private validation metadata for a |
| 851 | * recently validated resource |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 852 | * |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 853 | * @sw_context: Pointer to the command submission context |
| 854 | * @res: The resource |
| 855 | * |
| 856 | * The resource pointed to by @res needs to be present in the command submission |
| 857 | * context's resource cache and hence the last resource of that type to be |
| 858 | * processed by the validation code. |
| 859 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 860 | * Return: a pointer to the private metadata of the resource, or NULL if it |
| 861 | * wasn't found |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 862 | */ |
| 863 | static struct vmw_ctx_validation_info * |
| 864 | vmw_execbuf_info_from_res(struct vmw_sw_context *sw_context, |
| 865 | struct vmw_resource *res) |
| 866 | { |
| 867 | struct vmw_res_cache_entry *rcache = |
| 868 | &sw_context->res_cache[vmw_res_type(res)]; |
| 869 | |
| 870 | if (rcache->valid && rcache->res == res) |
| 871 | return rcache->private; |
| 872 | |
| 873 | WARN_ON_ONCE(true); |
| 874 | return NULL; |
| 875 | } |
| 876 | |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 877 | static int vmw_cmd_set_render_target_check(struct vmw_private *dev_priv, |
| 878 | struct vmw_sw_context *sw_context, |
| 879 | SVGA3dCmdHeader *header) |
| 880 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 881 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdSetRenderTarget); |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 882 | struct vmw_resource *ctx; |
| 883 | struct vmw_resource *res; |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 884 | int ret; |
| 885 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 886 | cmd = container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | b5c3b1a6 | 2013-10-08 02:27:17 -0700 | [diff] [blame] | 887 | |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 888 | if (cmd->body.type >= SVGA3D_RT_MAX) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 889 | VMW_DEBUG_USER("Illegal render target type %u.\n", |
| 890 | (unsigned int) cmd->body.type); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 891 | return -EINVAL; |
| 892 | } |
| 893 | |
Thomas Hellstrom | b5c3b1a6 | 2013-10-08 02:27:17 -0700 | [diff] [blame] | 894 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_context, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 895 | VMW_RES_DIRTY_SET, user_context_converter, |
| 896 | &cmd->body.cid, &ctx); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 897 | if (unlikely(ret != 0)) |
| 898 | return ret; |
| 899 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 900 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 901 | VMW_RES_DIRTY_SET, user_surface_converter, |
| 902 | &cmd->body.target.sid, &res); |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 903 | if (unlikely(ret)) |
Thomas Hellstrom | b5c3b1a6 | 2013-10-08 02:27:17 -0700 | [diff] [blame] | 904 | return ret; |
| 905 | |
| 906 | if (dev_priv->has_mob) { |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 907 | struct vmw_ctx_bindinfo_view binding; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 908 | struct vmw_ctx_validation_info *node; |
Thomas Hellstrom | b5c3b1a6 | 2013-10-08 02:27:17 -0700 | [diff] [blame] | 909 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 910 | node = vmw_execbuf_info_from_res(sw_context, ctx); |
| 911 | if (!node) |
| 912 | return -EINVAL; |
| 913 | |
| 914 | binding.bi.ctx = ctx; |
| 915 | binding.bi.res = res; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 916 | binding.bi.bt = vmw_ctx_binding_rt; |
| 917 | binding.slot = cmd->body.type; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 918 | vmw_binding_add(node->staged, &binding.bi, 0, binding.slot); |
Thomas Hellstrom | b5c3b1a6 | 2013-10-08 02:27:17 -0700 | [diff] [blame] | 919 | } |
| 920 | |
| 921 | return 0; |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 922 | } |
| 923 | |
| 924 | static int vmw_cmd_surface_copy_check(struct vmw_private *dev_priv, |
| 925 | struct vmw_sw_context *sw_context, |
| 926 | SVGA3dCmdHeader *header) |
| 927 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 928 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdSurfaceCopy); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 929 | int ret; |
| 930 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 931 | cmd = container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | c9146cd | 2015-03-02 23:45:04 -0800 | [diff] [blame] | 932 | |
Thomas Hellstrom | 6bf6bf0 | 2015-06-26 02:22:40 -0700 | [diff] [blame] | 933 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 934 | VMW_RES_DIRTY_NONE, user_surface_converter, |
| 935 | &cmd->body.src.sid, NULL); |
Thomas Hellstrom | 6bf6bf0 | 2015-06-26 02:22:40 -0700 | [diff] [blame] | 936 | if (ret) |
| 937 | return ret; |
Thomas Hellstrom | c9146cd | 2015-03-02 23:45:04 -0800 | [diff] [blame] | 938 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 939 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 940 | VMW_RES_DIRTY_SET, user_surface_converter, |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 941 | &cmd->body.dest.sid, NULL); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 942 | } |
| 943 | |
Neha Bhende | 0fca749e | 2015-08-10 10:51:07 -0700 | [diff] [blame] | 944 | static int vmw_cmd_buffer_copy_check(struct vmw_private *dev_priv, |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 945 | struct vmw_sw_context *sw_context, |
| 946 | SVGA3dCmdHeader *header) |
Neha Bhende | 0fca749e | 2015-08-10 10:51:07 -0700 | [diff] [blame] | 947 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 948 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDXBufferCopy); |
Neha Bhende | 0fca749e | 2015-08-10 10:51:07 -0700 | [diff] [blame] | 949 | int ret; |
| 950 | |
| 951 | cmd = container_of(header, typeof(*cmd), header); |
| 952 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 953 | VMW_RES_DIRTY_NONE, user_surface_converter, |
Neha Bhende | 0fca749e | 2015-08-10 10:51:07 -0700 | [diff] [blame] | 954 | &cmd->body.src, NULL); |
| 955 | if (ret != 0) |
| 956 | return ret; |
| 957 | |
| 958 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 959 | VMW_RES_DIRTY_SET, user_surface_converter, |
Neha Bhende | 0fca749e | 2015-08-10 10:51:07 -0700 | [diff] [blame] | 960 | &cmd->body.dest, NULL); |
| 961 | } |
| 962 | |
| 963 | static int vmw_cmd_pred_copy_check(struct vmw_private *dev_priv, |
| 964 | struct vmw_sw_context *sw_context, |
| 965 | SVGA3dCmdHeader *header) |
| 966 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 967 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDXPredCopyRegion); |
Neha Bhende | 0fca749e | 2015-08-10 10:51:07 -0700 | [diff] [blame] | 968 | int ret; |
| 969 | |
| 970 | cmd = container_of(header, typeof(*cmd), header); |
| 971 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 972 | VMW_RES_DIRTY_NONE, user_surface_converter, |
Neha Bhende | 0fca749e | 2015-08-10 10:51:07 -0700 | [diff] [blame] | 973 | &cmd->body.srcSid, NULL); |
| 974 | if (ret != 0) |
| 975 | return ret; |
| 976 | |
| 977 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 978 | VMW_RES_DIRTY_SET, user_surface_converter, |
Neha Bhende | 0fca749e | 2015-08-10 10:51:07 -0700 | [diff] [blame] | 979 | &cmd->body.dstSid, NULL); |
| 980 | } |
| 981 | |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 982 | static int vmw_cmd_stretch_blt_check(struct vmw_private *dev_priv, |
| 983 | struct vmw_sw_context *sw_context, |
| 984 | SVGA3dCmdHeader *header) |
| 985 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 986 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdSurfaceStretchBlt); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 987 | int ret; |
| 988 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 989 | cmd = container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 990 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 991 | VMW_RES_DIRTY_NONE, user_surface_converter, |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 992 | &cmd->body.src.sid, NULL); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 993 | if (unlikely(ret != 0)) |
| 994 | return ret; |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 995 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 996 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 997 | VMW_RES_DIRTY_SET, user_surface_converter, |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 998 | &cmd->body.dest.sid, NULL); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 999 | } |
| 1000 | |
| 1001 | static int vmw_cmd_blt_surf_screen_check(struct vmw_private *dev_priv, |
| 1002 | struct vmw_sw_context *sw_context, |
| 1003 | SVGA3dCmdHeader *header) |
| 1004 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1005 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdBlitSurfaceToScreen) = |
| 1006 | container_of(header, typeof(*cmd), header); |
Jakob Bornecrantz | 0cff60c | 2011-10-04 20:13:27 +0200 | [diff] [blame] | 1007 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1008 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1009 | VMW_RES_DIRTY_NONE, user_surface_converter, |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1010 | &cmd->body.srcImage.sid, NULL); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 1011 | } |
| 1012 | |
| 1013 | static int vmw_cmd_present_check(struct vmw_private *dev_priv, |
| 1014 | struct vmw_sw_context *sw_context, |
| 1015 | SVGA3dCmdHeader *header) |
| 1016 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1017 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdPresent) = |
| 1018 | container_of(header, typeof(*cmd), header); |
Jakob Bornecrantz | 0cff60c | 2011-10-04 20:13:27 +0200 | [diff] [blame] | 1019 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1020 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1021 | VMW_RES_DIRTY_NONE, user_surface_converter, |
| 1022 | &cmd->body.sid, NULL); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 1023 | } |
| 1024 | |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1025 | /** |
| 1026 | * vmw_query_bo_switch_prepare - Prepare to switch pinned buffer for queries. |
| 1027 | * |
| 1028 | * @dev_priv: The device private structure. |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1029 | * @new_query_bo: The new buffer holding query results. |
| 1030 | * @sw_context: The software context used for this command submission. |
| 1031 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1032 | * This function checks whether @new_query_bo is suitable for holding query |
| 1033 | * results, and if another buffer currently is pinned for query results. If so, |
| 1034 | * the function prepares the state of @sw_context for switching pinned buffers |
| 1035 | * after successful submission of the current command batch. |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1036 | */ |
| 1037 | static int vmw_query_bo_switch_prepare(struct vmw_private *dev_priv, |
Thomas Hellstrom | f1d34bf | 2018-06-19 15:02:16 +0200 | [diff] [blame] | 1038 | struct vmw_buffer_object *new_query_bo, |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1039 | struct vmw_sw_context *sw_context) |
| 1040 | { |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1041 | struct vmw_res_cache_entry *ctx_entry = |
| 1042 | &sw_context->res_cache[vmw_res_context]; |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1043 | int ret; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1044 | |
| 1045 | BUG_ON(!ctx_entry->valid); |
| 1046 | sw_context->last_query_ctx = ctx_entry->res; |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1047 | |
| 1048 | if (unlikely(new_query_bo != sw_context->cur_query_bo)) { |
| 1049 | |
Christian König | d311675 | 2021-04-12 15:11:47 +0200 | [diff] [blame] | 1050 | if (unlikely(new_query_bo->base.resource->num_pages > 4)) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 1051 | VMW_DEBUG_USER("Query buffer too large.\n"); |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1052 | return -EINVAL; |
| 1053 | } |
| 1054 | |
| 1055 | if (unlikely(sw_context->cur_query_bo != NULL)) { |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1056 | sw_context->needs_post_query_barrier = true; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 1057 | ret = vmw_validation_add_bo(sw_context->ctx, |
| 1058 | sw_context->cur_query_bo, |
| 1059 | dev_priv->has_mob, false); |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1060 | if (unlikely(ret != 0)) |
| 1061 | return ret; |
| 1062 | } |
| 1063 | sw_context->cur_query_bo = new_query_bo; |
| 1064 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 1065 | ret = vmw_validation_add_bo(sw_context->ctx, |
| 1066 | dev_priv->dummy_query_bo, |
| 1067 | dev_priv->has_mob, false); |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1068 | if (unlikely(ret != 0)) |
| 1069 | return ret; |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1070 | } |
| 1071 | |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1072 | return 0; |
| 1073 | } |
| 1074 | |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1075 | /** |
| 1076 | * vmw_query_bo_switch_commit - Finalize switching pinned query buffer |
| 1077 | * |
| 1078 | * @dev_priv: The device private structure. |
| 1079 | * @sw_context: The software context used for this command submission batch. |
| 1080 | * |
| 1081 | * This function will check if we're switching query buffers, and will then, |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1082 | * issue a dummy occlusion query wait used as a query barrier. When the fence |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1083 | * object following that query wait has signaled, we are sure that all preceding |
| 1084 | * queries have finished, and the old query buffer can be unpinned. However, |
| 1085 | * since both the new query buffer and the old one are fenced with that fence, |
| 1086 | * we can do an asynchronus unpin now, and be sure that the old query buffer |
| 1087 | * won't be moved until the fence has signaled. |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1088 | * |
| 1089 | * As mentioned above, both the new - and old query buffers need to be fenced |
| 1090 | * using a sequence emitted *after* calling this function. |
| 1091 | */ |
| 1092 | static void vmw_query_bo_switch_commit(struct vmw_private *dev_priv, |
| 1093 | struct vmw_sw_context *sw_context) |
| 1094 | { |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1095 | /* |
| 1096 | * The validate list should still hold references to all |
| 1097 | * contexts here. |
| 1098 | */ |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1099 | if (sw_context->needs_post_query_barrier) { |
| 1100 | struct vmw_res_cache_entry *ctx_entry = |
| 1101 | &sw_context->res_cache[vmw_res_context]; |
| 1102 | struct vmw_resource *ctx; |
| 1103 | int ret; |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1104 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1105 | BUG_ON(!ctx_entry->valid); |
| 1106 | ctx = ctx_entry->res; |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1107 | |
Zack Rusin | 8426ed9 | 2020-11-18 12:54:19 -0500 | [diff] [blame] | 1108 | ret = vmw_cmd_emit_dummy_query(dev_priv, ctx->id); |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1109 | |
| 1110 | if (unlikely(ret != 0)) |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 1111 | VMW_DEBUG_USER("Out of fifo space for dummy query.\n"); |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1112 | } |
| 1113 | |
| 1114 | if (dev_priv->pinned_bo != sw_context->cur_query_bo) { |
| 1115 | if (dev_priv->pinned_bo) { |
Thomas Hellstrom | 459d0fa | 2015-06-26 00:25:37 -0700 | [diff] [blame] | 1116 | vmw_bo_pin_reserved(dev_priv->pinned_bo, false); |
Thomas Hellstrom | f1d34bf | 2018-06-19 15:02:16 +0200 | [diff] [blame] | 1117 | vmw_bo_unreference(&dev_priv->pinned_bo); |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1118 | } |
| 1119 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1120 | if (!sw_context->needs_post_query_barrier) { |
Thomas Hellstrom | 459d0fa | 2015-06-26 00:25:37 -0700 | [diff] [blame] | 1121 | vmw_bo_pin_reserved(sw_context->cur_query_bo, true); |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1122 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1123 | /* |
| 1124 | * We pin also the dummy_query_bo buffer so that we |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1125 | * don't need to validate it when emitting dummy queries |
| 1126 | * in context destroy paths. |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1127 | */ |
Thomas Hellstrom | 459d0fa | 2015-06-26 00:25:37 -0700 | [diff] [blame] | 1128 | if (!dev_priv->dummy_query_bo_pinned) { |
| 1129 | vmw_bo_pin_reserved(dev_priv->dummy_query_bo, |
| 1130 | true); |
| 1131 | dev_priv->dummy_query_bo_pinned = true; |
| 1132 | } |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1133 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1134 | BUG_ON(sw_context->last_query_ctx == NULL); |
| 1135 | dev_priv->query_cid = sw_context->last_query_ctx->id; |
| 1136 | dev_priv->query_cid_valid = true; |
| 1137 | dev_priv->pinned_bo = |
Thomas Hellstrom | f1d34bf | 2018-06-19 15:02:16 +0200 | [diff] [blame] | 1138 | vmw_bo_reference(sw_context->cur_query_bo); |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1139 | } |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1140 | } |
| 1141 | } |
| 1142 | |
| 1143 | /** |
Zack Rusin | 2cd80db | 2021-05-05 15:10:07 -0400 | [diff] [blame] | 1144 | * vmw_translate_mob_ptr - Prepare to translate a user-space buffer handle |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1145 | * to a MOB id. |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1146 | * |
| 1147 | * @dev_priv: Pointer to a device private structure. |
| 1148 | * @sw_context: The software context used for this command batch validation. |
| 1149 | * @id: Pointer to the user-space handle to be translated. |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1150 | * @vmw_bo_p: Points to a location that, on successful return will carry a |
| 1151 | * non-reference-counted pointer to the buffer object identified by the |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1152 | * user-space handle in @id. |
| 1153 | * |
| 1154 | * This function saves information needed to translate a user-space buffer |
| 1155 | * handle to a MOB id. The translation does not take place immediately, but |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1156 | * during a call to vmw_apply_relocations(). |
| 1157 | * |
| 1158 | * This function builds a relocation list and a list of buffers to validate. The |
| 1159 | * former needs to be freed using either vmw_apply_relocations() or |
| 1160 | * vmw_free_relocations(). The latter needs to be freed using |
| 1161 | * vmw_clear_validations. |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1162 | */ |
| 1163 | static int vmw_translate_mob_ptr(struct vmw_private *dev_priv, |
| 1164 | struct vmw_sw_context *sw_context, |
| 1165 | SVGAMobId *id, |
Thomas Hellstrom | f1d34bf | 2018-06-19 15:02:16 +0200 | [diff] [blame] | 1166 | struct vmw_buffer_object **vmw_bo_p) |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1167 | { |
Thomas Hellstrom | b139d43 | 2018-09-26 16:27:54 +0200 | [diff] [blame] | 1168 | struct vmw_buffer_object *vmw_bo; |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1169 | uint32_t handle = *id; |
| 1170 | struct vmw_relocation *reloc; |
| 1171 | int ret; |
| 1172 | |
Thomas Hellstrom | b139d43 | 2018-09-26 16:27:54 +0200 | [diff] [blame] | 1173 | vmw_validation_preload_bo(sw_context->ctx); |
| 1174 | vmw_bo = vmw_user_bo_noref_lookup(sw_context->fp->tfile, handle); |
| 1175 | if (IS_ERR(vmw_bo)) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 1176 | VMW_DEBUG_USER("Could not find or use MOB buffer.\n"); |
Thomas Hellstrom | b139d43 | 2018-09-26 16:27:54 +0200 | [diff] [blame] | 1177 | return PTR_ERR(vmw_bo); |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1178 | } |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1179 | |
Thomas Hellstrom | b139d43 | 2018-09-26 16:27:54 +0200 | [diff] [blame] | 1180 | ret = vmw_validation_add_bo(sw_context->ctx, vmw_bo, true, false); |
| 1181 | vmw_user_bo_noref_release(); |
| 1182 | if (unlikely(ret != 0)) |
| 1183 | return ret; |
| 1184 | |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 1185 | reloc = vmw_validation_mem_alloc(sw_context->ctx, sizeof(*reloc)); |
| 1186 | if (!reloc) |
Thomas Hellstrom | b139d43 | 2018-09-26 16:27:54 +0200 | [diff] [blame] | 1187 | return -ENOMEM; |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1188 | |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1189 | reloc->mob_loc = id; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 1190 | reloc->vbo = vmw_bo; |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1191 | |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1192 | *vmw_bo_p = vmw_bo; |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 1193 | list_add_tail(&reloc->head, &sw_context->bo_relocations); |
| 1194 | |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1195 | return 0; |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1196 | } |
| 1197 | |
| 1198 | /** |
Zack Rusin | 2cd80db | 2021-05-05 15:10:07 -0400 | [diff] [blame] | 1199 | * vmw_translate_guest_ptr - Prepare to translate a user-space buffer handle |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1200 | * to a valid SVGAGuestPtr |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1201 | * |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1202 | * @dev_priv: Pointer to a device private structure. |
| 1203 | * @sw_context: The software context used for this command batch validation. |
| 1204 | * @ptr: Pointer to the user-space handle to be translated. |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1205 | * @vmw_bo_p: Points to a location that, on successful return will carry a |
| 1206 | * non-reference-counted pointer to the DMA buffer identified by the user-space |
| 1207 | * handle in @id. |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1208 | * |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1209 | * This function saves information needed to translate a user-space buffer |
| 1210 | * handle to a valid SVGAGuestPtr. The translation does not take place |
| 1211 | * immediately, but during a call to vmw_apply_relocations(). |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1212 | * |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1213 | * This function builds a relocation list and a list of buffers to validate. |
| 1214 | * The former needs to be freed using either vmw_apply_relocations() or |
| 1215 | * vmw_free_relocations(). The latter needs to be freed using |
| 1216 | * vmw_clear_validations. |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1217 | */ |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1218 | static int vmw_translate_guest_ptr(struct vmw_private *dev_priv, |
| 1219 | struct vmw_sw_context *sw_context, |
| 1220 | SVGAGuestPtr *ptr, |
Thomas Hellstrom | f1d34bf | 2018-06-19 15:02:16 +0200 | [diff] [blame] | 1221 | struct vmw_buffer_object **vmw_bo_p) |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 1222 | { |
Thomas Hellstrom | b139d43 | 2018-09-26 16:27:54 +0200 | [diff] [blame] | 1223 | struct vmw_buffer_object *vmw_bo; |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1224 | uint32_t handle = ptr->gmrId; |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 1225 | struct vmw_relocation *reloc; |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1226 | int ret; |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 1227 | |
Thomas Hellstrom | b139d43 | 2018-09-26 16:27:54 +0200 | [diff] [blame] | 1228 | vmw_validation_preload_bo(sw_context->ctx); |
| 1229 | vmw_bo = vmw_user_bo_noref_lookup(sw_context->fp->tfile, handle); |
| 1230 | if (IS_ERR(vmw_bo)) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 1231 | VMW_DEBUG_USER("Could not find or use GMR region.\n"); |
Thomas Hellstrom | b139d43 | 2018-09-26 16:27:54 +0200 | [diff] [blame] | 1232 | return PTR_ERR(vmw_bo); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 1233 | } |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 1234 | |
Thomas Hellstrom | b139d43 | 2018-09-26 16:27:54 +0200 | [diff] [blame] | 1235 | ret = vmw_validation_add_bo(sw_context->ctx, vmw_bo, false, false); |
| 1236 | vmw_user_bo_noref_release(); |
| 1237 | if (unlikely(ret != 0)) |
| 1238 | return ret; |
| 1239 | |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 1240 | reloc = vmw_validation_mem_alloc(sw_context->ctx, sizeof(*reloc)); |
| 1241 | if (!reloc) |
Thomas Hellstrom | b139d43 | 2018-09-26 16:27:54 +0200 | [diff] [blame] | 1242 | return -ENOMEM; |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 1243 | |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1244 | reloc->location = ptr; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 1245 | reloc->vbo = vmw_bo; |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1246 | *vmw_bo_p = vmw_bo; |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 1247 | list_add_tail(&reloc->head, &sw_context->bo_relocations); |
| 1248 | |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1249 | return 0; |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1250 | } |
| 1251 | |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 1252 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1253 | * vmw_cmd_dx_define_query - validate SVGA_3D_CMD_DX_DEFINE_QUERY command. |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 1254 | * |
| 1255 | * @dev_priv: Pointer to a device private struct. |
| 1256 | * @sw_context: The software context used for this command submission. |
| 1257 | * @header: Pointer to the command header in the command stream. |
| 1258 | * |
| 1259 | * This function adds the new query into the query COTABLE |
| 1260 | */ |
| 1261 | static int vmw_cmd_dx_define_query(struct vmw_private *dev_priv, |
| 1262 | struct vmw_sw_context *sw_context, |
| 1263 | SVGA3dCmdHeader *header) |
| 1264 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1265 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDXDefineQuery); |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 1266 | struct vmw_ctx_validation_info *ctx_node = VMW_GET_CTX_NODE(sw_context); |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 1267 | struct vmw_resource *cotable_res; |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1268 | int ret; |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 1269 | |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 1270 | if (!ctx_node) |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 1271 | return -EINVAL; |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 1272 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1273 | cmd = container_of(header, typeof(*cmd), header); |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 1274 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1275 | if (cmd->body.type < SVGA3D_QUERYTYPE_MIN || |
| 1276 | cmd->body.type >= SVGA3D_QUERYTYPE_MAX) |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 1277 | return -EINVAL; |
| 1278 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 1279 | cotable_res = vmw_context_cotable(ctx_node->ctx, SVGA_COTABLE_DXQUERY); |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1280 | ret = vmw_cotable_notify(cotable_res, cmd->body.queryId); |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 1281 | |
| 1282 | return ret; |
| 1283 | } |
| 1284 | |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 1285 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1286 | * vmw_cmd_dx_bind_query - validate SVGA_3D_CMD_DX_BIND_QUERY command. |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 1287 | * |
| 1288 | * @dev_priv: Pointer to a device private struct. |
| 1289 | * @sw_context: The software context used for this command submission. |
| 1290 | * @header: Pointer to the command header in the command stream. |
| 1291 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1292 | * The query bind operation will eventually associate the query ID with its |
| 1293 | * backing MOB. In this function, we take the user mode MOB ID and use |
| 1294 | * vmw_translate_mob_ptr() to translate it to its kernel mode equivalent. |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 1295 | */ |
| 1296 | static int vmw_cmd_dx_bind_query(struct vmw_private *dev_priv, |
| 1297 | struct vmw_sw_context *sw_context, |
| 1298 | SVGA3dCmdHeader *header) |
| 1299 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1300 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDXBindQuery); |
Thomas Hellstrom | f1d34bf | 2018-06-19 15:02:16 +0200 | [diff] [blame] | 1301 | struct vmw_buffer_object *vmw_bo; |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1302 | int ret; |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 1303 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1304 | cmd = container_of(header, typeof(*cmd), header); |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 1305 | |
| 1306 | /* |
| 1307 | * Look up the buffer pointed to by q.mobid, put it on the relocation |
| 1308 | * list so its kernel mode MOB ID can be filled in later |
| 1309 | */ |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1310 | ret = vmw_translate_mob_ptr(dev_priv, sw_context, &cmd->body.mobid, |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 1311 | &vmw_bo); |
| 1312 | |
| 1313 | if (ret != 0) |
| 1314 | return ret; |
| 1315 | |
| 1316 | sw_context->dx_query_mob = vmw_bo; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 1317 | sw_context->dx_query_ctx = sw_context->dx_ctx_node->ctx; |
Thomas Hellstrom | b139d43 | 2018-09-26 16:27:54 +0200 | [diff] [blame] | 1318 | return 0; |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 1319 | } |
| 1320 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1321 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1322 | * vmw_cmd_begin_gb_query - validate SVGA_3D_CMD_BEGIN_GB_QUERY command. |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1323 | * |
| 1324 | * @dev_priv: Pointer to a device private struct. |
| 1325 | * @sw_context: The software context used for this command submission. |
| 1326 | * @header: Pointer to the command header in the command stream. |
| 1327 | */ |
| 1328 | static int vmw_cmd_begin_gb_query(struct vmw_private *dev_priv, |
| 1329 | struct vmw_sw_context *sw_context, |
| 1330 | SVGA3dCmdHeader *header) |
| 1331 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1332 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdBeginGBQuery) = |
| 1333 | container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1334 | |
| 1335 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_context, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1336 | VMW_RES_DIRTY_SET, user_context_converter, |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1337 | &cmd->body.cid, NULL); |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1338 | } |
| 1339 | |
| 1340 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1341 | * vmw_cmd_begin_query - validate SVGA_3D_CMD_BEGIN_QUERY command. |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1342 | * |
| 1343 | * @dev_priv: Pointer to a device private struct. |
| 1344 | * @sw_context: The software context used for this command submission. |
| 1345 | * @header: Pointer to the command header in the command stream. |
| 1346 | */ |
| 1347 | static int vmw_cmd_begin_query(struct vmw_private *dev_priv, |
| 1348 | struct vmw_sw_context *sw_context, |
| 1349 | SVGA3dCmdHeader *header) |
| 1350 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1351 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdBeginQuery) = |
| 1352 | container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1353 | |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1354 | if (unlikely(dev_priv->has_mob)) { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1355 | VMW_DECLARE_CMD_VAR(gb_cmd, SVGA3dCmdBeginGBQuery); |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1356 | |
| 1357 | BUG_ON(sizeof(gb_cmd) != sizeof(*cmd)); |
| 1358 | |
| 1359 | gb_cmd.header.id = SVGA_3D_CMD_BEGIN_GB_QUERY; |
| 1360 | gb_cmd.header.size = cmd->header.size; |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1361 | gb_cmd.body.cid = cmd->body.cid; |
| 1362 | gb_cmd.body.type = cmd->body.type; |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1363 | |
| 1364 | memcpy(cmd, &gb_cmd, sizeof(*cmd)); |
| 1365 | return vmw_cmd_begin_gb_query(dev_priv, sw_context, header); |
| 1366 | } |
| 1367 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1368 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_context, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1369 | VMW_RES_DIRTY_SET, user_context_converter, |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1370 | &cmd->body.cid, NULL); |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1371 | } |
| 1372 | |
| 1373 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1374 | * vmw_cmd_end_gb_query - validate SVGA_3D_CMD_END_GB_QUERY command. |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1375 | * |
| 1376 | * @dev_priv: Pointer to a device private struct. |
| 1377 | * @sw_context: The software context used for this command submission. |
| 1378 | * @header: Pointer to the command header in the command stream. |
| 1379 | */ |
| 1380 | static int vmw_cmd_end_gb_query(struct vmw_private *dev_priv, |
| 1381 | struct vmw_sw_context *sw_context, |
| 1382 | SVGA3dCmdHeader *header) |
| 1383 | { |
Thomas Hellstrom | f1d34bf | 2018-06-19 15:02:16 +0200 | [diff] [blame] | 1384 | struct vmw_buffer_object *vmw_bo; |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1385 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdEndGBQuery); |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1386 | int ret; |
| 1387 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1388 | cmd = container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1389 | ret = vmw_cmd_cid_check(dev_priv, sw_context, header); |
| 1390 | if (unlikely(ret != 0)) |
| 1391 | return ret; |
| 1392 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1393 | ret = vmw_translate_mob_ptr(dev_priv, sw_context, &cmd->body.mobid, |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1394 | &vmw_bo); |
| 1395 | if (unlikely(ret != 0)) |
| 1396 | return ret; |
| 1397 | |
Thomas Hellstrom | 459d0fa | 2015-06-26 00:25:37 -0700 | [diff] [blame] | 1398 | ret = vmw_query_bo_switch_prepare(dev_priv, vmw_bo, sw_context); |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1399 | |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1400 | return ret; |
| 1401 | } |
| 1402 | |
| 1403 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1404 | * vmw_cmd_end_query - validate SVGA_3D_CMD_END_QUERY command. |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1405 | * |
| 1406 | * @dev_priv: Pointer to a device private struct. |
| 1407 | * @sw_context: The software context used for this command submission. |
| 1408 | * @header: Pointer to the command header in the command stream. |
| 1409 | */ |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1410 | static int vmw_cmd_end_query(struct vmw_private *dev_priv, |
| 1411 | struct vmw_sw_context *sw_context, |
| 1412 | SVGA3dCmdHeader *header) |
| 1413 | { |
Thomas Hellstrom | f1d34bf | 2018-06-19 15:02:16 +0200 | [diff] [blame] | 1414 | struct vmw_buffer_object *vmw_bo; |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1415 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdEndQuery); |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1416 | int ret; |
| 1417 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1418 | cmd = container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1419 | if (dev_priv->has_mob) { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1420 | VMW_DECLARE_CMD_VAR(gb_cmd, SVGA3dCmdEndGBQuery); |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1421 | |
| 1422 | BUG_ON(sizeof(gb_cmd) != sizeof(*cmd)); |
| 1423 | |
| 1424 | gb_cmd.header.id = SVGA_3D_CMD_END_GB_QUERY; |
| 1425 | gb_cmd.header.size = cmd->header.size; |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1426 | gb_cmd.body.cid = cmd->body.cid; |
| 1427 | gb_cmd.body.type = cmd->body.type; |
| 1428 | gb_cmd.body.mobid = cmd->body.guestResult.gmrId; |
| 1429 | gb_cmd.body.offset = cmd->body.guestResult.offset; |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1430 | |
| 1431 | memcpy(cmd, &gb_cmd, sizeof(*cmd)); |
| 1432 | return vmw_cmd_end_gb_query(dev_priv, sw_context, header); |
| 1433 | } |
| 1434 | |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1435 | ret = vmw_cmd_cid_check(dev_priv, sw_context, header); |
| 1436 | if (unlikely(ret != 0)) |
| 1437 | return ret; |
| 1438 | |
| 1439 | ret = vmw_translate_guest_ptr(dev_priv, sw_context, |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1440 | &cmd->body.guestResult, &vmw_bo); |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1441 | if (unlikely(ret != 0)) |
| 1442 | return ret; |
| 1443 | |
Thomas Hellstrom | 459d0fa | 2015-06-26 00:25:37 -0700 | [diff] [blame] | 1444 | ret = vmw_query_bo_switch_prepare(dev_priv, vmw_bo, sw_context); |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1445 | |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 1446 | return ret; |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1447 | } |
| 1448 | |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1449 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1450 | * vmw_cmd_wait_gb_query - validate SVGA_3D_CMD_WAIT_GB_QUERY command. |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1451 | * |
| 1452 | * @dev_priv: Pointer to a device private struct. |
| 1453 | * @sw_context: The software context used for this command submission. |
| 1454 | * @header: Pointer to the command header in the command stream. |
| 1455 | */ |
| 1456 | static int vmw_cmd_wait_gb_query(struct vmw_private *dev_priv, |
| 1457 | struct vmw_sw_context *sw_context, |
| 1458 | SVGA3dCmdHeader *header) |
| 1459 | { |
Thomas Hellstrom | f1d34bf | 2018-06-19 15:02:16 +0200 | [diff] [blame] | 1460 | struct vmw_buffer_object *vmw_bo; |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1461 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdWaitForGBQuery); |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1462 | int ret; |
| 1463 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1464 | cmd = container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1465 | ret = vmw_cmd_cid_check(dev_priv, sw_context, header); |
| 1466 | if (unlikely(ret != 0)) |
| 1467 | return ret; |
| 1468 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1469 | ret = vmw_translate_mob_ptr(dev_priv, sw_context, &cmd->body.mobid, |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1470 | &vmw_bo); |
| 1471 | if (unlikely(ret != 0)) |
| 1472 | return ret; |
| 1473 | |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1474 | return 0; |
| 1475 | } |
| 1476 | |
| 1477 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1478 | * vmw_cmd_wait_query - validate SVGA_3D_CMD_WAIT_QUERY command. |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1479 | * |
| 1480 | * @dev_priv: Pointer to a device private struct. |
| 1481 | * @sw_context: The software context used for this command submission. |
| 1482 | * @header: Pointer to the command header in the command stream. |
| 1483 | */ |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1484 | static int vmw_cmd_wait_query(struct vmw_private *dev_priv, |
| 1485 | struct vmw_sw_context *sw_context, |
| 1486 | SVGA3dCmdHeader *header) |
| 1487 | { |
Thomas Hellstrom | f1d34bf | 2018-06-19 15:02:16 +0200 | [diff] [blame] | 1488 | struct vmw_buffer_object *vmw_bo; |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1489 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdWaitForQuery); |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1490 | int ret; |
| 1491 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1492 | cmd = container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1493 | if (dev_priv->has_mob) { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1494 | VMW_DECLARE_CMD_VAR(gb_cmd, SVGA3dCmdWaitForGBQuery); |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1495 | |
| 1496 | BUG_ON(sizeof(gb_cmd) != sizeof(*cmd)); |
| 1497 | |
| 1498 | gb_cmd.header.id = SVGA_3D_CMD_WAIT_FOR_GB_QUERY; |
| 1499 | gb_cmd.header.size = cmd->header.size; |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1500 | gb_cmd.body.cid = cmd->body.cid; |
| 1501 | gb_cmd.body.type = cmd->body.type; |
| 1502 | gb_cmd.body.mobid = cmd->body.guestResult.gmrId; |
| 1503 | gb_cmd.body.offset = cmd->body.guestResult.offset; |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 1504 | |
| 1505 | memcpy(cmd, &gb_cmd, sizeof(*cmd)); |
| 1506 | return vmw_cmd_wait_gb_query(dev_priv, sw_context, header); |
| 1507 | } |
| 1508 | |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1509 | ret = vmw_cmd_cid_check(dev_priv, sw_context, header); |
| 1510 | if (unlikely(ret != 0)) |
| 1511 | return ret; |
| 1512 | |
| 1513 | ret = vmw_translate_guest_ptr(dev_priv, sw_context, |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1514 | &cmd->body.guestResult, &vmw_bo); |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1515 | if (unlikely(ret != 0)) |
| 1516 | return ret; |
| 1517 | |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1518 | return 0; |
| 1519 | } |
| 1520 | |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1521 | static int vmw_cmd_dma(struct vmw_private *dev_priv, |
| 1522 | struct vmw_sw_context *sw_context, |
| 1523 | SVGA3dCmdHeader *header) |
| 1524 | { |
Thomas Hellstrom | f1d34bf | 2018-06-19 15:02:16 +0200 | [diff] [blame] | 1525 | struct vmw_buffer_object *vmw_bo = NULL; |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1526 | struct vmw_surface *srf = NULL; |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1527 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdSurfaceDMA); |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1528 | int ret; |
Thomas Hellstrom | cbd75e9 | 2014-04-15 18:25:48 +0200 | [diff] [blame] | 1529 | SVGA3dCmdSurfaceDMASuffix *suffix; |
| 1530 | uint32_t bo_size; |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1531 | bool dirty; |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1532 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1533 | cmd = container_of(header, typeof(*cmd), header); |
| 1534 | suffix = (SVGA3dCmdSurfaceDMASuffix *)((unsigned long) &cmd->body + |
Thomas Hellstrom | cbd75e9 | 2014-04-15 18:25:48 +0200 | [diff] [blame] | 1535 | header->size - sizeof(*suffix)); |
| 1536 | |
| 1537 | /* Make sure device and verifier stays in sync. */ |
| 1538 | if (unlikely(suffix->suffixSize != sizeof(*suffix))) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 1539 | VMW_DEBUG_USER("Invalid DMA suffix size.\n"); |
Thomas Hellstrom | cbd75e9 | 2014-04-15 18:25:48 +0200 | [diff] [blame] | 1540 | return -EINVAL; |
| 1541 | } |
| 1542 | |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1543 | ret = vmw_translate_guest_ptr(dev_priv, sw_context, |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1544 | &cmd->body.guest.ptr, &vmw_bo); |
Thomas Hellstrom | 4e4ddd4 | 2010-02-21 14:54:55 +0000 | [diff] [blame] | 1545 | if (unlikely(ret != 0)) |
| 1546 | return ret; |
| 1547 | |
Thomas Hellstrom | cbd75e9 | 2014-04-15 18:25:48 +0200 | [diff] [blame] | 1548 | /* Make sure DMA doesn't cross BO boundaries. */ |
Christian König | e11bfb9 | 2020-12-09 15:07:50 +0100 | [diff] [blame] | 1549 | bo_size = vmw_bo->base.base.size; |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1550 | if (unlikely(cmd->body.guest.ptr.offset > bo_size)) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 1551 | VMW_DEBUG_USER("Invalid DMA offset.\n"); |
Thomas Hellstrom | cbd75e9 | 2014-04-15 18:25:48 +0200 | [diff] [blame] | 1552 | return -EINVAL; |
| 1553 | } |
| 1554 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1555 | bo_size -= cmd->body.guest.ptr.offset; |
Thomas Hellstrom | cbd75e9 | 2014-04-15 18:25:48 +0200 | [diff] [blame] | 1556 | if (unlikely(suffix->maximumOffset > bo_size)) |
| 1557 | suffix->maximumOffset = bo_size; |
| 1558 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1559 | dirty = (cmd->body.transfer == SVGA3D_WRITE_HOST_VRAM) ? |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1560 | VMW_RES_DIRTY_SET : 0; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1561 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1562 | dirty, user_surface_converter, |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1563 | &cmd->body.host.sid, NULL); |
Thomas Hellstrom | 5bb39e8 | 2011-10-04 20:13:33 +0200 | [diff] [blame] | 1564 | if (unlikely(ret != 0)) { |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1565 | if (unlikely(ret != -ERESTARTSYS)) |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 1566 | VMW_DEBUG_USER("could not find surface for DMA.\n"); |
Thomas Hellstrom | b139d43 | 2018-09-26 16:27:54 +0200 | [diff] [blame] | 1567 | return ret; |
Thomas Hellstrom | 5bb39e8 | 2011-10-04 20:13:33 +0200 | [diff] [blame] | 1568 | } |
| 1569 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1570 | srf = vmw_res_to_srf(sw_context->res_cache[vmw_res_surface].res); |
Thomas Hellstrom | be38ab6 | 2011-08-31 07:42:54 +0000 | [diff] [blame] | 1571 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1572 | vmw_kms_cursor_snoop(srf, sw_context->fp->tfile, &vmw_bo->base, header); |
Thomas Hellstrom | be38ab6 | 2011-08-31 07:42:54 +0000 | [diff] [blame] | 1573 | |
Thomas Hellstrom | b139d43 | 2018-09-26 16:27:54 +0200 | [diff] [blame] | 1574 | return 0; |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 1575 | } |
| 1576 | |
Thomas Hellstrom | 7a73ba7 | 2009-12-22 16:53:41 +0100 | [diff] [blame] | 1577 | static int vmw_cmd_draw(struct vmw_private *dev_priv, |
| 1578 | struct vmw_sw_context *sw_context, |
| 1579 | SVGA3dCmdHeader *header) |
| 1580 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1581 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDrawPrimitives); |
Thomas Hellstrom | 7a73ba7 | 2009-12-22 16:53:41 +0100 | [diff] [blame] | 1582 | SVGA3dVertexDecl *decl = (SVGA3dVertexDecl *)( |
| 1583 | (unsigned long)header + sizeof(*cmd)); |
| 1584 | SVGA3dPrimitiveRange *range; |
| 1585 | uint32_t i; |
| 1586 | uint32_t maxnum; |
| 1587 | int ret; |
| 1588 | |
| 1589 | ret = vmw_cmd_cid_check(dev_priv, sw_context, header); |
| 1590 | if (unlikely(ret != 0)) |
| 1591 | return ret; |
| 1592 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1593 | cmd = container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | 7a73ba7 | 2009-12-22 16:53:41 +0100 | [diff] [blame] | 1594 | maxnum = (header->size - sizeof(cmd->body)) / sizeof(*decl); |
| 1595 | |
| 1596 | if (unlikely(cmd->body.numVertexDecls > maxnum)) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 1597 | VMW_DEBUG_USER("Illegal number of vertex declarations.\n"); |
Thomas Hellstrom | 7a73ba7 | 2009-12-22 16:53:41 +0100 | [diff] [blame] | 1598 | return -EINVAL; |
| 1599 | } |
| 1600 | |
| 1601 | for (i = 0; i < cmd->body.numVertexDecls; ++i, ++decl) { |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1602 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1603 | VMW_RES_DIRTY_NONE, |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1604 | user_surface_converter, |
| 1605 | &decl->array.surfaceId, NULL); |
Thomas Hellstrom | 7a73ba7 | 2009-12-22 16:53:41 +0100 | [diff] [blame] | 1606 | if (unlikely(ret != 0)) |
| 1607 | return ret; |
| 1608 | } |
| 1609 | |
| 1610 | maxnum = (header->size - sizeof(cmd->body) - |
| 1611 | cmd->body.numVertexDecls * sizeof(*decl)) / sizeof(*range); |
| 1612 | if (unlikely(cmd->body.numRanges > maxnum)) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 1613 | VMW_DEBUG_USER("Illegal number of index ranges.\n"); |
Thomas Hellstrom | 7a73ba7 | 2009-12-22 16:53:41 +0100 | [diff] [blame] | 1614 | return -EINVAL; |
| 1615 | } |
| 1616 | |
| 1617 | range = (SVGA3dPrimitiveRange *) decl; |
| 1618 | for (i = 0; i < cmd->body.numRanges; ++i, ++range) { |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1619 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1620 | VMW_RES_DIRTY_NONE, |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1621 | user_surface_converter, |
| 1622 | &range->indexArray.surfaceId, NULL); |
Thomas Hellstrom | 7a73ba7 | 2009-12-22 16:53:41 +0100 | [diff] [blame] | 1623 | if (unlikely(ret != 0)) |
| 1624 | return ret; |
| 1625 | } |
| 1626 | return 0; |
| 1627 | } |
| 1628 | |
Thomas Hellstrom | 7a73ba7 | 2009-12-22 16:53:41 +0100 | [diff] [blame] | 1629 | static int vmw_cmd_tex_state(struct vmw_private *dev_priv, |
| 1630 | struct vmw_sw_context *sw_context, |
| 1631 | SVGA3dCmdHeader *header) |
| 1632 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1633 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdSetTextureState); |
Thomas Hellstrom | 7a73ba7 | 2009-12-22 16:53:41 +0100 | [diff] [blame] | 1634 | SVGA3dTextureState *last_state = (SVGA3dTextureState *) |
| 1635 | ((unsigned long) header + header->size + sizeof(header)); |
| 1636 | SVGA3dTextureState *cur_state = (SVGA3dTextureState *) |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1637 | ((unsigned long) header + sizeof(*cmd)); |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 1638 | struct vmw_resource *ctx; |
| 1639 | struct vmw_resource *res; |
Thomas Hellstrom | 7a73ba7 | 2009-12-22 16:53:41 +0100 | [diff] [blame] | 1640 | int ret; |
| 1641 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1642 | cmd = container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | b5c3b1a6 | 2013-10-08 02:27:17 -0700 | [diff] [blame] | 1643 | |
| 1644 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_context, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1645 | VMW_RES_DIRTY_SET, user_context_converter, |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1646 | &cmd->body.cid, &ctx); |
Thomas Hellstrom | 7a73ba7 | 2009-12-22 16:53:41 +0100 | [diff] [blame] | 1647 | if (unlikely(ret != 0)) |
| 1648 | return ret; |
| 1649 | |
| 1650 | for (; cur_state < last_state; ++cur_state) { |
| 1651 | if (likely(cur_state->name != SVGA3D_TS_BIND_TEXTURE)) |
| 1652 | continue; |
| 1653 | |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 1654 | if (cur_state->stage >= SVGA3D_NUM_TEXTURE_UNITS) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 1655 | VMW_DEBUG_USER("Illegal texture/sampler unit %u.\n", |
| 1656 | (unsigned int) cur_state->stage); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 1657 | return -EINVAL; |
| 1658 | } |
| 1659 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1660 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1661 | VMW_RES_DIRTY_NONE, |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1662 | user_surface_converter, |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 1663 | &cur_state->value, &res); |
Thomas Hellstrom | 7a73ba7 | 2009-12-22 16:53:41 +0100 | [diff] [blame] | 1664 | if (unlikely(ret != 0)) |
| 1665 | return ret; |
Thomas Hellstrom | b5c3b1a6 | 2013-10-08 02:27:17 -0700 | [diff] [blame] | 1666 | |
| 1667 | if (dev_priv->has_mob) { |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 1668 | struct vmw_ctx_bindinfo_tex binding; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 1669 | struct vmw_ctx_validation_info *node; |
Thomas Hellstrom | b5c3b1a6 | 2013-10-08 02:27:17 -0700 | [diff] [blame] | 1670 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 1671 | node = vmw_execbuf_info_from_res(sw_context, ctx); |
| 1672 | if (!node) |
| 1673 | return -EINVAL; |
| 1674 | |
| 1675 | binding.bi.ctx = ctx; |
| 1676 | binding.bi.res = res; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 1677 | binding.bi.bt = vmw_ctx_binding_tex; |
| 1678 | binding.texture_stage = cur_state->stage; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 1679 | vmw_binding_add(node->staged, &binding.bi, 0, |
| 1680 | binding.texture_stage); |
Thomas Hellstrom | b5c3b1a6 | 2013-10-08 02:27:17 -0700 | [diff] [blame] | 1681 | } |
Thomas Hellstrom | 7a73ba7 | 2009-12-22 16:53:41 +0100 | [diff] [blame] | 1682 | } |
| 1683 | |
| 1684 | return 0; |
| 1685 | } |
| 1686 | |
Jakob Bornecrantz | 4084fb8 | 2011-10-04 20:13:19 +0200 | [diff] [blame] | 1687 | static int vmw_cmd_check_define_gmrfb(struct vmw_private *dev_priv, |
| 1688 | struct vmw_sw_context *sw_context, |
| 1689 | void *buf) |
| 1690 | { |
Thomas Hellstrom | f1d34bf | 2018-06-19 15:02:16 +0200 | [diff] [blame] | 1691 | struct vmw_buffer_object *vmw_bo; |
Jakob Bornecrantz | 4084fb8 | 2011-10-04 20:13:19 +0200 | [diff] [blame] | 1692 | |
| 1693 | struct { |
| 1694 | uint32_t header; |
| 1695 | SVGAFifoCmdDefineGMRFB body; |
| 1696 | } *cmd = buf; |
| 1697 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1698 | return vmw_translate_guest_ptr(dev_priv, sw_context, &cmd->body.ptr, |
Thomas Hellstrom | b139d43 | 2018-09-26 16:27:54 +0200 | [diff] [blame] | 1699 | &vmw_bo); |
Jakob Bornecrantz | 4084fb8 | 2011-10-04 20:13:19 +0200 | [diff] [blame] | 1700 | } |
| 1701 | |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 1702 | /** |
| 1703 | * vmw_cmd_res_switch_backup - Utility function to handle backup buffer |
| 1704 | * switching |
| 1705 | * |
| 1706 | * @dev_priv: Pointer to a device private struct. |
| 1707 | * @sw_context: The software context being used for this batch. |
Lee Jones | 7450bf7 | 2021-01-15 18:12:36 +0000 | [diff] [blame] | 1708 | * @res: Pointer to the resource. |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 1709 | * @buf_id: Pointer to the user-space backup buffer handle in the command |
| 1710 | * stream. |
| 1711 | * @backup_offset: Offset of backup into MOB. |
| 1712 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1713 | * This function prepares for registering a switch of backup buffers in the |
| 1714 | * resource metadata just prior to unreserving. It's basically a wrapper around |
| 1715 | * vmw_cmd_res_switch_backup with a different interface. |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 1716 | */ |
| 1717 | static int vmw_cmd_res_switch_backup(struct vmw_private *dev_priv, |
| 1718 | struct vmw_sw_context *sw_context, |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1719 | struct vmw_resource *res, uint32_t *buf_id, |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 1720 | unsigned long backup_offset) |
| 1721 | { |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 1722 | struct vmw_buffer_object *vbo; |
| 1723 | void *info; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 1724 | int ret; |
| 1725 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 1726 | info = vmw_execbuf_info_from_res(sw_context, res); |
| 1727 | if (!info) |
| 1728 | return -EINVAL; |
| 1729 | |
| 1730 | ret = vmw_translate_mob_ptr(dev_priv, sw_context, buf_id, &vbo); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 1731 | if (ret) |
| 1732 | return ret; |
| 1733 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 1734 | vmw_validation_res_switch_backup(sw_context->ctx, info, vbo, |
| 1735 | backup_offset); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 1736 | return 0; |
| 1737 | } |
| 1738 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1739 | /** |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1740 | * vmw_cmd_switch_backup - Utility function to handle backup buffer switching |
| 1741 | * |
| 1742 | * @dev_priv: Pointer to a device private struct. |
| 1743 | * @sw_context: The software context being used for this batch. |
| 1744 | * @res_type: The resource type. |
| 1745 | * @converter: Information about user-space binding for this resource type. |
| 1746 | * @res_id: Pointer to the user-space resource handle in the command stream. |
| 1747 | * @buf_id: Pointer to the user-space backup buffer handle in the command |
| 1748 | * stream. |
| 1749 | * @backup_offset: Offset of backup into MOB. |
| 1750 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1751 | * This function prepares for registering a switch of backup buffers in the |
| 1752 | * resource metadata just prior to unreserving. It's basically a wrapper around |
| 1753 | * vmw_cmd_res_switch_backup with a different interface. |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1754 | */ |
| 1755 | static int vmw_cmd_switch_backup(struct vmw_private *dev_priv, |
| 1756 | struct vmw_sw_context *sw_context, |
| 1757 | enum vmw_res_type res_type, |
| 1758 | const struct vmw_user_resource_conv |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1759 | *converter, uint32_t *res_id, uint32_t *buf_id, |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1760 | unsigned long backup_offset) |
| 1761 | { |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 1762 | struct vmw_resource *res; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 1763 | int ret; |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1764 | |
| 1765 | ret = vmw_cmd_res_check(dev_priv, sw_context, res_type, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1766 | VMW_RES_DIRTY_NONE, converter, res_id, &res); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 1767 | if (ret) |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1768 | return ret; |
| 1769 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1770 | return vmw_cmd_res_switch_backup(dev_priv, sw_context, res, buf_id, |
| 1771 | backup_offset); |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1772 | } |
| 1773 | |
| 1774 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1775 | * vmw_cmd_bind_gb_surface - Validate SVGA_3D_CMD_BIND_GB_SURFACE command |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1776 | * |
| 1777 | * @dev_priv: Pointer to a device private struct. |
| 1778 | * @sw_context: The software context being used for this batch. |
| 1779 | * @header: Pointer to the command header in the command stream. |
| 1780 | */ |
| 1781 | static int vmw_cmd_bind_gb_surface(struct vmw_private *dev_priv, |
| 1782 | struct vmw_sw_context *sw_context, |
| 1783 | SVGA3dCmdHeader *header) |
| 1784 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1785 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdBindGBSurface) = |
| 1786 | container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1787 | |
| 1788 | return vmw_cmd_switch_backup(dev_priv, sw_context, vmw_res_surface, |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1789 | user_surface_converter, &cmd->body.sid, |
| 1790 | &cmd->body.mobid, 0); |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1791 | } |
| 1792 | |
| 1793 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1794 | * vmw_cmd_update_gb_image - Validate SVGA_3D_CMD_UPDATE_GB_IMAGE command |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1795 | * |
| 1796 | * @dev_priv: Pointer to a device private struct. |
| 1797 | * @sw_context: The software context being used for this batch. |
| 1798 | * @header: Pointer to the command header in the command stream. |
| 1799 | */ |
| 1800 | static int vmw_cmd_update_gb_image(struct vmw_private *dev_priv, |
| 1801 | struct vmw_sw_context *sw_context, |
| 1802 | SVGA3dCmdHeader *header) |
| 1803 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1804 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdUpdateGBImage) = |
| 1805 | container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1806 | |
| 1807 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1808 | VMW_RES_DIRTY_NONE, user_surface_converter, |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1809 | &cmd->body.image.sid, NULL); |
| 1810 | } |
| 1811 | |
| 1812 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1813 | * vmw_cmd_update_gb_surface - Validate SVGA_3D_CMD_UPDATE_GB_SURFACE command |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1814 | * |
| 1815 | * @dev_priv: Pointer to a device private struct. |
| 1816 | * @sw_context: The software context being used for this batch. |
| 1817 | * @header: Pointer to the command header in the command stream. |
| 1818 | */ |
| 1819 | static int vmw_cmd_update_gb_surface(struct vmw_private *dev_priv, |
| 1820 | struct vmw_sw_context *sw_context, |
| 1821 | SVGA3dCmdHeader *header) |
| 1822 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1823 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdUpdateGBSurface) = |
| 1824 | container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1825 | |
| 1826 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1827 | VMW_RES_DIRTY_CLEAR, user_surface_converter, |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1828 | &cmd->body.sid, NULL); |
| 1829 | } |
| 1830 | |
| 1831 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1832 | * vmw_cmd_readback_gb_image - Validate SVGA_3D_CMD_READBACK_GB_IMAGE command |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1833 | * |
| 1834 | * @dev_priv: Pointer to a device private struct. |
| 1835 | * @sw_context: The software context being used for this batch. |
| 1836 | * @header: Pointer to the command header in the command stream. |
| 1837 | */ |
| 1838 | static int vmw_cmd_readback_gb_image(struct vmw_private *dev_priv, |
| 1839 | struct vmw_sw_context *sw_context, |
| 1840 | SVGA3dCmdHeader *header) |
| 1841 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1842 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdReadbackGBImage) = |
| 1843 | container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1844 | |
| 1845 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1846 | VMW_RES_DIRTY_NONE, user_surface_converter, |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1847 | &cmd->body.image.sid, NULL); |
| 1848 | } |
| 1849 | |
| 1850 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1851 | * vmw_cmd_readback_gb_surface - Validate SVGA_3D_CMD_READBACK_GB_SURFACE |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1852 | * command |
| 1853 | * |
| 1854 | * @dev_priv: Pointer to a device private struct. |
| 1855 | * @sw_context: The software context being used for this batch. |
| 1856 | * @header: Pointer to the command header in the command stream. |
| 1857 | */ |
| 1858 | static int vmw_cmd_readback_gb_surface(struct vmw_private *dev_priv, |
| 1859 | struct vmw_sw_context *sw_context, |
| 1860 | SVGA3dCmdHeader *header) |
| 1861 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1862 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdReadbackGBSurface) = |
| 1863 | container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1864 | |
| 1865 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1866 | VMW_RES_DIRTY_CLEAR, user_surface_converter, |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1867 | &cmd->body.sid, NULL); |
| 1868 | } |
| 1869 | |
| 1870 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1871 | * vmw_cmd_invalidate_gb_image - Validate SVGA_3D_CMD_INVALIDATE_GB_IMAGE |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1872 | * command |
| 1873 | * |
| 1874 | * @dev_priv: Pointer to a device private struct. |
| 1875 | * @sw_context: The software context being used for this batch. |
| 1876 | * @header: Pointer to the command header in the command stream. |
| 1877 | */ |
| 1878 | static int vmw_cmd_invalidate_gb_image(struct vmw_private *dev_priv, |
| 1879 | struct vmw_sw_context *sw_context, |
| 1880 | SVGA3dCmdHeader *header) |
| 1881 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1882 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdInvalidateGBImage) = |
| 1883 | container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1884 | |
| 1885 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1886 | VMW_RES_DIRTY_NONE, user_surface_converter, |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1887 | &cmd->body.image.sid, NULL); |
| 1888 | } |
| 1889 | |
| 1890 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1891 | * vmw_cmd_invalidate_gb_surface - Validate SVGA_3D_CMD_INVALIDATE_GB_SURFACE |
| 1892 | * command |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1893 | * |
| 1894 | * @dev_priv: Pointer to a device private struct. |
| 1895 | * @sw_context: The software context being used for this batch. |
| 1896 | * @header: Pointer to the command header in the command stream. |
| 1897 | */ |
| 1898 | static int vmw_cmd_invalidate_gb_surface(struct vmw_private *dev_priv, |
| 1899 | struct vmw_sw_context *sw_context, |
| 1900 | SVGA3dCmdHeader *header) |
| 1901 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1902 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdInvalidateGBSurface) = |
| 1903 | container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1904 | |
| 1905 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1906 | VMW_RES_DIRTY_CLEAR, user_surface_converter, |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1907 | &cmd->body.sid, NULL); |
| 1908 | } |
| 1909 | |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 1910 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1911 | * vmw_cmd_shader_define - Validate SVGA_3D_CMD_SHADER_DEFINE command |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 1912 | * |
| 1913 | * @dev_priv: Pointer to a device private struct. |
| 1914 | * @sw_context: The software context being used for this batch. |
| 1915 | * @header: Pointer to the command header in the command stream. |
| 1916 | */ |
| 1917 | static int vmw_cmd_shader_define(struct vmw_private *dev_priv, |
| 1918 | struct vmw_sw_context *sw_context, |
| 1919 | SVGA3dCmdHeader *header) |
| 1920 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1921 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDefineShader); |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 1922 | int ret; |
| 1923 | size_t size; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 1924 | struct vmw_resource *ctx; |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 1925 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1926 | cmd = container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 1927 | |
| 1928 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_context, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1929 | VMW_RES_DIRTY_SET, user_context_converter, |
| 1930 | &cmd->body.cid, &ctx); |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 1931 | if (unlikely(ret != 0)) |
| 1932 | return ret; |
| 1933 | |
| 1934 | if (unlikely(!dev_priv->has_mob)) |
| 1935 | return 0; |
| 1936 | |
| 1937 | size = cmd->header.size - sizeof(cmd->body); |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1938 | ret = vmw_compat_shader_add(dev_priv, vmw_context_res_man(ctx), |
| 1939 | cmd->body.shid, cmd + 1, cmd->body.type, |
| 1940 | size, &sw_context->staged_cmd_res); |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 1941 | if (unlikely(ret != 0)) |
| 1942 | return ret; |
| 1943 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1944 | return vmw_resource_relocation_add(sw_context, NULL, |
Thomas Hellstrom | e7a4528 | 2016-10-10 10:44:00 -0700 | [diff] [blame] | 1945 | vmw_ptr_diff(sw_context->buf_start, |
Thomas Hellstrom | a194403 | 2016-10-10 11:06:45 -0700 | [diff] [blame] | 1946 | &cmd->header.id), |
| 1947 | vmw_res_rel_nop); |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 1948 | } |
| 1949 | |
| 1950 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1951 | * vmw_cmd_shader_destroy - Validate SVGA_3D_CMD_SHADER_DESTROY command |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 1952 | * |
| 1953 | * @dev_priv: Pointer to a device private struct. |
| 1954 | * @sw_context: The software context being used for this batch. |
| 1955 | * @header: Pointer to the command header in the command stream. |
| 1956 | */ |
| 1957 | static int vmw_cmd_shader_destroy(struct vmw_private *dev_priv, |
| 1958 | struct vmw_sw_context *sw_context, |
| 1959 | SVGA3dCmdHeader *header) |
| 1960 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1961 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDestroyShader); |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 1962 | int ret; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 1963 | struct vmw_resource *ctx; |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 1964 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1965 | cmd = container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 1966 | |
| 1967 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_context, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 1968 | VMW_RES_DIRTY_SET, user_context_converter, |
| 1969 | &cmd->body.cid, &ctx); |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 1970 | if (unlikely(ret != 0)) |
| 1971 | return ret; |
| 1972 | |
| 1973 | if (unlikely(!dev_priv->has_mob)) |
| 1974 | return 0; |
| 1975 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1976 | ret = vmw_shader_remove(vmw_context_res_man(ctx), cmd->body.shid, |
| 1977 | cmd->body.type, &sw_context->staged_cmd_res); |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 1978 | if (unlikely(ret != 0)) |
| 1979 | return ret; |
| 1980 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1981 | return vmw_resource_relocation_add(sw_context, NULL, |
Thomas Hellstrom | e7a4528 | 2016-10-10 10:44:00 -0700 | [diff] [blame] | 1982 | vmw_ptr_diff(sw_context->buf_start, |
Thomas Hellstrom | a194403 | 2016-10-10 11:06:45 -0700 | [diff] [blame] | 1983 | &cmd->header.id), |
| 1984 | vmw_res_rel_nop); |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 1985 | } |
| 1986 | |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 1987 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 1988 | * vmw_cmd_set_shader - Validate SVGA_3D_CMD_SET_SHADER command |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 1989 | * |
| 1990 | * @dev_priv: Pointer to a device private struct. |
| 1991 | * @sw_context: The software context being used for this batch. |
| 1992 | * @header: Pointer to the command header in the command stream. |
| 1993 | */ |
| 1994 | static int vmw_cmd_set_shader(struct vmw_private *dev_priv, |
| 1995 | struct vmw_sw_context *sw_context, |
| 1996 | SVGA3dCmdHeader *header) |
| 1997 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 1998 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdSetShader); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 1999 | struct vmw_ctx_bindinfo_shader binding; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2000 | struct vmw_resource *ctx, *res = NULL; |
| 2001 | struct vmw_ctx_validation_info *ctx_info; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 2002 | int ret; |
| 2003 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 2004 | cmd = container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 2005 | |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2006 | if (cmd->body.type >= SVGA3D_SHADERTYPE_PREDX_MAX) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 2007 | VMW_DEBUG_USER("Illegal shader type %u.\n", |
| 2008 | (unsigned int) cmd->body.type); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2009 | return -EINVAL; |
| 2010 | } |
| 2011 | |
Thomas Hellstrom | b5c3b1a6 | 2013-10-08 02:27:17 -0700 | [diff] [blame] | 2012 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_context, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 2013 | VMW_RES_DIRTY_SET, user_context_converter, |
| 2014 | &cmd->body.cid, &ctx); |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 2015 | if (unlikely(ret != 0)) |
| 2016 | return ret; |
| 2017 | |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 2018 | if (!dev_priv->has_mob) |
| 2019 | return 0; |
Thomas Hellstrom | c74c162 | 2012-11-21 12:10:26 +0100 | [diff] [blame] | 2020 | |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 2021 | if (cmd->body.shid != SVGA3D_INVALID_ID) { |
Thomas Hellstrom | e41c20c | 2019-04-04 13:25:43 +0000 | [diff] [blame] | 2022 | /* |
| 2023 | * This is the compat shader path - Per device guest-backed |
| 2024 | * shaders, but user-space thinks it's per context host- |
| 2025 | * backed shaders. |
| 2026 | */ |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2027 | res = vmw_shader_lookup(vmw_context_res_man(ctx), |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2028 | cmd->body.shid, cmd->body.type); |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 2029 | if (!IS_ERR(res)) { |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 2030 | ret = vmw_execbuf_res_noctx_val_add(sw_context, res, |
| 2031 | VMW_RES_DIRTY_NONE); |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 2032 | if (unlikely(ret != 0)) |
| 2033 | return ret; |
Thomas Hellstrom | e41c20c | 2019-04-04 13:25:43 +0000 | [diff] [blame] | 2034 | |
| 2035 | ret = vmw_resource_relocation_add |
| 2036 | (sw_context, res, |
| 2037 | vmw_ptr_diff(sw_context->buf_start, |
| 2038 | &cmd->body.shid), |
| 2039 | vmw_res_rel_normal); |
| 2040 | if (unlikely(ret != 0)) |
| 2041 | return ret; |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 2042 | } |
Thomas Hellstrom | b5c3b1a6 | 2013-10-08 02:27:17 -0700 | [diff] [blame] | 2043 | } |
Thomas Hellstrom | c74c162 | 2012-11-21 12:10:26 +0100 | [diff] [blame] | 2044 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2045 | if (IS_ERR_OR_NULL(res)) { |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2046 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_shader, |
| 2047 | VMW_RES_DIRTY_NONE, |
| 2048 | user_shader_converter, &cmd->body.shid, |
| 2049 | &res); |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 2050 | if (unlikely(ret != 0)) |
| 2051 | return ret; |
| 2052 | } |
| 2053 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2054 | ctx_info = vmw_execbuf_info_from_res(sw_context, ctx); |
| 2055 | if (!ctx_info) |
| 2056 | return -EINVAL; |
| 2057 | |
| 2058 | binding.bi.ctx = ctx; |
| 2059 | binding.bi.res = res; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2060 | binding.bi.bt = vmw_ctx_binding_shader; |
| 2061 | binding.shader_slot = cmd->body.type - SVGA3D_SHADERTYPE_MIN; |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2062 | vmw_binding_add(ctx_info->staged, &binding.bi, binding.shader_slot, 0); |
| 2063 | |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2064 | return 0; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 2065 | } |
| 2066 | |
Thomas Hellstrom | c74c162 | 2012-11-21 12:10:26 +0100 | [diff] [blame] | 2067 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2068 | * vmw_cmd_set_shader_const - Validate SVGA_3D_CMD_SET_SHADER_CONST command |
Thomas Hellstrom | 0ccbbae | 2014-01-30 11:13:43 +0100 | [diff] [blame] | 2069 | * |
| 2070 | * @dev_priv: Pointer to a device private struct. |
| 2071 | * @sw_context: The software context being used for this batch. |
| 2072 | * @header: Pointer to the command header in the command stream. |
| 2073 | */ |
| 2074 | static int vmw_cmd_set_shader_const(struct vmw_private *dev_priv, |
| 2075 | struct vmw_sw_context *sw_context, |
| 2076 | SVGA3dCmdHeader *header) |
| 2077 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 2078 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdSetShaderConst); |
Thomas Hellstrom | 0ccbbae | 2014-01-30 11:13:43 +0100 | [diff] [blame] | 2079 | int ret; |
| 2080 | |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 2081 | cmd = container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | 0ccbbae | 2014-01-30 11:13:43 +0100 | [diff] [blame] | 2082 | |
| 2083 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_context, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 2084 | VMW_RES_DIRTY_SET, user_context_converter, |
| 2085 | &cmd->body.cid, NULL); |
Thomas Hellstrom | 0ccbbae | 2014-01-30 11:13:43 +0100 | [diff] [blame] | 2086 | if (unlikely(ret != 0)) |
| 2087 | return ret; |
| 2088 | |
| 2089 | if (dev_priv->has_mob) |
| 2090 | header->id = SVGA_3D_CMD_SET_GB_SHADERCONSTS_INLINE; |
| 2091 | |
| 2092 | return 0; |
| 2093 | } |
| 2094 | |
| 2095 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2096 | * vmw_cmd_bind_gb_shader - Validate SVGA_3D_CMD_BIND_GB_SHADER command |
Thomas Hellstrom | c74c162 | 2012-11-21 12:10:26 +0100 | [diff] [blame] | 2097 | * |
| 2098 | * @dev_priv: Pointer to a device private struct. |
| 2099 | * @sw_context: The software context being used for this batch. |
| 2100 | * @header: Pointer to the command header in the command stream. |
| 2101 | */ |
| 2102 | static int vmw_cmd_bind_gb_shader(struct vmw_private *dev_priv, |
| 2103 | struct vmw_sw_context *sw_context, |
| 2104 | SVGA3dCmdHeader *header) |
| 2105 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 2106 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdBindGBShader) = |
| 2107 | container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | c74c162 | 2012-11-21 12:10:26 +0100 | [diff] [blame] | 2108 | |
| 2109 | return vmw_cmd_switch_backup(dev_priv, sw_context, vmw_res_shader, |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2110 | user_shader_converter, &cmd->body.shid, |
| 2111 | &cmd->body.mobid, cmd->body.offsetInBytes); |
Thomas Hellstrom | c74c162 | 2012-11-21 12:10:26 +0100 | [diff] [blame] | 2112 | } |
| 2113 | |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2114 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2115 | * vmw_cmd_dx_set_single_constant_buffer - Validate |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2116 | * SVGA_3D_CMD_DX_SET_SINGLE_CONSTANT_BUFFER command. |
| 2117 | * |
| 2118 | * @dev_priv: Pointer to a device private struct. |
| 2119 | * @sw_context: The software context being used for this batch. |
| 2120 | * @header: Pointer to the command header in the command stream. |
| 2121 | */ |
| 2122 | static int |
| 2123 | vmw_cmd_dx_set_single_constant_buffer(struct vmw_private *dev_priv, |
| 2124 | struct vmw_sw_context *sw_context, |
| 2125 | SVGA3dCmdHeader *header) |
| 2126 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 2127 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDXSetSingleConstantBuffer); |
Deepak Rawat | d2e90ab | 2018-12-13 13:43:20 -0800 | [diff] [blame] | 2128 | SVGA3dShaderType max_shader_num = has_sm5_context(dev_priv) ? |
| 2129 | SVGA3D_NUM_SHADERTYPE : SVGA3D_NUM_SHADERTYPE_DX10; |
| 2130 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2131 | struct vmw_resource *res = NULL; |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2132 | struct vmw_ctx_validation_info *ctx_node = VMW_GET_CTX_NODE(sw_context); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2133 | struct vmw_ctx_bindinfo_cb binding; |
| 2134 | int ret; |
| 2135 | |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2136 | if (!ctx_node) |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2137 | return -EINVAL; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2138 | |
| 2139 | cmd = container_of(header, typeof(*cmd), header); |
| 2140 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 2141 | VMW_RES_DIRTY_NONE, user_surface_converter, |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2142 | &cmd->body.sid, &res); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2143 | if (unlikely(ret != 0)) |
| 2144 | return ret; |
| 2145 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2146 | binding.bi.ctx = ctx_node->ctx; |
| 2147 | binding.bi.res = res; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2148 | binding.bi.bt = vmw_ctx_binding_cb; |
| 2149 | binding.shader_slot = cmd->body.type - SVGA3D_SHADERTYPE_MIN; |
| 2150 | binding.offset = cmd->body.offsetInBytes; |
| 2151 | binding.size = cmd->body.sizeInBytes; |
| 2152 | binding.slot = cmd->body.slot; |
| 2153 | |
Deepak Rawat | d2e90ab | 2018-12-13 13:43:20 -0800 | [diff] [blame] | 2154 | if (binding.shader_slot >= max_shader_num || |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2155 | binding.slot >= SVGA3D_DX_MAX_CONSTBUFFERS) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 2156 | VMW_DEBUG_USER("Illegal const buffer shader %u slot %u.\n", |
| 2157 | (unsigned int) cmd->body.type, |
| 2158 | (unsigned int) binding.slot); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2159 | return -EINVAL; |
| 2160 | } |
| 2161 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2162 | vmw_binding_add(ctx_node->staged, &binding.bi, binding.shader_slot, |
| 2163 | binding.slot); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2164 | |
| 2165 | return 0; |
| 2166 | } |
| 2167 | |
| 2168 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2169 | * vmw_cmd_dx_set_shader_res - Validate SVGA_3D_CMD_DX_SET_SHADER_RESOURCES |
| 2170 | * command |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2171 | * |
| 2172 | * @dev_priv: Pointer to a device private struct. |
| 2173 | * @sw_context: The software context being used for this batch. |
| 2174 | * @header: Pointer to the command header in the command stream. |
| 2175 | */ |
| 2176 | static int vmw_cmd_dx_set_shader_res(struct vmw_private *dev_priv, |
| 2177 | struct vmw_sw_context *sw_context, |
| 2178 | SVGA3dCmdHeader *header) |
| 2179 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 2180 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDXSetShaderResources) = |
| 2181 | container_of(header, typeof(*cmd), header); |
Deepak Rawat | d2e90ab | 2018-12-13 13:43:20 -0800 | [diff] [blame] | 2182 | SVGA3dShaderType max_allowed = has_sm5_context(dev_priv) ? |
| 2183 | SVGA3D_SHADERTYPE_MAX : SVGA3D_SHADERTYPE_DX10_MAX; |
| 2184 | |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2185 | u32 num_sr_view = (cmd->header.size - sizeof(cmd->body)) / |
| 2186 | sizeof(SVGA3dShaderResourceViewId); |
| 2187 | |
| 2188 | if ((u64) cmd->body.startView + (u64) num_sr_view > |
| 2189 | (u64) SVGA3D_DX_MAX_SRVIEWS || |
Deepak Rawat | d2e90ab | 2018-12-13 13:43:20 -0800 | [diff] [blame] | 2190 | cmd->body.type >= max_allowed) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 2191 | VMW_DEBUG_USER("Invalid shader binding.\n"); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2192 | return -EINVAL; |
| 2193 | } |
| 2194 | |
| 2195 | return vmw_view_bindings_add(sw_context, vmw_view_sr, |
| 2196 | vmw_ctx_binding_sr, |
| 2197 | cmd->body.type - SVGA3D_SHADERTYPE_MIN, |
| 2198 | (void *) &cmd[1], num_sr_view, |
| 2199 | cmd->body.startView); |
| 2200 | } |
| 2201 | |
| 2202 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2203 | * vmw_cmd_dx_set_shader - Validate SVGA_3D_CMD_DX_SET_SHADER command |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2204 | * |
| 2205 | * @dev_priv: Pointer to a device private struct. |
| 2206 | * @sw_context: The software context being used for this batch. |
| 2207 | * @header: Pointer to the command header in the command stream. |
| 2208 | */ |
| 2209 | static int vmw_cmd_dx_set_shader(struct vmw_private *dev_priv, |
| 2210 | struct vmw_sw_context *sw_context, |
| 2211 | SVGA3dCmdHeader *header) |
| 2212 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 2213 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDXSetShader); |
Deepak Rawat | d2e90ab | 2018-12-13 13:43:20 -0800 | [diff] [blame] | 2214 | SVGA3dShaderType max_allowed = has_sm5_context(dev_priv) ? |
| 2215 | SVGA3D_SHADERTYPE_MAX : SVGA3D_SHADERTYPE_DX10_MAX; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2216 | struct vmw_resource *res = NULL; |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2217 | struct vmw_ctx_validation_info *ctx_node = VMW_GET_CTX_NODE(sw_context); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2218 | struct vmw_ctx_bindinfo_shader binding; |
| 2219 | int ret = 0; |
| 2220 | |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2221 | if (!ctx_node) |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2222 | return -EINVAL; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2223 | |
| 2224 | cmd = container_of(header, typeof(*cmd), header); |
| 2225 | |
Deepak Rawat | d2e90ab | 2018-12-13 13:43:20 -0800 | [diff] [blame] | 2226 | if (cmd->body.type >= max_allowed || |
Murray McAllister | 5ed7f4b | 2019-05-20 21:57:34 +1200 | [diff] [blame] | 2227 | cmd->body.type < SVGA3D_SHADERTYPE_MIN) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 2228 | VMW_DEBUG_USER("Illegal shader type %u.\n", |
| 2229 | (unsigned int) cmd->body.type); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2230 | return -EINVAL; |
| 2231 | } |
| 2232 | |
| 2233 | if (cmd->body.shaderId != SVGA3D_INVALID_ID) { |
| 2234 | res = vmw_shader_lookup(sw_context->man, cmd->body.shaderId, 0); |
| 2235 | if (IS_ERR(res)) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 2236 | VMW_DEBUG_USER("Could not find shader for binding.\n"); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2237 | return PTR_ERR(res); |
| 2238 | } |
| 2239 | |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 2240 | ret = vmw_execbuf_res_noctx_val_add(sw_context, res, |
| 2241 | VMW_RES_DIRTY_NONE); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2242 | if (ret) |
Thomas Hellstrom | 508108e | 2018-09-26 16:28:45 +0200 | [diff] [blame] | 2243 | return ret; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2244 | } |
| 2245 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2246 | binding.bi.ctx = ctx_node->ctx; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2247 | binding.bi.res = res; |
| 2248 | binding.bi.bt = vmw_ctx_binding_dx_shader; |
| 2249 | binding.shader_slot = cmd->body.type - SVGA3D_SHADERTYPE_MIN; |
| 2250 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2251 | vmw_binding_add(ctx_node->staged, &binding.bi, binding.shader_slot, 0); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2252 | |
Thomas Hellstrom | 508108e | 2018-09-26 16:28:45 +0200 | [diff] [blame] | 2253 | return 0; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2254 | } |
| 2255 | |
| 2256 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2257 | * vmw_cmd_dx_set_vertex_buffers - Validates SVGA_3D_CMD_DX_SET_VERTEX_BUFFERS |
| 2258 | * command |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2259 | * |
| 2260 | * @dev_priv: Pointer to a device private struct. |
| 2261 | * @sw_context: The software context being used for this batch. |
| 2262 | * @header: Pointer to the command header in the command stream. |
| 2263 | */ |
| 2264 | static int vmw_cmd_dx_set_vertex_buffers(struct vmw_private *dev_priv, |
| 2265 | struct vmw_sw_context *sw_context, |
| 2266 | SVGA3dCmdHeader *header) |
| 2267 | { |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2268 | struct vmw_ctx_validation_info *ctx_node = VMW_GET_CTX_NODE(sw_context); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2269 | struct vmw_ctx_bindinfo_vb binding; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2270 | struct vmw_resource *res; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2271 | struct { |
| 2272 | SVGA3dCmdHeader header; |
| 2273 | SVGA3dCmdDXSetVertexBuffers body; |
| 2274 | SVGA3dVertexBuffer buf[]; |
| 2275 | } *cmd; |
| 2276 | int i, ret, num; |
| 2277 | |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2278 | if (!ctx_node) |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2279 | return -EINVAL; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2280 | |
| 2281 | cmd = container_of(header, typeof(*cmd), header); |
| 2282 | num = (cmd->header.size - sizeof(cmd->body)) / |
| 2283 | sizeof(SVGA3dVertexBuffer); |
| 2284 | if ((u64)num + (u64)cmd->body.startBuffer > |
| 2285 | (u64)SVGA3D_DX_MAX_VERTEXBUFFERS) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 2286 | VMW_DEBUG_USER("Invalid number of vertex buffers.\n"); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2287 | return -EINVAL; |
| 2288 | } |
| 2289 | |
| 2290 | for (i = 0; i < num; i++) { |
| 2291 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 2292 | VMW_RES_DIRTY_NONE, |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2293 | user_surface_converter, |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2294 | &cmd->buf[i].sid, &res); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2295 | if (unlikely(ret != 0)) |
| 2296 | return ret; |
| 2297 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2298 | binding.bi.ctx = ctx_node->ctx; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2299 | binding.bi.bt = vmw_ctx_binding_vb; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2300 | binding.bi.res = res; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2301 | binding.offset = cmd->buf[i].offset; |
| 2302 | binding.stride = cmd->buf[i].stride; |
| 2303 | binding.slot = i + cmd->body.startBuffer; |
| 2304 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2305 | vmw_binding_add(ctx_node->staged, &binding.bi, 0, binding.slot); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2306 | } |
| 2307 | |
| 2308 | return 0; |
| 2309 | } |
| 2310 | |
| 2311 | /** |
Zack Rusin | 2cd80db | 2021-05-05 15:10:07 -0400 | [diff] [blame] | 2312 | * vmw_cmd_dx_set_index_buffer - Validate |
Brian Paul | 8bd6287 | 2017-07-17 07:36:10 -0700 | [diff] [blame] | 2313 | * SVGA_3D_CMD_DX_IA_SET_INDEX_BUFFER command. |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2314 | * |
| 2315 | * @dev_priv: Pointer to a device private struct. |
| 2316 | * @sw_context: The software context being used for this batch. |
| 2317 | * @header: Pointer to the command header in the command stream. |
| 2318 | */ |
| 2319 | static int vmw_cmd_dx_set_index_buffer(struct vmw_private *dev_priv, |
| 2320 | struct vmw_sw_context *sw_context, |
| 2321 | SVGA3dCmdHeader *header) |
| 2322 | { |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2323 | struct vmw_ctx_validation_info *ctx_node = VMW_GET_CTX_NODE(sw_context); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2324 | struct vmw_ctx_bindinfo_ib binding; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2325 | struct vmw_resource *res; |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 2326 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDXSetIndexBuffer); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2327 | int ret; |
| 2328 | |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2329 | if (!ctx_node) |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2330 | return -EINVAL; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2331 | |
| 2332 | cmd = container_of(header, typeof(*cmd), header); |
| 2333 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 2334 | VMW_RES_DIRTY_NONE, user_surface_converter, |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2335 | &cmd->body.sid, &res); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2336 | if (unlikely(ret != 0)) |
| 2337 | return ret; |
| 2338 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2339 | binding.bi.ctx = ctx_node->ctx; |
| 2340 | binding.bi.res = res; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2341 | binding.bi.bt = vmw_ctx_binding_ib; |
| 2342 | binding.offset = cmd->body.offset; |
| 2343 | binding.format = cmd->body.format; |
| 2344 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2345 | vmw_binding_add(ctx_node->staged, &binding.bi, 0, 0); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2346 | |
| 2347 | return 0; |
| 2348 | } |
| 2349 | |
| 2350 | /** |
Zack Rusin | 2cd80db | 2021-05-05 15:10:07 -0400 | [diff] [blame] | 2351 | * vmw_cmd_dx_set_rendertargets - Validate SVGA_3D_CMD_DX_SET_RENDERTARGETS |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2352 | * command |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2353 | * |
| 2354 | * @dev_priv: Pointer to a device private struct. |
| 2355 | * @sw_context: The software context being used for this batch. |
| 2356 | * @header: Pointer to the command header in the command stream. |
| 2357 | */ |
| 2358 | static int vmw_cmd_dx_set_rendertargets(struct vmw_private *dev_priv, |
| 2359 | struct vmw_sw_context *sw_context, |
| 2360 | SVGA3dCmdHeader *header) |
| 2361 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 2362 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDXSetRenderTargets) = |
| 2363 | container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2364 | u32 num_rt_view = (cmd->header.size - sizeof(cmd->body)) / |
| 2365 | sizeof(SVGA3dRenderTargetViewId); |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 2366 | int ret; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2367 | |
| 2368 | if (num_rt_view > SVGA3D_MAX_SIMULTANEOUS_RENDER_TARGETS) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 2369 | VMW_DEBUG_USER("Invalid DX Rendertarget binding.\n"); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2370 | return -EINVAL; |
| 2371 | } |
| 2372 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2373 | ret = vmw_view_bindings_add(sw_context, vmw_view_ds, vmw_ctx_binding_ds, |
| 2374 | 0, &cmd->body.depthStencilViewId, 1, 0); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2375 | if (ret) |
| 2376 | return ret; |
| 2377 | |
| 2378 | return vmw_view_bindings_add(sw_context, vmw_view_rt, |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2379 | vmw_ctx_binding_dx_rt, 0, (void *)&cmd[1], |
| 2380 | num_rt_view, 0); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2381 | } |
| 2382 | |
| 2383 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2384 | * vmw_cmd_dx_clear_rendertarget_view - Validate |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2385 | * SVGA_3D_CMD_DX_CLEAR_RENDERTARGET_VIEW command |
| 2386 | * |
| 2387 | * @dev_priv: Pointer to a device private struct. |
| 2388 | * @sw_context: The software context being used for this batch. |
| 2389 | * @header: Pointer to the command header in the command stream. |
| 2390 | */ |
| 2391 | static int vmw_cmd_dx_clear_rendertarget_view(struct vmw_private *dev_priv, |
| 2392 | struct vmw_sw_context *sw_context, |
| 2393 | SVGA3dCmdHeader *header) |
| 2394 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 2395 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDXClearRenderTargetView) = |
| 2396 | container_of(header, typeof(*cmd), header); |
Lukas Bulwahn | a26ca96 | 2019-12-08 11:53:28 +0100 | [diff] [blame] | 2397 | struct vmw_resource *ret; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2398 | |
Lukas Bulwahn | a26ca96 | 2019-12-08 11:53:28 +0100 | [diff] [blame] | 2399 | ret = vmw_view_id_val_add(sw_context, vmw_view_rt, |
| 2400 | cmd->body.renderTargetViewId); |
| 2401 | |
| 2402 | return PTR_ERR_OR_ZERO(ret); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2403 | } |
| 2404 | |
| 2405 | /** |
Zack Rusin | 2cd80db | 2021-05-05 15:10:07 -0400 | [diff] [blame] | 2406 | * vmw_cmd_dx_clear_depthstencil_view - Validate |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2407 | * SVGA_3D_CMD_DX_CLEAR_DEPTHSTENCIL_VIEW command |
| 2408 | * |
| 2409 | * @dev_priv: Pointer to a device private struct. |
| 2410 | * @sw_context: The software context being used for this batch. |
| 2411 | * @header: Pointer to the command header in the command stream. |
| 2412 | */ |
| 2413 | static int vmw_cmd_dx_clear_depthstencil_view(struct vmw_private *dev_priv, |
| 2414 | struct vmw_sw_context *sw_context, |
| 2415 | SVGA3dCmdHeader *header) |
| 2416 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 2417 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDXClearDepthStencilView) = |
| 2418 | container_of(header, typeof(*cmd), header); |
Lukas Bulwahn | a26ca96 | 2019-12-08 11:53:28 +0100 | [diff] [blame] | 2419 | struct vmw_resource *ret; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2420 | |
Lukas Bulwahn | a26ca96 | 2019-12-08 11:53:28 +0100 | [diff] [blame] | 2421 | ret = vmw_view_id_val_add(sw_context, vmw_view_ds, |
| 2422 | cmd->body.depthStencilViewId); |
| 2423 | |
| 2424 | return PTR_ERR_OR_ZERO(ret); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2425 | } |
| 2426 | |
| 2427 | static int vmw_cmd_dx_view_define(struct vmw_private *dev_priv, |
| 2428 | struct vmw_sw_context *sw_context, |
| 2429 | SVGA3dCmdHeader *header) |
| 2430 | { |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2431 | struct vmw_ctx_validation_info *ctx_node = VMW_GET_CTX_NODE(sw_context); |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2432 | struct vmw_resource *srf; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2433 | struct vmw_resource *res; |
| 2434 | enum vmw_view_type view_type; |
| 2435 | int ret; |
| 2436 | /* |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2437 | * This is based on the fact that all affected define commands have the |
| 2438 | * same initial command body layout. |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2439 | */ |
| 2440 | struct { |
| 2441 | SVGA3dCmdHeader header; |
| 2442 | uint32 defined_id; |
| 2443 | uint32 sid; |
| 2444 | } *cmd; |
| 2445 | |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2446 | if (!ctx_node) |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2447 | return -EINVAL; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2448 | |
| 2449 | view_type = vmw_view_cmd_to_type(header->id); |
Dan Carpenter | 0d9cac0 | 2018-01-10 12:40:04 +0300 | [diff] [blame] | 2450 | if (view_type == vmw_view_max) |
| 2451 | return -EINVAL; |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2452 | |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2453 | cmd = container_of(header, typeof(*cmd), header); |
Murray McAllister | bcd6aa7 | 2019-05-11 18:01:37 +1200 | [diff] [blame] | 2454 | if (unlikely(cmd->sid == SVGA3D_INVALID_ID)) { |
| 2455 | VMW_DEBUG_USER("Invalid surface id.\n"); |
| 2456 | return -EINVAL; |
| 2457 | } |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2458 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 2459 | VMW_RES_DIRTY_NONE, user_surface_converter, |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2460 | &cmd->sid, &srf); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2461 | if (unlikely(ret != 0)) |
| 2462 | return ret; |
| 2463 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2464 | res = vmw_context_cotable(ctx_node->ctx, vmw_view_cotables[view_type]); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2465 | ret = vmw_cotable_notify(res, cmd->defined_id); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2466 | if (unlikely(ret != 0)) |
| 2467 | return ret; |
| 2468 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2469 | return vmw_view_add(sw_context->man, ctx_node->ctx, srf, view_type, |
| 2470 | cmd->defined_id, header, |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2471 | header->size + sizeof(*header), |
| 2472 | &sw_context->staged_cmd_res); |
| 2473 | } |
| 2474 | |
Charmaine Lee | 2f633e5 | 2015-08-10 10:45:11 -0700 | [diff] [blame] | 2475 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2476 | * vmw_cmd_dx_set_so_targets - Validate SVGA_3D_CMD_DX_SET_SOTARGETS command. |
Charmaine Lee | 2f633e5 | 2015-08-10 10:45:11 -0700 | [diff] [blame] | 2477 | * |
| 2478 | * @dev_priv: Pointer to a device private struct. |
| 2479 | * @sw_context: The software context being used for this batch. |
| 2480 | * @header: Pointer to the command header in the command stream. |
| 2481 | */ |
| 2482 | static int vmw_cmd_dx_set_so_targets(struct vmw_private *dev_priv, |
| 2483 | struct vmw_sw_context *sw_context, |
| 2484 | SVGA3dCmdHeader *header) |
| 2485 | { |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2486 | struct vmw_ctx_validation_info *ctx_node = VMW_GET_CTX_NODE(sw_context); |
Deepak Rawat | 403fef5 | 2018-12-18 10:13:13 -0800 | [diff] [blame] | 2487 | struct vmw_ctx_bindinfo_so_target binding; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2488 | struct vmw_resource *res; |
Charmaine Lee | 2f633e5 | 2015-08-10 10:45:11 -0700 | [diff] [blame] | 2489 | struct { |
| 2490 | SVGA3dCmdHeader header; |
| 2491 | SVGA3dCmdDXSetSOTargets body; |
| 2492 | SVGA3dSoTarget targets[]; |
| 2493 | } *cmd; |
| 2494 | int i, ret, num; |
| 2495 | |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2496 | if (!ctx_node) |
Charmaine Lee | 2f633e5 | 2015-08-10 10:45:11 -0700 | [diff] [blame] | 2497 | return -EINVAL; |
Charmaine Lee | 2f633e5 | 2015-08-10 10:45:11 -0700 | [diff] [blame] | 2498 | |
| 2499 | cmd = container_of(header, typeof(*cmd), header); |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2500 | num = (cmd->header.size - sizeof(cmd->body)) / sizeof(SVGA3dSoTarget); |
Charmaine Lee | 2f633e5 | 2015-08-10 10:45:11 -0700 | [diff] [blame] | 2501 | |
| 2502 | if (num > SVGA3D_DX_MAX_SOTARGETS) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 2503 | VMW_DEBUG_USER("Invalid DX SO binding.\n"); |
Charmaine Lee | 2f633e5 | 2015-08-10 10:45:11 -0700 | [diff] [blame] | 2504 | return -EINVAL; |
| 2505 | } |
| 2506 | |
| 2507 | for (i = 0; i < num; i++) { |
| 2508 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 2509 | VMW_RES_DIRTY_SET, |
Charmaine Lee | 2f633e5 | 2015-08-10 10:45:11 -0700 | [diff] [blame] | 2510 | user_surface_converter, |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2511 | &cmd->targets[i].sid, &res); |
Charmaine Lee | 2f633e5 | 2015-08-10 10:45:11 -0700 | [diff] [blame] | 2512 | if (unlikely(ret != 0)) |
| 2513 | return ret; |
| 2514 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2515 | binding.bi.ctx = ctx_node->ctx; |
| 2516 | binding.bi.res = res; |
Zheng Yongjun | ed2684e | 2020-12-11 16:57:51 +0800 | [diff] [blame] | 2517 | binding.bi.bt = vmw_ctx_binding_so_target; |
Charmaine Lee | 2f633e5 | 2015-08-10 10:45:11 -0700 | [diff] [blame] | 2518 | binding.offset = cmd->targets[i].offset; |
| 2519 | binding.size = cmd->targets[i].sizeInBytes; |
| 2520 | binding.slot = i; |
| 2521 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2522 | vmw_binding_add(ctx_node->staged, &binding.bi, 0, binding.slot); |
Charmaine Lee | 2f633e5 | 2015-08-10 10:45:11 -0700 | [diff] [blame] | 2523 | } |
| 2524 | |
| 2525 | return 0; |
| 2526 | } |
| 2527 | |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2528 | static int vmw_cmd_dx_so_define(struct vmw_private *dev_priv, |
| 2529 | struct vmw_sw_context *sw_context, |
| 2530 | SVGA3dCmdHeader *header) |
| 2531 | { |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2532 | struct vmw_ctx_validation_info *ctx_node = VMW_GET_CTX_NODE(sw_context); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2533 | struct vmw_resource *res; |
| 2534 | /* |
| 2535 | * This is based on the fact that all affected define commands have |
| 2536 | * the same initial command body layout. |
| 2537 | */ |
| 2538 | struct { |
| 2539 | SVGA3dCmdHeader header; |
| 2540 | uint32 defined_id; |
| 2541 | } *cmd; |
| 2542 | enum vmw_so_type so_type; |
| 2543 | int ret; |
| 2544 | |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2545 | if (!ctx_node) |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2546 | return -EINVAL; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2547 | |
| 2548 | so_type = vmw_so_cmd_to_type(header->id); |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2549 | res = vmw_context_cotable(ctx_node->ctx, vmw_so_cotables[so_type]); |
Zack Rusin | 7423104 | 2021-06-09 13:23:02 -0400 | [diff] [blame^] | 2550 | if (IS_ERR(res)) |
| 2551 | return PTR_ERR(res); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2552 | cmd = container_of(header, typeof(*cmd), header); |
| 2553 | ret = vmw_cotable_notify(res, cmd->defined_id); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2554 | |
| 2555 | return ret; |
| 2556 | } |
| 2557 | |
| 2558 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2559 | * vmw_cmd_dx_check_subresource - Validate SVGA_3D_CMD_DX_[X]_SUBRESOURCE |
| 2560 | * command |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2561 | * |
| 2562 | * @dev_priv: Pointer to a device private struct. |
| 2563 | * @sw_context: The software context being used for this batch. |
| 2564 | * @header: Pointer to the command header in the command stream. |
| 2565 | */ |
| 2566 | static int vmw_cmd_dx_check_subresource(struct vmw_private *dev_priv, |
| 2567 | struct vmw_sw_context *sw_context, |
| 2568 | SVGA3dCmdHeader *header) |
| 2569 | { |
| 2570 | struct { |
| 2571 | SVGA3dCmdHeader header; |
| 2572 | union { |
| 2573 | SVGA3dCmdDXReadbackSubResource r_body; |
| 2574 | SVGA3dCmdDXInvalidateSubResource i_body; |
| 2575 | SVGA3dCmdDXUpdateSubResource u_body; |
| 2576 | SVGA3dSurfaceId sid; |
| 2577 | }; |
| 2578 | } *cmd; |
| 2579 | |
| 2580 | BUILD_BUG_ON(offsetof(typeof(*cmd), r_body.sid) != |
| 2581 | offsetof(typeof(*cmd), sid)); |
| 2582 | BUILD_BUG_ON(offsetof(typeof(*cmd), i_body.sid) != |
| 2583 | offsetof(typeof(*cmd), sid)); |
| 2584 | BUILD_BUG_ON(offsetof(typeof(*cmd), u_body.sid) != |
| 2585 | offsetof(typeof(*cmd), sid)); |
| 2586 | |
| 2587 | cmd = container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2588 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 2589 | VMW_RES_DIRTY_NONE, user_surface_converter, |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2590 | &cmd->sid, NULL); |
| 2591 | } |
| 2592 | |
| 2593 | static int vmw_cmd_dx_cid_check(struct vmw_private *dev_priv, |
| 2594 | struct vmw_sw_context *sw_context, |
| 2595 | SVGA3dCmdHeader *header) |
| 2596 | { |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2597 | struct vmw_ctx_validation_info *ctx_node = VMW_GET_CTX_NODE(sw_context); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2598 | |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2599 | if (!ctx_node) |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2600 | return -EINVAL; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2601 | |
| 2602 | return 0; |
| 2603 | } |
| 2604 | |
| 2605 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2606 | * vmw_cmd_dx_view_remove - validate a view remove command and schedule the view |
| 2607 | * resource for removal. |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2608 | * |
| 2609 | * @dev_priv: Pointer to a device private struct. |
| 2610 | * @sw_context: The software context being used for this batch. |
| 2611 | * @header: Pointer to the command header in the command stream. |
| 2612 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2613 | * Check that the view exists, and if it was not created using this command |
| 2614 | * batch, conditionally make this command a NOP. |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2615 | */ |
| 2616 | static int vmw_cmd_dx_view_remove(struct vmw_private *dev_priv, |
| 2617 | struct vmw_sw_context *sw_context, |
| 2618 | SVGA3dCmdHeader *header) |
| 2619 | { |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2620 | struct vmw_ctx_validation_info *ctx_node = VMW_GET_CTX_NODE(sw_context); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2621 | struct { |
| 2622 | SVGA3dCmdHeader header; |
| 2623 | union vmw_view_destroy body; |
| 2624 | } *cmd = container_of(header, typeof(*cmd), header); |
| 2625 | enum vmw_view_type view_type = vmw_view_cmd_to_type(header->id); |
| 2626 | struct vmw_resource *view; |
| 2627 | int ret; |
| 2628 | |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2629 | if (!ctx_node) |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2630 | return -EINVAL; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2631 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2632 | ret = vmw_view_remove(sw_context->man, cmd->body.view_id, view_type, |
| 2633 | &sw_context->staged_cmd_res, &view); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2634 | if (ret || !view) |
| 2635 | return ret; |
| 2636 | |
| 2637 | /* |
Thomas Hellstrom | a194403 | 2016-10-10 11:06:45 -0700 | [diff] [blame] | 2638 | * If the view wasn't created during this command batch, it might |
| 2639 | * have been removed due to a context swapout, so add a |
| 2640 | * relocation to conditionally make this command a NOP to avoid |
| 2641 | * device errors. |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2642 | */ |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2643 | return vmw_resource_relocation_add(sw_context, view, |
Thomas Hellstrom | a194403 | 2016-10-10 11:06:45 -0700 | [diff] [blame] | 2644 | vmw_ptr_diff(sw_context->buf_start, |
| 2645 | &cmd->header.id), |
| 2646 | vmw_res_rel_cond_nop); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2647 | } |
| 2648 | |
| 2649 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2650 | * vmw_cmd_dx_define_shader - Validate SVGA_3D_CMD_DX_DEFINE_SHADER command |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2651 | * |
| 2652 | * @dev_priv: Pointer to a device private struct. |
| 2653 | * @sw_context: The software context being used for this batch. |
| 2654 | * @header: Pointer to the command header in the command stream. |
| 2655 | */ |
| 2656 | static int vmw_cmd_dx_define_shader(struct vmw_private *dev_priv, |
| 2657 | struct vmw_sw_context *sw_context, |
| 2658 | SVGA3dCmdHeader *header) |
| 2659 | { |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2660 | struct vmw_ctx_validation_info *ctx_node = VMW_GET_CTX_NODE(sw_context); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2661 | struct vmw_resource *res; |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 2662 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDXDefineShader) = |
| 2663 | container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2664 | int ret; |
| 2665 | |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2666 | if (!ctx_node) |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2667 | return -EINVAL; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2668 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2669 | res = vmw_context_cotable(ctx_node->ctx, SVGA_COTABLE_DXSHADER); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2670 | ret = vmw_cotable_notify(res, cmd->body.shaderId); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2671 | if (ret) |
| 2672 | return ret; |
| 2673 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2674 | return vmw_dx_shader_add(sw_context->man, ctx_node->ctx, |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2675 | cmd->body.shaderId, cmd->body.type, |
| 2676 | &sw_context->staged_cmd_res); |
| 2677 | } |
| 2678 | |
| 2679 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2680 | * vmw_cmd_dx_destroy_shader - Validate SVGA_3D_CMD_DX_DESTROY_SHADER command |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2681 | * |
| 2682 | * @dev_priv: Pointer to a device private struct. |
| 2683 | * @sw_context: The software context being used for this batch. |
| 2684 | * @header: Pointer to the command header in the command stream. |
| 2685 | */ |
| 2686 | static int vmw_cmd_dx_destroy_shader(struct vmw_private *dev_priv, |
| 2687 | struct vmw_sw_context *sw_context, |
| 2688 | SVGA3dCmdHeader *header) |
| 2689 | { |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2690 | struct vmw_ctx_validation_info *ctx_node = VMW_GET_CTX_NODE(sw_context); |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 2691 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDXDestroyShader) = |
| 2692 | container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2693 | int ret; |
| 2694 | |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2695 | if (!ctx_node) |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2696 | return -EINVAL; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2697 | |
| 2698 | ret = vmw_shader_remove(sw_context->man, cmd->body.shaderId, 0, |
| 2699 | &sw_context->staged_cmd_res); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2700 | |
| 2701 | return ret; |
| 2702 | } |
| 2703 | |
| 2704 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2705 | * vmw_cmd_dx_bind_shader - Validate SVGA_3D_CMD_DX_BIND_SHADER command |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2706 | * |
| 2707 | * @dev_priv: Pointer to a device private struct. |
| 2708 | * @sw_context: The software context being used for this batch. |
| 2709 | * @header: Pointer to the command header in the command stream. |
| 2710 | */ |
| 2711 | static int vmw_cmd_dx_bind_shader(struct vmw_private *dev_priv, |
| 2712 | struct vmw_sw_context *sw_context, |
| 2713 | SVGA3dCmdHeader *header) |
| 2714 | { |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 2715 | struct vmw_resource *ctx; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2716 | struct vmw_resource *res; |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 2717 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDXBindShader) = |
| 2718 | container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2719 | int ret; |
| 2720 | |
| 2721 | if (cmd->body.cid != SVGA3D_INVALID_ID) { |
| 2722 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_context, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 2723 | VMW_RES_DIRTY_SET, |
| 2724 | user_context_converter, &cmd->body.cid, |
| 2725 | &ctx); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2726 | if (ret) |
| 2727 | return ret; |
| 2728 | } else { |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2729 | struct vmw_ctx_validation_info *ctx_node = |
| 2730 | VMW_GET_CTX_NODE(sw_context); |
| 2731 | |
| 2732 | if (!ctx_node) |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2733 | return -EINVAL; |
Deepak Rawat | 6f74fd9 | 2019-02-08 12:53:57 -0800 | [diff] [blame] | 2734 | |
| 2735 | ctx = ctx_node->ctx; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2736 | } |
| 2737 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2738 | res = vmw_shader_lookup(vmw_context_res_man(ctx), cmd->body.shid, 0); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2739 | if (IS_ERR(res)) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 2740 | VMW_DEBUG_USER("Could not find shader to bind.\n"); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2741 | return PTR_ERR(res); |
| 2742 | } |
| 2743 | |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 2744 | ret = vmw_execbuf_res_noctx_val_add(sw_context, res, |
| 2745 | VMW_RES_DIRTY_NONE); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2746 | if (ret) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 2747 | VMW_DEBUG_USER("Error creating resource validation node.\n"); |
Thomas Hellstrom | 508108e | 2018-09-26 16:28:45 +0200 | [diff] [blame] | 2748 | return ret; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2749 | } |
| 2750 | |
Thomas Hellstrom | 508108e | 2018-09-26 16:28:45 +0200 | [diff] [blame] | 2751 | return vmw_cmd_res_switch_backup(dev_priv, sw_context, res, |
| 2752 | &cmd->body.mobid, |
| 2753 | cmd->body.offsetInBytes); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 2754 | } |
| 2755 | |
Charmaine Lee | f3b33550 | 2016-02-12 08:11:56 +0100 | [diff] [blame] | 2756 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2757 | * vmw_cmd_dx_genmips - Validate SVGA_3D_CMD_DX_GENMIPS command |
Charmaine Lee | f3b33550 | 2016-02-12 08:11:56 +0100 | [diff] [blame] | 2758 | * |
| 2759 | * @dev_priv: Pointer to a device private struct. |
| 2760 | * @sw_context: The software context being used for this batch. |
| 2761 | * @header: Pointer to the command header in the command stream. |
| 2762 | */ |
| 2763 | static int vmw_cmd_dx_genmips(struct vmw_private *dev_priv, |
| 2764 | struct vmw_sw_context *sw_context, |
| 2765 | SVGA3dCmdHeader *header) |
| 2766 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 2767 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDXGenMips) = |
| 2768 | container_of(header, typeof(*cmd), header); |
Thomas Hellstrom | 75156a8 | 2021-05-04 23:57:36 -0400 | [diff] [blame] | 2769 | struct vmw_resource *view; |
| 2770 | struct vmw_res_cache_entry *rcache; |
Charmaine Lee | f3b33550 | 2016-02-12 08:11:56 +0100 | [diff] [blame] | 2771 | |
Thomas Hellstrom | 75156a8 | 2021-05-04 23:57:36 -0400 | [diff] [blame] | 2772 | view = vmw_view_id_val_add(sw_context, vmw_view_sr, |
| 2773 | cmd->body.shaderResourceViewId); |
| 2774 | if (IS_ERR(view)) |
| 2775 | return PTR_ERR(view); |
Lukas Bulwahn | a26ca96 | 2019-12-08 11:53:28 +0100 | [diff] [blame] | 2776 | |
Thomas Hellstrom | 75156a8 | 2021-05-04 23:57:36 -0400 | [diff] [blame] | 2777 | /* |
| 2778 | * Normally the shader-resource view is not gpu-dirtying, but for |
| 2779 | * this particular command it is... |
| 2780 | * So mark the last looked-up surface, which is the surface |
| 2781 | * the view points to, gpu-dirty. |
| 2782 | */ |
| 2783 | rcache = &sw_context->res_cache[vmw_res_surface]; |
| 2784 | vmw_validation_res_set_dirty(sw_context->ctx, rcache->private, |
| 2785 | VMW_RES_DIRTY_SET); |
| 2786 | return 0; |
Charmaine Lee | f3b33550 | 2016-02-12 08:11:56 +0100 | [diff] [blame] | 2787 | } |
| 2788 | |
Charmaine Lee | 1f982e4 | 2016-10-10 10:37:03 -0700 | [diff] [blame] | 2789 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2790 | * vmw_cmd_dx_transfer_from_buffer - Validate |
| 2791 | * SVGA_3D_CMD_DX_TRANSFER_FROM_BUFFER command |
Charmaine Lee | 1f982e4 | 2016-10-10 10:37:03 -0700 | [diff] [blame] | 2792 | * |
| 2793 | * @dev_priv: Pointer to a device private struct. |
| 2794 | * @sw_context: The software context being used for this batch. |
| 2795 | * @header: Pointer to the command header in the command stream. |
| 2796 | */ |
| 2797 | static int vmw_cmd_dx_transfer_from_buffer(struct vmw_private *dev_priv, |
| 2798 | struct vmw_sw_context *sw_context, |
| 2799 | SVGA3dCmdHeader *header) |
| 2800 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 2801 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdDXTransferFromBuffer) = |
| 2802 | container_of(header, typeof(*cmd), header); |
Charmaine Lee | 1f982e4 | 2016-10-10 10:37:03 -0700 | [diff] [blame] | 2803 | int ret; |
| 2804 | |
| 2805 | ret = vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 2806 | VMW_RES_DIRTY_NONE, user_surface_converter, |
Charmaine Lee | 1f982e4 | 2016-10-10 10:37:03 -0700 | [diff] [blame] | 2807 | &cmd->body.srcSid, NULL); |
| 2808 | if (ret != 0) |
| 2809 | return ret; |
| 2810 | |
| 2811 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 2812 | VMW_RES_DIRTY_SET, user_surface_converter, |
Charmaine Lee | 1f982e4 | 2016-10-10 10:37:03 -0700 | [diff] [blame] | 2813 | &cmd->body.destSid, NULL); |
| 2814 | } |
| 2815 | |
Neha Bhende | 0d81d34 | 2018-06-18 17:14:56 -0700 | [diff] [blame] | 2816 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 2817 | * vmw_cmd_intra_surface_copy - Validate SVGA_3D_CMD_INTRA_SURFACE_COPY command |
Neha Bhende | 0d81d34 | 2018-06-18 17:14:56 -0700 | [diff] [blame] | 2818 | * |
| 2819 | * @dev_priv: Pointer to a device private struct. |
| 2820 | * @sw_context: The software context being used for this batch. |
| 2821 | * @header: Pointer to the command header in the command stream. |
| 2822 | */ |
| 2823 | static int vmw_cmd_intra_surface_copy(struct vmw_private *dev_priv, |
| 2824 | struct vmw_sw_context *sw_context, |
| 2825 | SVGA3dCmdHeader *header) |
| 2826 | { |
Deepak Rawat | d01316d | 2019-02-08 15:50:40 -0800 | [diff] [blame] | 2827 | VMW_DECLARE_CMD_VAR(*cmd, SVGA3dCmdIntraSurfaceCopy) = |
| 2828 | container_of(header, typeof(*cmd), header); |
Neha Bhende | 0d81d34 | 2018-06-18 17:14:56 -0700 | [diff] [blame] | 2829 | |
| 2830 | if (!(dev_priv->capabilities2 & SVGA_CAP2_INTRA_SURFACE_COPY)) |
| 2831 | return -EINVAL; |
| 2832 | |
| 2833 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 2834 | VMW_RES_DIRTY_SET, user_surface_converter, |
| 2835 | &cmd->body.surface.sid, NULL); |
Neha Bhende | 0d81d34 | 2018-06-18 17:14:56 -0700 | [diff] [blame] | 2836 | } |
| 2837 | |
Deepak Rawat | b6fad73 | 2018-12-13 14:00:18 -0800 | [diff] [blame] | 2838 | static int vmw_cmd_sm5(struct vmw_private *dev_priv, |
| 2839 | struct vmw_sw_context *sw_context, |
| 2840 | SVGA3dCmdHeader *header) |
| 2841 | { |
| 2842 | if (!has_sm5_context(dev_priv)) |
| 2843 | return -EINVAL; |
| 2844 | |
| 2845 | return 0; |
| 2846 | } |
| 2847 | |
Deepak Rawat | 5e8ec0d | 2018-12-13 13:51:08 -0800 | [diff] [blame] | 2848 | static int vmw_cmd_sm5_view_define(struct vmw_private *dev_priv, |
| 2849 | struct vmw_sw_context *sw_context, |
| 2850 | SVGA3dCmdHeader *header) |
| 2851 | { |
| 2852 | if (!has_sm5_context(dev_priv)) |
| 2853 | return -EINVAL; |
| 2854 | |
| 2855 | return vmw_cmd_dx_view_define(dev_priv, sw_context, header); |
| 2856 | } |
| 2857 | |
| 2858 | static int vmw_cmd_sm5_view_remove(struct vmw_private *dev_priv, |
| 2859 | struct vmw_sw_context *sw_context, |
| 2860 | SVGA3dCmdHeader *header) |
| 2861 | { |
| 2862 | if (!has_sm5_context(dev_priv)) |
| 2863 | return -EINVAL; |
| 2864 | |
| 2865 | return vmw_cmd_dx_view_remove(dev_priv, sw_context, header); |
| 2866 | } |
| 2867 | |
| 2868 | static int vmw_cmd_clear_uav_uint(struct vmw_private *dev_priv, |
| 2869 | struct vmw_sw_context *sw_context, |
| 2870 | SVGA3dCmdHeader *header) |
| 2871 | { |
| 2872 | struct { |
| 2873 | SVGA3dCmdHeader header; |
| 2874 | SVGA3dCmdDXClearUAViewUint body; |
| 2875 | } *cmd = container_of(header, typeof(*cmd), header); |
| 2876 | struct vmw_resource *ret; |
| 2877 | |
| 2878 | if (!has_sm5_context(dev_priv)) |
| 2879 | return -EINVAL; |
| 2880 | |
| 2881 | ret = vmw_view_id_val_add(sw_context, vmw_view_ua, |
| 2882 | cmd->body.uaViewId); |
| 2883 | |
| 2884 | return PTR_ERR_OR_ZERO(ret); |
| 2885 | } |
| 2886 | |
| 2887 | static int vmw_cmd_clear_uav_float(struct vmw_private *dev_priv, |
| 2888 | struct vmw_sw_context *sw_context, |
| 2889 | SVGA3dCmdHeader *header) |
| 2890 | { |
| 2891 | struct { |
| 2892 | SVGA3dCmdHeader header; |
| 2893 | SVGA3dCmdDXClearUAViewFloat body; |
| 2894 | } *cmd = container_of(header, typeof(*cmd), header); |
| 2895 | struct vmw_resource *ret; |
| 2896 | |
| 2897 | if (!has_sm5_context(dev_priv)) |
| 2898 | return -EINVAL; |
| 2899 | |
| 2900 | ret = vmw_view_id_val_add(sw_context, vmw_view_ua, |
| 2901 | cmd->body.uaViewId); |
| 2902 | |
| 2903 | return PTR_ERR_OR_ZERO(ret); |
| 2904 | } |
| 2905 | |
| 2906 | static int vmw_cmd_set_uav(struct vmw_private *dev_priv, |
| 2907 | struct vmw_sw_context *sw_context, |
| 2908 | SVGA3dCmdHeader *header) |
| 2909 | { |
| 2910 | struct { |
| 2911 | SVGA3dCmdHeader header; |
| 2912 | SVGA3dCmdDXSetUAViews body; |
| 2913 | } *cmd = container_of(header, typeof(*cmd), header); |
| 2914 | u32 num_uav = (cmd->header.size - sizeof(cmd->body)) / |
| 2915 | sizeof(SVGA3dUAViewId); |
| 2916 | int ret; |
| 2917 | |
| 2918 | if (!has_sm5_context(dev_priv)) |
| 2919 | return -EINVAL; |
| 2920 | |
| 2921 | if (num_uav > SVGA3D_MAX_UAVIEWS) { |
| 2922 | VMW_DEBUG_USER("Invalid UAV binding.\n"); |
| 2923 | return -EINVAL; |
| 2924 | } |
| 2925 | |
| 2926 | ret = vmw_view_bindings_add(sw_context, vmw_view_ua, |
| 2927 | vmw_ctx_binding_uav, 0, (void *)&cmd[1], |
| 2928 | num_uav, 0); |
| 2929 | if (ret) |
| 2930 | return ret; |
| 2931 | |
| 2932 | vmw_binding_add_uav_index(sw_context->dx_ctx_node->staged, 0, |
| 2933 | cmd->body.uavSpliceIndex); |
| 2934 | |
| 2935 | return ret; |
| 2936 | } |
| 2937 | |
| 2938 | static int vmw_cmd_set_cs_uav(struct vmw_private *dev_priv, |
| 2939 | struct vmw_sw_context *sw_context, |
| 2940 | SVGA3dCmdHeader *header) |
| 2941 | { |
| 2942 | struct { |
| 2943 | SVGA3dCmdHeader header; |
| 2944 | SVGA3dCmdDXSetCSUAViews body; |
| 2945 | } *cmd = container_of(header, typeof(*cmd), header); |
| 2946 | u32 num_uav = (cmd->header.size - sizeof(cmd->body)) / |
| 2947 | sizeof(SVGA3dUAViewId); |
| 2948 | int ret; |
| 2949 | |
| 2950 | if (!has_sm5_context(dev_priv)) |
| 2951 | return -EINVAL; |
| 2952 | |
| 2953 | if (num_uav > SVGA3D_MAX_UAVIEWS) { |
| 2954 | VMW_DEBUG_USER("Invalid UAV binding.\n"); |
| 2955 | return -EINVAL; |
| 2956 | } |
| 2957 | |
| 2958 | ret = vmw_view_bindings_add(sw_context, vmw_view_ua, |
| 2959 | vmw_ctx_binding_cs_uav, 0, (void *)&cmd[1], |
| 2960 | num_uav, 0); |
| 2961 | if (ret) |
| 2962 | return ret; |
| 2963 | |
| 2964 | vmw_binding_add_uav_index(sw_context->dx_ctx_node->staged, 1, |
| 2965 | cmd->body.startIndex); |
| 2966 | |
| 2967 | return ret; |
| 2968 | } |
| 2969 | |
Deepak Rawat | e8bead9 | 2018-12-13 14:04:31 -0800 | [diff] [blame] | 2970 | static int vmw_cmd_dx_define_streamoutput(struct vmw_private *dev_priv, |
| 2971 | struct vmw_sw_context *sw_context, |
| 2972 | SVGA3dCmdHeader *header) |
| 2973 | { |
| 2974 | struct vmw_ctx_validation_info *ctx_node = sw_context->dx_ctx_node; |
| 2975 | struct vmw_resource *res; |
| 2976 | struct { |
| 2977 | SVGA3dCmdHeader header; |
| 2978 | SVGA3dCmdDXDefineStreamOutputWithMob body; |
| 2979 | } *cmd = container_of(header, typeof(*cmd), header); |
| 2980 | int ret; |
| 2981 | |
| 2982 | if (!has_sm5_context(dev_priv)) |
| 2983 | return -EINVAL; |
| 2984 | |
| 2985 | if (!ctx_node) { |
| 2986 | DRM_ERROR("DX Context not set.\n"); |
| 2987 | return -EINVAL; |
| 2988 | } |
| 2989 | |
| 2990 | res = vmw_context_cotable(ctx_node->ctx, SVGA_COTABLE_STREAMOUTPUT); |
| 2991 | ret = vmw_cotable_notify(res, cmd->body.soid); |
| 2992 | if (ret) |
| 2993 | return ret; |
| 2994 | |
| 2995 | return vmw_dx_streamoutput_add(sw_context->man, ctx_node->ctx, |
| 2996 | cmd->body.soid, |
| 2997 | &sw_context->staged_cmd_res); |
| 2998 | } |
| 2999 | |
| 3000 | static int vmw_cmd_dx_destroy_streamoutput(struct vmw_private *dev_priv, |
| 3001 | struct vmw_sw_context *sw_context, |
| 3002 | SVGA3dCmdHeader *header) |
| 3003 | { |
| 3004 | struct vmw_ctx_validation_info *ctx_node = sw_context->dx_ctx_node; |
| 3005 | struct vmw_resource *res; |
| 3006 | struct { |
| 3007 | SVGA3dCmdHeader header; |
| 3008 | SVGA3dCmdDXDestroyStreamOutput body; |
| 3009 | } *cmd = container_of(header, typeof(*cmd), header); |
| 3010 | |
| 3011 | if (!ctx_node) { |
| 3012 | DRM_ERROR("DX Context not set.\n"); |
| 3013 | return -EINVAL; |
| 3014 | } |
| 3015 | |
| 3016 | /* |
| 3017 | * When device does not support SM5 then streamoutput with mob command is |
| 3018 | * not available to user-space. Simply return in this case. |
| 3019 | */ |
| 3020 | if (!has_sm5_context(dev_priv)) |
| 3021 | return 0; |
| 3022 | |
| 3023 | /* |
| 3024 | * With SM5 capable device if lookup fails then user-space probably used |
| 3025 | * old streamoutput define command. Return without an error. |
| 3026 | */ |
| 3027 | res = vmw_dx_streamoutput_lookup(vmw_context_res_man(ctx_node->ctx), |
| 3028 | cmd->body.soid); |
| 3029 | if (IS_ERR(res)) |
| 3030 | return 0; |
| 3031 | |
| 3032 | return vmw_dx_streamoutput_remove(sw_context->man, cmd->body.soid, |
| 3033 | &sw_context->staged_cmd_res); |
| 3034 | } |
| 3035 | |
| 3036 | static int vmw_cmd_dx_bind_streamoutput(struct vmw_private *dev_priv, |
| 3037 | struct vmw_sw_context *sw_context, |
| 3038 | SVGA3dCmdHeader *header) |
| 3039 | { |
| 3040 | struct vmw_ctx_validation_info *ctx_node = sw_context->dx_ctx_node; |
| 3041 | struct vmw_resource *res; |
| 3042 | struct { |
| 3043 | SVGA3dCmdHeader header; |
| 3044 | SVGA3dCmdDXBindStreamOutput body; |
| 3045 | } *cmd = container_of(header, typeof(*cmd), header); |
| 3046 | int ret; |
| 3047 | |
| 3048 | if (!has_sm5_context(dev_priv)) |
| 3049 | return -EINVAL; |
| 3050 | |
| 3051 | if (!ctx_node) { |
| 3052 | DRM_ERROR("DX Context not set.\n"); |
| 3053 | return -EINVAL; |
| 3054 | } |
| 3055 | |
| 3056 | res = vmw_dx_streamoutput_lookup(vmw_context_res_man(ctx_node->ctx), |
| 3057 | cmd->body.soid); |
| 3058 | if (IS_ERR(res)) { |
Colin Ian King | 1ae96fc | 2020-08-05 12:31:55 +0100 | [diff] [blame] | 3059 | DRM_ERROR("Could not find streamoutput to bind.\n"); |
Deepak Rawat | e8bead9 | 2018-12-13 14:04:31 -0800 | [diff] [blame] | 3060 | return PTR_ERR(res); |
| 3061 | } |
| 3062 | |
| 3063 | vmw_dx_streamoutput_set_size(res, cmd->body.sizeInBytes); |
| 3064 | |
| 3065 | ret = vmw_execbuf_res_noctx_val_add(sw_context, res, |
| 3066 | VMW_RES_DIRTY_NONE); |
| 3067 | if (ret) { |
| 3068 | DRM_ERROR("Error creating resource validation node.\n"); |
| 3069 | return ret; |
| 3070 | } |
| 3071 | |
| 3072 | return vmw_cmd_res_switch_backup(dev_priv, sw_context, res, |
| 3073 | &cmd->body.mobid, |
| 3074 | cmd->body.offsetInBytes); |
| 3075 | } |
| 3076 | |
| 3077 | static int vmw_cmd_dx_set_streamoutput(struct vmw_private *dev_priv, |
| 3078 | struct vmw_sw_context *sw_context, |
| 3079 | SVGA3dCmdHeader *header) |
| 3080 | { |
| 3081 | struct vmw_ctx_validation_info *ctx_node = sw_context->dx_ctx_node; |
| 3082 | struct vmw_resource *res; |
| 3083 | struct vmw_ctx_bindinfo_so binding; |
| 3084 | struct { |
| 3085 | SVGA3dCmdHeader header; |
| 3086 | SVGA3dCmdDXSetStreamOutput body; |
| 3087 | } *cmd = container_of(header, typeof(*cmd), header); |
| 3088 | int ret; |
| 3089 | |
| 3090 | if (!ctx_node) { |
| 3091 | DRM_ERROR("DX Context not set.\n"); |
| 3092 | return -EINVAL; |
| 3093 | } |
| 3094 | |
| 3095 | if (cmd->body.soid == SVGA3D_INVALID_ID) |
| 3096 | return 0; |
| 3097 | |
| 3098 | /* |
| 3099 | * When device does not support SM5 then streamoutput with mob command is |
| 3100 | * not available to user-space. Simply return in this case. |
| 3101 | */ |
| 3102 | if (!has_sm5_context(dev_priv)) |
| 3103 | return 0; |
| 3104 | |
| 3105 | /* |
| 3106 | * With SM5 capable device if lookup fails then user-space probably used |
| 3107 | * old streamoutput define command. Return without an error. |
| 3108 | */ |
| 3109 | res = vmw_dx_streamoutput_lookup(vmw_context_res_man(ctx_node->ctx), |
| 3110 | cmd->body.soid); |
| 3111 | if (IS_ERR(res)) { |
| 3112 | return 0; |
| 3113 | } |
| 3114 | |
| 3115 | ret = vmw_execbuf_res_noctx_val_add(sw_context, res, |
| 3116 | VMW_RES_DIRTY_NONE); |
| 3117 | if (ret) { |
| 3118 | DRM_ERROR("Error creating resource validation node.\n"); |
| 3119 | return ret; |
| 3120 | } |
| 3121 | |
| 3122 | binding.bi.ctx = ctx_node->ctx; |
| 3123 | binding.bi.res = res; |
| 3124 | binding.bi.bt = vmw_ctx_binding_so; |
| 3125 | binding.slot = 0; /* Only one SO set to context at a time. */ |
| 3126 | |
| 3127 | vmw_binding_add(sw_context->dx_ctx_node->staged, &binding.bi, 0, |
| 3128 | binding.slot); |
| 3129 | |
| 3130 | return ret; |
| 3131 | } |
| 3132 | |
Deepak Rawat | b6fad73 | 2018-12-13 14:00:18 -0800 | [diff] [blame] | 3133 | static int vmw_cmd_indexed_instanced_indirect(struct vmw_private *dev_priv, |
| 3134 | struct vmw_sw_context *sw_context, |
| 3135 | SVGA3dCmdHeader *header) |
| 3136 | { |
| 3137 | struct vmw_draw_indexed_instanced_indirect_cmd { |
| 3138 | SVGA3dCmdHeader header; |
| 3139 | SVGA3dCmdDXDrawIndexedInstancedIndirect body; |
| 3140 | } *cmd = container_of(header, typeof(*cmd), header); |
| 3141 | |
| 3142 | if (!has_sm5_context(dev_priv)) |
| 3143 | return -EINVAL; |
| 3144 | |
| 3145 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
| 3146 | VMW_RES_DIRTY_NONE, user_surface_converter, |
| 3147 | &cmd->body.argsBufferSid, NULL); |
| 3148 | } |
| 3149 | |
| 3150 | static int vmw_cmd_instanced_indirect(struct vmw_private *dev_priv, |
| 3151 | struct vmw_sw_context *sw_context, |
| 3152 | SVGA3dCmdHeader *header) |
| 3153 | { |
| 3154 | struct vmw_draw_instanced_indirect_cmd { |
| 3155 | SVGA3dCmdHeader header; |
| 3156 | SVGA3dCmdDXDrawInstancedIndirect body; |
| 3157 | } *cmd = container_of(header, typeof(*cmd), header); |
| 3158 | |
| 3159 | if (!has_sm5_context(dev_priv)) |
| 3160 | return -EINVAL; |
| 3161 | |
| 3162 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
| 3163 | VMW_RES_DIRTY_NONE, user_surface_converter, |
| 3164 | &cmd->body.argsBufferSid, NULL); |
| 3165 | } |
| 3166 | |
| 3167 | static int vmw_cmd_dispatch_indirect(struct vmw_private *dev_priv, |
| 3168 | struct vmw_sw_context *sw_context, |
| 3169 | SVGA3dCmdHeader *header) |
| 3170 | { |
| 3171 | struct vmw_dispatch_indirect_cmd { |
| 3172 | SVGA3dCmdHeader header; |
| 3173 | SVGA3dCmdDXDispatchIndirect body; |
| 3174 | } *cmd = container_of(header, typeof(*cmd), header); |
| 3175 | |
| 3176 | if (!has_sm5_context(dev_priv)) |
| 3177 | return -EINVAL; |
| 3178 | |
| 3179 | return vmw_cmd_res_check(dev_priv, sw_context, vmw_res_surface, |
| 3180 | VMW_RES_DIRTY_NONE, user_surface_converter, |
| 3181 | &cmd->body.argsBufferSid, NULL); |
| 3182 | } |
| 3183 | |
Jakob Bornecrantz | 4084fb8 | 2011-10-04 20:13:19 +0200 | [diff] [blame] | 3184 | static int vmw_cmd_check_not_3d(struct vmw_private *dev_priv, |
| 3185 | struct vmw_sw_context *sw_context, |
| 3186 | void *buf, uint32_t *size) |
| 3187 | { |
| 3188 | uint32_t size_remaining = *size; |
Jakob Bornecrantz | 4084fb8 | 2011-10-04 20:13:19 +0200 | [diff] [blame] | 3189 | uint32_t cmd_id; |
| 3190 | |
Thomas Hellstrom | b9eb1a6 | 2015-04-02 02:39:45 -0700 | [diff] [blame] | 3191 | cmd_id = ((uint32_t *)buf)[0]; |
Jakob Bornecrantz | 4084fb8 | 2011-10-04 20:13:19 +0200 | [diff] [blame] | 3192 | switch (cmd_id) { |
| 3193 | case SVGA_CMD_UPDATE: |
| 3194 | *size = sizeof(uint32_t) + sizeof(SVGAFifoCmdUpdate); |
Jakob Bornecrantz | 4084fb8 | 2011-10-04 20:13:19 +0200 | [diff] [blame] | 3195 | break; |
| 3196 | case SVGA_CMD_DEFINE_GMRFB: |
| 3197 | *size = sizeof(uint32_t) + sizeof(SVGAFifoCmdDefineGMRFB); |
| 3198 | break; |
| 3199 | case SVGA_CMD_BLIT_GMRFB_TO_SCREEN: |
| 3200 | *size = sizeof(uint32_t) + sizeof(SVGAFifoCmdBlitGMRFBToScreen); |
| 3201 | break; |
| 3202 | case SVGA_CMD_BLIT_SCREEN_TO_GMRFB: |
| 3203 | *size = sizeof(uint32_t) + sizeof(SVGAFifoCmdBlitGMRFBToScreen); |
| 3204 | break; |
| 3205 | default: |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 3206 | VMW_DEBUG_USER("Unsupported SVGA command: %u.\n", cmd_id); |
Jakob Bornecrantz | 4084fb8 | 2011-10-04 20:13:19 +0200 | [diff] [blame] | 3207 | return -EINVAL; |
| 3208 | } |
| 3209 | |
| 3210 | if (*size > size_remaining) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 3211 | VMW_DEBUG_USER("Invalid SVGA command (size mismatch): %u.\n", |
| 3212 | cmd_id); |
Jakob Bornecrantz | 4084fb8 | 2011-10-04 20:13:19 +0200 | [diff] [blame] | 3213 | return -EINVAL; |
| 3214 | } |
| 3215 | |
Jakob Bornecrantz | 0cff60c | 2011-10-04 20:13:27 +0200 | [diff] [blame] | 3216 | if (unlikely(!sw_context->kernel)) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 3217 | VMW_DEBUG_USER("Kernel only SVGA command: %u.\n", cmd_id); |
Jakob Bornecrantz | 4084fb8 | 2011-10-04 20:13:19 +0200 | [diff] [blame] | 3218 | return -EPERM; |
| 3219 | } |
| 3220 | |
| 3221 | if (cmd_id == SVGA_CMD_DEFINE_GMRFB) |
| 3222 | return vmw_cmd_check_define_gmrfb(dev_priv, sw_context, buf); |
| 3223 | |
| 3224 | return 0; |
| 3225 | } |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3226 | |
Thomas Hellstrom | 4fbd9d2 | 2014-02-12 12:37:01 +0100 | [diff] [blame] | 3227 | static const struct vmw_cmd_entry vmw_cmd_entries[SVGA_3D_CMD_MAX] = { |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3228 | VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_DEFINE, &vmw_cmd_invalid, |
| 3229 | false, false, false), |
| 3230 | VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_DESTROY, &vmw_cmd_invalid, |
| 3231 | false, false, false), |
| 3232 | VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_COPY, &vmw_cmd_surface_copy_check, |
| 3233 | true, false, false), |
| 3234 | VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_STRETCHBLT, &vmw_cmd_stretch_blt_check, |
| 3235 | true, false, false), |
| 3236 | VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_DMA, &vmw_cmd_dma, |
| 3237 | true, false, false), |
| 3238 | VMW_CMD_DEF(SVGA_3D_CMD_CONTEXT_DEFINE, &vmw_cmd_invalid, |
| 3239 | false, false, false), |
| 3240 | VMW_CMD_DEF(SVGA_3D_CMD_CONTEXT_DESTROY, &vmw_cmd_invalid, |
| 3241 | false, false, false), |
| 3242 | VMW_CMD_DEF(SVGA_3D_CMD_SETTRANSFORM, &vmw_cmd_cid_check, |
| 3243 | true, false, false), |
| 3244 | VMW_CMD_DEF(SVGA_3D_CMD_SETZRANGE, &vmw_cmd_cid_check, |
| 3245 | true, false, false), |
| 3246 | VMW_CMD_DEF(SVGA_3D_CMD_SETRENDERSTATE, &vmw_cmd_cid_check, |
| 3247 | true, false, false), |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3248 | VMW_CMD_DEF(SVGA_3D_CMD_SETRENDERTARGET, |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3249 | &vmw_cmd_set_render_target_check, true, false, false), |
| 3250 | VMW_CMD_DEF(SVGA_3D_CMD_SETTEXTURESTATE, &vmw_cmd_tex_state, |
| 3251 | true, false, false), |
| 3252 | VMW_CMD_DEF(SVGA_3D_CMD_SETMATERIAL, &vmw_cmd_cid_check, |
| 3253 | true, false, false), |
| 3254 | VMW_CMD_DEF(SVGA_3D_CMD_SETLIGHTDATA, &vmw_cmd_cid_check, |
| 3255 | true, false, false), |
| 3256 | VMW_CMD_DEF(SVGA_3D_CMD_SETLIGHTENABLED, &vmw_cmd_cid_check, |
| 3257 | true, false, false), |
| 3258 | VMW_CMD_DEF(SVGA_3D_CMD_SETVIEWPORT, &vmw_cmd_cid_check, |
| 3259 | true, false, false), |
| 3260 | VMW_CMD_DEF(SVGA_3D_CMD_SETCLIPPLANE, &vmw_cmd_cid_check, |
| 3261 | true, false, false), |
| 3262 | VMW_CMD_DEF(SVGA_3D_CMD_CLEAR, &vmw_cmd_cid_check, |
| 3263 | true, false, false), |
| 3264 | VMW_CMD_DEF(SVGA_3D_CMD_PRESENT, &vmw_cmd_present_check, |
| 3265 | false, false, false), |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 3266 | VMW_CMD_DEF(SVGA_3D_CMD_SHADER_DEFINE, &vmw_cmd_shader_define, |
| 3267 | true, false, false), |
| 3268 | VMW_CMD_DEF(SVGA_3D_CMD_SHADER_DESTROY, &vmw_cmd_shader_destroy, |
| 3269 | true, false, false), |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3270 | VMW_CMD_DEF(SVGA_3D_CMD_SET_SHADER, &vmw_cmd_set_shader, |
| 3271 | true, false, false), |
Thomas Hellstrom | 0ccbbae | 2014-01-30 11:13:43 +0100 | [diff] [blame] | 3272 | VMW_CMD_DEF(SVGA_3D_CMD_SET_SHADER_CONST, &vmw_cmd_set_shader_const, |
| 3273 | true, false, false), |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3274 | VMW_CMD_DEF(SVGA_3D_CMD_DRAW_PRIMITIVES, &vmw_cmd_draw, |
| 3275 | true, false, false), |
| 3276 | VMW_CMD_DEF(SVGA_3D_CMD_SETSCISSORRECT, &vmw_cmd_cid_check, |
| 3277 | true, false, false), |
| 3278 | VMW_CMD_DEF(SVGA_3D_CMD_BEGIN_QUERY, &vmw_cmd_begin_query, |
| 3279 | true, false, false), |
| 3280 | VMW_CMD_DEF(SVGA_3D_CMD_END_QUERY, &vmw_cmd_end_query, |
| 3281 | true, false, false), |
| 3282 | VMW_CMD_DEF(SVGA_3D_CMD_WAIT_FOR_QUERY, &vmw_cmd_wait_query, |
| 3283 | true, false, false), |
| 3284 | VMW_CMD_DEF(SVGA_3D_CMD_PRESENT_READBACK, &vmw_cmd_ok, |
| 3285 | true, false, false), |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3286 | VMW_CMD_DEF(SVGA_3D_CMD_BLIT_SURFACE_TO_SCREEN, |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3287 | &vmw_cmd_blt_surf_screen_check, false, false, false), |
| 3288 | VMW_CMD_DEF(SVGA_3D_CMD_SURFACE_DEFINE_V2, &vmw_cmd_invalid, |
| 3289 | false, false, false), |
| 3290 | VMW_CMD_DEF(SVGA_3D_CMD_GENERATE_MIPMAPS, &vmw_cmd_invalid, |
| 3291 | false, false, false), |
| 3292 | VMW_CMD_DEF(SVGA_3D_CMD_ACTIVATE_SURFACE, &vmw_cmd_invalid, |
| 3293 | false, false, false), |
| 3294 | VMW_CMD_DEF(SVGA_3D_CMD_DEACTIVATE_SURFACE, &vmw_cmd_invalid, |
| 3295 | false, false, false), |
| 3296 | VMW_CMD_DEF(SVGA_3D_CMD_SCREEN_DMA, &vmw_cmd_invalid, |
| 3297 | false, false, false), |
Deepak Rawat | dc75e73 | 2018-06-13 13:53:28 -0700 | [diff] [blame] | 3298 | VMW_CMD_DEF(SVGA_3D_CMD_DEAD1, &vmw_cmd_invalid, |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3299 | false, false, false), |
Deepak Rawat | dc75e73 | 2018-06-13 13:53:28 -0700 | [diff] [blame] | 3300 | VMW_CMD_DEF(SVGA_3D_CMD_DEAD2, &vmw_cmd_invalid, |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3301 | false, false, false), |
Deepak Rawat | 3d14395 | 2018-12-13 11:55:57 -0800 | [diff] [blame] | 3302 | VMW_CMD_DEF(SVGA_3D_CMD_DEAD12, &vmw_cmd_invalid, false, false, false), |
| 3303 | VMW_CMD_DEF(SVGA_3D_CMD_DEAD13, &vmw_cmd_invalid, false, false, false), |
| 3304 | VMW_CMD_DEF(SVGA_3D_CMD_DEAD14, &vmw_cmd_invalid, false, false, false), |
| 3305 | VMW_CMD_DEF(SVGA_3D_CMD_DEAD15, &vmw_cmd_invalid, false, false, false), |
| 3306 | VMW_CMD_DEF(SVGA_3D_CMD_DEAD16, &vmw_cmd_invalid, false, false, false), |
| 3307 | VMW_CMD_DEF(SVGA_3D_CMD_DEAD17, &vmw_cmd_invalid, false, false, false), |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3308 | VMW_CMD_DEF(SVGA_3D_CMD_SET_OTABLE_BASE, &vmw_cmd_invalid, |
| 3309 | false, false, true), |
| 3310 | VMW_CMD_DEF(SVGA_3D_CMD_READBACK_OTABLE, &vmw_cmd_invalid, |
| 3311 | false, false, true), |
| 3312 | VMW_CMD_DEF(SVGA_3D_CMD_DEFINE_GB_MOB, &vmw_cmd_invalid, |
| 3313 | false, false, true), |
| 3314 | VMW_CMD_DEF(SVGA_3D_CMD_DESTROY_GB_MOB, &vmw_cmd_invalid, |
| 3315 | false, false, true), |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 3316 | VMW_CMD_DEF(SVGA_3D_CMD_REDEFINE_GB_MOB64, &vmw_cmd_invalid, |
| 3317 | false, false, true), |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3318 | VMW_CMD_DEF(SVGA_3D_CMD_UPDATE_GB_MOB_MAPPING, &vmw_cmd_invalid, |
| 3319 | false, false, true), |
| 3320 | VMW_CMD_DEF(SVGA_3D_CMD_DEFINE_GB_SURFACE, &vmw_cmd_invalid, |
| 3321 | false, false, true), |
| 3322 | VMW_CMD_DEF(SVGA_3D_CMD_DESTROY_GB_SURFACE, &vmw_cmd_invalid, |
| 3323 | false, false, true), |
| 3324 | VMW_CMD_DEF(SVGA_3D_CMD_BIND_GB_SURFACE, &vmw_cmd_bind_gb_surface, |
| 3325 | true, false, true), |
| 3326 | VMW_CMD_DEF(SVGA_3D_CMD_COND_BIND_GB_SURFACE, &vmw_cmd_invalid, |
| 3327 | false, false, true), |
| 3328 | VMW_CMD_DEF(SVGA_3D_CMD_UPDATE_GB_IMAGE, &vmw_cmd_update_gb_image, |
| 3329 | true, false, true), |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 3330 | VMW_CMD_DEF(SVGA_3D_CMD_UPDATE_GB_SURFACE, |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3331 | &vmw_cmd_update_gb_surface, true, false, true), |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 3332 | VMW_CMD_DEF(SVGA_3D_CMD_READBACK_GB_IMAGE, |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3333 | &vmw_cmd_readback_gb_image, true, false, true), |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 3334 | VMW_CMD_DEF(SVGA_3D_CMD_READBACK_GB_SURFACE, |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3335 | &vmw_cmd_readback_gb_surface, true, false, true), |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 3336 | VMW_CMD_DEF(SVGA_3D_CMD_INVALIDATE_GB_IMAGE, |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3337 | &vmw_cmd_invalidate_gb_image, true, false, true), |
Thomas Hellstrom | a97e219 | 2012-11-21 11:45:13 +0100 | [diff] [blame] | 3338 | VMW_CMD_DEF(SVGA_3D_CMD_INVALIDATE_GB_SURFACE, |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3339 | &vmw_cmd_invalidate_gb_surface, true, false, true), |
| 3340 | VMW_CMD_DEF(SVGA_3D_CMD_DEFINE_GB_CONTEXT, &vmw_cmd_invalid, |
| 3341 | false, false, true), |
| 3342 | VMW_CMD_DEF(SVGA_3D_CMD_DESTROY_GB_CONTEXT, &vmw_cmd_invalid, |
| 3343 | false, false, true), |
| 3344 | VMW_CMD_DEF(SVGA_3D_CMD_BIND_GB_CONTEXT, &vmw_cmd_invalid, |
| 3345 | false, false, true), |
| 3346 | VMW_CMD_DEF(SVGA_3D_CMD_READBACK_GB_CONTEXT, &vmw_cmd_invalid, |
| 3347 | false, false, true), |
| 3348 | VMW_CMD_DEF(SVGA_3D_CMD_INVALIDATE_GB_CONTEXT, &vmw_cmd_invalid, |
| 3349 | false, false, true), |
| 3350 | VMW_CMD_DEF(SVGA_3D_CMD_DEFINE_GB_SHADER, &vmw_cmd_invalid, |
| 3351 | false, false, true), |
| 3352 | VMW_CMD_DEF(SVGA_3D_CMD_BIND_GB_SHADER, &vmw_cmd_bind_gb_shader, |
| 3353 | true, false, true), |
| 3354 | VMW_CMD_DEF(SVGA_3D_CMD_DESTROY_GB_SHADER, &vmw_cmd_invalid, |
| 3355 | false, false, true), |
Thomas Hellstrom | f2a0dcb | 2014-01-15 10:04:07 +0100 | [diff] [blame] | 3356 | VMW_CMD_DEF(SVGA_3D_CMD_SET_OTABLE_BASE64, &vmw_cmd_invalid, |
Thomas Hellstrom | 8ba0731 | 2013-10-08 02:25:35 -0700 | [diff] [blame] | 3357 | false, false, false), |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3358 | VMW_CMD_DEF(SVGA_3D_CMD_BEGIN_GB_QUERY, &vmw_cmd_begin_gb_query, |
| 3359 | true, false, true), |
| 3360 | VMW_CMD_DEF(SVGA_3D_CMD_END_GB_QUERY, &vmw_cmd_end_gb_query, |
| 3361 | true, false, true), |
| 3362 | VMW_CMD_DEF(SVGA_3D_CMD_WAIT_FOR_GB_QUERY, &vmw_cmd_wait_gb_query, |
| 3363 | true, false, true), |
| 3364 | VMW_CMD_DEF(SVGA_3D_CMD_NOP, &vmw_cmd_ok, |
| 3365 | true, false, true), |
Thomas Hellstrom | 5f55be5f | 2017-08-24 08:06:30 +0200 | [diff] [blame] | 3366 | VMW_CMD_DEF(SVGA_3D_CMD_NOP_ERROR, &vmw_cmd_ok, |
| 3367 | true, false, true), |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3368 | VMW_CMD_DEF(SVGA_3D_CMD_ENABLE_GART, &vmw_cmd_invalid, |
| 3369 | false, false, true), |
| 3370 | VMW_CMD_DEF(SVGA_3D_CMD_DISABLE_GART, &vmw_cmd_invalid, |
| 3371 | false, false, true), |
| 3372 | VMW_CMD_DEF(SVGA_3D_CMD_MAP_MOB_INTO_GART, &vmw_cmd_invalid, |
| 3373 | false, false, true), |
| 3374 | VMW_CMD_DEF(SVGA_3D_CMD_UNMAP_GART_RANGE, &vmw_cmd_invalid, |
| 3375 | false, false, true), |
| 3376 | VMW_CMD_DEF(SVGA_3D_CMD_DEFINE_GB_SCREENTARGET, &vmw_cmd_invalid, |
| 3377 | false, false, true), |
| 3378 | VMW_CMD_DEF(SVGA_3D_CMD_DESTROY_GB_SCREENTARGET, &vmw_cmd_invalid, |
| 3379 | false, false, true), |
| 3380 | VMW_CMD_DEF(SVGA_3D_CMD_BIND_GB_SCREENTARGET, &vmw_cmd_invalid, |
| 3381 | false, false, true), |
| 3382 | VMW_CMD_DEF(SVGA_3D_CMD_UPDATE_GB_SCREENTARGET, &vmw_cmd_invalid, |
| 3383 | false, false, true), |
| 3384 | VMW_CMD_DEF(SVGA_3D_CMD_READBACK_GB_IMAGE_PARTIAL, &vmw_cmd_invalid, |
| 3385 | false, false, true), |
| 3386 | VMW_CMD_DEF(SVGA_3D_CMD_INVALIDATE_GB_IMAGE_PARTIAL, &vmw_cmd_invalid, |
| 3387 | false, false, true), |
| 3388 | VMW_CMD_DEF(SVGA_3D_CMD_SET_GB_SHADERCONSTS_INLINE, &vmw_cmd_cid_check, |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 3389 | true, false, true), |
| 3390 | VMW_CMD_DEF(SVGA_3D_CMD_GB_SCREEN_DMA, &vmw_cmd_invalid, |
| 3391 | false, false, true), |
| 3392 | VMW_CMD_DEF(SVGA_3D_CMD_BIND_GB_SURFACE_WITH_PITCH, &vmw_cmd_invalid, |
| 3393 | false, false, true), |
| 3394 | VMW_CMD_DEF(SVGA_3D_CMD_GB_MOB_FENCE, &vmw_cmd_invalid, |
| 3395 | false, false, true), |
| 3396 | VMW_CMD_DEF(SVGA_3D_CMD_DEFINE_GB_SURFACE_V2, &vmw_cmd_invalid, |
| 3397 | false, false, true), |
| 3398 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3399 | /* SM commands */ |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 3400 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DEFINE_CONTEXT, &vmw_cmd_invalid, |
| 3401 | false, false, true), |
| 3402 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DESTROY_CONTEXT, &vmw_cmd_invalid, |
| 3403 | false, false, true), |
| 3404 | VMW_CMD_DEF(SVGA_3D_CMD_DX_BIND_CONTEXT, &vmw_cmd_invalid, |
| 3405 | false, false, true), |
| 3406 | VMW_CMD_DEF(SVGA_3D_CMD_DX_READBACK_CONTEXT, &vmw_cmd_invalid, |
| 3407 | false, false, true), |
| 3408 | VMW_CMD_DEF(SVGA_3D_CMD_DX_INVALIDATE_CONTEXT, &vmw_cmd_invalid, |
| 3409 | false, false, true), |
| 3410 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_SINGLE_CONSTANT_BUFFER, |
| 3411 | &vmw_cmd_dx_set_single_constant_buffer, true, false, true), |
| 3412 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_SHADER_RESOURCES, |
| 3413 | &vmw_cmd_dx_set_shader_res, true, false, true), |
| 3414 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_SHADER, &vmw_cmd_dx_set_shader, |
| 3415 | true, false, true), |
Charmaine Lee | 2f633e5 | 2015-08-10 10:45:11 -0700 | [diff] [blame] | 3416 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_SAMPLERS, &vmw_cmd_dx_cid_check, |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 3417 | true, false, true), |
Charmaine Lee | 2f633e5 | 2015-08-10 10:45:11 -0700 | [diff] [blame] | 3418 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DRAW, &vmw_cmd_dx_cid_check, |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 3419 | true, false, true), |
Charmaine Lee | 2f633e5 | 2015-08-10 10:45:11 -0700 | [diff] [blame] | 3420 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DRAW_INDEXED, &vmw_cmd_dx_cid_check, |
| 3421 | true, false, true), |
| 3422 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DRAW_INSTANCED, &vmw_cmd_dx_cid_check, |
| 3423 | true, false, true), |
| 3424 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DRAW_INDEXED_INSTANCED, |
| 3425 | &vmw_cmd_dx_cid_check, true, false, true), |
| 3426 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DRAW_AUTO, &vmw_cmd_dx_cid_check, |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 3427 | true, false, true), |
| 3428 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_VERTEX_BUFFERS, |
| 3429 | &vmw_cmd_dx_set_vertex_buffers, true, false, true), |
| 3430 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_INDEX_BUFFER, |
| 3431 | &vmw_cmd_dx_set_index_buffer, true, false, true), |
| 3432 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_RENDERTARGETS, |
| 3433 | &vmw_cmd_dx_set_rendertargets, true, false, true), |
| 3434 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_BLEND_STATE, &vmw_cmd_dx_cid_check, |
| 3435 | true, false, true), |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 3436 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_DEPTHSTENCIL_STATE, |
Charmaine Lee | 2f633e5 | 2015-08-10 10:45:11 -0700 | [diff] [blame] | 3437 | &vmw_cmd_dx_cid_check, true, false, true), |
| 3438 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_RASTERIZER_STATE, |
| 3439 | &vmw_cmd_dx_cid_check, true, false, true), |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 3440 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DEFINE_QUERY, &vmw_cmd_dx_define_query, |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 3441 | true, false, true), |
Charmaine Lee | e02e588 | 2016-04-12 08:19:08 -0700 | [diff] [blame] | 3442 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DESTROY_QUERY, &vmw_cmd_dx_cid_check, |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 3443 | true, false, true), |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 3444 | VMW_CMD_DEF(SVGA_3D_CMD_DX_BIND_QUERY, &vmw_cmd_dx_bind_query, |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 3445 | true, false, true), |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 3446 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_QUERY_OFFSET, |
Charmaine Lee | e02e588 | 2016-04-12 08:19:08 -0700 | [diff] [blame] | 3447 | &vmw_cmd_dx_cid_check, true, false, true), |
| 3448 | VMW_CMD_DEF(SVGA_3D_CMD_DX_BEGIN_QUERY, &vmw_cmd_dx_cid_check, |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 3449 | true, false, true), |
Charmaine Lee | e02e588 | 2016-04-12 08:19:08 -0700 | [diff] [blame] | 3450 | VMW_CMD_DEF(SVGA_3D_CMD_DX_END_QUERY, &vmw_cmd_dx_cid_check, |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 3451 | true, false, true), |
| 3452 | VMW_CMD_DEF(SVGA_3D_CMD_DX_READBACK_QUERY, &vmw_cmd_invalid, |
| 3453 | true, false, true), |
Charmaine Lee | 1883598 | 2016-04-12 08:14:23 -0700 | [diff] [blame] | 3454 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_PREDICATION, &vmw_cmd_dx_cid_check, |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 3455 | true, false, true), |
| 3456 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_VIEWPORTS, &vmw_cmd_dx_cid_check, |
| 3457 | true, false, true), |
| 3458 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_SCISSORRECTS, &vmw_cmd_dx_cid_check, |
| 3459 | true, false, true), |
| 3460 | VMW_CMD_DEF(SVGA_3D_CMD_DX_CLEAR_RENDERTARGET_VIEW, |
| 3461 | &vmw_cmd_dx_clear_rendertarget_view, true, false, true), |
| 3462 | VMW_CMD_DEF(SVGA_3D_CMD_DX_CLEAR_DEPTHSTENCIL_VIEW, |
| 3463 | &vmw_cmd_dx_clear_depthstencil_view, true, false, true), |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 3464 | VMW_CMD_DEF(SVGA_3D_CMD_DX_PRED_COPY, &vmw_cmd_invalid, |
| 3465 | true, false, true), |
Charmaine Lee | f3b33550 | 2016-02-12 08:11:56 +0100 | [diff] [blame] | 3466 | VMW_CMD_DEF(SVGA_3D_CMD_DX_GENMIPS, &vmw_cmd_dx_genmips, |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 3467 | true, false, true), |
| 3468 | VMW_CMD_DEF(SVGA_3D_CMD_DX_UPDATE_SUBRESOURCE, |
| 3469 | &vmw_cmd_dx_check_subresource, true, false, true), |
| 3470 | VMW_CMD_DEF(SVGA_3D_CMD_DX_READBACK_SUBRESOURCE, |
| 3471 | &vmw_cmd_dx_check_subresource, true, false, true), |
| 3472 | VMW_CMD_DEF(SVGA_3D_CMD_DX_INVALIDATE_SUBRESOURCE, |
| 3473 | &vmw_cmd_dx_check_subresource, true, false, true), |
| 3474 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DEFINE_SHADERRESOURCE_VIEW, |
| 3475 | &vmw_cmd_dx_view_define, true, false, true), |
| 3476 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DESTROY_SHADERRESOURCE_VIEW, |
| 3477 | &vmw_cmd_dx_view_remove, true, false, true), |
| 3478 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DEFINE_RENDERTARGET_VIEW, |
| 3479 | &vmw_cmd_dx_view_define, true, false, true), |
| 3480 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DESTROY_RENDERTARGET_VIEW, |
| 3481 | &vmw_cmd_dx_view_remove, true, false, true), |
| 3482 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DEFINE_DEPTHSTENCIL_VIEW, |
| 3483 | &vmw_cmd_dx_view_define, true, false, true), |
| 3484 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DESTROY_DEPTHSTENCIL_VIEW, |
| 3485 | &vmw_cmd_dx_view_remove, true, false, true), |
| 3486 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DEFINE_ELEMENTLAYOUT, |
| 3487 | &vmw_cmd_dx_so_define, true, false, true), |
| 3488 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DESTROY_ELEMENTLAYOUT, |
| 3489 | &vmw_cmd_dx_cid_check, true, false, true), |
| 3490 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DEFINE_BLEND_STATE, |
| 3491 | &vmw_cmd_dx_so_define, true, false, true), |
| 3492 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DESTROY_BLEND_STATE, |
| 3493 | &vmw_cmd_dx_cid_check, true, false, true), |
| 3494 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DEFINE_DEPTHSTENCIL_STATE, |
| 3495 | &vmw_cmd_dx_so_define, true, false, true), |
| 3496 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DESTROY_DEPTHSTENCIL_STATE, |
| 3497 | &vmw_cmd_dx_cid_check, true, false, true), |
| 3498 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DEFINE_RASTERIZER_STATE, |
| 3499 | &vmw_cmd_dx_so_define, true, false, true), |
| 3500 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DESTROY_RASTERIZER_STATE, |
| 3501 | &vmw_cmd_dx_cid_check, true, false, true), |
| 3502 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DEFINE_SAMPLER_STATE, |
| 3503 | &vmw_cmd_dx_so_define, true, false, true), |
| 3504 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DESTROY_SAMPLER_STATE, |
| 3505 | &vmw_cmd_dx_cid_check, true, false, true), |
| 3506 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DEFINE_SHADER, |
| 3507 | &vmw_cmd_dx_define_shader, true, false, true), |
| 3508 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DESTROY_SHADER, |
| 3509 | &vmw_cmd_dx_destroy_shader, true, false, true), |
| 3510 | VMW_CMD_DEF(SVGA_3D_CMD_DX_BIND_SHADER, |
| 3511 | &vmw_cmd_dx_bind_shader, true, false, true), |
| 3512 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DEFINE_STREAMOUTPUT, |
| 3513 | &vmw_cmd_dx_so_define, true, false, true), |
| 3514 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DESTROY_STREAMOUTPUT, |
Deepak Rawat | e8bead9 | 2018-12-13 14:04:31 -0800 | [diff] [blame] | 3515 | &vmw_cmd_dx_destroy_streamoutput, true, false, true), |
| 3516 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_STREAMOUTPUT, |
| 3517 | &vmw_cmd_dx_set_streamoutput, true, false, true), |
Charmaine Lee | 2f633e5 | 2015-08-10 10:45:11 -0700 | [diff] [blame] | 3518 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_SOTARGETS, |
| 3519 | &vmw_cmd_dx_set_so_targets, true, false, true), |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 3520 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_INPUT_LAYOUT, |
| 3521 | &vmw_cmd_dx_cid_check, true, false, true), |
| 3522 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_TOPOLOGY, |
| 3523 | &vmw_cmd_dx_cid_check, true, false, true), |
Neha Bhende | 0fca749e | 2015-08-10 10:51:07 -0700 | [diff] [blame] | 3524 | VMW_CMD_DEF(SVGA_3D_CMD_DX_BUFFER_COPY, |
| 3525 | &vmw_cmd_buffer_copy_check, true, false, true), |
| 3526 | VMW_CMD_DEF(SVGA_3D_CMD_DX_PRED_COPY_REGION, |
| 3527 | &vmw_cmd_pred_copy_check, true, false, true), |
Charmaine Lee | 1f982e4 | 2016-10-10 10:37:03 -0700 | [diff] [blame] | 3528 | VMW_CMD_DEF(SVGA_3D_CMD_DX_TRANSFER_FROM_BUFFER, |
| 3529 | &vmw_cmd_dx_transfer_from_buffer, |
| 3530 | true, false, true), |
Neha Bhende | 0d81d34 | 2018-06-18 17:14:56 -0700 | [diff] [blame] | 3531 | VMW_CMD_DEF(SVGA_3D_CMD_INTRA_SURFACE_COPY, &vmw_cmd_intra_surface_copy, |
| 3532 | true, false, true), |
Deepak Rawat | 5e8ec0d | 2018-12-13 13:51:08 -0800 | [diff] [blame] | 3533 | |
| 3534 | /* |
| 3535 | * SM5 commands |
| 3536 | */ |
| 3537 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DEFINE_UA_VIEW, &vmw_cmd_sm5_view_define, |
| 3538 | true, false, true), |
| 3539 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DESTROY_UA_VIEW, &vmw_cmd_sm5_view_remove, |
| 3540 | true, false, true), |
| 3541 | VMW_CMD_DEF(SVGA_3D_CMD_DX_CLEAR_UA_VIEW_UINT, &vmw_cmd_clear_uav_uint, |
| 3542 | true, false, true), |
| 3543 | VMW_CMD_DEF(SVGA_3D_CMD_DX_CLEAR_UA_VIEW_FLOAT, |
| 3544 | &vmw_cmd_clear_uav_float, true, false, true), |
| 3545 | VMW_CMD_DEF(SVGA_3D_CMD_DX_COPY_STRUCTURE_COUNT, &vmw_cmd_invalid, true, |
| 3546 | false, true), |
| 3547 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_UA_VIEWS, &vmw_cmd_set_uav, true, false, |
| 3548 | true), |
Deepak Rawat | b6fad73 | 2018-12-13 14:00:18 -0800 | [diff] [blame] | 3549 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DRAW_INDEXED_INSTANCED_INDIRECT, |
| 3550 | &vmw_cmd_indexed_instanced_indirect, true, false, true), |
| 3551 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DRAW_INSTANCED_INDIRECT, |
| 3552 | &vmw_cmd_instanced_indirect, true, false, true), |
| 3553 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DISPATCH, &vmw_cmd_sm5, true, false, true), |
| 3554 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DISPATCH_INDIRECT, |
| 3555 | &vmw_cmd_dispatch_indirect, true, false, true), |
Deepak Rawat | 5e8ec0d | 2018-12-13 13:51:08 -0800 | [diff] [blame] | 3556 | VMW_CMD_DEF(SVGA_3D_CMD_DX_SET_CS_UA_VIEWS, &vmw_cmd_set_cs_uav, true, |
| 3557 | false, true), |
Deepak Rawat | b6fad73 | 2018-12-13 14:00:18 -0800 | [diff] [blame] | 3558 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DEFINE_DEPTHSTENCIL_VIEW_V2, |
| 3559 | &vmw_cmd_sm5_view_define, true, false, true), |
Deepak Rawat | e8bead9 | 2018-12-13 14:04:31 -0800 | [diff] [blame] | 3560 | VMW_CMD_DEF(SVGA_3D_CMD_DX_DEFINE_STREAMOUTPUT_WITH_MOB, |
| 3561 | &vmw_cmd_dx_define_streamoutput, true, false, true), |
| 3562 | VMW_CMD_DEF(SVGA_3D_CMD_DX_BIND_STREAMOUTPUT, |
| 3563 | &vmw_cmd_dx_bind_streamoutput, true, false, true), |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3564 | }; |
| 3565 | |
Thomas Hellstrom | 65b97a2 | 2017-08-24 08:06:29 +0200 | [diff] [blame] | 3566 | bool vmw_cmd_describe(const void *buf, u32 *size, char const **cmd) |
| 3567 | { |
| 3568 | u32 cmd_id = ((u32 *) buf)[0]; |
| 3569 | |
| 3570 | if (cmd_id >= SVGA_CMD_MAX) { |
| 3571 | SVGA3dCmdHeader *header = (SVGA3dCmdHeader *) buf; |
| 3572 | const struct vmw_cmd_entry *entry; |
| 3573 | |
| 3574 | *size = header->size + sizeof(SVGA3dCmdHeader); |
| 3575 | cmd_id = header->id; |
| 3576 | if (cmd_id >= SVGA_3D_CMD_MAX) |
| 3577 | return false; |
| 3578 | |
| 3579 | cmd_id -= SVGA_3D_CMD_BASE; |
| 3580 | entry = &vmw_cmd_entries[cmd_id]; |
| 3581 | *cmd = entry->cmd_name; |
| 3582 | return true; |
| 3583 | } |
| 3584 | |
| 3585 | switch (cmd_id) { |
| 3586 | case SVGA_CMD_UPDATE: |
| 3587 | *cmd = "SVGA_CMD_UPDATE"; |
| 3588 | *size = sizeof(u32) + sizeof(SVGAFifoCmdUpdate); |
| 3589 | break; |
| 3590 | case SVGA_CMD_DEFINE_GMRFB: |
| 3591 | *cmd = "SVGA_CMD_DEFINE_GMRFB"; |
| 3592 | *size = sizeof(u32) + sizeof(SVGAFifoCmdDefineGMRFB); |
| 3593 | break; |
| 3594 | case SVGA_CMD_BLIT_GMRFB_TO_SCREEN: |
| 3595 | *cmd = "SVGA_CMD_BLIT_GMRFB_TO_SCREEN"; |
| 3596 | *size = sizeof(u32) + sizeof(SVGAFifoCmdBlitGMRFBToScreen); |
| 3597 | break; |
| 3598 | case SVGA_CMD_BLIT_SCREEN_TO_GMRFB: |
| 3599 | *cmd = "SVGA_CMD_BLIT_SCREEN_TO_GMRFB"; |
| 3600 | *size = sizeof(u32) + sizeof(SVGAFifoCmdBlitGMRFBToScreen); |
| 3601 | break; |
| 3602 | default: |
| 3603 | *cmd = "UNKNOWN"; |
| 3604 | *size = 0; |
| 3605 | return false; |
| 3606 | } |
| 3607 | |
| 3608 | return true; |
| 3609 | } |
| 3610 | |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3611 | static int vmw_cmd_check(struct vmw_private *dev_priv, |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3612 | struct vmw_sw_context *sw_context, void *buf, |
| 3613 | uint32_t *size) |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3614 | { |
| 3615 | uint32_t cmd_id; |
Thomas Hellstrom | 7a73ba7 | 2009-12-22 16:53:41 +0100 | [diff] [blame] | 3616 | uint32_t size_remaining = *size; |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3617 | SVGA3dCmdHeader *header = (SVGA3dCmdHeader *) buf; |
| 3618 | int ret; |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3619 | const struct vmw_cmd_entry *entry; |
| 3620 | bool gb = dev_priv->capabilities & SVGA_CAP_GBOBJECTS; |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3621 | |
Thomas Hellstrom | b9eb1a6 | 2015-04-02 02:39:45 -0700 | [diff] [blame] | 3622 | cmd_id = ((uint32_t *)buf)[0]; |
Jakob Bornecrantz | 4084fb8 | 2011-10-04 20:13:19 +0200 | [diff] [blame] | 3623 | /* Handle any none 3D commands */ |
| 3624 | if (unlikely(cmd_id < SVGA_CMD_MAX)) |
| 3625 | return vmw_cmd_check_not_3d(dev_priv, sw_context, buf, size); |
| 3626 | |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3627 | |
Thomas Hellstrom | b9eb1a6 | 2015-04-02 02:39:45 -0700 | [diff] [blame] | 3628 | cmd_id = header->id; |
| 3629 | *size = header->size + sizeof(SVGA3dCmdHeader); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3630 | |
| 3631 | cmd_id -= SVGA_3D_CMD_BASE; |
Thomas Hellstrom | 7a73ba7 | 2009-12-22 16:53:41 +0100 | [diff] [blame] | 3632 | if (unlikely(*size > size_remaining)) |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3633 | goto out_invalid; |
Thomas Hellstrom | 7a73ba7 | 2009-12-22 16:53:41 +0100 | [diff] [blame] | 3634 | |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3635 | if (unlikely(cmd_id >= SVGA_3D_CMD_MAX - SVGA_3D_CMD_BASE)) |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3636 | goto out_invalid; |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3637 | |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3638 | entry = &vmw_cmd_entries[cmd_id]; |
Thomas Hellstrom | 36e952c | 2014-02-12 13:19:36 +0100 | [diff] [blame] | 3639 | if (unlikely(!entry->func)) |
| 3640 | goto out_invalid; |
| 3641 | |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3642 | if (unlikely(!entry->user_allow && !sw_context->kernel)) |
| 3643 | goto out_privileged; |
| 3644 | |
| 3645 | if (unlikely(entry->gb_disable && gb)) |
| 3646 | goto out_old; |
| 3647 | |
| 3648 | if (unlikely(entry->gb_enable && !gb)) |
| 3649 | goto out_new; |
| 3650 | |
| 3651 | ret = entry->func(dev_priv, sw_context, header); |
Deepak Rawat | 45399b1 | 2019-02-11 12:57:38 -0800 | [diff] [blame] | 3652 | if (unlikely(ret != 0)) { |
| 3653 | VMW_DEBUG_USER("SVGA3D command: %d failed with error %d\n", |
| 3654 | cmd_id + SVGA_3D_CMD_BASE, ret); |
| 3655 | return ret; |
| 3656 | } |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3657 | |
| 3658 | return 0; |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3659 | out_invalid: |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 3660 | VMW_DEBUG_USER("Invalid SVGA3D command: %d\n", |
| 3661 | cmd_id + SVGA_3D_CMD_BASE); |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3662 | return -EINVAL; |
| 3663 | out_privileged: |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 3664 | VMW_DEBUG_USER("Privileged SVGA3D command: %d\n", |
| 3665 | cmd_id + SVGA_3D_CMD_BASE); |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3666 | return -EPERM; |
| 3667 | out_old: |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 3668 | VMW_DEBUG_USER("Deprecated (disallowed) SVGA3D command: %d\n", |
| 3669 | cmd_id + SVGA_3D_CMD_BASE); |
Thomas Hellstrom | c373d4e | 2012-11-21 12:22:35 +0100 | [diff] [blame] | 3670 | return -EINVAL; |
| 3671 | out_new: |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 3672 | VMW_DEBUG_USER("SVGA3D command: %d not supported by virtual device.\n", |
| 3673 | cmd_id + SVGA_3D_CMD_BASE); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3674 | return -EINVAL; |
| 3675 | } |
| 3676 | |
| 3677 | static int vmw_cmd_check_all(struct vmw_private *dev_priv, |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3678 | struct vmw_sw_context *sw_context, void *buf, |
Thomas Hellstrom | be38ab6 | 2011-08-31 07:42:54 +0000 | [diff] [blame] | 3679 | uint32_t size) |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3680 | { |
| 3681 | int32_t cur_size = size; |
| 3682 | int ret; |
| 3683 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 3684 | sw_context->buf_start = buf; |
| 3685 | |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3686 | while (cur_size > 0) { |
Thomas Hellstrom | 7a73ba7 | 2009-12-22 16:53:41 +0100 | [diff] [blame] | 3687 | size = cur_size; |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3688 | ret = vmw_cmd_check(dev_priv, sw_context, buf, &size); |
| 3689 | if (unlikely(ret != 0)) |
| 3690 | return ret; |
| 3691 | buf = (void *)((unsigned long) buf + size); |
| 3692 | cur_size -= size; |
| 3693 | } |
| 3694 | |
| 3695 | if (unlikely(cur_size != 0)) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 3696 | VMW_DEBUG_USER("Command verifier out of sync.\n"); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3697 | return -EINVAL; |
| 3698 | } |
| 3699 | |
| 3700 | return 0; |
| 3701 | } |
| 3702 | |
| 3703 | static void vmw_free_relocations(struct vmw_sw_context *sw_context) |
| 3704 | { |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 3705 | /* Memory is validation context memory, so no need to free it */ |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 3706 | INIT_LIST_HEAD(&sw_context->bo_relocations); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3707 | } |
| 3708 | |
| 3709 | static void vmw_apply_relocations(struct vmw_sw_context *sw_context) |
| 3710 | { |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3711 | struct vmw_relocation *reloc; |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3712 | struct ttm_buffer_object *bo; |
| 3713 | |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 3714 | list_for_each_entry(reloc, &sw_context->bo_relocations, head) { |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 3715 | bo = &reloc->vbo->base; |
Christian König | d311675 | 2021-04-12 15:11:47 +0200 | [diff] [blame] | 3716 | switch (bo->resource->mem_type) { |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 3717 | case TTM_PL_VRAM: |
Christian König | d311675 | 2021-04-12 15:11:47 +0200 | [diff] [blame] | 3718 | reloc->location->offset += bo->resource->start << PAGE_SHIFT; |
Thomas Hellstrom | 135cba0 | 2010-10-26 21:21:47 +0200 | [diff] [blame] | 3719 | reloc->location->gmrId = SVGA_GMR_FRAMEBUFFER; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 3720 | break; |
| 3721 | case VMW_PL_GMR: |
Christian König | d311675 | 2021-04-12 15:11:47 +0200 | [diff] [blame] | 3722 | reloc->location->gmrId = bo->resource->start; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 3723 | break; |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 3724 | case VMW_PL_MOB: |
Christian König | d311675 | 2021-04-12 15:11:47 +0200 | [diff] [blame] | 3725 | *reloc->mob_loc = bo->resource->start; |
Thomas Hellstrom | ddcda24 | 2012-11-21 11:26:55 +0100 | [diff] [blame] | 3726 | break; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 3727 | default: |
| 3728 | BUG(); |
| 3729 | } |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 3730 | } |
| 3731 | vmw_free_relocations(sw_context); |
| 3732 | } |
| 3733 | |
Thomas Hellstrom | be38ab6 | 2011-08-31 07:42:54 +0000 | [diff] [blame] | 3734 | static int vmw_resize_cmd_bounce(struct vmw_sw_context *sw_context, |
| 3735 | uint32_t size) |
| 3736 | { |
| 3737 | if (likely(sw_context->cmd_bounce_size >= size)) |
| 3738 | return 0; |
| 3739 | |
| 3740 | if (sw_context->cmd_bounce_size == 0) |
| 3741 | sw_context->cmd_bounce_size = VMWGFX_CMD_BOUNCE_INIT_SIZE; |
| 3742 | |
| 3743 | while (sw_context->cmd_bounce_size < size) { |
| 3744 | sw_context->cmd_bounce_size = |
| 3745 | PAGE_ALIGN(sw_context->cmd_bounce_size + |
| 3746 | (sw_context->cmd_bounce_size >> 1)); |
| 3747 | } |
| 3748 | |
Markus Elfring | 0bc3299 | 2016-07-22 13:31:00 +0200 | [diff] [blame] | 3749 | vfree(sw_context->cmd_bounce); |
Thomas Hellstrom | be38ab6 | 2011-08-31 07:42:54 +0000 | [diff] [blame] | 3750 | sw_context->cmd_bounce = vmalloc(sw_context->cmd_bounce_size); |
| 3751 | |
| 3752 | if (sw_context->cmd_bounce == NULL) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 3753 | VMW_DEBUG_USER("Failed to allocate command bounce buffer.\n"); |
Thomas Hellstrom | be38ab6 | 2011-08-31 07:42:54 +0000 | [diff] [blame] | 3754 | sw_context->cmd_bounce_size = 0; |
| 3755 | return -ENOMEM; |
| 3756 | } |
| 3757 | |
| 3758 | return 0; |
| 3759 | } |
| 3760 | |
Lee Jones | 7450bf7 | 2021-01-15 18:12:36 +0000 | [diff] [blame] | 3761 | /* |
Thomas Hellstrom | ae2a104 | 2011-09-01 20:18:44 +0000 | [diff] [blame] | 3762 | * vmw_execbuf_fence_commands - create and submit a command stream fence |
| 3763 | * |
| 3764 | * Creates a fence object and submits a command stream marker. |
| 3765 | * If this fails for some reason, We sync the fifo and return NULL. |
| 3766 | * It is then safe to fence buffers with a NULL pointer. |
Jakob Bornecrantz | 6070e9f | 2011-10-04 20:13:16 +0200 | [diff] [blame] | 3767 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3768 | * If @p_handle is not NULL @file_priv must also not be NULL. Creates a |
| 3769 | * userspace handle if @p_handle is not NULL, otherwise not. |
Thomas Hellstrom | ae2a104 | 2011-09-01 20:18:44 +0000 | [diff] [blame] | 3770 | */ |
| 3771 | |
| 3772 | int vmw_execbuf_fence_commands(struct drm_file *file_priv, |
| 3773 | struct vmw_private *dev_priv, |
| 3774 | struct vmw_fence_obj **p_fence, |
| 3775 | uint32_t *p_handle) |
| 3776 | { |
| 3777 | uint32_t sequence; |
| 3778 | int ret; |
| 3779 | bool synced = false; |
| 3780 | |
Jakob Bornecrantz | 6070e9f | 2011-10-04 20:13:16 +0200 | [diff] [blame] | 3781 | /* p_handle implies file_priv. */ |
| 3782 | BUG_ON(p_handle != NULL && file_priv == NULL); |
Thomas Hellstrom | ae2a104 | 2011-09-01 20:18:44 +0000 | [diff] [blame] | 3783 | |
Zack Rusin | 8426ed9 | 2020-11-18 12:54:19 -0500 | [diff] [blame] | 3784 | ret = vmw_cmd_send_fence(dev_priv, &sequence); |
Thomas Hellstrom | ae2a104 | 2011-09-01 20:18:44 +0000 | [diff] [blame] | 3785 | if (unlikely(ret != 0)) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 3786 | VMW_DEBUG_USER("Fence submission error. Syncing.\n"); |
Thomas Hellstrom | ae2a104 | 2011-09-01 20:18:44 +0000 | [diff] [blame] | 3787 | synced = true; |
| 3788 | } |
| 3789 | |
| 3790 | if (p_handle != NULL) |
| 3791 | ret = vmw_user_fence_create(file_priv, dev_priv->fman, |
Maarten Lankhorst | c060a4e | 2014-03-26 13:06:24 +0100 | [diff] [blame] | 3792 | sequence, p_fence, p_handle); |
Thomas Hellstrom | ae2a104 | 2011-09-01 20:18:44 +0000 | [diff] [blame] | 3793 | else |
Maarten Lankhorst | c060a4e | 2014-03-26 13:06:24 +0100 | [diff] [blame] | 3794 | ret = vmw_fence_create(dev_priv->fman, sequence, p_fence); |
Thomas Hellstrom | ae2a104 | 2011-09-01 20:18:44 +0000 | [diff] [blame] | 3795 | |
| 3796 | if (unlikely(ret != 0 && !synced)) { |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3797 | (void) vmw_fallback_wait(dev_priv, false, false, sequence, |
| 3798 | false, VMW_FENCE_WAIT_TIMEOUT); |
Thomas Hellstrom | ae2a104 | 2011-09-01 20:18:44 +0000 | [diff] [blame] | 3799 | *p_fence = NULL; |
| 3800 | } |
| 3801 | |
Thomas Hellstrom | 728354c | 2019-01-31 10:55:37 +0100 | [diff] [blame] | 3802 | return ret; |
Thomas Hellstrom | ae2a104 | 2011-09-01 20:18:44 +0000 | [diff] [blame] | 3803 | } |
| 3804 | |
Thomas Hellstrom | 8bf445c | 2011-10-10 12:23:25 +0200 | [diff] [blame] | 3805 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3806 | * vmw_execbuf_copy_fence_user - copy fence object information to user-space. |
Thomas Hellstrom | 8bf445c | 2011-10-10 12:23:25 +0200 | [diff] [blame] | 3807 | * |
| 3808 | * @dev_priv: Pointer to a vmw_private struct. |
| 3809 | * @vmw_fp: Pointer to the struct vmw_fpriv representing the calling file. |
| 3810 | * @ret: Return value from fence object creation. |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3811 | * @user_fence_rep: User space address of a struct drm_vmw_fence_rep to which |
| 3812 | * the information should be copied. |
Thomas Hellstrom | 8bf445c | 2011-10-10 12:23:25 +0200 | [diff] [blame] | 3813 | * @fence: Pointer to the fenc object. |
| 3814 | * @fence_handle: User-space fence handle. |
Sinclair Yeh | c906965d | 2017-07-05 01:49:32 -0700 | [diff] [blame] | 3815 | * @out_fence_fd: exported file descriptor for the fence. -1 if not used |
| 3816 | * @sync_file: Only used to clean up in case of an error in this function. |
Thomas Hellstrom | 8bf445c | 2011-10-10 12:23:25 +0200 | [diff] [blame] | 3817 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3818 | * This function copies fence information to user-space. If copying fails, the |
| 3819 | * user-space struct drm_vmw_fence_rep::error member is hopefully left |
| 3820 | * untouched, and if it's preloaded with an -EFAULT by user-space, the error |
| 3821 | * will hopefully be detected. |
| 3822 | * |
| 3823 | * Also if copying fails, user-space will be unable to signal the fence object |
| 3824 | * so we wait for it immediately, and then unreference the user-space reference. |
Thomas Hellstrom | 8bf445c | 2011-10-10 12:23:25 +0200 | [diff] [blame] | 3825 | */ |
Thomas Hellstrom | 57c5ee7 | 2011-10-10 12:23:26 +0200 | [diff] [blame] | 3826 | void |
Thomas Hellstrom | 8bf445c | 2011-10-10 12:23:25 +0200 | [diff] [blame] | 3827 | vmw_execbuf_copy_fence_user(struct vmw_private *dev_priv, |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3828 | struct vmw_fpriv *vmw_fp, int ret, |
Thomas Hellstrom | 8bf445c | 2011-10-10 12:23:25 +0200 | [diff] [blame] | 3829 | struct drm_vmw_fence_rep __user *user_fence_rep, |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3830 | struct vmw_fence_obj *fence, uint32_t fence_handle, |
| 3831 | int32_t out_fence_fd, struct sync_file *sync_file) |
Thomas Hellstrom | 8bf445c | 2011-10-10 12:23:25 +0200 | [diff] [blame] | 3832 | { |
| 3833 | struct drm_vmw_fence_rep fence_rep; |
| 3834 | |
| 3835 | if (user_fence_rep == NULL) |
| 3836 | return; |
| 3837 | |
Dan Carpenter | 80d9b24 | 2011-10-18 09:10:12 +0300 | [diff] [blame] | 3838 | memset(&fence_rep, 0, sizeof(fence_rep)); |
| 3839 | |
Thomas Hellstrom | 8bf445c | 2011-10-10 12:23:25 +0200 | [diff] [blame] | 3840 | fence_rep.error = ret; |
Sinclair Yeh | c906965d | 2017-07-05 01:49:32 -0700 | [diff] [blame] | 3841 | fence_rep.fd = out_fence_fd; |
Thomas Hellstrom | 8bf445c | 2011-10-10 12:23:25 +0200 | [diff] [blame] | 3842 | if (ret == 0) { |
| 3843 | BUG_ON(fence == NULL); |
| 3844 | |
| 3845 | fence_rep.handle = fence_handle; |
Maarten Lankhorst | 2298e80 | 2014-03-26 14:07:44 +0100 | [diff] [blame] | 3846 | fence_rep.seqno = fence->base.seqno; |
Zack Rusin | 2cd80db | 2021-05-05 15:10:07 -0400 | [diff] [blame] | 3847 | vmw_update_seqno(dev_priv); |
Thomas Hellstrom | 8bf445c | 2011-10-10 12:23:25 +0200 | [diff] [blame] | 3848 | fence_rep.passed_seqno = dev_priv->last_read_seqno; |
| 3849 | } |
| 3850 | |
| 3851 | /* |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3852 | * copy_to_user errors will be detected by user space not seeing |
| 3853 | * fence_rep::error filled in. Typically user-space would have pre-set |
| 3854 | * that member to -EFAULT. |
Thomas Hellstrom | 8bf445c | 2011-10-10 12:23:25 +0200 | [diff] [blame] | 3855 | */ |
| 3856 | ret = copy_to_user(user_fence_rep, &fence_rep, |
| 3857 | sizeof(fence_rep)); |
| 3858 | |
| 3859 | /* |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3860 | * User-space lost the fence object. We need to sync and unreference the |
| 3861 | * handle. |
Thomas Hellstrom | 8bf445c | 2011-10-10 12:23:25 +0200 | [diff] [blame] | 3862 | */ |
| 3863 | if (unlikely(ret != 0) && (fence_rep.error == 0)) { |
Sinclair Yeh | c906965d | 2017-07-05 01:49:32 -0700 | [diff] [blame] | 3864 | if (sync_file) |
| 3865 | fput(sync_file->file); |
| 3866 | |
| 3867 | if (fence_rep.fd != -1) { |
| 3868 | put_unused_fd(fence_rep.fd); |
| 3869 | fence_rep.fd = -1; |
| 3870 | } |
| 3871 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3872 | ttm_ref_object_base_unref(vmw_fp->tfile, fence_handle, |
| 3873 | TTM_REF_USAGE); |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 3874 | VMW_DEBUG_USER("Fence copy error. Syncing.\n"); |
Maarten Lankhorst | c060a4e | 2014-03-26 13:06:24 +0100 | [diff] [blame] | 3875 | (void) vmw_fence_obj_wait(fence, false, false, |
Thomas Hellstrom | 8bf445c | 2011-10-10 12:23:25 +0200 | [diff] [blame] | 3876 | VMW_FENCE_WAIT_TIMEOUT); |
| 3877 | } |
| 3878 | } |
| 3879 | |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 3880 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3881 | * vmw_execbuf_submit_fifo - Patch a command batch and submit it using the fifo. |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 3882 | * |
| 3883 | * @dev_priv: Pointer to a device private structure. |
| 3884 | * @kernel_commands: Pointer to the unpatched command batch. |
| 3885 | * @command_size: Size of the unpatched command batch. |
| 3886 | * @sw_context: Structure holding the relocation lists. |
| 3887 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3888 | * Side effects: If this function returns 0, then the command batch pointed to |
| 3889 | * by @kernel_commands will have been modified. |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 3890 | */ |
| 3891 | static int vmw_execbuf_submit_fifo(struct vmw_private *dev_priv, |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3892 | void *kernel_commands, u32 command_size, |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 3893 | struct vmw_sw_context *sw_context) |
| 3894 | { |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 3895 | void *cmd; |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 3896 | |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 3897 | if (sw_context->dx_ctx_node) |
Zack Rusin | 8426ed9 | 2020-11-18 12:54:19 -0500 | [diff] [blame] | 3898 | cmd = VMW_CMD_CTX_RESERVE(dev_priv, command_size, |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 3899 | sw_context->dx_ctx_node->ctx->id); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 3900 | else |
Zack Rusin | 8426ed9 | 2020-11-18 12:54:19 -0500 | [diff] [blame] | 3901 | cmd = VMW_CMD_RESERVE(dev_priv, command_size); |
Deepak Rawat | 11c4541 | 2019-02-14 16:15:39 -0800 | [diff] [blame] | 3902 | |
| 3903 | if (!cmd) |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 3904 | return -ENOMEM; |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 3905 | |
| 3906 | vmw_apply_relocations(sw_context); |
| 3907 | memcpy(cmd, kernel_commands, command_size); |
| 3908 | vmw_resource_relocations_apply(cmd, &sw_context->res_relocations); |
| 3909 | vmw_resource_relocations_free(&sw_context->res_relocations); |
Zack Rusin | 8426ed9 | 2020-11-18 12:54:19 -0500 | [diff] [blame] | 3910 | vmw_cmd_commit(dev_priv, command_size); |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 3911 | |
| 3912 | return 0; |
| 3913 | } |
| 3914 | |
| 3915 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3916 | * vmw_execbuf_submit_cmdbuf - Patch a command batch and submit it using the |
| 3917 | * command buffer manager. |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 3918 | * |
| 3919 | * @dev_priv: Pointer to a device private structure. |
| 3920 | * @header: Opaque handle to the command buffer allocation. |
| 3921 | * @command_size: Size of the unpatched command batch. |
| 3922 | * @sw_context: Structure holding the relocation lists. |
| 3923 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3924 | * Side effects: If this function returns 0, then the command buffer represented |
| 3925 | * by @header will have been modified. |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 3926 | */ |
| 3927 | static int vmw_execbuf_submit_cmdbuf(struct vmw_private *dev_priv, |
| 3928 | struct vmw_cmdbuf_header *header, |
| 3929 | u32 command_size, |
| 3930 | struct vmw_sw_context *sw_context) |
| 3931 | { |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 3932 | u32 id = ((sw_context->dx_ctx_node) ? sw_context->dx_ctx_node->ctx->id : |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 3933 | SVGA3D_INVALID_ID); |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3934 | void *cmd = vmw_cmdbuf_reserve(dev_priv->cman, command_size, id, false, |
| 3935 | header); |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 3936 | |
| 3937 | vmw_apply_relocations(sw_context); |
| 3938 | vmw_resource_relocations_apply(cmd, &sw_context->res_relocations); |
| 3939 | vmw_resource_relocations_free(&sw_context->res_relocations); |
| 3940 | vmw_cmdbuf_commit(dev_priv->cman, command_size, header, false); |
| 3941 | |
| 3942 | return 0; |
| 3943 | } |
| 3944 | |
| 3945 | /** |
| 3946 | * vmw_execbuf_cmdbuf - Prepare, if possible, a user-space command batch for |
| 3947 | * submission using a command buffer. |
| 3948 | * |
| 3949 | * @dev_priv: Pointer to a device private structure. |
| 3950 | * @user_commands: User-space pointer to the commands to be submitted. |
| 3951 | * @command_size: Size of the unpatched command batch. |
| 3952 | * @header: Out parameter returning the opaque pointer to the command buffer. |
| 3953 | * |
| 3954 | * This function checks whether we can use the command buffer manager for |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3955 | * submission and if so, creates a command buffer of suitable size and copies |
| 3956 | * the user data into that buffer. |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 3957 | * |
| 3958 | * On successful return, the function returns a pointer to the data in the |
| 3959 | * command buffer and *@header is set to non-NULL. |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3960 | * |
Lee Jones | 7450bf7 | 2021-01-15 18:12:36 +0000 | [diff] [blame] | 3961 | * @kernel_commands: If command buffers could not be used, the function will |
| 3962 | * return the value of @kernel_commands on function call. That value may be |
| 3963 | * NULL. In that case, the value of *@header will be set to NULL. |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3964 | * |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 3965 | * If an error is encountered, the function will return a pointer error value. |
| 3966 | * If the function is interrupted by a signal while sleeping, it will return |
| 3967 | * -ERESTARTSYS casted to a pointer error value. |
| 3968 | */ |
Thomas Hellstrom | b9eb1a6 | 2015-04-02 02:39:45 -0700 | [diff] [blame] | 3969 | static void *vmw_execbuf_cmdbuf(struct vmw_private *dev_priv, |
| 3970 | void __user *user_commands, |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3971 | void *kernel_commands, u32 command_size, |
Thomas Hellstrom | b9eb1a6 | 2015-04-02 02:39:45 -0700 | [diff] [blame] | 3972 | struct vmw_cmdbuf_header **header) |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 3973 | { |
| 3974 | size_t cmdbuf_size; |
| 3975 | int ret; |
| 3976 | |
| 3977 | *header = NULL; |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 3978 | if (command_size > SVGA_CB_MAX_SIZE) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 3979 | VMW_DEBUG_USER("Command buffer is too large.\n"); |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 3980 | return ERR_PTR(-EINVAL); |
| 3981 | } |
| 3982 | |
Thomas Hellstrom | 51ab70b | 2016-10-10 10:51:24 -0700 | [diff] [blame] | 3983 | if (!dev_priv->cman || kernel_commands) |
| 3984 | return kernel_commands; |
| 3985 | |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 3986 | /* If possible, add a little space for fencing. */ |
| 3987 | cmdbuf_size = command_size + 512; |
| 3988 | cmdbuf_size = min_t(size_t, cmdbuf_size, SVGA_CB_MAX_SIZE); |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3989 | kernel_commands = vmw_cmdbuf_alloc(dev_priv->cman, cmdbuf_size, true, |
| 3990 | header); |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 3991 | if (IS_ERR(kernel_commands)) |
| 3992 | return kernel_commands; |
| 3993 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 3994 | ret = copy_from_user(kernel_commands, user_commands, command_size); |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 3995 | if (ret) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 3996 | VMW_DEBUG_USER("Failed copying commands.\n"); |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 3997 | vmw_cmdbuf_header_free(*header); |
| 3998 | *header = NULL; |
| 3999 | return ERR_PTR(-EFAULT); |
| 4000 | } |
| 4001 | |
| 4002 | return kernel_commands; |
| 4003 | } |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 4004 | |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4005 | static int vmw_execbuf_tie_context(struct vmw_private *dev_priv, |
| 4006 | struct vmw_sw_context *sw_context, |
| 4007 | uint32_t handle) |
| 4008 | { |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4009 | struct vmw_resource *res; |
| 4010 | int ret; |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 4011 | unsigned int size; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4012 | |
| 4013 | if (handle == SVGA3D_INVALID_ID) |
| 4014 | return 0; |
| 4015 | |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 4016 | size = vmw_execbuf_res_size(dev_priv, vmw_res_dx_context); |
| 4017 | ret = vmw_validation_preload_res(sw_context->ctx, size); |
| 4018 | if (ret) |
| 4019 | return ret; |
| 4020 | |
| 4021 | res = vmw_user_resource_noref_lookup_handle |
| 4022 | (dev_priv, sw_context->fp->tfile, handle, |
| 4023 | user_context_converter); |
Chengguang Xu | 4efa666 | 2019-03-01 10:14:06 -0800 | [diff] [blame] | 4024 | if (IS_ERR(res)) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 4025 | VMW_DEBUG_USER("Could not find or user DX context 0x%08x.\n", |
| 4026 | (unsigned int) handle); |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 4027 | return PTR_ERR(res); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4028 | } |
| 4029 | |
Thomas Hellstrom | a9f58c4 | 2019-02-20 08:21:26 +0100 | [diff] [blame] | 4030 | ret = vmw_execbuf_res_noref_val_add(sw_context, res, VMW_RES_DIRTY_SET); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4031 | if (unlikely(ret != 0)) |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 4032 | return ret; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4033 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4034 | sw_context->dx_ctx_node = vmw_execbuf_info_from_res(sw_context, res); |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4035 | sw_context->man = vmw_context_res_man(res); |
Thomas Hellstrom | e8c66ef | 2018-09-26 16:32:40 +0200 | [diff] [blame] | 4036 | |
| 4037 | return 0; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4038 | } |
| 4039 | |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4040 | int vmw_execbuf_process(struct drm_file *file_priv, |
| 4041 | struct vmw_private *dev_priv, |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4042 | void __user *user_commands, void *kernel_commands, |
| 4043 | uint32_t command_size, uint64_t throttle_us, |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4044 | uint32_t dx_context_handle, |
Jakob Bornecrantz | bb1bd2f | 2012-02-09 16:56:43 +0100 | [diff] [blame] | 4045 | struct drm_vmw_fence_rep __user *user_fence_rep, |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4046 | struct vmw_fence_obj **out_fence, uint32_t flags) |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4047 | { |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4048 | struct vmw_sw_context *sw_context = &dev_priv->ctx; |
Jakob Bornecrantz | bb1bd2f | 2012-02-09 16:56:43 +0100 | [diff] [blame] | 4049 | struct vmw_fence_obj *fence = NULL; |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 4050 | struct vmw_cmdbuf_header *header; |
Nathan Chancellor | a5020f4 | 2019-03-11 20:24:46 -0700 | [diff] [blame] | 4051 | uint32_t handle = 0; |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4052 | int ret; |
Sinclair Yeh | c906965d | 2017-07-05 01:49:32 -0700 | [diff] [blame] | 4053 | int32_t out_fence_fd = -1; |
| 4054 | struct sync_file *sync_file = NULL; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4055 | DECLARE_VAL_CONTEXT(val_ctx, &sw_context->res_ht, 1); |
Sinclair Yeh | c906965d | 2017-07-05 01:49:32 -0700 | [diff] [blame] | 4056 | |
Thomas Hellstrom | fd56746 | 2018-12-12 11:52:08 +0100 | [diff] [blame] | 4057 | vmw_validation_set_val_mem(&val_ctx, &dev_priv->vvm); |
| 4058 | |
Sinclair Yeh | c906965d | 2017-07-05 01:49:32 -0700 | [diff] [blame] | 4059 | if (flags & DRM_VMW_EXECBUF_FLAG_EXPORT_FENCE_FD) { |
| 4060 | out_fence_fd = get_unused_fd_flags(O_CLOEXEC); |
| 4061 | if (out_fence_fd < 0) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 4062 | VMW_DEBUG_USER("Failed to get a fence fd.\n"); |
Sinclair Yeh | c906965d | 2017-07-05 01:49:32 -0700 | [diff] [blame] | 4063 | return out_fence_fd; |
| 4064 | } |
| 4065 | } |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4066 | |
Charmaine Lee | 2f633e5 | 2015-08-10 10:45:11 -0700 | [diff] [blame] | 4067 | if (throttle_us) { |
Zack Rusin | 359dc60 | 2020-11-10 22:14:46 -0500 | [diff] [blame] | 4068 | VMW_DEBUG_USER("Throttling is no longer supported.\n"); |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 4069 | } |
Charmaine Lee | 2f633e5 | 2015-08-10 10:45:11 -0700 | [diff] [blame] | 4070 | |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 4071 | kernel_commands = vmw_execbuf_cmdbuf(dev_priv, user_commands, |
| 4072 | kernel_commands, command_size, |
| 4073 | &header); |
Sinclair Yeh | c906965d | 2017-07-05 01:49:32 -0700 | [diff] [blame] | 4074 | if (IS_ERR(kernel_commands)) { |
| 4075 | ret = PTR_ERR(kernel_commands); |
| 4076 | goto out_free_fence_fd; |
| 4077 | } |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 4078 | |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4079 | ret = mutex_lock_interruptible(&dev_priv->cmdbuf_mutex); |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 4080 | if (ret) { |
| 4081 | ret = -ERESTARTSYS; |
| 4082 | goto out_free_header; |
| 4083 | } |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4084 | |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 4085 | sw_context->kernel = false; |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4086 | if (kernel_commands == NULL) { |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4087 | ret = vmw_resize_cmd_bounce(sw_context, command_size); |
| 4088 | if (unlikely(ret != 0)) |
| 4089 | goto out_unlock; |
| 4090 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4091 | ret = copy_from_user(sw_context->cmd_bounce, user_commands, |
| 4092 | command_size); |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4093 | if (unlikely(ret != 0)) { |
| 4094 | ret = -EFAULT; |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 4095 | VMW_DEBUG_USER("Failed copying commands.\n"); |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4096 | goto out_unlock; |
| 4097 | } |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4098 | |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4099 | kernel_commands = sw_context->cmd_bounce; |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4100 | } else if (!header) { |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4101 | sw_context->kernel = true; |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4102 | } |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4103 | |
Thomas Hellstrom | d5bde95 | 2014-01-31 10:12:10 +0100 | [diff] [blame] | 4104 | sw_context->fp = vmw_fpriv(file_priv); |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4105 | INIT_LIST_HEAD(&sw_context->ctx_list); |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4106 | sw_context->cur_query_bo = dev_priv->pinned_bo; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4107 | sw_context->last_query_ctx = NULL; |
| 4108 | sw_context->needs_post_query_barrier = false; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4109 | sw_context->dx_ctx_node = NULL; |
Sinclair Yeh | fd11a3c | 2015-08-10 10:56:15 -0700 | [diff] [blame] | 4110 | sw_context->dx_query_mob = NULL; |
| 4111 | sw_context->dx_query_ctx = NULL; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4112 | memset(sw_context->res_cache, 0, sizeof(sw_context->res_cache)); |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4113 | INIT_LIST_HEAD(&sw_context->res_relocations); |
Thomas Hellstrom | fc18afc | 2018-09-26 15:36:52 +0200 | [diff] [blame] | 4114 | INIT_LIST_HEAD(&sw_context->bo_relocations); |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4115 | |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4116 | if (sw_context->staged_bindings) |
| 4117 | vmw_binding_state_reset(sw_context->staged_bindings); |
| 4118 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4119 | if (!sw_context->res_ht_initialized) { |
| 4120 | ret = drm_ht_create(&sw_context->res_ht, VMW_RES_HT_ORDER); |
| 4121 | if (unlikely(ret != 0)) |
| 4122 | goto out_unlock; |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4123 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4124 | sw_context->res_ht_initialized = true; |
| 4125 | } |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4126 | |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 4127 | INIT_LIST_HEAD(&sw_context->staged_cmd_res); |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4128 | sw_context->ctx = &val_ctx; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4129 | ret = vmw_execbuf_tie_context(dev_priv, sw_context, dx_context_handle); |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4130 | if (unlikely(ret != 0)) |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4131 | goto out_err_nores; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4132 | |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4133 | ret = vmw_cmd_check_all(dev_priv, sw_context, kernel_commands, |
| 4134 | command_size); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4135 | if (unlikely(ret != 0)) |
Thomas Hellstrom | cf5e341 | 2014-01-30 10:58:19 +0100 | [diff] [blame] | 4136 | goto out_err_nores; |
Thomas Hellstrom | be38ab6 | 2011-08-31 07:42:54 +0000 | [diff] [blame] | 4137 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4138 | ret = vmw_resources_reserve(sw_context); |
| 4139 | if (unlikely(ret != 0)) |
Thomas Hellstrom | cf5e341 | 2014-01-30 10:58:19 +0100 | [diff] [blame] | 4140 | goto out_err_nores; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4141 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4142 | ret = vmw_validation_bo_reserve(&val_ctx, true); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4143 | if (unlikely(ret != 0)) |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4144 | goto out_err_nores; |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4145 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4146 | ret = vmw_validation_bo_validate(&val_ctx, true); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4147 | if (unlikely(ret != 0)) |
| 4148 | goto out_err; |
| 4149 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4150 | ret = vmw_validation_res_validate(&val_ctx, true); |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4151 | if (unlikely(ret != 0)) |
| 4152 | goto out_err; |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4153 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4154 | vmw_validation_drop_ht(&val_ctx); |
Thomas Hellstrom | 1925d45 | 2010-05-28 11:21:57 +0200 | [diff] [blame] | 4155 | |
Thomas Hellstrom | 173fb7d | 2013-10-08 02:32:36 -0700 | [diff] [blame] | 4156 | ret = mutex_lock_interruptible(&dev_priv->binding_mutex); |
| 4157 | if (unlikely(ret != 0)) { |
| 4158 | ret = -ERESTARTSYS; |
| 4159 | goto out_err; |
| 4160 | } |
| 4161 | |
Thomas Hellstrom | 30f82d81 | 2014-02-05 08:13:56 +0100 | [diff] [blame] | 4162 | if (dev_priv->has_mob) { |
| 4163 | ret = vmw_rebind_contexts(sw_context); |
| 4164 | if (unlikely(ret != 0)) |
Dan Carpenter | b2ad988 | 2014-02-11 19:03:47 +0300 | [diff] [blame] | 4165 | goto out_unlock_binding; |
Thomas Hellstrom | 30f82d81 | 2014-02-05 08:13:56 +0100 | [diff] [blame] | 4166 | } |
| 4167 | |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 4168 | if (!header) { |
| 4169 | ret = vmw_execbuf_submit_fifo(dev_priv, kernel_commands, |
| 4170 | command_size, sw_context); |
| 4171 | } else { |
| 4172 | ret = vmw_execbuf_submit_cmdbuf(dev_priv, header, command_size, |
| 4173 | sw_context); |
| 4174 | header = NULL; |
Thomas Hellstrom | be38ab6 | 2011-08-31 07:42:54 +0000 | [diff] [blame] | 4175 | } |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4176 | mutex_unlock(&dev_priv->binding_mutex); |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 4177 | if (ret) |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4178 | goto out_err; |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4179 | |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4180 | vmw_query_bo_switch_commit(dev_priv, sw_context); |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4181 | ret = vmw_execbuf_fence_commands(file_priv, dev_priv, &fence, |
Thomas Hellstrom | ae2a104 | 2011-09-01 20:18:44 +0000 | [diff] [blame] | 4182 | (user_fence_rep) ? &handle : NULL); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4183 | /* |
| 4184 | * This error is harmless, because if fence submission fails, |
Thomas Hellstrom | ae2a104 | 2011-09-01 20:18:44 +0000 | [diff] [blame] | 4185 | * vmw_fifo_send_fence will sync. The error will be propagated to |
| 4186 | * user-space in @fence_rep |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4187 | */ |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4188 | if (ret != 0) |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 4189 | VMW_DEBUG_USER("Fence submission error. Syncing.\n"); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4190 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4191 | vmw_execbuf_bindings_commit(sw_context, false); |
| 4192 | vmw_bind_dx_query_mob(sw_context); |
| 4193 | vmw_validation_res_unreserve(&val_ctx, false); |
Thomas Hellstrom | 173fb7d | 2013-10-08 02:32:36 -0700 | [diff] [blame] | 4194 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4195 | vmw_validation_bo_fence(sw_context->ctx, fence); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4196 | |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4197 | if (unlikely(dev_priv->pinned_bo != NULL && !dev_priv->query_cid_valid)) |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4198 | __vmw_execbuf_release_pinned_bo(dev_priv, fence); |
| 4199 | |
Sinclair Yeh | c906965d | 2017-07-05 01:49:32 -0700 | [diff] [blame] | 4200 | /* |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4201 | * If anything fails here, give up trying to export the fence and do a |
| 4202 | * sync since the user mode will not be able to sync the fence itself. |
| 4203 | * This ensures we are still functionally correct. |
Sinclair Yeh | c906965d | 2017-07-05 01:49:32 -0700 | [diff] [blame] | 4204 | */ |
| 4205 | if (flags & DRM_VMW_EXECBUF_FLAG_EXPORT_FENCE_FD) { |
| 4206 | |
| 4207 | sync_file = sync_file_create(&fence->base); |
| 4208 | if (!sync_file) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 4209 | VMW_DEBUG_USER("Sync file create failed for fence\n"); |
Sinclair Yeh | c906965d | 2017-07-05 01:49:32 -0700 | [diff] [blame] | 4210 | put_unused_fd(out_fence_fd); |
| 4211 | out_fence_fd = -1; |
| 4212 | |
| 4213 | (void) vmw_fence_obj_wait(fence, false, false, |
| 4214 | VMW_FENCE_WAIT_TIMEOUT); |
| 4215 | } else { |
| 4216 | /* Link the fence with the FD created earlier */ |
| 4217 | fd_install(out_fence_fd, sync_file->file); |
| 4218 | } |
| 4219 | } |
| 4220 | |
Thomas Hellstrom | 8bf445c | 2011-10-10 12:23:25 +0200 | [diff] [blame] | 4221 | vmw_execbuf_copy_fence_user(dev_priv, vmw_fpriv(file_priv), ret, |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4222 | user_fence_rep, fence, handle, out_fence_fd, |
| 4223 | sync_file); |
Thomas Hellstrom | ae2a104 | 2011-09-01 20:18:44 +0000 | [diff] [blame] | 4224 | |
Jakob Bornecrantz | bb1bd2f | 2012-02-09 16:56:43 +0100 | [diff] [blame] | 4225 | /* Don't unreference when handing fence out */ |
| 4226 | if (unlikely(out_fence != NULL)) { |
| 4227 | *out_fence = fence; |
| 4228 | fence = NULL; |
| 4229 | } else if (likely(fence != NULL)) { |
Thomas Hellstrom | ae2a104 | 2011-09-01 20:18:44 +0000 | [diff] [blame] | 4230 | vmw_fence_obj_unreference(&fence); |
Jakob Bornecrantz | bb1bd2f | 2012-02-09 16:56:43 +0100 | [diff] [blame] | 4231 | } |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4232 | |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 4233 | vmw_cmdbuf_res_commit(&sw_context->staged_cmd_res); |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4234 | mutex_unlock(&dev_priv->cmdbuf_mutex); |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4235 | |
| 4236 | /* |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4237 | * Unreference resources outside of the cmdbuf_mutex to avoid deadlocks |
| 4238 | * in resource destruction paths. |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4239 | */ |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4240 | vmw_validation_unref_lists(&val_ctx); |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4241 | |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4242 | return 0; |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4243 | |
Thomas Hellstrom | 173fb7d | 2013-10-08 02:32:36 -0700 | [diff] [blame] | 4244 | out_unlock_binding: |
| 4245 | mutex_unlock(&dev_priv->binding_mutex); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4246 | out_err: |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4247 | vmw_validation_bo_backoff(&val_ctx); |
Thomas Hellstrom | cf5e341 | 2014-01-30 10:58:19 +0100 | [diff] [blame] | 4248 | out_err_nores: |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4249 | vmw_execbuf_bindings_commit(sw_context, true); |
| 4250 | vmw_validation_res_unreserve(&val_ctx, true); |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4251 | vmw_resource_relocations_free(&sw_context->res_relocations); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4252 | vmw_free_relocations(sw_context); |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4253 | if (unlikely(dev_priv->pinned_bo != NULL && !dev_priv->query_cid_valid)) |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4254 | __vmw_execbuf_release_pinned_bo(dev_priv, NULL); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4255 | out_unlock: |
Thomas Hellstrom | 18e4a46 | 2014-06-09 12:39:22 +0200 | [diff] [blame] | 4256 | vmw_cmdbuf_res_revert(&sw_context->staged_cmd_res); |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4257 | vmw_validation_drop_ht(&val_ctx); |
| 4258 | WARN_ON(!list_empty(&sw_context->ctx_list)); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4259 | mutex_unlock(&dev_priv->cmdbuf_mutex); |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4260 | |
| 4261 | /* |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4262 | * Unreference resources outside of the cmdbuf_mutex to avoid deadlocks |
| 4263 | * in resource destruction paths. |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4264 | */ |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4265 | vmw_validation_unref_lists(&val_ctx); |
Thomas Hellstrom | 3eab3d9 | 2015-06-25 11:57:56 -0700 | [diff] [blame] | 4266 | out_free_header: |
| 4267 | if (header) |
| 4268 | vmw_cmdbuf_header_free(header); |
Sinclair Yeh | c906965d | 2017-07-05 01:49:32 -0700 | [diff] [blame] | 4269 | out_free_fence_fd: |
| 4270 | if (out_fence_fd >= 0) |
| 4271 | put_unused_fd(out_fence_fd); |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4272 | |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4273 | return ret; |
| 4274 | } |
| 4275 | |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4276 | /** |
| 4277 | * vmw_execbuf_unpin_panic - Idle the fifo and unpin the query buffer. |
| 4278 | * |
| 4279 | * @dev_priv: The device private structure. |
| 4280 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4281 | * This function is called to idle the fifo and unpin the query buffer if the |
| 4282 | * normal way to do this hits an error, which should typically be extremely |
| 4283 | * rare. |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4284 | */ |
| 4285 | static void vmw_execbuf_unpin_panic(struct vmw_private *dev_priv) |
| 4286 | { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 4287 | VMW_DEBUG_USER("Can't unpin query buffer. Trying to recover.\n"); |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4288 | |
| 4289 | (void) vmw_fallback_wait(dev_priv, false, true, 0, false, 10*HZ); |
Thomas Hellstrom | 459d0fa | 2015-06-26 00:25:37 -0700 | [diff] [blame] | 4290 | vmw_bo_pin_reserved(dev_priv->pinned_bo, false); |
| 4291 | if (dev_priv->dummy_query_bo_pinned) { |
| 4292 | vmw_bo_pin_reserved(dev_priv->dummy_query_bo, false); |
| 4293 | dev_priv->dummy_query_bo_pinned = false; |
| 4294 | } |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4295 | } |
| 4296 | |
| 4297 | |
| 4298 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4299 | * __vmw_execbuf_release_pinned_bo - Flush queries and unpin the pinned query |
| 4300 | * bo. |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4301 | * |
| 4302 | * @dev_priv: The device private structure. |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4303 | * @fence: If non-NULL should point to a struct vmw_fence_obj issued _after_ a |
| 4304 | * query barrier that flushes all queries touching the current buffer pointed to |
| 4305 | * by @dev_priv->pinned_bo |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4306 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4307 | * This function should be used to unpin the pinned query bo, or as a query |
| 4308 | * barrier when we need to make sure that all queries have finished before the |
| 4309 | * next fifo command. (For example on hardware context destructions where the |
| 4310 | * hardware may otherwise leak unfinished queries). |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4311 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4312 | * This function does not return any failure codes, but make attempts to do safe |
| 4313 | * unpinning in case of errors. |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4314 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4315 | * The function will synchronize on the previous query barrier, and will thus |
| 4316 | * not finish until that barrier has executed. |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4317 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4318 | * the @dev_priv->cmdbuf_mutex needs to be held by the current thread before |
| 4319 | * calling this function. |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4320 | */ |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4321 | void __vmw_execbuf_release_pinned_bo(struct vmw_private *dev_priv, |
| 4322 | struct vmw_fence_obj *fence) |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4323 | { |
| 4324 | int ret = 0; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4325 | struct vmw_fence_obj *lfence = NULL; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4326 | DECLARE_VAL_CONTEXT(val_ctx, NULL, 0); |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4327 | |
| 4328 | if (dev_priv->pinned_bo == NULL) |
| 4329 | goto out_unlock; |
| 4330 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4331 | ret = vmw_validation_add_bo(&val_ctx, dev_priv->pinned_bo, false, |
| 4332 | false); |
| 4333 | if (ret) |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4334 | goto out_no_reserve; |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4335 | |
| 4336 | ret = vmw_validation_add_bo(&val_ctx, dev_priv->dummy_query_bo, false, |
| 4337 | false); |
| 4338 | if (ret) |
| 4339 | goto out_no_reserve; |
| 4340 | |
| 4341 | ret = vmw_validation_bo_reserve(&val_ctx, false); |
| 4342 | if (ret) |
| 4343 | goto out_no_reserve; |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4344 | |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4345 | if (dev_priv->query_cid_valid) { |
| 4346 | BUG_ON(fence != NULL); |
Zack Rusin | 8426ed9 | 2020-11-18 12:54:19 -0500 | [diff] [blame] | 4347 | ret = vmw_cmd_emit_dummy_query(dev_priv, dev_priv->query_cid); |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4348 | if (ret) |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4349 | goto out_no_emit; |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4350 | dev_priv->query_cid_valid = false; |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4351 | } |
| 4352 | |
Thomas Hellstrom | 459d0fa | 2015-06-26 00:25:37 -0700 | [diff] [blame] | 4353 | vmw_bo_pin_reserved(dev_priv->pinned_bo, false); |
| 4354 | if (dev_priv->dummy_query_bo_pinned) { |
| 4355 | vmw_bo_pin_reserved(dev_priv->dummy_query_bo, false); |
| 4356 | dev_priv->dummy_query_bo_pinned = false; |
| 4357 | } |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4358 | if (fence == NULL) { |
| 4359 | (void) vmw_execbuf_fence_commands(NULL, dev_priv, &lfence, |
| 4360 | NULL); |
| 4361 | fence = lfence; |
| 4362 | } |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4363 | vmw_validation_bo_fence(&val_ctx, fence); |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4364 | if (lfence != NULL) |
| 4365 | vmw_fence_obj_unreference(&lfence); |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4366 | |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4367 | vmw_validation_unref_lists(&val_ctx); |
Thomas Hellstrom | f1d34bf | 2018-06-19 15:02:16 +0200 | [diff] [blame] | 4368 | vmw_bo_unreference(&dev_priv->pinned_bo); |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4369 | |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4370 | out_unlock: |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4371 | return; |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4372 | out_no_emit: |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4373 | vmw_validation_bo_backoff(&val_ctx); |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4374 | out_no_reserve: |
Thomas Hellstrom | 9c079b8 | 2018-09-26 15:28:55 +0200 | [diff] [blame] | 4375 | vmw_validation_unref_lists(&val_ctx); |
| 4376 | vmw_execbuf_unpin_panic(dev_priv); |
Thomas Hellstrom | f1d34bf | 2018-06-19 15:02:16 +0200 | [diff] [blame] | 4377 | vmw_bo_unreference(&dev_priv->pinned_bo); |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4378 | } |
| 4379 | |
| 4380 | /** |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4381 | * vmw_execbuf_release_pinned_bo - Flush queries and unpin the pinned query bo. |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4382 | * |
| 4383 | * @dev_priv: The device private structure. |
| 4384 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4385 | * This function should be used to unpin the pinned query bo, or as a query |
| 4386 | * barrier when we need to make sure that all queries have finished before the |
| 4387 | * next fifo command. (For example on hardware context destructions where the |
| 4388 | * hardware may otherwise leak unfinished queries). |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4389 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4390 | * This function does not return any failure codes, but make attempts to do safe |
| 4391 | * unpinning in case of errors. |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4392 | * |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4393 | * The function will synchronize on the previous query barrier, and will thus |
| 4394 | * not finish until that barrier has executed. |
Thomas Hellstrom | c0951b7 | 2012-11-20 12:19:35 +0000 | [diff] [blame] | 4395 | */ |
| 4396 | void vmw_execbuf_release_pinned_bo(struct vmw_private *dev_priv) |
| 4397 | { |
| 4398 | mutex_lock(&dev_priv->cmdbuf_mutex); |
| 4399 | if (dev_priv->query_cid_valid) |
| 4400 | __vmw_execbuf_release_pinned_bo(dev_priv, NULL); |
Thomas Hellstrom | e2fa3a7 | 2011-10-04 20:13:30 +0200 | [diff] [blame] | 4401 | mutex_unlock(&dev_priv->cmdbuf_mutex); |
| 4402 | } |
| 4403 | |
Emil Velikov | cbfbe47 | 2019-05-22 17:41:17 +0100 | [diff] [blame] | 4404 | int vmw_execbuf_ioctl(struct drm_device *dev, void *data, |
| 4405 | struct drm_file *file_priv) |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4406 | { |
| 4407 | struct vmw_private *dev_priv = vmw_priv(dev); |
Emil Velikov | cbfbe47 | 2019-05-22 17:41:17 +0100 | [diff] [blame] | 4408 | struct drm_vmw_execbuf_arg *arg = data; |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4409 | int ret; |
Sinclair Yeh | 58585116 | 2017-07-05 01:45:40 -0700 | [diff] [blame] | 4410 | struct dma_fence *in_fence = NULL; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4411 | |
Martin Krastev | 7a7a933 | 2021-06-09 13:23:00 -0400 | [diff] [blame] | 4412 | MKS_STAT_TIME_DECL(MKSSTAT_KERN_EXECBUF); |
| 4413 | MKS_STAT_TIME_PUSH(MKSSTAT_KERN_EXECBUF); |
| 4414 | |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4415 | /* |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4416 | * Extend the ioctl argument while maintaining backwards compatibility: |
Emil Velikov | cbfbe47 | 2019-05-22 17:41:17 +0100 | [diff] [blame] | 4417 | * We take different code paths depending on the value of arg->version. |
| 4418 | * |
| 4419 | * Note: The ioctl argument is extended and zeropadded by core DRM. |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4420 | */ |
Emil Velikov | cbfbe47 | 2019-05-22 17:41:17 +0100 | [diff] [blame] | 4421 | if (unlikely(arg->version > DRM_VMW_EXECBUF_VERSION || |
| 4422 | arg->version == 0)) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 4423 | VMW_DEBUG_USER("Incorrect execbuf version.\n"); |
Martin Krastev | 7a7a933 | 2021-06-09 13:23:00 -0400 | [diff] [blame] | 4424 | ret = -EINVAL; |
| 4425 | goto mksstats_out; |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4426 | } |
| 4427 | |
Emil Velikov | cbfbe47 | 2019-05-22 17:41:17 +0100 | [diff] [blame] | 4428 | switch (arg->version) { |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4429 | case 1: |
Emil Velikov | cbfbe47 | 2019-05-22 17:41:17 +0100 | [diff] [blame] | 4430 | /* For v1 core DRM have extended + zeropadded the data */ |
| 4431 | arg->context_handle = (uint32_t) -1; |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4432 | break; |
| 4433 | case 2: |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4434 | default: |
Emil Velikov | cbfbe47 | 2019-05-22 17:41:17 +0100 | [diff] [blame] | 4435 | /* For v2 and later core DRM would have correctly copied it */ |
Thomas Hellstrom | d80efd5 | 2015-08-10 10:39:35 -0700 | [diff] [blame] | 4436 | break; |
| 4437 | } |
| 4438 | |
Sinclair Yeh | 58585116 | 2017-07-05 01:45:40 -0700 | [diff] [blame] | 4439 | /* If imported a fence FD from elsewhere, then wait on it */ |
Emil Velikov | cbfbe47 | 2019-05-22 17:41:17 +0100 | [diff] [blame] | 4440 | if (arg->flags & DRM_VMW_EXECBUF_FLAG_IMPORT_FENCE_FD) { |
| 4441 | in_fence = sync_file_get_fence(arg->imported_fence_fd); |
Sinclair Yeh | 58585116 | 2017-07-05 01:45:40 -0700 | [diff] [blame] | 4442 | |
| 4443 | if (!in_fence) { |
Deepak Rawat | 5724f89 | 2019-02-11 11:46:27 -0800 | [diff] [blame] | 4444 | VMW_DEBUG_USER("Cannot get imported fence\n"); |
Martin Krastev | 7a7a933 | 2021-06-09 13:23:00 -0400 | [diff] [blame] | 4445 | ret = -EINVAL; |
| 4446 | goto mksstats_out; |
Sinclair Yeh | 58585116 | 2017-07-05 01:45:40 -0700 | [diff] [blame] | 4447 | } |
| 4448 | |
| 4449 | ret = vmw_wait_dma_fence(dev_priv->fman, in_fence); |
| 4450 | if (ret) |
| 4451 | goto out; |
| 4452 | } |
| 4453 | |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4454 | ret = vmw_execbuf_process(file_priv, dev_priv, |
Emil Velikov | cbfbe47 | 2019-05-22 17:41:17 +0100 | [diff] [blame] | 4455 | (void __user *)(unsigned long)arg->commands, |
| 4456 | NULL, arg->command_size, arg->throttle_us, |
| 4457 | arg->context_handle, |
| 4458 | (void __user *)(unsigned long)arg->fence_rep, |
| 4459 | NULL, arg->flags); |
Deepak Rawat | 680360a | 2019-02-13 13:20:42 -0800 | [diff] [blame] | 4460 | |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4461 | if (unlikely(ret != 0)) |
Sinclair Yeh | 58585116 | 2017-07-05 01:45:40 -0700 | [diff] [blame] | 4462 | goto out; |
Thomas Hellstrom | 922ade0 | 2011-10-04 20:13:17 +0200 | [diff] [blame] | 4463 | |
| 4464 | vmw_kms_cursor_post_execbuf(dev_priv); |
| 4465 | |
Sinclair Yeh | 58585116 | 2017-07-05 01:45:40 -0700 | [diff] [blame] | 4466 | out: |
| 4467 | if (in_fence) |
| 4468 | dma_fence_put(in_fence); |
Martin Krastev | 7a7a933 | 2021-06-09 13:23:00 -0400 | [diff] [blame] | 4469 | |
| 4470 | mksstats_out: |
| 4471 | MKS_STAT_TIME_POP(MKSSTAT_KERN_EXECBUF); |
Sinclair Yeh | 58585116 | 2017-07-05 01:45:40 -0700 | [diff] [blame] | 4472 | return ret; |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4473 | } |