drm/connector: add ref to drm_connector_get in iter docs
[linux-2.6-block.git] / drivers / gpu / drm / drm_gem_atomic_helper.c
CommitLineData
6dd7b6ce
TZ
1// SPDX-License-Identifier: GPL-2.0-or-later
2
820c1707
TZ
3#include <linux/dma-resv.h>
4
6dd7b6ce 5#include <drm/drm_atomic_state_helper.h>
820c1707
TZ
6#include <drm/drm_atomic_uapi.h>
7#include <drm/drm_gem.h>
6dd7b6ce
TZ
8#include <drm/drm_gem_atomic_helper.h>
9#include <drm/drm_gem_framebuffer_helper.h>
10#include <drm/drm_simple_kms_helper.h>
11
12#include "drm_internal.h"
13
14/**
15 * DOC: overview
16 *
17 * The GEM atomic helpers library implements generic atomic-commit
18 * functions for drivers that use GEM objects. Currently, it provides
820c1707
TZ
19 * synchronization helpers, and plane state and framebuffer BO mappings
20 * for planes with shadow buffers.
21 *
22 * Before scanout, a plane's framebuffer needs to be synchronized with
23 * possible writers that draw into the framebuffer. All drivers should
24 * call drm_gem_plane_helper_prepare_fb() from their implementation of
25 * struct &drm_plane_helper.prepare_fb . It sets the plane's fence from
26 * the framebuffer so that the DRM core can synchronize access automatically.
27 *
28 * drm_gem_plane_helper_prepare_fb() can also be used directly as
29 * implementation of prepare_fb. For drivers based on
30 * struct drm_simple_display_pipe, drm_gem_simple_display_pipe_prepare_fb()
31 * provides equivalent functionality.
32 *
33 * .. code-block:: c
34 *
35 * #include <drm/drm_gem_atomic_helper.h>
36 *
37 * struct drm_plane_helper_funcs driver_plane_helper_funcs = {
38 * ...,
39 * . prepare_fb = drm_gem_plane_helper_prepare_fb,
40 * };
41 *
42 * struct drm_simple_display_pipe_funcs driver_pipe_funcs = {
43 * ...,
44 * . prepare_fb = drm_gem_simple_display_pipe_prepare_fb,
45 * };
9dc9067d
TZ
46 *
47 * A driver using a shadow buffer copies the content of the shadow buffers
48 * into the HW's framebuffer memory during an atomic update. This requires
49 * a mapping of the shadow buffer into kernel address space. The mappings
50 * cannot be established by commit-tail functions, such as atomic_update,
51 * as this would violate locking rules around dma_buf_vmap().
52 *
53 * The helpers for shadow-buffered planes establish and release mappings,
54 * and provide struct drm_shadow_plane_state, which stores the plane's mapping
55 * for commit-tail functons.
56 *
57 * Shadow-buffered planes can easily be enabled by using the provided macros
58 * %DRM_GEM_SHADOW_PLANE_FUNCS and %DRM_GEM_SHADOW_PLANE_HELPER_FUNCS.
59 * These macros set up the plane and plane-helper callbacks to point to the
60 * shadow-buffer helpers.
61 *
62 * .. code-block:: c
63 *
820c1707 64 * #include <drm/drm_gem_atomic_helper.h>
9dc9067d
TZ
65 *
66 * struct drm_plane_funcs driver_plane_funcs = {
67 * ...,
68 * DRM_GEM_SHADOW_PLANE_FUNCS,
69 * };
70 *
71 * struct drm_plane_helper_funcs driver_plane_helper_funcs = {
72 * ...,
73 * DRM_GEM_SHADOW_PLANE_HELPER_FUNCS,
74 * };
75 *
76 * In the driver's atomic-update function, shadow-buffer mappings are available
77 * from the plane state. Use to_drm_shadow_plane_state() to upcast from
78 * struct drm_plane_state.
79 *
80 * .. code-block:: c
81 *
82 * void driver_plane_atomic_update(struct drm_plane *plane,
83 * struct drm_plane_state *old_plane_state)
84 * {
85 * struct drm_plane_state *plane_state = plane->state;
86 * struct drm_shadow_plane_state *shadow_plane_state =
87 * to_drm_shadow_plane_state(plane_state);
88 *
89 * // access shadow buffer via shadow_plane_state->map
90 * }
91 *
92 * A mapping address for each of the framebuffer's buffer object is stored in
93 * struct &drm_shadow_plane_state.map. The mappings are valid while the state
94 * is being used.
95 *
96 * Drivers that use struct drm_simple_display_pipe can use
97 * %DRM_GEM_SIMPLE_DISPLAY_PIPE_SHADOW_PLANE_FUNCS to initialize the rsp
98 * callbacks. Access to shadow-buffer mappings is similar to regular
99 * atomic_update.
100 *
101 * .. code-block:: c
102 *
103 * struct drm_simple_display_pipe_funcs driver_pipe_funcs = {
104 * ...,
105 * DRM_GEM_SIMPLE_DISPLAY_PIPE_SHADOW_PLANE_FUNCS,
106 * };
107 *
108 * void driver_pipe_enable(struct drm_simple_display_pipe *pipe,
109 * struct drm_crtc_state *crtc_state,
110 * struct drm_plane_state *plane_state)
111 * {
112 * struct drm_shadow_plane_state *shadow_plane_state =
113 * to_drm_shadow_plane_state(plane_state);
114 *
115 * // access shadow buffer via shadow_plane_state->map
116 * }
6dd7b6ce
TZ
117 */
118
820c1707
TZ
119/*
120 * Plane Helpers
121 */
122
123/**
124 * drm_gem_plane_helper_prepare_fb() - Prepare a GEM backed framebuffer
125 * @plane: Plane
126 * @state: Plane state the fence will be attached to
127 *
128 * This function extracts the exclusive fence from &drm_gem_object.resv and
129 * attaches it to plane state for the atomic helper to wait on. This is
130 * necessary to correctly implement implicit synchronization for any buffers
131 * shared as a struct &dma_buf. This function can be used as the
132 * &drm_plane_helper_funcs.prepare_fb callback.
133 *
134 * There is no need for &drm_plane_helper_funcs.cleanup_fb hook for simple
135 * GEM based framebuffer drivers which have their buffers always pinned in
136 * memory.
137 *
7d30963f
DV
138 * This function is the default implementation for GEM drivers of
139 * &drm_plane_helper_funcs.prepare_fb if no callback is provided.
140 *
820c1707
TZ
141 * See drm_atomic_set_fence_for_plane() for a discussion of implicit and
142 * explicit fencing in atomic modeset updates.
143 */
144int drm_gem_plane_helper_prepare_fb(struct drm_plane *plane, struct drm_plane_state *state)
145{
146 struct drm_gem_object *obj;
147 struct dma_fence *fence;
148
149 if (!state->fb)
150 return 0;
151
152 obj = drm_gem_fb_get_obj(state->fb, 0);
6b41323a 153 fence = dma_resv_get_excl_unlocked(obj->resv);
820c1707
TZ
154 drm_atomic_set_fence_for_plane(state, fence);
155
156 return 0;
157}
158EXPORT_SYMBOL_GPL(drm_gem_plane_helper_prepare_fb);
159
160/**
161 * drm_gem_simple_display_pipe_prepare_fb - prepare_fb helper for &drm_simple_display_pipe
162 * @pipe: Simple display pipe
163 * @plane_state: Plane state
164 *
165 * This function uses drm_gem_plane_helper_prepare_fb() to extract the exclusive fence
166 * from &drm_gem_object.resv and attaches it to plane state for the atomic
167 * helper to wait on. This is necessary to correctly implement implicit
168 * synchronization for any buffers shared as a struct &dma_buf. Drivers can use
169 * this as their &drm_simple_display_pipe_funcs.prepare_fb callback.
170 *
171 * See drm_atomic_set_fence_for_plane() for a discussion of implicit and
172 * explicit fencing in atomic modeset updates.
173 */
174int drm_gem_simple_display_pipe_prepare_fb(struct drm_simple_display_pipe *pipe,
175 struct drm_plane_state *plane_state)
176{
177 return drm_gem_plane_helper_prepare_fb(&pipe->plane, plane_state);
178}
179EXPORT_SYMBOL(drm_gem_simple_display_pipe_prepare_fb);
180
6dd7b6ce
TZ
181/*
182 * Shadow-buffered Planes
183 */
184
b7156502
TZ
185/**
186 * __drm_gem_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
187 * @plane: the plane
188 * @new_shadow_plane_state: the new shadow-buffered plane state
189 *
190 * This function duplicates shadow-buffered plane state. This is helpful for drivers
191 * that subclass struct drm_shadow_plane_state.
192 *
193 * The function does not duplicate existing mappings of the shadow buffers.
194 * Mappings are maintained during the atomic commit by the plane's prepare_fb
195 * and cleanup_fb helpers. See drm_gem_prepare_shadow_fb() and drm_gem_cleanup_shadow_fb()
196 * for corresponding helpers.
197 */
198void
199__drm_gem_duplicate_shadow_plane_state(struct drm_plane *plane,
200 struct drm_shadow_plane_state *new_shadow_plane_state)
201{
202 __drm_atomic_helper_plane_duplicate_state(plane, &new_shadow_plane_state->base);
203}
204EXPORT_SYMBOL(__drm_gem_duplicate_shadow_plane_state);
205
9dc9067d
TZ
206/**
207 * drm_gem_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
208 * @plane: the plane
209 *
210 * This function implements struct &drm_plane_funcs.atomic_duplicate_state for
211 * shadow-buffered planes. It assumes the existing state to be of type
212 * struct drm_shadow_plane_state and it allocates the new state to be of this
213 * type.
214 *
215 * The function does not duplicate existing mappings of the shadow buffers.
216 * Mappings are maintained during the atomic commit by the plane's prepare_fb
217 * and cleanup_fb helpers. See drm_gem_prepare_shadow_fb() and drm_gem_cleanup_shadow_fb()
218 * for corresponding helpers.
219 *
220 * Returns:
221 * A pointer to a new plane state on success, or NULL otherwise.
222 */
223struct drm_plane_state *
6dd7b6ce
TZ
224drm_gem_duplicate_shadow_plane_state(struct drm_plane *plane)
225{
226 struct drm_plane_state *plane_state = plane->state;
227 struct drm_shadow_plane_state *new_shadow_plane_state;
228
229 if (!plane_state)
230 return NULL;
231
232 new_shadow_plane_state = kzalloc(sizeof(*new_shadow_plane_state), GFP_KERNEL);
233 if (!new_shadow_plane_state)
234 return NULL;
b7156502 235 __drm_gem_duplicate_shadow_plane_state(plane, new_shadow_plane_state);
6dd7b6ce
TZ
236
237 return &new_shadow_plane_state->base;
238}
9dc9067d 239EXPORT_SYMBOL(drm_gem_duplicate_shadow_plane_state);
6dd7b6ce 240
b7156502
TZ
241/**
242 * __drm_gem_destroy_shadow_plane_state - cleans up shadow-buffered plane state
243 * @shadow_plane_state: the shadow-buffered plane state
244 *
245 * This function cleans up shadow-buffered plane state. Helpful for drivers that
246 * subclass struct drm_shadow_plane_state.
247 */
248void __drm_gem_destroy_shadow_plane_state(struct drm_shadow_plane_state *shadow_plane_state)
249{
250 __drm_atomic_helper_plane_destroy_state(&shadow_plane_state->base);
251}
252EXPORT_SYMBOL(__drm_gem_destroy_shadow_plane_state);
253
9dc9067d
TZ
254/**
255 * drm_gem_destroy_shadow_plane_state - deletes shadow-buffered plane state
256 * @plane: the plane
257 * @plane_state: the plane state of type struct drm_shadow_plane_state
258 *
259 * This function implements struct &drm_plane_funcs.atomic_destroy_state
260 * for shadow-buffered planes. It expects that mappings of shadow buffers
261 * have been released already.
262 */
263void drm_gem_destroy_shadow_plane_state(struct drm_plane *plane,
264 struct drm_plane_state *plane_state)
6dd7b6ce
TZ
265{
266 struct drm_shadow_plane_state *shadow_plane_state =
267 to_drm_shadow_plane_state(plane_state);
268
b7156502 269 __drm_gem_destroy_shadow_plane_state(shadow_plane_state);
6dd7b6ce
TZ
270 kfree(shadow_plane_state);
271}
9dc9067d 272EXPORT_SYMBOL(drm_gem_destroy_shadow_plane_state);
6dd7b6ce 273
b7156502
TZ
274/**
275 * __drm_gem_reset_shadow_plane - resets a shadow-buffered plane
276 * @plane: the plane
277 * @shadow_plane_state: the shadow-buffered plane state
278 *
279 * This function resets state for shadow-buffered planes. Helpful
280 * for drivers that subclass struct drm_shadow_plane_state.
281 */
282void __drm_gem_reset_shadow_plane(struct drm_plane *plane,
283 struct drm_shadow_plane_state *shadow_plane_state)
284{
285 __drm_atomic_helper_plane_reset(plane, &shadow_plane_state->base);
286}
287EXPORT_SYMBOL(__drm_gem_reset_shadow_plane);
288
9dc9067d
TZ
289/**
290 * drm_gem_reset_shadow_plane - resets a shadow-buffered plane
291 * @plane: the plane
292 *
293 * This function implements struct &drm_plane_funcs.reset_plane for
294 * shadow-buffered planes. It assumes the current plane state to be
295 * of type struct drm_shadow_plane and it allocates the new state of
296 * this type.
297 */
298void drm_gem_reset_shadow_plane(struct drm_plane *plane)
6dd7b6ce
TZ
299{
300 struct drm_shadow_plane_state *shadow_plane_state;
301
302 if (plane->state) {
303 drm_gem_destroy_shadow_plane_state(plane, plane->state);
304 plane->state = NULL; /* must be set to NULL here */
305 }
306
307 shadow_plane_state = kzalloc(sizeof(*shadow_plane_state), GFP_KERNEL);
308 if (!shadow_plane_state)
309 return;
b7156502 310 __drm_gem_reset_shadow_plane(plane, shadow_plane_state);
6dd7b6ce 311}
9dc9067d 312EXPORT_SYMBOL(drm_gem_reset_shadow_plane);
6dd7b6ce 313
9dc9067d
TZ
314/**
315 * drm_gem_prepare_shadow_fb - prepares shadow framebuffers
316 * @plane: the plane
317 * @plane_state: the plane state of type struct drm_shadow_plane_state
318 *
319 * This function implements struct &drm_plane_helper_funcs.prepare_fb. It
320 * maps all buffer objects of the plane's framebuffer into kernel address
321 * space and stores them in &struct drm_shadow_plane_state.map. The
322 * framebuffer will be synchronized as part of the atomic commit.
323 *
324 * See drm_gem_cleanup_shadow_fb() for cleanup.
325 *
326 * Returns:
327 * 0 on success, or a negative errno code otherwise.
328 */
329int drm_gem_prepare_shadow_fb(struct drm_plane *plane, struct drm_plane_state *plane_state)
6dd7b6ce
TZ
330{
331 struct drm_shadow_plane_state *shadow_plane_state = to_drm_shadow_plane_state(plane_state);
332 struct drm_framebuffer *fb = plane_state->fb;
333 struct drm_gem_object *obj;
334 struct dma_buf_map map;
335 int ret;
336 size_t i;
337
338 if (!fb)
339 return 0;
340
820c1707 341 ret = drm_gem_plane_helper_prepare_fb(plane, plane_state);
6dd7b6ce
TZ
342 if (ret)
343 return ret;
344
345 for (i = 0; i < ARRAY_SIZE(shadow_plane_state->map); ++i) {
346 obj = drm_gem_fb_get_obj(fb, i);
347 if (!obj)
348 continue;
349 ret = drm_gem_vmap(obj, &map);
350 if (ret)
351 goto err_drm_gem_vunmap;
352 shadow_plane_state->map[i] = map;
353 }
354
355 return 0;
356
357err_drm_gem_vunmap:
358 while (i) {
359 --i;
360 obj = drm_gem_fb_get_obj(fb, i);
361 if (!obj)
362 continue;
363 drm_gem_vunmap(obj, &shadow_plane_state->map[i]);
364 }
365 return ret;
366}
9dc9067d 367EXPORT_SYMBOL(drm_gem_prepare_shadow_fb);
6dd7b6ce 368
9dc9067d
TZ
369/**
370 * drm_gem_cleanup_shadow_fb - releases shadow framebuffers
371 * @plane: the plane
372 * @plane_state: the plane state of type struct drm_shadow_plane_state
373 *
374 * This function implements struct &drm_plane_helper_funcs.cleanup_fb.
375 * This function unmaps all buffer objects of the plane's framebuffer.
376 *
377 * See drm_gem_prepare_shadow_fb() for more inforamtion.
378 */
379void drm_gem_cleanup_shadow_fb(struct drm_plane *plane, struct drm_plane_state *plane_state)
6dd7b6ce
TZ
380{
381 struct drm_shadow_plane_state *shadow_plane_state = to_drm_shadow_plane_state(plane_state);
382 struct drm_framebuffer *fb = plane_state->fb;
383 size_t i = ARRAY_SIZE(shadow_plane_state->map);
384 struct drm_gem_object *obj;
385
386 if (!fb)
387 return;
388
389 while (i) {
390 --i;
391 obj = drm_gem_fb_get_obj(fb, i);
392 if (!obj)
393 continue;
394 drm_gem_vunmap(obj, &shadow_plane_state->map[i]);
395 }
396}
9dc9067d 397EXPORT_SYMBOL(drm_gem_cleanup_shadow_fb);
6dd7b6ce
TZ
398
399/**
400 * drm_gem_simple_kms_prepare_shadow_fb - prepares shadow framebuffers
401 * @pipe: the simple display pipe
402 * @plane_state: the plane state of type struct drm_shadow_plane_state
403 *
404 * This function implements struct drm_simple_display_funcs.prepare_fb. It
405 * maps all buffer objects of the plane's framebuffer into kernel address
406 * space and stores them in struct drm_shadow_plane_state.map. The
407 * framebuffer will be synchronized as part of the atomic commit.
408 *
409 * See drm_gem_simple_kms_cleanup_shadow_fb() for cleanup.
410 *
411 * Returns:
412 * 0 on success, or a negative errno code otherwise.
413 */
414int drm_gem_simple_kms_prepare_shadow_fb(struct drm_simple_display_pipe *pipe,
415 struct drm_plane_state *plane_state)
416{
417 return drm_gem_prepare_shadow_fb(&pipe->plane, plane_state);
418}
419EXPORT_SYMBOL(drm_gem_simple_kms_prepare_shadow_fb);
420
421/**
422 * drm_gem_simple_kms_cleanup_shadow_fb - releases shadow framebuffers
423 * @pipe: the simple display pipe
424 * @plane_state: the plane state of type struct drm_shadow_plane_state
425 *
426 * This function implements struct drm_simple_display_funcs.cleanup_fb.
427 * This function unmaps all buffer objects of the plane's framebuffer.
428 *
429 * See drm_gem_simple_kms_prepare_shadow_fb().
430 */
431void drm_gem_simple_kms_cleanup_shadow_fb(struct drm_simple_display_pipe *pipe,
432 struct drm_plane_state *plane_state)
433{
434 drm_gem_cleanup_shadow_fb(&pipe->plane, plane_state);
435}
436EXPORT_SYMBOL(drm_gem_simple_kms_cleanup_shadow_fb);
437
438/**
439 * drm_gem_simple_kms_reset_shadow_plane - resets a shadow-buffered plane
440 * @pipe: the simple display pipe
441 *
442 * This function implements struct drm_simple_display_funcs.reset_plane
443 * for shadow-buffered planes.
444 */
445void drm_gem_simple_kms_reset_shadow_plane(struct drm_simple_display_pipe *pipe)
446{
447 drm_gem_reset_shadow_plane(&pipe->plane);
448}
449EXPORT_SYMBOL(drm_gem_simple_kms_reset_shadow_plane);
450
451/**
452 * drm_gem_simple_kms_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
453 * @pipe: the simple display pipe
454 *
455 * This function implements struct drm_simple_display_funcs.duplicate_plane_state
456 * for shadow-buffered planes. It does not duplicate existing mappings of the shadow
457 * buffers. Mappings are maintained during the atomic commit by the plane's prepare_fb
458 * and cleanup_fb helpers.
459 *
460 * Returns:
461 * A pointer to a new plane state on success, or NULL otherwise.
462 */
463struct drm_plane_state *
464drm_gem_simple_kms_duplicate_shadow_plane_state(struct drm_simple_display_pipe *pipe)
465{
466 return drm_gem_duplicate_shadow_plane_state(&pipe->plane);
467}
468EXPORT_SYMBOL(drm_gem_simple_kms_duplicate_shadow_plane_state);
469
470/**
471 * drm_gem_simple_kms_destroy_shadow_plane_state - resets shadow-buffered plane state
472 * @pipe: the simple display pipe
473 * @plane_state: the plane state of type struct drm_shadow_plane_state
474 *
475 * This function implements struct drm_simple_display_funcs.destroy_plane_state
476 * for shadow-buffered planes. It expects that mappings of shadow buffers
477 * have been released already.
478 */
479void drm_gem_simple_kms_destroy_shadow_plane_state(struct drm_simple_display_pipe *pipe,
480 struct drm_plane_state *plane_state)
481{
482 drm_gem_destroy_shadow_plane_state(&pipe->plane, plane_state);
483}
484EXPORT_SYMBOL(drm_gem_simple_kms_destroy_shadow_plane_state);