drm/i915: Move more GEM objects under gem/
[linux-2.6-block.git] / drivers / gpu / drm / i915 / selftests / i915_vma.c
CommitLineData
e3c7a1c5
CW
1/*
2 * Copyright © 2016 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 */
24
25#include <linux/prime_numbers.h>
26
10be98a7
CW
27#include "gem/selftests/mock_context.h"
28
29#include "i915_selftest.h"
e3c7a1c5
CW
30
31#include "mock_gem_device.h"
c95e7ce3 32#include "mock_gtt.h"
e3c7a1c5
CW
33
34static bool assert_vma(struct i915_vma *vma,
35 struct drm_i915_gem_object *obj,
36 struct i915_gem_context *ctx)
37{
38 bool ok = true;
39
82ad6443 40 if (vma->vm != &ctx->ppgtt->vm) {
e3c7a1c5
CW
41 pr_err("VMA created with wrong VM\n");
42 ok = false;
43 }
44
45 if (vma->size != obj->base.size) {
46 pr_err("VMA created with wrong size, found %llu, expected %zu\n",
47 vma->size, obj->base.size);
48 ok = false;
49 }
50
51 if (vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) {
52 pr_err("VMA created with wrong type [%d]\n",
53 vma->ggtt_view.type);
54 ok = false;
55 }
56
57 return ok;
58}
59
60static struct i915_vma *
61checked_vma_instance(struct drm_i915_gem_object *obj,
62 struct i915_address_space *vm,
e2e394bf 63 const struct i915_ggtt_view *view)
e3c7a1c5
CW
64{
65 struct i915_vma *vma;
66 bool ok = true;
67
68 vma = i915_vma_instance(obj, vm, view);
69 if (IS_ERR(vma))
70 return vma;
71
72 /* Manual checks, will be reinforced by i915_vma_compare! */
73 if (vma->vm != vm) {
74 pr_err("VMA's vm [%p] does not match request [%p]\n",
75 vma->vm, vm);
76 ok = false;
77 }
78
79 if (i915_is_ggtt(vm) != i915_vma_is_ggtt(vma)) {
80 pr_err("VMA ggtt status [%d] does not match parent [%d]\n",
81 i915_vma_is_ggtt(vma), i915_is_ggtt(vm));
82 ok = false;
83 }
84
85 if (i915_vma_compare(vma, vm, view)) {
dc74f6fe 86 pr_err("i915_vma_compare failed with create parameters!\n");
e3c7a1c5
CW
87 return ERR_PTR(-EINVAL);
88 }
89
90 if (i915_vma_compare(vma, vma->vm,
91 i915_vma_is_ggtt(vma) ? &vma->ggtt_view : NULL)) {
92 pr_err("i915_vma_compare failed with itself\n");
93 return ERR_PTR(-EINVAL);
94 }
95
96 if (!ok) {
97 pr_err("i915_vma_compare failed to detect the difference!\n");
98 return ERR_PTR(-EINVAL);
99 }
100
101 return vma;
102}
103
104static int create_vmas(struct drm_i915_private *i915,
105 struct list_head *objects,
106 struct list_head *contexts)
107{
108 struct drm_i915_gem_object *obj;
109 struct i915_gem_context *ctx;
110 int pinned;
111
112 list_for_each_entry(obj, objects, st_link) {
113 for (pinned = 0; pinned <= 1; pinned++) {
114 list_for_each_entry(ctx, contexts, link) {
82ad6443 115 struct i915_address_space *vm = &ctx->ppgtt->vm;
e3c7a1c5
CW
116 struct i915_vma *vma;
117 int err;
118
119 vma = checked_vma_instance(obj, vm, NULL);
120 if (IS_ERR(vma))
121 return PTR_ERR(vma);
122
123 if (!assert_vma(vma, obj, ctx)) {
124 pr_err("VMA lookup/create failed\n");
125 return -EINVAL;
126 }
127
128 if (!pinned) {
129 err = i915_vma_pin(vma, 0, 0, PIN_USER);
130 if (err) {
131 pr_err("Failed to pin VMA\n");
132 return err;
133 }
134 } else {
135 i915_vma_unpin(vma);
136 }
137 }
138 }
139 }
140
141 return 0;
142}
143
144static int igt_vma_create(void *arg)
145{
c95e7ce3
CW
146 struct i915_ggtt *ggtt = arg;
147 struct drm_i915_private *i915 = ggtt->vm.i915;
e3c7a1c5
CW
148 struct drm_i915_gem_object *obj, *on;
149 struct i915_gem_context *ctx, *cn;
150 unsigned long num_obj, num_ctx;
151 unsigned long no, nc;
152 IGT_TIMEOUT(end_time);
153 LIST_HEAD(contexts);
154 LIST_HEAD(objects);
6e128141 155 int err = -ENOMEM;
e3c7a1c5
CW
156
157 /* Exercise creating many vma amonst many objections, checking the
158 * vma creation and lookup routines.
159 */
160
161 no = 0;
162 for_each_prime_number(num_obj, ULONG_MAX - 1) {
163 for (; no < num_obj; no++) {
164 obj = i915_gem_object_create_internal(i915, PAGE_SIZE);
165 if (IS_ERR(obj))
166 goto out;
167
168 list_add(&obj->st_link, &objects);
169 }
170
171 nc = 0;
172 for_each_prime_number(num_ctx, MAX_CONTEXT_HW_ID) {
173 for (; nc < num_ctx; nc++) {
174 ctx = mock_context(i915, "mock");
175 if (!ctx)
176 goto out;
177
178 list_move(&ctx->link, &contexts);
179 }
180
181 err = create_vmas(i915, &objects, &contexts);
182 if (err)
183 goto out;
184
185 if (igt_timeout(end_time,
186 "%s timed out: after %lu objects in %lu contexts\n",
187 __func__, no, nc))
188 goto end;
189 }
190
5f09a9c8
CW
191 list_for_each_entry_safe(ctx, cn, &contexts, link) {
192 list_del_init(&ctx->link);
e3c7a1c5 193 mock_context_close(ctx);
5f09a9c8 194 }
e3c7a1c5
CW
195 }
196
197end:
198 /* Final pass to lookup all created contexts */
199 err = create_vmas(i915, &objects, &contexts);
200out:
5f09a9c8
CW
201 list_for_each_entry_safe(ctx, cn, &contexts, link) {
202 list_del_init(&ctx->link);
e3c7a1c5 203 mock_context_close(ctx);
5f09a9c8 204 }
e3c7a1c5
CW
205
206 list_for_each_entry_safe(obj, on, &objects, st_link)
207 i915_gem_object_put(obj);
208 return err;
209}
210
782a3e9e
CW
211struct pin_mode {
212 u64 size;
213 u64 flags;
214 bool (*assert)(const struct i915_vma *,
215 const struct pin_mode *mode,
216 int result);
217 const char *string;
218};
219
220static bool assert_pin_valid(const struct i915_vma *vma,
221 const struct pin_mode *mode,
222 int result)
223{
224 if (result)
225 return false;
226
227 if (i915_vma_misplaced(vma, mode->size, 0, mode->flags))
228 return false;
229
230 return true;
231}
232
782a3e9e
CW
233__maybe_unused
234static bool assert_pin_enospc(const struct i915_vma *vma,
235 const struct pin_mode *mode,
236 int result)
237{
238 return result == -ENOSPC;
239}
240
241__maybe_unused
242static bool assert_pin_einval(const struct i915_vma *vma,
243 const struct pin_mode *mode,
244 int result)
245{
246 return result == -EINVAL;
247}
248
249static int igt_vma_pin1(void *arg)
250{
c95e7ce3 251 struct i915_ggtt *ggtt = arg;
782a3e9e
CW
252 const struct pin_mode modes[] = {
253#define VALID(sz, fl) { .size = (sz), .flags = (fl), .assert = assert_pin_valid, .string = #sz ", " #fl ", (valid) " }
254#define __INVALID(sz, fl, check, eval) { .size = (sz), .flags = (fl), .assert = (check), .string = #sz ", " #fl ", (invalid " #eval ")" }
255#define INVALID(sz, fl) __INVALID(sz, fl, assert_pin_einval, EINVAL)
782a3e9e
CW
256#define NOSPACE(sz, fl) __INVALID(sz, fl, assert_pin_enospc, ENOSPC)
257 VALID(0, PIN_GLOBAL),
258 VALID(0, PIN_GLOBAL | PIN_MAPPABLE),
259
260 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | 4096),
261 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | 8192),
c95e7ce3
CW
262 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
263 VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
264 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->vm.total - 4096)),
265
266 VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | (ggtt->mappable_end - 4096)),
267 INVALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | ggtt->mappable_end),
268 VALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | (ggtt->vm.total - 4096)),
269 INVALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | ggtt->vm.total),
782a3e9e
CW
270 INVALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | round_down(U64_MAX, PAGE_SIZE)),
271
272 VALID(4096, PIN_GLOBAL),
273 VALID(8192, PIN_GLOBAL),
c95e7ce3
CW
274 VALID(ggtt->mappable_end - 4096, PIN_GLOBAL | PIN_MAPPABLE),
275 VALID(ggtt->mappable_end, PIN_GLOBAL | PIN_MAPPABLE),
276 NOSPACE(ggtt->mappable_end + 4096, PIN_GLOBAL | PIN_MAPPABLE),
277 VALID(ggtt->vm.total - 4096, PIN_GLOBAL),
278 VALID(ggtt->vm.total, PIN_GLOBAL),
279 NOSPACE(ggtt->vm.total + 4096, PIN_GLOBAL),
2889caa9 280 NOSPACE(round_down(U64_MAX, PAGE_SIZE), PIN_GLOBAL),
c95e7ce3
CW
281 INVALID(8192, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | (ggtt->mappable_end - 4096)),
282 INVALID(8192, PIN_GLOBAL | PIN_OFFSET_FIXED | (ggtt->vm.total - 4096)),
782a3e9e
CW
283 INVALID(8192, PIN_GLOBAL | PIN_OFFSET_FIXED | (round_down(U64_MAX, PAGE_SIZE) - 4096)),
284
c95e7ce3 285 VALID(8192, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
782a3e9e
CW
286
287#if !IS_ENABLED(CONFIG_DRM_I915_DEBUG_GEM)
288 /* Misusing BIAS is a programming error (it is not controllable
289 * from userspace) so when debugging is enabled, it explodes.
290 * However, the tests are still quite interesting for checking
291 * variable start, end and size.
292 */
c95e7ce3
CW
293 NOSPACE(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | ggtt->mappable_end),
294 NOSPACE(0, PIN_GLOBAL | PIN_OFFSET_BIAS | ggtt->vm.total),
295 NOSPACE(8192, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
296 NOSPACE(8192, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->vm.total - 4096)),
782a3e9e
CW
297#endif
298 { },
299#undef NOSPACE
782a3e9e
CW
300#undef INVALID
301#undef __INVALID
302#undef VALID
303 }, *m;
304 struct drm_i915_gem_object *obj;
305 struct i915_vma *vma;
306 int err = -EINVAL;
307
308 /* Exercise all the weird and wonderful i915_vma_pin requests,
309 * focusing on error handling of boundary conditions.
310 */
311
c95e7ce3 312 GEM_BUG_ON(!drm_mm_clean(&ggtt->vm.mm));
782a3e9e 313
c95e7ce3 314 obj = i915_gem_object_create_internal(ggtt->vm.i915, PAGE_SIZE);
782a3e9e
CW
315 if (IS_ERR(obj))
316 return PTR_ERR(obj);
317
c95e7ce3 318 vma = checked_vma_instance(obj, &ggtt->vm, NULL);
782a3e9e
CW
319 if (IS_ERR(vma))
320 goto out;
321
322 for (m = modes; m->assert; m++) {
323 err = i915_vma_pin(vma, m->size, 0, m->flags);
324 if (!m->assert(vma, m, err)) {
325 pr_err("%s to pin single page into GGTT with mode[%d:%s]: size=%llx flags=%llx, err=%d\n",
326 m->assert == assert_pin_valid ? "Failed" : "Unexpectedly succeeded",
327 (int)(m - modes), m->string, m->size, m->flags,
328 err);
329 if (!err)
330 i915_vma_unpin(vma);
331 err = -EINVAL;
332 goto out;
333 }
334
335 if (!err) {
336 i915_vma_unpin(vma);
337 err = i915_vma_unbind(vma);
338 if (err) {
339 pr_err("Failed to unbind single page from GGTT, err=%d\n", err);
340 goto out;
341 }
342 }
343 }
344
345 err = 0;
346out:
347 i915_gem_object_put(obj);
348 return err;
349}
350
a231bf64
CW
351static unsigned long rotated_index(const struct intel_rotation_info *r,
352 unsigned int n,
353 unsigned int x,
354 unsigned int y)
355{
356 return (r->plane[n].stride * (r->plane[n].height - y - 1) +
357 r->plane[n].offset + x);
358}
359
360static struct scatterlist *
361assert_rotated(struct drm_i915_gem_object *obj,
362 const struct intel_rotation_info *r, unsigned int n,
363 struct scatterlist *sg)
364{
365 unsigned int x, y;
366
367 for (x = 0; x < r->plane[n].width; x++) {
368 for (y = 0; y < r->plane[n].height; y++) {
369 unsigned long src_idx;
370 dma_addr_t src;
371
372 if (!sg) {
373 pr_err("Invalid sg table: too short at plane %d, (%d, %d)!\n",
374 n, x, y);
375 return ERR_PTR(-EINVAL);
376 }
377
378 src_idx = rotated_index(r, n, x, y);
379 src = i915_gem_object_get_dma_address(obj, src_idx);
380
381 if (sg_dma_len(sg) != PAGE_SIZE) {
382 pr_err("Invalid sg.length, found %d, expected %lu for rotated page (%d, %d) [src index %lu]\n",
383 sg_dma_len(sg), PAGE_SIZE,
384 x, y, src_idx);
385 return ERR_PTR(-EINVAL);
386 }
387
388 if (sg_dma_address(sg) != src) {
389 pr_err("Invalid address for rotated page (%d, %d) [src index %lu]\n",
390 x, y, src_idx);
391 return ERR_PTR(-EINVAL);
392 }
393
394 sg = sg_next(sg);
395 }
396 }
397
398 return sg;
399}
400
e2e394bf
VS
401static unsigned long remapped_index(const struct intel_remapped_info *r,
402 unsigned int n,
403 unsigned int x,
404 unsigned int y)
405{
406 return (r->plane[n].stride * y +
407 r->plane[n].offset + x);
408}
409
410static struct scatterlist *
411assert_remapped(struct drm_i915_gem_object *obj,
412 const struct intel_remapped_info *r, unsigned int n,
413 struct scatterlist *sg)
414{
415 unsigned int x, y;
416 unsigned int left = 0;
417 unsigned int offset;
418
419 for (y = 0; y < r->plane[n].height; y++) {
420 for (x = 0; x < r->plane[n].width; x++) {
421 unsigned long src_idx;
422 dma_addr_t src;
423
424 if (!sg) {
425 pr_err("Invalid sg table: too short at plane %d, (%d, %d)!\n",
426 n, x, y);
427 return ERR_PTR(-EINVAL);
428 }
429 if (!left) {
430 offset = 0;
431 left = sg_dma_len(sg);
432 }
433
434 src_idx = remapped_index(r, n, x, y);
435 src = i915_gem_object_get_dma_address(obj, src_idx);
436
437 if (left < PAGE_SIZE || left & (PAGE_SIZE-1)) {
438 pr_err("Invalid sg.length, found %d, expected %lu for remapped page (%d, %d) [src index %lu]\n",
439 sg_dma_len(sg), PAGE_SIZE,
440 x, y, src_idx);
441 return ERR_PTR(-EINVAL);
442 }
443
444 if (sg_dma_address(sg) + offset != src) {
445 pr_err("Invalid address for remapped page (%d, %d) [src index %lu]\n",
446 x, y, src_idx);
447 return ERR_PTR(-EINVAL);
448 }
449
450 left -= PAGE_SIZE;
451 offset += PAGE_SIZE;
452
453
454 if (!left)
455 sg = sg_next(sg);
456 }
457 }
458
459 return sg;
460}
461
1a74fc0b
VS
462static unsigned int rotated_size(const struct intel_remapped_plane_info *a,
463 const struct intel_remapped_plane_info *b)
a231bf64
CW
464{
465 return a->width * a->height + b->width * b->height;
466}
467
e2e394bf 468static int igt_vma_rotate_remap(void *arg)
a231bf64 469{
c95e7ce3
CW
470 struct i915_ggtt *ggtt = arg;
471 struct i915_address_space *vm = &ggtt->vm;
a231bf64 472 struct drm_i915_gem_object *obj;
1a74fc0b 473 const struct intel_remapped_plane_info planes[] = {
a231bf64
CW
474 { .width = 1, .height = 1, .stride = 1 },
475 { .width = 2, .height = 2, .stride = 2 },
476 { .width = 4, .height = 4, .stride = 4 },
477 { .width = 8, .height = 8, .stride = 8 },
478
479 { .width = 3, .height = 5, .stride = 3 },
480 { .width = 3, .height = 5, .stride = 4 },
481 { .width = 3, .height = 5, .stride = 5 },
482
483 { .width = 5, .height = 3, .stride = 5 },
484 { .width = 5, .height = 3, .stride = 7 },
485 { .width = 5, .height = 3, .stride = 9 },
486
487 { .width = 4, .height = 6, .stride = 6 },
488 { .width = 6, .height = 4, .stride = 6 },
489 { }
490 }, *a, *b;
e2e394bf
VS
491 enum i915_ggtt_view_type types[] = {
492 I915_GGTT_VIEW_ROTATED,
493 I915_GGTT_VIEW_REMAPPED,
494 0,
495 }, *t;
a231bf64
CW
496 const unsigned int max_pages = 64;
497 int err = -ENOMEM;
498
499 /* Create VMA for many different combinations of planes and check
500 * that the page layout within the rotated VMA match our expectations.
501 */
502
c95e7ce3 503 obj = i915_gem_object_create_internal(vm->i915, max_pages * PAGE_SIZE);
a231bf64
CW
504 if (IS_ERR(obj))
505 goto out;
506
e2e394bf 507 for (t = types; *t; t++) {
a231bf64
CW
508 for (a = planes; a->width; a++) {
509 for (b = planes + ARRAY_SIZE(planes); b-- != planes; ) {
510 struct i915_ggtt_view view;
511 unsigned int n, max_offset;
512
513 max_offset = max(a->stride * a->height,
514 b->stride * b->height);
515 GEM_BUG_ON(max_offset > max_pages);
516 max_offset = max_pages - max_offset;
517
e2e394bf 518 view.type = *t;
a231bf64
CW
519 view.rotated.plane[0] = *a;
520 view.rotated.plane[1] = *b;
521
522 for_each_prime_number_from(view.rotated.plane[0].offset, 0, max_offset) {
523 for_each_prime_number_from(view.rotated.plane[1].offset, 0, max_offset) {
524 struct scatterlist *sg;
525 struct i915_vma *vma;
526
527 vma = checked_vma_instance(obj, vm, &view);
528 if (IS_ERR(vma)) {
529 err = PTR_ERR(vma);
530 goto out_object;
531 }
532
533 err = i915_vma_pin(vma, 0, 0, PIN_GLOBAL);
534 if (err) {
535 pr_err("Failed to pin VMA, err=%d\n", err);
536 goto out_object;
537 }
538
e2e394bf
VS
539 if (view.type == I915_GGTT_VIEW_ROTATED &&
540 vma->size != rotated_size(a, b) * PAGE_SIZE) {
a231bf64
CW
541 pr_err("VMA is wrong size, expected %lu, found %llu\n",
542 PAGE_SIZE * rotated_size(a, b), vma->size);
543 err = -EINVAL;
544 goto out_object;
545 }
546
e2e394bf
VS
547 if (view.type == I915_GGTT_VIEW_REMAPPED &&
548 vma->size > rotated_size(a, b) * PAGE_SIZE) {
549 pr_err("VMA is wrong size, expected %lu, found %llu\n",
550 PAGE_SIZE * rotated_size(a, b), vma->size);
551 err = -EINVAL;
552 goto out_object;
553 }
554
555 if (vma->pages->nents > rotated_size(a, b)) {
a231bf64
CW
556 pr_err("sg table is wrong sizeo, expected %u, found %u nents\n",
557 rotated_size(a, b), vma->pages->nents);
558 err = -EINVAL;
559 goto out_object;
560 }
561
562 if (vma->node.size < vma->size) {
563 pr_err("VMA binding too small, expected %llu, found %llu\n",
564 vma->size, vma->node.size);
565 err = -EINVAL;
566 goto out_object;
567 }
568
569 if (vma->pages == obj->mm.pages) {
570 pr_err("VMA using unrotated object pages!\n");
571 err = -EINVAL;
572 goto out_object;
573 }
574
575 sg = vma->pages->sgl;
576 for (n = 0; n < ARRAY_SIZE(view.rotated.plane); n++) {
e2e394bf
VS
577 if (view.type == I915_GGTT_VIEW_ROTATED)
578 sg = assert_rotated(obj, &view.rotated, n, sg);
579 else
580 sg = assert_remapped(obj, &view.remapped, n, sg);
a231bf64 581 if (IS_ERR(sg)) {
e2e394bf
VS
582 pr_err("Inconsistent %s VMA pages for plane %d: [(%d, %d, %d, %d), (%d, %d, %d, %d)]\n",
583 view.type == I915_GGTT_VIEW_ROTATED ?
584 "rotated" : "remapped", n,
a231bf64
CW
585 view.rotated.plane[0].width,
586 view.rotated.plane[0].height,
587 view.rotated.plane[0].stride,
588 view.rotated.plane[0].offset,
589 view.rotated.plane[1].width,
590 view.rotated.plane[1].height,
591 view.rotated.plane[1].stride,
592 view.rotated.plane[1].offset);
593 err = -EINVAL;
594 goto out_object;
595 }
596 }
597
598 i915_vma_unpin(vma);
599 }
600 }
601 }
602 }
e2e394bf 603 }
a231bf64
CW
604
605out_object:
606 i915_gem_object_put(obj);
607out:
608 return err;
609}
610
af1f83a1
CW
611static bool assert_partial(struct drm_i915_gem_object *obj,
612 struct i915_vma *vma,
613 unsigned long offset,
614 unsigned long size)
615{
616 struct sgt_iter sgt;
617 dma_addr_t dma;
618
619 for_each_sgt_dma(dma, sgt, vma->pages) {
620 dma_addr_t src;
621
622 if (!size) {
623 pr_err("Partial scattergather list too long\n");
624 return false;
625 }
626
627 src = i915_gem_object_get_dma_address(obj, offset);
628 if (src != dma) {
629 pr_err("DMA mismatch for partial page offset %lu\n",
630 offset);
631 return false;
632 }
633
634 offset++;
635 size--;
636 }
637
638 return true;
639}
640
641static bool assert_pin(struct i915_vma *vma,
642 struct i915_ggtt_view *view,
643 u64 size,
644 const char *name)
645{
646 bool ok = true;
647
648 if (vma->size != size) {
649 pr_err("(%s) VMA is wrong size, expected %llu, found %llu\n",
650 name, size, vma->size);
651 ok = false;
652 }
653
654 if (vma->node.size < vma->size) {
655 pr_err("(%s) VMA binding too small, expected %llu, found %llu\n",
656 name, vma->size, vma->node.size);
657 ok = false;
658 }
659
660 if (view && view->type != I915_GGTT_VIEW_NORMAL) {
661 if (memcmp(&vma->ggtt_view, view, sizeof(*view))) {
662 pr_err("(%s) VMA mismatch upon creation!\n",
663 name);
664 ok = false;
665 }
666
667 if (vma->pages == vma->obj->mm.pages) {
668 pr_err("(%s) VMA using original object pages!\n",
669 name);
670 ok = false;
671 }
672 } else {
673 if (vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) {
674 pr_err("Not the normal ggtt view! Found %d\n",
675 vma->ggtt_view.type);
676 ok = false;
677 }
678
679 if (vma->pages != vma->obj->mm.pages) {
680 pr_err("VMA not using object pages!\n");
681 ok = false;
682 }
683 }
684
685 return ok;
686}
687
688static int igt_vma_partial(void *arg)
689{
c95e7ce3
CW
690 struct i915_ggtt *ggtt = arg;
691 struct i915_address_space *vm = &ggtt->vm;
af1f83a1
CW
692 const unsigned int npages = 1021; /* prime! */
693 struct drm_i915_gem_object *obj;
694 const struct phase {
695 const char *name;
696 } phases[] = {
697 { "create" },
698 { "lookup" },
699 { },
700 }, *p;
701 unsigned int sz, offset;
702 struct i915_vma *vma;
703 int err = -ENOMEM;
704
705 /* Create lots of different VMA for the object and check that
706 * we are returned the same VMA when we later request the same range.
707 */
708
c95e7ce3 709 obj = i915_gem_object_create_internal(vm->i915, npages * PAGE_SIZE);
af1f83a1
CW
710 if (IS_ERR(obj))
711 goto out;
712
713 for (p = phases; p->name; p++) { /* exercise both create/lookup */
714 unsigned int count, nvma;
715
716 nvma = 0;
717 for_each_prime_number_from(sz, 1, npages) {
718 for_each_prime_number_from(offset, 0, npages - sz) {
719 struct i915_ggtt_view view;
720
721 view.type = I915_GGTT_VIEW_PARTIAL;
722 view.partial.offset = offset;
723 view.partial.size = sz;
724
725 if (sz == npages)
726 view.type = I915_GGTT_VIEW_NORMAL;
727
728 vma = checked_vma_instance(obj, vm, &view);
729 if (IS_ERR(vma)) {
730 err = PTR_ERR(vma);
731 goto out_object;
732 }
733
734 err = i915_vma_pin(vma, 0, 0, PIN_GLOBAL);
735 if (err)
736 goto out_object;
737
738 if (!assert_pin(vma, &view, sz*PAGE_SIZE, p->name)) {
739 pr_err("(%s) Inconsistent partial pinning for (offset=%d, size=%d)\n",
740 p->name, offset, sz);
741 err = -EINVAL;
742 goto out_object;
743 }
744
745 if (!assert_partial(obj, vma, offset, sz)) {
746 pr_err("(%s) Inconsistent partial pages for (offset=%d, size=%d)\n",
747 p->name, offset, sz);
748 err = -EINVAL;
749 goto out_object;
750 }
751
752 i915_vma_unpin(vma);
753 nvma++;
754 }
755 }
756
757 count = 0;
528cbd17 758 list_for_each_entry(vma, &obj->vma.list, obj_link)
af1f83a1
CW
759 count++;
760 if (count != nvma) {
761 pr_err("(%s) All partial vma were not recorded on the obj->vma_list: found %u, expected %u\n",
762 p->name, count, nvma);
763 err = -EINVAL;
764 goto out_object;
765 }
766
767 /* Check that we did create the whole object mapping */
768 vma = checked_vma_instance(obj, vm, NULL);
769 if (IS_ERR(vma)) {
770 err = PTR_ERR(vma);
771 goto out_object;
772 }
773
774 err = i915_vma_pin(vma, 0, 0, PIN_GLOBAL);
775 if (err)
776 goto out_object;
777
778 if (!assert_pin(vma, NULL, obj->base.size, p->name)) {
779 pr_err("(%s) inconsistent full pin\n", p->name);
780 err = -EINVAL;
781 goto out_object;
782 }
783
784 i915_vma_unpin(vma);
785
786 count = 0;
528cbd17 787 list_for_each_entry(vma, &obj->vma.list, obj_link)
af1f83a1
CW
788 count++;
789 if (count != nvma) {
790 pr_err("(%s) allocated an extra full vma!\n", p->name);
791 err = -EINVAL;
792 goto out_object;
793 }
794 }
795
796out_object:
797 i915_gem_object_put(obj);
798out:
799 return err;
800}
801
e3c7a1c5
CW
802int i915_vma_mock_selftests(void)
803{
804 static const struct i915_subtest tests[] = {
805 SUBTEST(igt_vma_create),
782a3e9e 806 SUBTEST(igt_vma_pin1),
e2e394bf 807 SUBTEST(igt_vma_rotate_remap),
af1f83a1 808 SUBTEST(igt_vma_partial),
e3c7a1c5
CW
809 };
810 struct drm_i915_private *i915;
83e3a215 811 struct i915_ggtt *ggtt;
e3c7a1c5
CW
812 int err;
813
814 i915 = mock_gem_device();
815 if (!i915)
816 return -ENOMEM;
817
83e3a215
CW
818 ggtt = kmalloc(sizeof(*ggtt), GFP_KERNEL);
819 if (!ggtt) {
820 err = -ENOMEM;
821 goto out_put;
822 }
823 mock_init_ggtt(i915, ggtt);
c95e7ce3 824
e3c7a1c5 825 mutex_lock(&i915->drm.struct_mutex);
83e3a215 826 err = i915_subtests(tests, ggtt);
c95e7ce3 827 mock_device_flush(i915);
e3c7a1c5
CW
828 mutex_unlock(&i915->drm.struct_mutex);
829
c95e7ce3
CW
830 i915_gem_drain_freed_objects(i915);
831
83e3a215
CW
832 mock_fini_ggtt(ggtt);
833 kfree(ggtt);
834out_put:
a24362ea 835 drm_dev_put(&i915->drm);
e3c7a1c5
CW
836 return err;
837}
bb211c3d
VS
838
839static int igt_vma_remapped_gtt(void *arg)
840{
841 struct drm_i915_private *i915 = arg;
842 const struct intel_remapped_plane_info planes[] = {
843 { .width = 1, .height = 1, .stride = 1 },
844 { .width = 2, .height = 2, .stride = 2 },
845 { .width = 4, .height = 4, .stride = 4 },
846 { .width = 8, .height = 8, .stride = 8 },
847
848 { .width = 3, .height = 5, .stride = 3 },
849 { .width = 3, .height = 5, .stride = 4 },
850 { .width = 3, .height = 5, .stride = 5 },
851
852 { .width = 5, .height = 3, .stride = 5 },
853 { .width = 5, .height = 3, .stride = 7 },
854 { .width = 5, .height = 3, .stride = 9 },
855
856 { .width = 4, .height = 6, .stride = 6 },
857 { .width = 6, .height = 4, .stride = 6 },
858 { }
859 }, *p;
860 enum i915_ggtt_view_type types[] = {
861 I915_GGTT_VIEW_ROTATED,
862 I915_GGTT_VIEW_REMAPPED,
863 0,
864 }, *t;
865 struct drm_i915_gem_object *obj;
866 intel_wakeref_t wakeref;
867 int err = 0;
868
869 obj = i915_gem_object_create_internal(i915, 10 * 10 * PAGE_SIZE);
870 if (IS_ERR(obj))
871 return PTR_ERR(obj);
872
873 mutex_lock(&i915->drm.struct_mutex);
874
875 wakeref = intel_runtime_pm_get(i915);
876
877 for (t = types; *t; t++) {
878 for (p = planes; p->width; p++) {
879 struct i915_ggtt_view view = {
880 .type = *t,
881 .rotated.plane[0] = *p,
882 };
883 struct i915_vma *vma;
884 u32 __iomem *map;
885 unsigned int x, y;
886 int err;
887
888 err = i915_gem_object_set_to_gtt_domain(obj, true);
889 if (err)
890 goto out;
891
892 vma = i915_gem_object_ggtt_pin(obj, &view, 0, 0, PIN_MAPPABLE);
893 if (IS_ERR(vma)) {
894 err = PTR_ERR(vma);
895 goto out;
896 }
897
898 GEM_BUG_ON(vma->ggtt_view.type != *t);
899
900 map = i915_vma_pin_iomap(vma);
901 i915_vma_unpin(vma);
902 if (IS_ERR(map)) {
903 err = PTR_ERR(map);
904 goto out;
905 }
906
907 for (y = 0 ; y < p->height; y++) {
908 for (x = 0 ; x < p->width; x++) {
909 unsigned int offset;
910 u32 val = y << 16 | x;
911
912 if (*t == I915_GGTT_VIEW_ROTATED)
913 offset = (x * p->height + y) * PAGE_SIZE;
914 else
915 offset = (y * p->width + x) * PAGE_SIZE;
916
917 iowrite32(val, &map[offset / sizeof(*map)]);
918 }
919 }
920
921 i915_vma_unpin_iomap(vma);
922
923 vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, PIN_MAPPABLE);
924 if (IS_ERR(vma)) {
925 err = PTR_ERR(vma);
926 goto out;
927 }
928
929 GEM_BUG_ON(vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL);
930
931 map = i915_vma_pin_iomap(vma);
932 i915_vma_unpin(vma);
933 if (IS_ERR(map)) {
934 err = PTR_ERR(map);
935 goto out;
936 }
937
938 for (y = 0 ; y < p->height; y++) {
939 for (x = 0 ; x < p->width; x++) {
940 unsigned int offset, src_idx;
941 u32 exp = y << 16 | x;
942 u32 val;
943
944 if (*t == I915_GGTT_VIEW_ROTATED)
945 src_idx = rotated_index(&view.rotated, 0, x, y);
946 else
947 src_idx = remapped_index(&view.remapped, 0, x, y);
948 offset = src_idx * PAGE_SIZE;
949
950 val = ioread32(&map[offset / sizeof(*map)]);
951 if (val != exp) {
952 pr_err("%s VMA write test failed, expected 0x%x, found 0x%x\n",
953 *t == I915_GGTT_VIEW_ROTATED ? "Rotated" : "Remapped",
954 val, exp);
955 i915_vma_unpin_iomap(vma);
956 goto out;
957 }
958 }
959 }
960 i915_vma_unpin_iomap(vma);
961 }
962 }
963
964out:
965 intel_runtime_pm_put(i915, wakeref);
966 mutex_unlock(&i915->drm.struct_mutex);
967 i915_gem_object_put(obj);
968
969 return err;
970}
971
972int i915_vma_live_selftests(struct drm_i915_private *i915)
973{
974 static const struct i915_subtest tests[] = {
975 SUBTEST(igt_vma_remapped_gtt),
976 };
977
978 return i915_subtests(tests, i915);
979}