Commit | Line | Data |
---|---|---|
dd08ebf6 MB |
1 | // SPDX-License-Identifier: MIT |
2 | /* | |
3 | * Copyright © 2022 Intel Corporation | |
4 | */ | |
5 | ||
ea9f879d LDM |
6 | #include "xe_preempt_fence.h" |
7 | ||
dd08ebf6 MB |
8 | #include <linux/slab.h> |
9 | ||
c22a4ed0 | 10 | #include "xe_exec_queue.h" |
dd08ebf6 MB |
11 | #include "xe_vm.h" |
12 | ||
13 | static void preempt_fence_work_func(struct work_struct *w) | |
14 | { | |
15 | bool cookie = dma_fence_begin_signalling(); | |
16 | struct xe_preempt_fence *pfence = | |
17 | container_of(w, typeof(*pfence), preempt_work); | |
9b9529ce | 18 | struct xe_exec_queue *q = pfence->q; |
dd08ebf6 MB |
19 | |
20 | if (pfence->error) | |
21 | dma_fence_set_error(&pfence->base, pfence->error); | |
22 | else | |
9b9529ce | 23 | q->ops->suspend_wait(q); |
dd08ebf6 MB |
24 | |
25 | dma_fence_signal(&pfence->base); | |
26 | dma_fence_end_signalling(cookie); | |
27 | ||
9b9529ce | 28 | xe_vm_queue_rebind_worker(q->vm); |
dd08ebf6 | 29 | |
9b9529ce | 30 | xe_exec_queue_put(q); |
dd08ebf6 MB |
31 | } |
32 | ||
33 | static const char * | |
34 | preempt_fence_get_driver_name(struct dma_fence *fence) | |
35 | { | |
36 | return "xe"; | |
37 | } | |
38 | ||
39 | static const char * | |
40 | preempt_fence_get_timeline_name(struct dma_fence *fence) | |
41 | { | |
42 | return "preempt"; | |
43 | } | |
44 | ||
45 | static bool preempt_fence_enable_signaling(struct dma_fence *fence) | |
46 | { | |
47 | struct xe_preempt_fence *pfence = | |
48 | container_of(fence, typeof(*pfence), base); | |
9b9529ce | 49 | struct xe_exec_queue *q = pfence->q; |
dd08ebf6 | 50 | |
9b9529ce | 51 | pfence->error = q->ops->suspend(q); |
dd08ebf6 MB |
52 | queue_work(system_unbound_wq, &pfence->preempt_work); |
53 | return true; | |
54 | } | |
55 | ||
56 | static const struct dma_fence_ops preempt_fence_ops = { | |
57 | .get_driver_name = preempt_fence_get_driver_name, | |
58 | .get_timeline_name = preempt_fence_get_timeline_name, | |
59 | .enable_signaling = preempt_fence_enable_signaling, | |
60 | }; | |
61 | ||
62 | /** | |
63 | * xe_preempt_fence_alloc() - Allocate a preempt fence with minimal | |
64 | * initialization | |
65 | * | |
66 | * Allocate a preempt fence, and initialize its list head. | |
67 | * If the preempt_fence allocated has been armed with | |
68 | * xe_preempt_fence_arm(), it must be freed using dma_fence_put(). If not, | |
69 | * it must be freed using xe_preempt_fence_free(). | |
70 | * | |
71 | * Return: A struct xe_preempt_fence pointer used for calling into | |
72 | * xe_preempt_fence_arm() or xe_preempt_fence_free(). | |
73 | * An error pointer on error. | |
74 | */ | |
75 | struct xe_preempt_fence *xe_preempt_fence_alloc(void) | |
76 | { | |
77 | struct xe_preempt_fence *pfence; | |
78 | ||
79 | pfence = kmalloc(sizeof(*pfence), GFP_KERNEL); | |
80 | if (!pfence) | |
81 | return ERR_PTR(-ENOMEM); | |
82 | ||
83 | INIT_LIST_HEAD(&pfence->link); | |
84 | INIT_WORK(&pfence->preempt_work, preempt_fence_work_func); | |
85 | ||
86 | return pfence; | |
87 | } | |
88 | ||
89 | /** | |
90 | * xe_preempt_fence_free() - Free a preempt fence allocated using | |
91 | * xe_preempt_fence_alloc(). | |
92 | * @pfence: pointer obtained from xe_preempt_fence_alloc(); | |
93 | * | |
94 | * Free a preempt fence that has not yet been armed. | |
95 | */ | |
96 | void xe_preempt_fence_free(struct xe_preempt_fence *pfence) | |
97 | { | |
98 | list_del(&pfence->link); | |
99 | kfree(pfence); | |
100 | } | |
101 | ||
102 | /** | |
103 | * xe_preempt_fence_arm() - Arm a preempt fence allocated using | |
104 | * xe_preempt_fence_alloc(). | |
105 | * @pfence: The struct xe_preempt_fence pointer returned from | |
106 | * xe_preempt_fence_alloc(). | |
9b9529ce | 107 | * @q: The struct xe_exec_queue used for arming. |
dd08ebf6 MB |
108 | * @context: The dma-fence context used for arming. |
109 | * @seqno: The dma-fence seqno used for arming. | |
110 | * | |
111 | * Inserts the preempt fence into @context's timeline, takes @link off any | |
9b9529ce | 112 | * list, and registers the struct xe_exec_queue as the xe_engine to be preempted. |
dd08ebf6 MB |
113 | * |
114 | * Return: A pointer to a struct dma_fence embedded into the preempt fence. | |
115 | * This function doesn't error. | |
116 | */ | |
117 | struct dma_fence * | |
9b9529ce | 118 | xe_preempt_fence_arm(struct xe_preempt_fence *pfence, struct xe_exec_queue *q, |
dd08ebf6 MB |
119 | u64 context, u32 seqno) |
120 | { | |
121 | list_del_init(&pfence->link); | |
9b9529ce | 122 | pfence->q = xe_exec_queue_get(q); |
dd08ebf6 | 123 | dma_fence_init(&pfence->base, &preempt_fence_ops, |
9b9529ce | 124 | &q->compute.lock, context, seqno); |
dd08ebf6 MB |
125 | |
126 | return &pfence->base; | |
127 | } | |
128 | ||
129 | /** | |
130 | * xe_preempt_fence_create() - Helper to create and arm a preempt fence. | |
9b9529ce | 131 | * @q: The struct xe_exec_queue used for arming. |
dd08ebf6 MB |
132 | * @context: The dma-fence context used for arming. |
133 | * @seqno: The dma-fence seqno used for arming. | |
134 | * | |
135 | * Allocates and inserts the preempt fence into @context's timeline, | |
9b9529ce | 136 | * and registers @e as the struct xe_exec_queue to be preempted. |
dd08ebf6 MB |
137 | * |
138 | * Return: A pointer to the resulting struct dma_fence on success. An error | |
139 | * pointer on error. In particular if allocation fails it returns | |
140 | * ERR_PTR(-ENOMEM); | |
141 | */ | |
142 | struct dma_fence * | |
9b9529ce | 143 | xe_preempt_fence_create(struct xe_exec_queue *q, |
dd08ebf6 MB |
144 | u64 context, u32 seqno) |
145 | { | |
146 | struct xe_preempt_fence *pfence; | |
147 | ||
148 | pfence = xe_preempt_fence_alloc(); | |
149 | if (IS_ERR(pfence)) | |
150 | return ERR_CAST(pfence); | |
151 | ||
9b9529ce | 152 | return xe_preempt_fence_arm(pfence, q, context, seqno); |
dd08ebf6 MB |
153 | } |
154 | ||
155 | bool xe_fence_is_xe_preempt(const struct dma_fence *fence) | |
156 | { | |
157 | return fence->ops == &preempt_fence_ops; | |
158 | } |