drm/radeon: Mark all possible functions / structs as static
[linux-2.6-block.git] / drivers / gpu / drm / radeon / radeon_fb.c
CommitLineData
771fe6b9
JG
1/*
2 * Copyright © 2007 David Airlie
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
22 *
23 * Authors:
24 * David Airlie
25 */
771fe6b9 26#include <linux/module.h>
5a0e3ad6 27#include <linux/slab.h>
771fe6b9 28#include <linux/fb.h>
771fe6b9
JG
29
30#include "drmP.h"
31#include "drm.h"
32#include "drm_crtc.h"
33#include "drm_crtc_helper.h"
34#include "radeon_drm.h"
35#include "radeon.h"
36
785b93ef
DA
37#include "drm_fb_helper.h"
38
6a9ee8af
DA
39#include <linux/vga_switcheroo.h>
40
38651674
DA
41/* object hierarchy -
42 this contains a helper + a radeon fb
43 the helper contains a pointer to radeon framebuffer baseclass.
44*/
8be48d92 45struct radeon_fbdev {
785b93ef 46 struct drm_fb_helper helper;
38651674
DA
47 struct radeon_framebuffer rfb;
48 struct list_head fbdev_list;
49 struct radeon_device *rdev;
771fe6b9
JG
50};
51
771fe6b9
JG
52static struct fb_ops radeonfb_ops = {
53 .owner = THIS_MODULE,
c88f9f0c 54 .fb_check_var = drm_fb_helper_check_var,
785b93ef 55 .fb_set_par = drm_fb_helper_set_par,
771fe6b9
JG
56 .fb_fillrect = cfb_fillrect,
57 .fb_copyarea = cfb_copyarea,
58 .fb_imageblit = cfb_imageblit,
785b93ef
DA
59 .fb_pan_display = drm_fb_helper_pan_display,
60 .fb_blank = drm_fb_helper_blank,
068143d3 61 .fb_setcmap = drm_fb_helper_setcmap,
4dd19b0d
CB
62 .fb_debug_enter = drm_fb_helper_debug_enter,
63 .fb_debug_leave = drm_fb_helper_debug_leave,
771fe6b9
JG
64};
65
771fe6b9 66
ff72145b 67int radeon_align_pitch(struct radeon_device *rdev, int width, int bpp, bool tiled)
771fe6b9
JG
68{
69 int aligned = width;
e024e110 70 int align_large = (ASIC_IS_AVIVO(rdev)) || tiled;
771fe6b9
JG
71 int pitch_mask = 0;
72
73 switch (bpp / 8) {
74 case 1:
75 pitch_mask = align_large ? 255 : 127;
76 break;
77 case 2:
78 pitch_mask = align_large ? 127 : 31;
79 break;
80 case 3:
81 case 4:
82 pitch_mask = align_large ? 63 : 15;
83 break;
84 }
85
86 aligned += pitch_mask;
87 aligned &= ~pitch_mask;
88 return aligned;
89}
90
8be48d92 91static void radeonfb_destroy_pinned_object(struct drm_gem_object *gobj)
771fe6b9 92{
7e4d15d9 93 struct radeon_bo *rbo = gem_to_radeon_bo(gobj);
8be48d92
DA
94 int ret;
95
96 ret = radeon_bo_reserve(rbo, false);
97 if (likely(ret == 0)) {
98 radeon_bo_kunmap(rbo);
29d08b3e 99 radeon_bo_unpin(rbo);
8be48d92
DA
100 radeon_bo_unreserve(rbo);
101 }
102 drm_gem_object_unreference_unlocked(gobj);
103}
785b93ef 104
8be48d92 105static int radeonfb_create_pinned_object(struct radeon_fbdev *rfbdev,
308e5bcb 106 struct drm_mode_fb_cmd2 *mode_cmd,
8be48d92 107 struct drm_gem_object **gobj_p)
771fe6b9 108{
8be48d92 109 struct radeon_device *rdev = rfbdev->rdev;
771fe6b9 110 struct drm_gem_object *gobj = NULL;
4c788679 111 struct radeon_bo *rbo = NULL;
e024e110 112 bool fb_tiled = false; /* useful for testing */
c88f9f0c 113 u32 tiling_flags = 0;
8be48d92
DA
114 int ret;
115 int aligned_size, size;
e40b6fc8 116 int height = mode_cmd->height;
308e5bcb
JB
117 u32 bpp, depth;
118
248dbc23 119 drm_fb_get_bpp_depth(mode_cmd->pixel_format, &depth, &bpp);
771fe6b9 120
771fe6b9 121 /* need to align pitch with crtc limits */
308e5bcb
JB
122 mode_cmd->pitches[0] = radeon_align_pitch(rdev, mode_cmd->width, bpp,
123 fb_tiled) * ((bpp + 1) / 8);
771fe6b9 124
e40b6fc8
DA
125 if (rdev->family >= CHIP_R600)
126 height = ALIGN(mode_cmd->height, 8);
308e5bcb 127 size = mode_cmd->pitches[0] * height;
771fe6b9 128 aligned_size = ALIGN(size, PAGE_SIZE);
771fe6b9 129 ret = radeon_gem_object_create(rdev, aligned_size, 0,
8be48d92 130 RADEON_GEM_DOMAIN_VRAM,
4dfe947e 131 false, true,
8be48d92 132 &gobj);
771fe6b9 133 if (ret) {
8be48d92
DA
134 printk(KERN_ERR "failed to allocate framebuffer (%d)\n",
135 aligned_size);
136 return -ENOMEM;
771fe6b9 137 }
7e4d15d9 138 rbo = gem_to_radeon_bo(gobj);
771fe6b9 139
e024e110 140 if (fb_tiled)
c88f9f0c
MD
141 tiling_flags = RADEON_TILING_MACRO;
142
143#ifdef __BIG_ENDIAN
435ddd92 144 switch (bpp) {
c88f9f0c
MD
145 case 32:
146 tiling_flags |= RADEON_TILING_SWAP_32BIT;
147 break;
148 case 16:
149 tiling_flags |= RADEON_TILING_SWAP_16BIT;
150 default:
151 break;
152 }
153#endif
154
4c788679
JG
155 if (tiling_flags) {
156 ret = radeon_bo_set_tiling_flags(rbo,
8be48d92 157 tiling_flags | RADEON_TILING_SURFACE,
308e5bcb 158 mode_cmd->pitches[0]);
4c788679
JG
159 if (ret)
160 dev_err(rdev->dev, "FB failed to set tiling flags\n");
161 }
8be48d92 162
38651674 163
4c788679
JG
164 ret = radeon_bo_reserve(rbo, false);
165 if (unlikely(ret != 0))
166 goto out_unref;
0349af70
MD
167 /* Only 27 bit offset for legacy CRTC */
168 ret = radeon_bo_pin_restricted(rbo, RADEON_GEM_DOMAIN_VRAM,
169 ASIC_IS_AVIVO(rdev) ? 0 : 1 << 27,
170 NULL);
4c788679
JG
171 if (ret) {
172 radeon_bo_unreserve(rbo);
173 goto out_unref;
174 }
175 if (fb_tiled)
176 radeon_bo_check_tiling(rbo, 0, 0);
8be48d92 177 ret = radeon_bo_kmap(rbo, NULL);
4c788679 178 radeon_bo_unreserve(rbo);
f92e93eb 179 if (ret) {
f92e93eb
JG
180 goto out_unref;
181 }
771fe6b9 182
8be48d92
DA
183 *gobj_p = gobj;
184 return 0;
185out_unref:
186 radeonfb_destroy_pinned_object(gobj);
187 *gobj_p = NULL;
188 return ret;
189}
190
191static int radeonfb_create(struct radeon_fbdev *rfbdev,
192 struct drm_fb_helper_surface_size *sizes)
193{
194 struct radeon_device *rdev = rfbdev->rdev;
195 struct fb_info *info;
196 struct drm_framebuffer *fb = NULL;
308e5bcb 197 struct drm_mode_fb_cmd2 mode_cmd;
8be48d92
DA
198 struct drm_gem_object *gobj = NULL;
199 struct radeon_bo *rbo = NULL;
200 struct device *device = &rdev->pdev->dev;
201 int ret;
202 unsigned long tmp;
203
204 mode_cmd.width = sizes->surface_width;
205 mode_cmd.height = sizes->surface_height;
206
207 /* avivo can't scanout real 24bpp */
208 if ((sizes->surface_bpp == 24) && ASIC_IS_AVIVO(rdev))
209 sizes->surface_bpp = 32;
210
308e5bcb
JB
211 mode_cmd.pixel_format = drm_mode_legacy_fb_format(sizes->surface_bpp,
212 sizes->surface_depth);
771fe6b9 213
8be48d92 214 ret = radeonfb_create_pinned_object(rfbdev, &mode_cmd, &gobj);
aaefcd42
DA
215 if (ret) {
216 DRM_ERROR("failed to create fbcon object %d\n", ret);
217 return ret;
218 }
219
7e4d15d9 220 rbo = gem_to_radeon_bo(gobj);
771fe6b9 221
8be48d92
DA
222 /* okay we have an object now allocate the framebuffer */
223 info = framebuffer_alloc(0, device);
771fe6b9
JG
224 if (info == NULL) {
225 ret = -ENOMEM;
226 goto out_unref;
227 }
785b93ef 228
8be48d92 229 info->par = rfbdev;
771fe6b9 230
aaefcd42
DA
231 ret = radeon_framebuffer_init(rdev->ddev, &rfbdev->rfb, &mode_cmd, gobj);
232 if (ret) {
233 DRM_ERROR("failed to initalise framebuffer %d\n", ret);
234 goto out_unref;
235 }
8be48d92 236
38651674
DA
237 fb = &rfbdev->rfb.base;
238
239 /* setup helper */
240 rfbdev->helper.fb = fb;
241 rfbdev->helper.fbdev = info;
38651674 242
8be48d92 243 memset_io(rbo->kptr, 0x0, radeon_bo_size(rbo));
bf8e828b 244
771fe6b9 245 strcpy(info->fix.id, "radeondrmfb");
785b93ef 246
01f2c773 247 drm_fb_helper_fill_fix(info, fb->pitches[0], fb->depth);
3632ef89 248
8fd4bd22 249 info->flags = FBINFO_DEFAULT | FBINFO_CAN_FORCE_OUTPUT;
771fe6b9 250 info->fbops = &radeonfb_ops;
785b93ef 251
8be48d92 252 tmp = radeon_bo_gpu_offset(rbo) - rdev->mc.vram_start;
f92e93eb 253 info->fix.smem_start = rdev->mc.aper_base + tmp;
8be48d92
DA
254 info->fix.smem_len = radeon_bo_size(rbo);
255 info->screen_base = rbo->kptr;
256 info->screen_size = radeon_bo_size(rbo);
785b93ef 257
38651674 258 drm_fb_helper_fill_var(info, &rfbdev->helper, sizes->fb_width, sizes->fb_height);
ed8f0d9e
DA
259
260 /* setup aperture base/size for vesafb takeover */
1471ca9a
MS
261 info->apertures = alloc_apertures(1);
262 if (!info->apertures) {
263 ret = -ENOMEM;
264 goto out_unref;
265 }
266 info->apertures->ranges[0].base = rdev->ddev->mode_config.fb_base;
68d30596 267 info->apertures->ranges[0].size = rdev->mc.aper_size;
ed8f0d9e 268
fb2a99e1 269 /* Use default scratch pixmap (info->pixmap.flags = FB_PIXMAP_SYSTEM) */
4abe3520 270
771fe6b9
JG
271 if (info->screen_base == NULL) {
272 ret = -ENOSPC;
273 goto out_unref;
274 }
4abe3520
DA
275
276 ret = fb_alloc_cmap(&info->cmap, 256, 0);
277 if (ret) {
278 ret = -ENOMEM;
279 goto out_unref;
280 }
281
771fe6b9
JG
282 DRM_INFO("fb mappable at 0x%lX\n", info->fix.smem_start);
283 DRM_INFO("vram apper at 0x%lX\n", (unsigned long)rdev->mc.aper_base);
8be48d92 284 DRM_INFO("size %lu\n", (unsigned long)radeon_bo_size(rbo));
771fe6b9 285 DRM_INFO("fb depth is %d\n", fb->depth);
01f2c773 286 DRM_INFO(" pitch is %d\n", fb->pitches[0]);
771fe6b9 287
6a9ee8af 288 vga_switcheroo_client_fb_set(rdev->ddev->pdev, info);
771fe6b9
JG
289 return 0;
290
291out_unref:
4c788679 292 if (rbo) {
8be48d92 293
771fe6b9 294 }
f92e93eb 295 if (fb && ret) {
771fe6b9
JG
296 drm_gem_object_unreference(gobj);
297 drm_framebuffer_cleanup(fb);
298 kfree(fb);
299 }
771fe6b9
JG
300 return ret;
301}
302
8be48d92
DA
303static int radeon_fb_find_or_create_single(struct drm_fb_helper *helper,
304 struct drm_fb_helper_surface_size *sizes)
38651674 305{
8be48d92 306 struct radeon_fbdev *rfbdev = (struct radeon_fbdev *)helper;
38651674
DA
307 int new_fb = 0;
308 int ret;
309
8be48d92
DA
310 if (!helper->fb) {
311 ret = radeonfb_create(rfbdev, sizes);
38651674
DA
312 if (ret)
313 return ret;
38651674 314 new_fb = 1;
38651674 315 }
38651674
DA
316 return new_fb;
317}
318
d50ba256 319static char *mode_option;
1109ca09 320static int radeon_parse_options(char *options)
d50ba256
DA
321{
322 char *this_opt;
323
324 if (!options || !*options)
325 return 0;
326
327 while ((this_opt = strsep(&options, ",")) != NULL) {
328 if (!*this_opt)
329 continue;
330 mode_option = this_opt;
331 }
332 return 0;
333}
334
eb1f8e4f 335void radeon_fb_output_poll_changed(struct radeon_device *rdev)
771fe6b9 336{
eb1f8e4f 337 drm_fb_helper_hotplug_event(&rdev->mode_info.rfbdev->helper);
771fe6b9 338}
771fe6b9 339
8be48d92 340static int radeon_fbdev_destroy(struct drm_device *dev, struct radeon_fbdev *rfbdev)
771fe6b9
JG
341{
342 struct fb_info *info;
38651674 343 struct radeon_framebuffer *rfb = &rfbdev->rfb;
771fe6b9 344
8be48d92
DA
345 if (rfbdev->helper.fbdev) {
346 info = rfbdev->helper.fbdev;
4abe3520 347
8be48d92 348 unregister_framebuffer(info);
4abe3520
DA
349 if (info->cmap.len)
350 fb_dealloc_cmap(&info->cmap);
8be48d92 351 framebuffer_release(info);
771fe6b9 352 }
771fe6b9 353
8be48d92 354 if (rfb->obj) {
29d08b3e
DA
355 radeonfb_destroy_pinned_object(rfb->obj);
356 rfb->obj = NULL;
771fe6b9 357 }
4abe3520 358 drm_fb_helper_fini(&rfbdev->helper);
38651674 359 drm_framebuffer_cleanup(&rfb->base);
771fe6b9 360
771fe6b9
JG
361 return 0;
362}
785b93ef 363
4abe3520
DA
364static struct drm_fb_helper_funcs radeon_fb_helper_funcs = {
365 .gamma_set = radeon_crtc_fb_gamma_set,
366 .gamma_get = radeon_crtc_fb_gamma_get,
367 .fb_probe = radeon_fb_find_or_create_single,
4abe3520 368};
38651674
DA
369
370int radeon_fbdev_init(struct radeon_device *rdev)
371{
8be48d92 372 struct radeon_fbdev *rfbdev;
4abe3520 373 int bpp_sel = 32;
5a79395b 374 int ret;
4abe3520
DA
375
376 /* select 8 bpp console on RN50 or 16MB cards */
377 if (ASIC_IS_RN50(rdev) || rdev->mc.real_vram_size <= (32*1024*1024))
378 bpp_sel = 8;
8be48d92
DA
379
380 rfbdev = kzalloc(sizeof(struct radeon_fbdev), GFP_KERNEL);
381 if (!rfbdev)
382 return -ENOMEM;
383
384 rfbdev->rdev = rdev;
385 rdev->mode_info.rfbdev = rfbdev;
4abe3520 386 rfbdev->helper.funcs = &radeon_fb_helper_funcs;
8be48d92 387
5a79395b
CW
388 ret = drm_fb_helper_init(rdev->ddev, &rfbdev->helper,
389 rdev->num_crtc,
390 RADEONFB_CONN_LIMIT);
391 if (ret) {
392 kfree(rfbdev);
393 return ret;
394 }
395
0b4c0f3f 396 drm_fb_helper_single_add_all_connectors(&rfbdev->helper);
4abe3520 397 drm_fb_helper_initial_config(&rfbdev->helper, bpp_sel);
771fe6b9 398 return 0;
38651674
DA
399}
400
401void radeon_fbdev_fini(struct radeon_device *rdev)
402{
8be48d92
DA
403 if (!rdev->mode_info.rfbdev)
404 return;
405
38651674 406 radeon_fbdev_destroy(rdev->ddev, rdev->mode_info.rfbdev);
8be48d92 407 kfree(rdev->mode_info.rfbdev);
38651674
DA
408 rdev->mode_info.rfbdev = NULL;
409}
410
411void radeon_fbdev_set_suspend(struct radeon_device *rdev, int state)
412{
413 fb_set_suspend(rdev->mode_info.rfbdev->helper.fbdev, state);
414}
415
416int radeon_fbdev_total_size(struct radeon_device *rdev)
417{
418 struct radeon_bo *robj;
419 int size = 0;
420
7e4d15d9 421 robj = gem_to_radeon_bo(rdev->mode_info.rfbdev->rfb.obj);
38651674
DA
422 size += radeon_bo_size(robj);
423 return size;
424}
425
426bool radeon_fbdev_robj_is_fb(struct radeon_device *rdev, struct radeon_bo *robj)
427{
7e4d15d9 428 if (robj == gem_to_radeon_bo(rdev->mode_info.rfbdev->rfb.obj))
38651674
DA
429 return true;
430 return false;
771fe6b9 431}