Merge commit 'v2.6.34-rc2' into perf/core
[linux-2.6-block.git] / arch / blackfin / include / asm / dma.h
CommitLineData
1394f032 1/*
b03f2039 2 * dma.h - Blackfin DMA defines/structures/etc...
1394f032 3 *
b03f2039
MF
4 * Copyright 2004-2008 Analog Devices Inc.
5 * Licensed under the GPL-2 or later.
1394f032
BW
6 */
7
8#ifndef _BLACKFIN_DMA_H_
9#define _BLACKFIN_DMA_H_
10
1394f032 11#include <linux/interrupt.h>
4c1ed6a5 12#include <mach/dma.h>
d2e015d6 13#include <asm/atomic.h>
1394f032 14#include <asm/blackfin.h>
4c1ed6a5 15#include <asm/page.h>
dd3b0e3e 16#include <asm-generic/dma.h>
1394f032 17
00d24604
MF
18/* DMA_CONFIG Masks */
19#define DMAEN 0x0001 /* DMA Channel Enable */
20#define WNR 0x0002 /* Channel Direction (W/R*) */
21#define WDSIZE_8 0x0000 /* Transfer Word Size = 8 */
22#define WDSIZE_16 0x0004 /* Transfer Word Size = 16 */
23#define WDSIZE_32 0x0008 /* Transfer Word Size = 32 */
24#define DMA2D 0x0010 /* DMA Mode (2D/1D*) */
25#define RESTART 0x0020 /* DMA Buffer Clear */
26#define DI_SEL 0x0040 /* Data Interrupt Timing Select */
27#define DI_EN 0x0080 /* Data Interrupt Enable */
28#define NDSIZE_0 0x0000 /* Next Descriptor Size = 0 (Stop/Autobuffer) */
29#define NDSIZE_1 0x0100 /* Next Descriptor Size = 1 */
30#define NDSIZE_2 0x0200 /* Next Descriptor Size = 2 */
31#define NDSIZE_3 0x0300 /* Next Descriptor Size = 3 */
32#define NDSIZE_4 0x0400 /* Next Descriptor Size = 4 */
33#define NDSIZE_5 0x0500 /* Next Descriptor Size = 5 */
34#define NDSIZE_6 0x0600 /* Next Descriptor Size = 6 */
35#define NDSIZE_7 0x0700 /* Next Descriptor Size = 7 */
36#define NDSIZE_8 0x0800 /* Next Descriptor Size = 8 */
37#define NDSIZE_9 0x0900 /* Next Descriptor Size = 9 */
38#define NDSIZE 0x0f00 /* Next Descriptor Size */
39#define DMAFLOW 0x7000 /* Flow Control */
40#define DMAFLOW_STOP 0x0000 /* Stop Mode */
41#define DMAFLOW_AUTO 0x1000 /* Autobuffer Mode */
42#define DMAFLOW_ARRAY 0x4000 /* Descriptor Array Mode */
43#define DMAFLOW_SMALL 0x6000 /* Small Model Descriptor List Mode */
44#define DMAFLOW_LARGE 0x7000 /* Large Model Descriptor List Mode */
45
46/* DMA_IRQ_STATUS Masks */
47#define DMA_DONE 0x0001 /* DMA Completion Interrupt Status */
48#define DMA_ERR 0x0002 /* DMA Error Interrupt Status */
49#define DFETCH 0x0004 /* DMA Descriptor Fetch Indicator */
50#define DMA_RUN 0x0008 /* DMA Channel Running Indicator */
1394f032
BW
51
52/*-------------------------
53 * config reg bits value
54 *-------------------------*/
00d24604
MF
55#define DATA_SIZE_8 0
56#define DATA_SIZE_16 1
57#define DATA_SIZE_32 2
1394f032 58
00d24604
MF
59#define DMA_FLOW_STOP 0
60#define DMA_FLOW_AUTO 1
61#define DMA_FLOW_ARRAY 4
62#define DMA_FLOW_SMALL 6
63#define DMA_FLOW_LARGE 7
1394f032 64
00d24604
MF
65#define DIMENSION_LINEAR 0
66#define DIMENSION_2D 1
1394f032 67
00d24604
MF
68#define DIR_READ 0
69#define DIR_WRITE 1
1394f032 70
00d24604
MF
71#define INTR_DISABLE 0
72#define INTR_ON_BUF 2
73#define INTR_ON_ROW 3
1394f032 74
2047e40d 75#define DMA_NOSYNC_KEEP_DMA_BUF 0
00d24604 76#define DMA_SYNC_RESTART 1
2047e40d 77
1394f032 78struct dmasg {
6ab729d8 79 void *next_desc_addr;
1394f032
BW
80 unsigned long start_addr;
81 unsigned short cfg;
82 unsigned short x_count;
83 short x_modify;
84 unsigned short y_count;
85 short y_modify;
86} __attribute__((packed));
87
88struct dma_register {
6ab729d8 89 void *next_desc_ptr; /* DMA Next Descriptor Pointer register */
1394f032
BW
90 unsigned long start_addr; /* DMA Start address register */
91
92 unsigned short cfg; /* DMA Configuration register */
93 unsigned short dummy1; /* DMA Configuration register */
94
95 unsigned long reserved;
96
97 unsigned short x_count; /* DMA x_count register */
98 unsigned short dummy2;
99
100 short x_modify; /* DMA x_modify register */
101 unsigned short dummy3;
102
103 unsigned short y_count; /* DMA y_count register */
104 unsigned short dummy4;
105
106 short y_modify; /* DMA y_modify register */
107 unsigned short dummy5;
108
6ab729d8 109 void *curr_desc_ptr; /* DMA Current Descriptor Pointer
1394f032 110 register */
452af71f 111 unsigned long curr_addr_ptr; /* DMA Current Address Pointer
1394f032
BW
112 register */
113 unsigned short irq_status; /* DMA irq status register */
114 unsigned short dummy6;
115
116 unsigned short peripheral_map; /* DMA peripheral map register */
117 unsigned short dummy7;
118
119 unsigned short curr_x_count; /* DMA Current x-count register */
120 unsigned short dummy8;
121
122 unsigned long reserved2;
123
124 unsigned short curr_y_count; /* DMA Current y-count register */
125 unsigned short dummy9;
126
127 unsigned long reserved3;
128
129};
130
1394f032 131struct dma_channel {
99532fd2 132 const char *device_id;
d2e015d6 133 atomic_t chan_status;
4ce18736 134 volatile struct dma_register *regs;
1394f032 135 struct dmasg *sg; /* large mode descriptor */
a2ba8b19 136 unsigned int irq;
1394f032 137 void *data;
1efc80b5
MH
138#ifdef CONFIG_PM
139 unsigned short saved_peripheral_map;
140#endif
1394f032
BW
141};
142
1efc80b5
MH
143#ifdef CONFIG_PM
144int blackfin_dma_suspend(void);
145void blackfin_dma_resume(void);
146#endif
147
1394f032
BW
148/*******************************************************************************
149* DMA API's
150*******************************************************************************/
9c417a43
MF
151extern struct dma_channel dma_ch[MAX_DMA_CHANNELS];
152extern struct dma_register *dma_io_base_addr[MAX_DMA_CHANNELS];
153extern int channel2irq(unsigned int channel);
154
155static inline void set_dma_start_addr(unsigned int channel, unsigned long addr)
156{
157 dma_ch[channel].regs->start_addr = addr;
158}
6ab729d8 159static inline void set_dma_next_desc_addr(unsigned int channel, void *addr)
9c417a43
MF
160{
161 dma_ch[channel].regs->next_desc_ptr = addr;
162}
6ab729d8 163static inline void set_dma_curr_desc_addr(unsigned int channel, void *addr)
9c417a43
MF
164{
165 dma_ch[channel].regs->curr_desc_ptr = addr;
166}
167static inline void set_dma_x_count(unsigned int channel, unsigned short x_count)
168{
169 dma_ch[channel].regs->x_count = x_count;
170}
171static inline void set_dma_y_count(unsigned int channel, unsigned short y_count)
172{
173 dma_ch[channel].regs->y_count = y_count;
174}
175static inline void set_dma_x_modify(unsigned int channel, short x_modify)
176{
177 dma_ch[channel].regs->x_modify = x_modify;
178}
179static inline void set_dma_y_modify(unsigned int channel, short y_modify)
180{
181 dma_ch[channel].regs->y_modify = y_modify;
182}
183static inline void set_dma_config(unsigned int channel, unsigned short config)
184{
185 dma_ch[channel].regs->cfg = config;
186}
187static inline void set_dma_curr_addr(unsigned int channel, unsigned long addr)
188{
189 dma_ch[channel].regs->curr_addr_ptr = addr;
190}
191
192static inline unsigned short
193set_bfin_dma_config(char direction, char flow_mode,
194 char intr_mode, char dma_mode, char width, char syncmode)
195{
196 return (direction << 1) | (width << 2) | (dma_mode << 4) |
197 (intr_mode << 6) | (flow_mode << 12) | (syncmode << 5);
198}
199
200static inline unsigned short get_dma_curr_irqstat(unsigned int channel)
201{
202 return dma_ch[channel].regs->irq_status;
203}
204static inline unsigned short get_dma_curr_xcount(unsigned int channel)
205{
206 return dma_ch[channel].regs->curr_x_count;
207}
208static inline unsigned short get_dma_curr_ycount(unsigned int channel)
209{
210 return dma_ch[channel].regs->curr_y_count;
211}
6ab729d8 212static inline void *get_dma_next_desc_ptr(unsigned int channel)
9c417a43
MF
213{
214 return dma_ch[channel].regs->next_desc_ptr;
215}
6ab729d8 216static inline void *get_dma_curr_desc_ptr(unsigned int channel)
9c417a43
MF
217{
218 return dma_ch[channel].regs->curr_desc_ptr;
219}
71f5ca35
MF
220static inline unsigned short get_dma_config(unsigned int channel)
221{
222 return dma_ch[channel].regs->cfg;
223}
9c417a43
MF
224static inline unsigned long get_dma_curr_addr(unsigned int channel)
225{
226 return dma_ch[channel].regs->curr_addr_ptr;
227}
228
229static inline void set_dma_sg(unsigned int channel, struct dmasg *sg, int ndsize)
230{
ea8538a0
SZ
231 /* Make sure the internal data buffers in the core are drained
232 * so that the DMA descriptors are completely written when the
233 * DMA engine goes to fetch them below.
234 */
235 SSYNC();
236
237 dma_ch[channel].regs->next_desc_ptr = sg;
d41e8009
MF
238 dma_ch[channel].regs->cfg =
239 (dma_ch[channel].regs->cfg & ~(0xf << 8)) |
240 ((ndsize & 0xf) << 8);
9c417a43
MF
241}
242
243static inline int dma_channel_active(unsigned int channel)
244{
d2e015d6 245 return atomic_read(&dma_ch[channel].chan_status);
9c417a43
MF
246}
247
248static inline void disable_dma(unsigned int channel)
249{
250 dma_ch[channel].regs->cfg &= ~DMAEN;
251 SSYNC();
9c417a43
MF
252}
253static inline void enable_dma(unsigned int channel)
254{
255 dma_ch[channel].regs->curr_x_count = 0;
256 dma_ch[channel].regs->curr_y_count = 0;
257 dma_ch[channel].regs->cfg |= DMAEN;
9c417a43 258}
9c417a43
MF
259int set_dma_callback(unsigned int channel, irq_handler_t callback, void *data);
260
261static inline void dma_disable_irq(unsigned int channel)
262{
263 disable_irq(dma_ch[channel].irq);
264}
4ab069e5
BS
265static inline void dma_disable_irq_nosync(unsigned int channel)
266{
267 disable_irq_nosync(dma_ch[channel].irq);
268}
9c417a43
MF
269static inline void dma_enable_irq(unsigned int channel)
270{
271 enable_irq(dma_ch[channel].irq);
272}
273static inline void clear_dma_irqstat(unsigned int channel)
274{
275 dma_ch[channel].regs->irq_status = DMA_DONE | DMA_ERR;
276}
277
1394f032
BW
278void *dma_memcpy(void *dest, const void *src, size_t count);
279void *safe_dma_memcpy(void *dest, const void *src, size_t count);
dd3dd384 280void blackfin_dma_early_init(void);
fecbd736
RG
281void early_dma_memcpy(void *dest, const void *src, size_t count);
282void early_dma_memcpy_done(void);
1394f032
BW
283
284#endif