Merge branch 'topic/xilinx' into for-linus
authorVinod Koul <vkoul@kernel.org>
Mon, 31 Dec 2018 14:02:32 +0000 (19:32 +0530)
committerVinod Koul <vkoul@kernel.org>
Mon, 31 Dec 2018 14:02:32 +0000 (19:32 +0530)
drivers/dma/xilinx/xilinx_dma.c
drivers/dma/xilinx/zynqmp_dma.c

index c1244231259518d08dda4427c977fac160aee76b..02880963092f287d4e7f0113b0d9f6c8500b7cf8 100644 (file)
 /* AXI CDMA Specific Masks */
 #define XILINX_CDMA_CR_SGMODE          BIT(3)
 
+#define xilinx_prep_dma_addr_t(addr)   \
+       ((dma_addr_t)((u64)addr##_##msb << 32 | (addr)))
 /**
  * struct xilinx_vdma_desc_hw - Hardware Descriptor
  * @next_desc: Next Descriptor Pointer @0x00
@@ -887,6 +889,24 @@ static int xilinx_dma_alloc_chan_resources(struct dma_chan *dchan)
                                chan->id);
                        return -ENOMEM;
                }
+               /*
+                * For cyclic DMA mode we need to program the tail Descriptor
+                * register with a value which is not a part of the BD chain
+                * so allocating a desc segment during channel allocation for
+                * programming tail descriptor.
+                */
+               chan->cyclic_seg_v = dma_zalloc_coherent(chan->dev,
+                                       sizeof(*chan->cyclic_seg_v),
+                                       &chan->cyclic_seg_p, GFP_KERNEL);
+               if (!chan->cyclic_seg_v) {
+                       dev_err(chan->dev,
+                               "unable to allocate desc segment for cyclic DMA\n");
+                       dma_free_coherent(chan->dev, sizeof(*chan->seg_v) *
+                               XILINX_DMA_NUM_DESCS, chan->seg_v,
+                               chan->seg_p);
+                       return -ENOMEM;
+               }
+               chan->cyclic_seg_v->phys = chan->cyclic_seg_p;
 
                for (i = 0; i < XILINX_DMA_NUM_DESCS; i++) {
                        chan->seg_v[i].hw.next_desc =
@@ -922,24 +942,6 @@ static int xilinx_dma_alloc_chan_resources(struct dma_chan *dchan)
                return -ENOMEM;
        }
 
-       if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) {
-               /*
-                * For cyclic DMA mode we need to program the tail Descriptor
-                * register with a value which is not a part of the BD chain
-                * so allocating a desc segment during channel allocation for
-                * programming tail descriptor.
-                */
-               chan->cyclic_seg_v = dma_zalloc_coherent(chan->dev,
-                                       sizeof(*chan->cyclic_seg_v),
-                                       &chan->cyclic_seg_p, GFP_KERNEL);
-               if (!chan->cyclic_seg_v) {
-                       dev_err(chan->dev,
-                               "unable to allocate desc segment for cyclic DMA\n");
-                       return -ENOMEM;
-               }
-               chan->cyclic_seg_v->phys = chan->cyclic_seg_p;
-       }
-
        dma_cookie_init(dchan);
 
        if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) {
@@ -1245,8 +1247,10 @@ static void xilinx_cdma_start_transfer(struct xilinx_dma_chan *chan)
 
                hw = &segment->hw;
 
-               xilinx_write(chan, XILINX_CDMA_REG_SRCADDR, hw->src_addr);
-               xilinx_write(chan, XILINX_CDMA_REG_DSTADDR, hw->dest_addr);
+               xilinx_write(chan, XILINX_CDMA_REG_SRCADDR,
+                            xilinx_prep_dma_addr_t(hw->src_addr));
+               xilinx_write(chan, XILINX_CDMA_REG_DSTADDR,
+                            xilinx_prep_dma_addr_t(hw->dest_addr));
 
                /* Start the transfer */
                dma_ctrl_write(chan, XILINX_DMA_REG_BTT,
index 6f26b59a7216c7f545066ff4c502a6b431a3943d..8db51750ce931731dea6dd6633544e64a074fc38 100644 (file)
@@ -163,7 +163,7 @@ struct zynqmp_dma_desc_ll {
        u32 ctrl;
        u64 nxtdscraddr;
        u64 rsvd;
-}; __aligned(64)
+};
 
 /**
  * struct zynqmp_dma_desc_sw - Per Transaction structure