mirror of
https://github.com/torvalds/linux.git
synced 2024-11-10 14:11:52 +00:00
dmaengine: add context parameter to prep_slave_sg and prep_dma_cyclic
Add context parameter to device_prep_slave_sg() and device_prep_dma_cyclic() interfaces to allow passing client/target specific information associated with the data transfer. Modify all affected DMA engine drivers. Signed-off-by: Alexandre Bounine <alexandre.bounine@idt.com> Acked-by: Linus Walleij <linus.walleij@linaro.org> Acked-by: Felipe Balbi <balbi@ti.com> Signed-off-by: Vinod Koul <vinod.koul@linux.intel.com>
This commit is contained in:
parent
16052827d9
commit
185ecb5f4f
@ -1313,7 +1313,7 @@ static struct dma_async_tx_descriptor *pl08x_prep_dma_memcpy(
|
||||
static struct dma_async_tx_descriptor *pl08x_prep_slave_sg(
|
||||
struct dma_chan *chan, struct scatterlist *sgl,
|
||||
unsigned int sg_len, enum dma_transfer_direction direction,
|
||||
unsigned long flags)
|
||||
unsigned long flags, void *context)
|
||||
{
|
||||
struct pl08x_dma_chan *plchan = to_pl08x_chan(chan);
|
||||
struct pl08x_driver_data *pl08x = plchan->host;
|
||||
|
@ -639,11 +639,12 @@ err_desc_get:
|
||||
* @sg_len: number of entries in @scatterlist
|
||||
* @direction: DMA direction
|
||||
* @flags: tx descriptor status flags
|
||||
* @context: transaction context (ignored)
|
||||
*/
|
||||
static struct dma_async_tx_descriptor *
|
||||
atc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
|
||||
unsigned int sg_len, enum dma_transfer_direction direction,
|
||||
unsigned long flags)
|
||||
unsigned long flags, void *context)
|
||||
{
|
||||
struct at_dma_chan *atchan = to_at_dma_chan(chan);
|
||||
struct at_dma_slave *atslave = chan->private;
|
||||
@ -840,10 +841,12 @@ atc_dma_cyclic_fill_desc(struct at_dma_slave *atslave, struct at_desc *desc,
|
||||
* @buf_len: total number of bytes for the entire buffer
|
||||
* @period_len: number of bytes for each period
|
||||
* @direction: transfer direction, to or from device
|
||||
* @context: transfer context (ignored)
|
||||
*/
|
||||
static struct dma_async_tx_descriptor *
|
||||
atc_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
|
||||
size_t period_len, enum dma_transfer_direction direction)
|
||||
size_t period_len, enum dma_transfer_direction direction,
|
||||
void *context)
|
||||
{
|
||||
struct at_dma_chan *atchan = to_at_dma_chan(chan);
|
||||
struct at_dma_slave *atslave = chan->private;
|
||||
|
@ -1021,7 +1021,7 @@ coh901318_prep_memcpy(struct dma_chan *chan, dma_addr_t dest, dma_addr_t src,
|
||||
static struct dma_async_tx_descriptor *
|
||||
coh901318_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
|
||||
unsigned int sg_len, enum dma_transfer_direction direction,
|
||||
unsigned long flags)
|
||||
unsigned long flags, void *context)
|
||||
{
|
||||
struct coh901318_chan *cohc = to_coh901318_chan(chan);
|
||||
struct coh901318_lli *lli;
|
||||
|
@ -704,7 +704,7 @@ err_desc_get:
|
||||
static struct dma_async_tx_descriptor *
|
||||
dwc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
|
||||
unsigned int sg_len, enum dma_transfer_direction direction,
|
||||
unsigned long flags)
|
||||
unsigned long flags, void *context)
|
||||
{
|
||||
struct dw_dma_chan *dwc = to_dw_dma_chan(chan);
|
||||
struct dw_dma_slave *dws = chan->private;
|
||||
|
@ -975,13 +975,14 @@ fail:
|
||||
* @sg_len: number of entries in @sgl
|
||||
* @dir: direction of tha DMA transfer
|
||||
* @flags: flags for the descriptor
|
||||
* @context: operation context (ignored)
|
||||
*
|
||||
* Returns a valid DMA descriptor or %NULL in case of failure.
|
||||
*/
|
||||
static struct dma_async_tx_descriptor *
|
||||
ep93xx_dma_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
|
||||
unsigned int sg_len, enum dma_transfer_direction dir,
|
||||
unsigned long flags)
|
||||
unsigned long flags, void *context)
|
||||
{
|
||||
struct ep93xx_dma_chan *edmac = to_ep93xx_dma_chan(chan);
|
||||
struct ep93xx_dma_desc *desc, *first;
|
||||
@ -1048,6 +1049,7 @@ fail:
|
||||
* @buf_len: length of the buffer (in bytes)
|
||||
* @period_len: lenght of a single period
|
||||
* @dir: direction of the operation
|
||||
* @context: operation context (ignored)
|
||||
*
|
||||
* Prepares a descriptor for cyclic DMA operation. This means that once the
|
||||
* descriptor is submitted, we will be submitting in a @period_len sized
|
||||
@ -1060,7 +1062,7 @@ fail:
|
||||
static struct dma_async_tx_descriptor *
|
||||
ep93xx_dma_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t dma_addr,
|
||||
size_t buf_len, size_t period_len,
|
||||
enum dma_transfer_direction dir)
|
||||
enum dma_transfer_direction dir, void *context)
|
||||
{
|
||||
struct ep93xx_dma_chan *edmac = to_ep93xx_dma_chan(chan);
|
||||
struct ep93xx_dma_desc *desc, *first;
|
||||
|
@ -759,6 +759,7 @@ fail:
|
||||
* @sg_len: number of entries in @scatterlist
|
||||
* @direction: DMA direction
|
||||
* @flags: DMAEngine flags
|
||||
* @context: transaction context (ignored)
|
||||
*
|
||||
* Prepare a set of descriptors for a DMA_SLAVE transaction. Following the
|
||||
* DMA_SLAVE API, this gets the device-specific information from the
|
||||
@ -766,7 +767,8 @@ fail:
|
||||
*/
|
||||
static struct dma_async_tx_descriptor *fsl_dma_prep_slave_sg(
|
||||
struct dma_chan *dchan, struct scatterlist *sgl, unsigned int sg_len,
|
||||
enum dma_transfer_direction direction, unsigned long flags)
|
||||
enum dma_transfer_direction direction, unsigned long flags,
|
||||
void *context)
|
||||
{
|
||||
/*
|
||||
* This operation is not supported on the Freescale DMA controller
|
||||
|
@ -354,7 +354,7 @@ static void imxdma_free_chan_resources(struct dma_chan *chan)
|
||||
static struct dma_async_tx_descriptor *imxdma_prep_slave_sg(
|
||||
struct dma_chan *chan, struct scatterlist *sgl,
|
||||
unsigned int sg_len, enum dma_transfer_direction direction,
|
||||
unsigned long flags)
|
||||
unsigned long flags, void *context)
|
||||
{
|
||||
struct imxdma_channel *imxdmac = to_imxdma_chan(chan);
|
||||
struct scatterlist *sg;
|
||||
@ -405,7 +405,8 @@ static struct dma_async_tx_descriptor *imxdma_prep_slave_sg(
|
||||
|
||||
static struct dma_async_tx_descriptor *imxdma_prep_dma_cyclic(
|
||||
struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len,
|
||||
size_t period_len, enum dma_transfer_direction direction)
|
||||
size_t period_len, enum dma_transfer_direction direction,
|
||||
void *context)
|
||||
{
|
||||
struct imxdma_channel *imxdmac = to_imxdma_chan(chan);
|
||||
struct imxdma_engine *imxdma = imxdmac->imxdma;
|
||||
|
@ -903,7 +903,7 @@ static void sdma_free_chan_resources(struct dma_chan *chan)
|
||||
static struct dma_async_tx_descriptor *sdma_prep_slave_sg(
|
||||
struct dma_chan *chan, struct scatterlist *sgl,
|
||||
unsigned int sg_len, enum dma_transfer_direction direction,
|
||||
unsigned long flags)
|
||||
unsigned long flags, void *context)
|
||||
{
|
||||
struct sdma_channel *sdmac = to_sdma_chan(chan);
|
||||
struct sdma_engine *sdma = sdmac->sdma;
|
||||
@ -1001,7 +1001,8 @@ err_out:
|
||||
|
||||
static struct dma_async_tx_descriptor *sdma_prep_dma_cyclic(
|
||||
struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len,
|
||||
size_t period_len, enum dma_transfer_direction direction)
|
||||
size_t period_len, enum dma_transfer_direction direction,
|
||||
void *context)
|
||||
{
|
||||
struct sdma_channel *sdmac = to_sdma_chan(chan);
|
||||
struct sdma_engine *sdma = sdmac->sdma;
|
||||
|
@ -714,13 +714,14 @@ err_desc_get:
|
||||
* @sg_len: length of sg txn
|
||||
* @direction: DMA transfer dirtn
|
||||
* @flags: DMA flags
|
||||
* @context: transfer context (ignored)
|
||||
*
|
||||
* Prepares LLI based periphral transfer
|
||||
*/
|
||||
static struct dma_async_tx_descriptor *intel_mid_dma_prep_slave_sg(
|
||||
struct dma_chan *chan, struct scatterlist *sgl,
|
||||
unsigned int sg_len, enum dma_transfer_direction direction,
|
||||
unsigned long flags)
|
||||
unsigned long flags, void *context)
|
||||
{
|
||||
struct intel_mid_dma_chan *midc = NULL;
|
||||
struct intel_mid_dma_slave *mids = NULL;
|
||||
|
@ -1335,7 +1335,8 @@ static void ipu_gc_tasklet(unsigned long arg)
|
||||
/* Allocate and initialise a transfer descriptor. */
|
||||
static struct dma_async_tx_descriptor *idmac_prep_slave_sg(struct dma_chan *chan,
|
||||
struct scatterlist *sgl, unsigned int sg_len,
|
||||
enum dma_transfer_direction direction, unsigned long tx_flags)
|
||||
enum dma_transfer_direction direction, unsigned long tx_flags,
|
||||
void *context)
|
||||
{
|
||||
struct idmac_channel *ichan = to_idmac_chan(chan);
|
||||
struct idmac_tx_desc *desc = NULL;
|
||||
|
@ -340,7 +340,7 @@ static void mxs_dma_free_chan_resources(struct dma_chan *chan)
|
||||
static struct dma_async_tx_descriptor *mxs_dma_prep_slave_sg(
|
||||
struct dma_chan *chan, struct scatterlist *sgl,
|
||||
unsigned int sg_len, enum dma_transfer_direction direction,
|
||||
unsigned long append)
|
||||
unsigned long append, void *context)
|
||||
{
|
||||
struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan);
|
||||
struct mxs_dma_engine *mxs_dma = mxs_chan->mxs_dma;
|
||||
@ -435,7 +435,8 @@ err_out:
|
||||
|
||||
static struct dma_async_tx_descriptor *mxs_dma_prep_dma_cyclic(
|
||||
struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len,
|
||||
size_t period_len, enum dma_transfer_direction direction)
|
||||
size_t period_len, enum dma_transfer_direction direction,
|
||||
void *context)
|
||||
{
|
||||
struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan);
|
||||
struct mxs_dma_engine *mxs_dma = mxs_chan->mxs_dma;
|
||||
|
@ -587,7 +587,8 @@ static void pd_issue_pending(struct dma_chan *chan)
|
||||
|
||||
static struct dma_async_tx_descriptor *pd_prep_slave_sg(struct dma_chan *chan,
|
||||
struct scatterlist *sgl, unsigned int sg_len,
|
||||
enum dma_transfer_direction direction, unsigned long flags)
|
||||
enum dma_transfer_direction direction, unsigned long flags,
|
||||
void *context)
|
||||
{
|
||||
struct pch_dma_chan *pd_chan = to_pd_chan(chan);
|
||||
struct pch_dma_slave *pd_slave = chan->private;
|
||||
|
@ -2685,7 +2685,8 @@ static inline int get_burst_len(struct dma_pl330_desc *desc, size_t len)
|
||||
|
||||
static struct dma_async_tx_descriptor *pl330_prep_dma_cyclic(
|
||||
struct dma_chan *chan, dma_addr_t dma_addr, size_t len,
|
||||
size_t period_len, enum dma_transfer_direction direction)
|
||||
size_t period_len, enum dma_transfer_direction direction,
|
||||
void *context)
|
||||
{
|
||||
struct dma_pl330_desc *desc;
|
||||
struct dma_pl330_chan *pch = to_pchan(chan);
|
||||
@ -2775,7 +2776,7 @@ pl330_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dst,
|
||||
static struct dma_async_tx_descriptor *
|
||||
pl330_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
|
||||
unsigned int sg_len, enum dma_transfer_direction direction,
|
||||
unsigned long flg)
|
||||
unsigned long flg, void *context)
|
||||
{
|
||||
struct dma_pl330_desc *first, *desc = NULL;
|
||||
struct dma_pl330_chan *pch = to_pchan(chan);
|
||||
|
@ -669,7 +669,8 @@ static struct dma_async_tx_descriptor *sh_dmae_prep_memcpy(
|
||||
|
||||
static struct dma_async_tx_descriptor *sh_dmae_prep_slave_sg(
|
||||
struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len,
|
||||
enum dma_transfer_direction direction, unsigned long flags)
|
||||
enum dma_transfer_direction direction, unsigned long flags,
|
||||
void *context)
|
||||
{
|
||||
struct sh_dmae_slave *param;
|
||||
struct sh_dmae_chan *sh_chan;
|
||||
|
@ -489,7 +489,7 @@ err_dir:
|
||||
static struct dma_async_tx_descriptor *
|
||||
sirfsoc_dma_prep_cyclic(struct dma_chan *chan, dma_addr_t addr,
|
||||
size_t buf_len, size_t period_len,
|
||||
enum dma_transfer_direction direction)
|
||||
enum dma_transfer_direction direction, void *context)
|
||||
{
|
||||
struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan);
|
||||
struct sirfsoc_dma_desc *sdesc = NULL;
|
||||
|
@ -2289,7 +2289,8 @@ static struct dma_async_tx_descriptor *d40_prep_slave_sg(struct dma_chan *chan,
|
||||
struct scatterlist *sgl,
|
||||
unsigned int sg_len,
|
||||
enum dma_transfer_direction direction,
|
||||
unsigned long dma_flags)
|
||||
unsigned long dma_flags,
|
||||
void *context)
|
||||
{
|
||||
if (direction != DMA_DEV_TO_MEM && direction != DMA_MEM_TO_DEV)
|
||||
return NULL;
|
||||
@ -2300,7 +2301,7 @@ static struct dma_async_tx_descriptor *d40_prep_slave_sg(struct dma_chan *chan,
|
||||
static struct dma_async_tx_descriptor *
|
||||
dma40_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t dma_addr,
|
||||
size_t buf_len, size_t period_len,
|
||||
enum dma_transfer_direction direction)
|
||||
enum dma_transfer_direction direction, void *context)
|
||||
{
|
||||
unsigned int periods = buf_len / period_len;
|
||||
struct dma_async_tx_descriptor *txd;
|
||||
|
@ -542,7 +542,8 @@ static void td_issue_pending(struct dma_chan *chan)
|
||||
|
||||
static struct dma_async_tx_descriptor *td_prep_slave_sg(struct dma_chan *chan,
|
||||
struct scatterlist *sgl, unsigned int sg_len,
|
||||
enum dma_transfer_direction direction, unsigned long flags)
|
||||
enum dma_transfer_direction direction, unsigned long flags,
|
||||
void *context)
|
||||
{
|
||||
struct timb_dma_chan *td_chan =
|
||||
container_of(chan, struct timb_dma_chan, chan);
|
||||
|
@ -833,7 +833,7 @@ txx9dmac_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dest, dma_addr_t src,
|
||||
static struct dma_async_tx_descriptor *
|
||||
txx9dmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
|
||||
unsigned int sg_len, enum dma_transfer_direction direction,
|
||||
unsigned long flags)
|
||||
unsigned long flags, void *context)
|
||||
{
|
||||
struct txx9dmac_chan *dc = to_txx9dmac_chan(chan);
|
||||
struct txx9dmac_dev *ddev = dc->ddev;
|
||||
|
@ -582,10 +582,11 @@ struct dma_device {
|
||||
struct dma_async_tx_descriptor *(*device_prep_slave_sg)(
|
||||
struct dma_chan *chan, struct scatterlist *sgl,
|
||||
unsigned int sg_len, enum dma_transfer_direction direction,
|
||||
unsigned long flags);
|
||||
unsigned long flags, void *context);
|
||||
struct dma_async_tx_descriptor *(*device_prep_dma_cyclic)(
|
||||
struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
|
||||
size_t period_len, enum dma_transfer_direction direction);
|
||||
size_t period_len, enum dma_transfer_direction direction,
|
||||
void *context);
|
||||
struct dma_async_tx_descriptor *(*device_prep_interleaved_dma)(
|
||||
struct dma_chan *chan, struct dma_interleaved_template *xt,
|
||||
unsigned long flags);
|
||||
@ -619,7 +620,8 @@ static inline struct dma_async_tx_descriptor *dmaengine_prep_slave_single(
|
||||
struct scatterlist sg;
|
||||
sg_init_one(&sg, buf, len);
|
||||
|
||||
return chan->device->device_prep_slave_sg(chan, &sg, 1, dir, flags);
|
||||
return chan->device->device_prep_slave_sg(chan, &sg, 1,
|
||||
dir, flags, NULL);
|
||||
}
|
||||
|
||||
static inline struct dma_async_tx_descriptor *dmaengine_prep_slave_sg(
|
||||
@ -627,7 +629,7 @@ static inline struct dma_async_tx_descriptor *dmaengine_prep_slave_sg(
|
||||
enum dma_transfer_direction dir, unsigned long flags)
|
||||
{
|
||||
return chan->device->device_prep_slave_sg(chan, sgl, sg_len,
|
||||
dir, flags);
|
||||
dir, flags, NULL);
|
||||
}
|
||||
|
||||
static inline struct dma_async_tx_descriptor *dmaengine_prep_dma_cyclic(
|
||||
@ -635,7 +637,7 @@ static inline struct dma_async_tx_descriptor *dmaengine_prep_dma_cyclic(
|
||||
size_t period_len, enum dma_transfer_direction dir)
|
||||
{
|
||||
return chan->device->device_prep_dma_cyclic(chan, buf_addr, buf_len,
|
||||
period_len, dir);
|
||||
period_len, dir, NULL);
|
||||
}
|
||||
|
||||
static inline int dmaengine_terminate_all(struct dma_chan *chan)
|
||||
|
Loading…
Reference in New Issue
Block a user