forked from Minki/linux
dmaengine: Pass flags via device_prep_dma_cyclic() callback
Change the parameter list of device_prep_dma_cyclic() so the DMA drivers can receive the flags coming from clients. This feature can be used during audio operation to disable all audio related interrupts when the DMA_PREP_INTERRUPT is cleared from the flags. Signed-off-by: Peter Ujfalusi <peter.ujfalusi@ti.com> Acked-by: Nicolas Ferre <nicolas.ferre@atmel.com> Acked-by: Shawn Guo <shawn.guo@linaro.org> Acked-by: Vinod Koul <vinod.koul@linux.intel.com> Signed-off-by: Mark Brown <broonie@opensource.wolfsonmicro.com>
This commit is contained in:
parent
2dcdf57093
commit
ec8b5e48c0
@ -841,12 +841,13 @@ atc_dma_cyclic_fill_desc(struct dma_chan *chan, struct at_desc *desc,
|
|||||||
* @buf_len: total number of bytes for the entire buffer
|
* @buf_len: total number of bytes for the entire buffer
|
||||||
* @period_len: number of bytes for each period
|
* @period_len: number of bytes for each period
|
||||||
* @direction: transfer direction, to or from device
|
* @direction: transfer direction, to or from device
|
||||||
|
* @flags: tx descriptor status flags
|
||||||
* @context: transfer context (ignored)
|
* @context: transfer context (ignored)
|
||||||
*/
|
*/
|
||||||
static struct dma_async_tx_descriptor *
|
static struct dma_async_tx_descriptor *
|
||||||
atc_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
|
atc_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
|
||||||
size_t period_len, enum dma_transfer_direction direction,
|
size_t period_len, enum dma_transfer_direction direction,
|
||||||
void *context)
|
unsigned long flags, void *context)
|
||||||
{
|
{
|
||||||
struct at_dma_chan *atchan = to_at_dma_chan(chan);
|
struct at_dma_chan *atchan = to_at_dma_chan(chan);
|
||||||
struct at_dma_slave *atslave = chan->private;
|
struct at_dma_slave *atslave = chan->private;
|
||||||
|
@ -1120,6 +1120,7 @@ fail:
|
|||||||
* @buf_len: length of the buffer (in bytes)
|
* @buf_len: length of the buffer (in bytes)
|
||||||
* @period_len: lenght of a single period
|
* @period_len: lenght of a single period
|
||||||
* @dir: direction of the operation
|
* @dir: direction of the operation
|
||||||
|
* @flags: tx descriptor status flags
|
||||||
* @context: operation context (ignored)
|
* @context: operation context (ignored)
|
||||||
*
|
*
|
||||||
* Prepares a descriptor for cyclic DMA operation. This means that once the
|
* Prepares a descriptor for cyclic DMA operation. This means that once the
|
||||||
@ -1133,7 +1134,8 @@ fail:
|
|||||||
static struct dma_async_tx_descriptor *
|
static struct dma_async_tx_descriptor *
|
||||||
ep93xx_dma_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t dma_addr,
|
ep93xx_dma_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t dma_addr,
|
||||||
size_t buf_len, size_t period_len,
|
size_t buf_len, size_t period_len,
|
||||||
enum dma_transfer_direction dir, void *context)
|
enum dma_transfer_direction dir, unsigned long flags,
|
||||||
|
void *context)
|
||||||
{
|
{
|
||||||
struct ep93xx_dma_chan *edmac = to_ep93xx_dma_chan(chan);
|
struct ep93xx_dma_chan *edmac = to_ep93xx_dma_chan(chan);
|
||||||
struct ep93xx_dma_desc *desc, *first;
|
struct ep93xx_dma_desc *desc, *first;
|
||||||
|
@ -801,7 +801,7 @@ static struct dma_async_tx_descriptor *imxdma_prep_slave_sg(
|
|||||||
static struct dma_async_tx_descriptor *imxdma_prep_dma_cyclic(
|
static struct dma_async_tx_descriptor *imxdma_prep_dma_cyclic(
|
||||||
struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len,
|
struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len,
|
||||||
size_t period_len, enum dma_transfer_direction direction,
|
size_t period_len, enum dma_transfer_direction direction,
|
||||||
void *context)
|
unsigned long flags, void *context)
|
||||||
{
|
{
|
||||||
struct imxdma_channel *imxdmac = to_imxdma_chan(chan);
|
struct imxdma_channel *imxdmac = to_imxdma_chan(chan);
|
||||||
struct imxdma_engine *imxdma = imxdmac->imxdma;
|
struct imxdma_engine *imxdma = imxdmac->imxdma;
|
||||||
|
@ -1012,7 +1012,7 @@ err_out:
|
|||||||
static struct dma_async_tx_descriptor *sdma_prep_dma_cyclic(
|
static struct dma_async_tx_descriptor *sdma_prep_dma_cyclic(
|
||||||
struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len,
|
struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len,
|
||||||
size_t period_len, enum dma_transfer_direction direction,
|
size_t period_len, enum dma_transfer_direction direction,
|
||||||
void *context)
|
unsigned long flags, void *context)
|
||||||
{
|
{
|
||||||
struct sdma_channel *sdmac = to_sdma_chan(chan);
|
struct sdma_channel *sdmac = to_sdma_chan(chan);
|
||||||
struct sdma_engine *sdma = sdmac->sdma;
|
struct sdma_engine *sdma = sdmac->sdma;
|
||||||
|
@ -358,7 +358,7 @@ struct mmp_tdma_desc *mmp_tdma_alloc_descriptor(struct mmp_tdma_chan *tdmac)
|
|||||||
static struct dma_async_tx_descriptor *mmp_tdma_prep_dma_cyclic(
|
static struct dma_async_tx_descriptor *mmp_tdma_prep_dma_cyclic(
|
||||||
struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len,
|
struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len,
|
||||||
size_t period_len, enum dma_transfer_direction direction,
|
size_t period_len, enum dma_transfer_direction direction,
|
||||||
void *context)
|
unsigned long flags, void *context)
|
||||||
{
|
{
|
||||||
struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan);
|
struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan);
|
||||||
struct mmp_tdma_desc *desc;
|
struct mmp_tdma_desc *desc;
|
||||||
|
@ -531,7 +531,7 @@ err_out:
|
|||||||
static struct dma_async_tx_descriptor *mxs_dma_prep_dma_cyclic(
|
static struct dma_async_tx_descriptor *mxs_dma_prep_dma_cyclic(
|
||||||
struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len,
|
struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len,
|
||||||
size_t period_len, enum dma_transfer_direction direction,
|
size_t period_len, enum dma_transfer_direction direction,
|
||||||
void *context)
|
unsigned long flags, void *context)
|
||||||
{
|
{
|
||||||
struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan);
|
struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan);
|
||||||
struct mxs_dma_engine *mxs_dma = mxs_chan->mxs_dma;
|
struct mxs_dma_engine *mxs_dma = mxs_chan->mxs_dma;
|
||||||
|
@ -366,7 +366,8 @@ static struct dma_async_tx_descriptor *omap_dma_prep_slave_sg(
|
|||||||
|
|
||||||
static struct dma_async_tx_descriptor *omap_dma_prep_dma_cyclic(
|
static struct dma_async_tx_descriptor *omap_dma_prep_dma_cyclic(
|
||||||
struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
|
struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
|
||||||
size_t period_len, enum dma_transfer_direction dir, void *context)
|
size_t period_len, enum dma_transfer_direction dir, unsigned long flags,
|
||||||
|
void *context)
|
||||||
{
|
{
|
||||||
struct omap_chan *c = to_omap_dma_chan(chan);
|
struct omap_chan *c = to_omap_dma_chan(chan);
|
||||||
enum dma_slave_buswidth dev_width;
|
enum dma_slave_buswidth dev_width;
|
||||||
|
@ -2683,7 +2683,7 @@ static inline int get_burst_len(struct dma_pl330_desc *desc, size_t len)
|
|||||||
static struct dma_async_tx_descriptor *pl330_prep_dma_cyclic(
|
static struct dma_async_tx_descriptor *pl330_prep_dma_cyclic(
|
||||||
struct dma_chan *chan, dma_addr_t dma_addr, size_t len,
|
struct dma_chan *chan, dma_addr_t dma_addr, size_t len,
|
||||||
size_t period_len, enum dma_transfer_direction direction,
|
size_t period_len, enum dma_transfer_direction direction,
|
||||||
void *context)
|
unsigned long flags, void *context)
|
||||||
{
|
{
|
||||||
struct dma_pl330_desc *desc;
|
struct dma_pl330_desc *desc;
|
||||||
struct dma_pl330_chan *pch = to_pchan(chan);
|
struct dma_pl330_chan *pch = to_pchan(chan);
|
||||||
|
@ -614,7 +614,7 @@ static struct dma_async_tx_descriptor *sa11x0_dma_prep_slave_sg(
|
|||||||
|
|
||||||
static struct dma_async_tx_descriptor *sa11x0_dma_prep_dma_cyclic(
|
static struct dma_async_tx_descriptor *sa11x0_dma_prep_dma_cyclic(
|
||||||
struct dma_chan *chan, dma_addr_t addr, size_t size, size_t period,
|
struct dma_chan *chan, dma_addr_t addr, size_t size, size_t period,
|
||||||
enum dma_transfer_direction dir, void *context)
|
enum dma_transfer_direction dir, unsigned long flags, void *context)
|
||||||
{
|
{
|
||||||
struct sa11x0_dma_chan *c = to_sa11x0_dma_chan(chan);
|
struct sa11x0_dma_chan *c = to_sa11x0_dma_chan(chan);
|
||||||
struct sa11x0_dma_desc *txd;
|
struct sa11x0_dma_desc *txd;
|
||||||
|
@ -489,7 +489,7 @@ err_dir:
|
|||||||
static struct dma_async_tx_descriptor *
|
static struct dma_async_tx_descriptor *
|
||||||
sirfsoc_dma_prep_cyclic(struct dma_chan *chan, dma_addr_t addr,
|
sirfsoc_dma_prep_cyclic(struct dma_chan *chan, dma_addr_t addr,
|
||||||
size_t buf_len, size_t period_len,
|
size_t buf_len, size_t period_len,
|
||||||
enum dma_transfer_direction direction, void *context)
|
enum dma_transfer_direction direction, unsigned long flags, void *context)
|
||||||
{
|
{
|
||||||
struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan);
|
struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan);
|
||||||
struct sirfsoc_dma_desc *sdesc = NULL;
|
struct sirfsoc_dma_desc *sdesc = NULL;
|
||||||
|
@ -2347,7 +2347,8 @@ static struct dma_async_tx_descriptor *d40_prep_slave_sg(struct dma_chan *chan,
|
|||||||
static struct dma_async_tx_descriptor *
|
static struct dma_async_tx_descriptor *
|
||||||
dma40_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t dma_addr,
|
dma40_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t dma_addr,
|
||||||
size_t buf_len, size_t period_len,
|
size_t buf_len, size_t period_len,
|
||||||
enum dma_transfer_direction direction, void *context)
|
enum dma_transfer_direction direction, unsigned long flags,
|
||||||
|
void *context)
|
||||||
{
|
{
|
||||||
unsigned int periods = buf_len / period_len;
|
unsigned int periods = buf_len / period_len;
|
||||||
struct dma_async_tx_descriptor *txd;
|
struct dma_async_tx_descriptor *txd;
|
||||||
|
@ -990,7 +990,7 @@ static struct dma_async_tx_descriptor *tegra_dma_prep_slave_sg(
|
|||||||
struct dma_async_tx_descriptor *tegra_dma_prep_dma_cyclic(
|
struct dma_async_tx_descriptor *tegra_dma_prep_dma_cyclic(
|
||||||
struct dma_chan *dc, dma_addr_t buf_addr, size_t buf_len,
|
struct dma_chan *dc, dma_addr_t buf_addr, size_t buf_len,
|
||||||
size_t period_len, enum dma_transfer_direction direction,
|
size_t period_len, enum dma_transfer_direction direction,
|
||||||
void *context)
|
unsigned long flags, void *context)
|
||||||
{
|
{
|
||||||
struct tegra_dma_channel *tdc = to_tegra_dma_chan(dc);
|
struct tegra_dma_channel *tdc = to_tegra_dma_chan(dc);
|
||||||
struct tegra_dma_desc *dma_desc = NULL;
|
struct tegra_dma_desc *dma_desc = NULL;
|
||||||
|
@ -591,7 +591,7 @@ struct dma_device {
|
|||||||
struct dma_async_tx_descriptor *(*device_prep_dma_cyclic)(
|
struct dma_async_tx_descriptor *(*device_prep_dma_cyclic)(
|
||||||
struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
|
struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
|
||||||
size_t period_len, enum dma_transfer_direction direction,
|
size_t period_len, enum dma_transfer_direction direction,
|
||||||
void *context);
|
unsigned long flags, void *context);
|
||||||
struct dma_async_tx_descriptor *(*device_prep_interleaved_dma)(
|
struct dma_async_tx_descriptor *(*device_prep_interleaved_dma)(
|
||||||
struct dma_chan *chan, struct dma_interleaved_template *xt,
|
struct dma_chan *chan, struct dma_interleaved_template *xt,
|
||||||
unsigned long flags);
|
unsigned long flags);
|
||||||
@ -656,7 +656,7 @@ static inline struct dma_async_tx_descriptor *dmaengine_prep_dma_cyclic(
|
|||||||
size_t period_len, enum dma_transfer_direction dir)
|
size_t period_len, enum dma_transfer_direction dir)
|
||||||
{
|
{
|
||||||
return chan->device->device_prep_dma_cyclic(chan, buf_addr, buf_len,
|
return chan->device->device_prep_dma_cyclic(chan, buf_addr, buf_len,
|
||||||
period_len, dir, NULL);
|
period_len, dir, flags, NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline int dmaengine_terminate_all(struct dma_chan *chan)
|
static inline int dmaengine_terminate_all(struct dma_chan *chan)
|
||||||
|
Loading…
Reference in New Issue
Block a user