Path: blob/master/sound/soc/blackfin/bf5xx-sport.c
10817 views
/*1* File: bf5xx_sport.c2* Based on:3* Author: Roy Huang <[email protected]>4*5* Created: Tue Sep 21 10:52:42 CEST 20046* Description:7* Blackfin SPORT Driver8*9* Copyright 2004-2007 Analog Devices Inc.10*11* Bugs: Enter bugs at http://blackfin.uclinux.org/12*13* This program is free software; you can redistribute it and/or modify14* it under the terms of the GNU General Public License as published by15* the Free Software Foundation; either version 2 of the License, or16* (at your option) any later version.17*18* This program is distributed in the hope that it will be useful,19* but WITHOUT ANY WARRANTY; without even the implied warranty of20* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the21* GNU General Public License for more details.22*23* You should have received a copy of the GNU General Public License24* along with this program; if not, see the file COPYING, or write25* to the Free Software Foundation, Inc.,26* 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA27*/2829#include <linux/kernel.h>30#include <linux/slab.h>31#include <linux/delay.h>32#include <linux/dma-mapping.h>33#include <linux/gpio.h>34#include <linux/bug.h>35#include <asm/portmux.h>36#include <asm/dma.h>37#include <asm/blackfin.h>38#include <asm/cacheflush.h>3940#include "bf5xx-sport.h"41/* delay between frame sync pulse and first data bit in multichannel mode */42#define FRAME_DELAY (1<<12)4344/* note: multichannel is in units of 8 channels,45* tdm_count is # channels NOT / 8 ! */46int sport_set_multichannel(struct sport_device *sport,47int tdm_count, u32 mask, int packed)48{49pr_debug("%s tdm_count=%d mask:0x%08x packed=%d\n", __func__,50tdm_count, mask, packed);5152if ((sport->regs->tcr1 & TSPEN) || (sport->regs->rcr1 & RSPEN))53return -EBUSY;5455if (tdm_count & 0x7)56return -EINVAL;5758if (tdm_count > 32)59return -EINVAL; /* Only support less than 32 channels now */6061if (tdm_count) {62sport->regs->mcmc1 = ((tdm_count>>3)-1) << 12;63sport->regs->mcmc2 = FRAME_DELAY | MCMEN | \64(packed ? (MCDTXPE|MCDRXPE) : 0);6566sport->regs->mtcs0 = mask;67sport->regs->mrcs0 = mask;68sport->regs->mtcs1 = 0;69sport->regs->mrcs1 = 0;70sport->regs->mtcs2 = 0;71sport->regs->mrcs2 = 0;72sport->regs->mtcs3 = 0;73sport->regs->mrcs3 = 0;74} else {75sport->regs->mcmc1 = 0;76sport->regs->mcmc2 = 0;7778sport->regs->mtcs0 = 0;79sport->regs->mrcs0 = 0;80}8182sport->regs->mtcs1 = 0; sport->regs->mtcs2 = 0; sport->regs->mtcs3 = 0;83sport->regs->mrcs1 = 0; sport->regs->mrcs2 = 0; sport->regs->mrcs3 = 0;8485SSYNC();8687return 0;88}89EXPORT_SYMBOL(sport_set_multichannel);9091int sport_config_rx(struct sport_device *sport, unsigned int rcr1,92unsigned int rcr2, unsigned int clkdiv, unsigned int fsdiv)93{94if ((sport->regs->tcr1 & TSPEN) || (sport->regs->rcr1 & RSPEN))95return -EBUSY;9697sport->regs->rcr1 = rcr1;98sport->regs->rcr2 = rcr2;99sport->regs->rclkdiv = clkdiv;100sport->regs->rfsdiv = fsdiv;101102SSYNC();103104return 0;105}106EXPORT_SYMBOL(sport_config_rx);107108int sport_config_tx(struct sport_device *sport, unsigned int tcr1,109unsigned int tcr2, unsigned int clkdiv, unsigned int fsdiv)110{111if ((sport->regs->tcr1 & TSPEN) || (sport->regs->rcr1 & RSPEN))112return -EBUSY;113114sport->regs->tcr1 = tcr1;115sport->regs->tcr2 = tcr2;116sport->regs->tclkdiv = clkdiv;117sport->regs->tfsdiv = fsdiv;118119SSYNC();120121return 0;122}123EXPORT_SYMBOL(sport_config_tx);124125static void setup_desc(struct dmasg *desc, void *buf, int fragcount,126size_t fragsize, unsigned int cfg,127unsigned int x_count, unsigned int ycount, size_t wdsize)128{129130int i;131132for (i = 0; i < fragcount; ++i) {133desc[i].next_desc_addr = &(desc[i + 1]);134desc[i].start_addr = (unsigned long)buf + i*fragsize;135desc[i].cfg = cfg;136desc[i].x_count = x_count;137desc[i].x_modify = wdsize;138desc[i].y_count = ycount;139desc[i].y_modify = wdsize;140}141142/* make circular */143desc[fragcount-1].next_desc_addr = desc;144145pr_debug("setup desc: desc0=%p, next0=%p, desc1=%p,"146"next1=%p\nx_count=%x,y_count=%x,addr=0x%lx,cfs=0x%x\n",147desc, desc[0].next_desc_addr,148desc+1, desc[1].next_desc_addr,149desc[0].x_count, desc[0].y_count,150desc[0].start_addr, desc[0].cfg);151}152153static int sport_start(struct sport_device *sport)154{155enable_dma(sport->dma_rx_chan);156enable_dma(sport->dma_tx_chan);157sport->regs->rcr1 |= RSPEN;158sport->regs->tcr1 |= TSPEN;159SSYNC();160161return 0;162}163164static int sport_stop(struct sport_device *sport)165{166sport->regs->tcr1 &= ~TSPEN;167sport->regs->rcr1 &= ~RSPEN;168SSYNC();169170disable_dma(sport->dma_rx_chan);171disable_dma(sport->dma_tx_chan);172return 0;173}174175static inline int sport_hook_rx_dummy(struct sport_device *sport)176{177struct dmasg *desc, temp_desc;178unsigned long flags;179180BUG_ON(sport->dummy_rx_desc == NULL);181BUG_ON(sport->curr_rx_desc == sport->dummy_rx_desc);182183/* Maybe the dummy buffer descriptor ring is damaged */184sport->dummy_rx_desc->next_desc_addr = sport->dummy_rx_desc + 1;185186local_irq_save(flags);187desc = get_dma_next_desc_ptr(sport->dma_rx_chan);188/* Copy the descriptor which will be damaged to backup */189temp_desc = *desc;190desc->x_count = sport->dummy_count / 2;191desc->y_count = 0;192desc->next_desc_addr = sport->dummy_rx_desc;193local_irq_restore(flags);194/* Waiting for dummy buffer descriptor is already hooked*/195while ((get_dma_curr_desc_ptr(sport->dma_rx_chan) -196sizeof(struct dmasg)) != sport->dummy_rx_desc)197continue;198sport->curr_rx_desc = sport->dummy_rx_desc;199/* Restore the damaged descriptor */200*desc = temp_desc;201202return 0;203}204205static inline int sport_rx_dma_start(struct sport_device *sport, int dummy)206{207if (dummy) {208sport->dummy_rx_desc->next_desc_addr = sport->dummy_rx_desc;209sport->curr_rx_desc = sport->dummy_rx_desc;210} else211sport->curr_rx_desc = sport->dma_rx_desc;212213set_dma_next_desc_addr(sport->dma_rx_chan, sport->curr_rx_desc);214set_dma_x_count(sport->dma_rx_chan, 0);215set_dma_x_modify(sport->dma_rx_chan, 0);216set_dma_config(sport->dma_rx_chan, (DMAFLOW_LARGE | NDSIZE_9 | \217WDSIZE_32 | WNR));218set_dma_curr_addr(sport->dma_rx_chan, sport->curr_rx_desc->start_addr);219SSYNC();220221return 0;222}223224static inline int sport_tx_dma_start(struct sport_device *sport, int dummy)225{226if (dummy) {227sport->dummy_tx_desc->next_desc_addr = sport->dummy_tx_desc;228sport->curr_tx_desc = sport->dummy_tx_desc;229} else230sport->curr_tx_desc = sport->dma_tx_desc;231232set_dma_next_desc_addr(sport->dma_tx_chan, sport->curr_tx_desc);233set_dma_x_count(sport->dma_tx_chan, 0);234set_dma_x_modify(sport->dma_tx_chan, 0);235set_dma_config(sport->dma_tx_chan,236(DMAFLOW_LARGE | NDSIZE_9 | WDSIZE_32));237set_dma_curr_addr(sport->dma_tx_chan, sport->curr_tx_desc->start_addr);238SSYNC();239240return 0;241}242243int sport_rx_start(struct sport_device *sport)244{245unsigned long flags;246pr_debug("%s enter\n", __func__);247if (sport->rx_run)248return -EBUSY;249if (sport->tx_run) {250/* tx is running, rx is not running */251BUG_ON(sport->dma_rx_desc == NULL);252BUG_ON(sport->curr_rx_desc != sport->dummy_rx_desc);253local_irq_save(flags);254while ((get_dma_curr_desc_ptr(sport->dma_rx_chan) -255sizeof(struct dmasg)) != sport->dummy_rx_desc)256continue;257sport->dummy_rx_desc->next_desc_addr = sport->dma_rx_desc;258local_irq_restore(flags);259sport->curr_rx_desc = sport->dma_rx_desc;260} else {261sport_tx_dma_start(sport, 1);262sport_rx_dma_start(sport, 0);263sport_start(sport);264}265266sport->rx_run = 1;267268return 0;269}270EXPORT_SYMBOL(sport_rx_start);271272int sport_rx_stop(struct sport_device *sport)273{274pr_debug("%s enter\n", __func__);275276if (!sport->rx_run)277return 0;278if (sport->tx_run) {279/* TX dma is still running, hook the dummy buffer */280sport_hook_rx_dummy(sport);281} else {282/* Both rx and tx dma will be stopped */283sport_stop(sport);284sport->curr_rx_desc = NULL;285sport->curr_tx_desc = NULL;286}287288sport->rx_run = 0;289290return 0;291}292EXPORT_SYMBOL(sport_rx_stop);293294static inline int sport_hook_tx_dummy(struct sport_device *sport)295{296struct dmasg *desc, temp_desc;297unsigned long flags;298299BUG_ON(sport->dummy_tx_desc == NULL);300BUG_ON(sport->curr_tx_desc == sport->dummy_tx_desc);301302sport->dummy_tx_desc->next_desc_addr = sport->dummy_tx_desc + 1;303304/* Shorten the time on last normal descriptor */305local_irq_save(flags);306desc = get_dma_next_desc_ptr(sport->dma_tx_chan);307/* Store the descriptor which will be damaged */308temp_desc = *desc;309desc->x_count = sport->dummy_count / 2;310desc->y_count = 0;311desc->next_desc_addr = sport->dummy_tx_desc;312local_irq_restore(flags);313/* Waiting for dummy buffer descriptor is already hooked*/314while ((get_dma_curr_desc_ptr(sport->dma_tx_chan) - \315sizeof(struct dmasg)) != sport->dummy_tx_desc)316continue;317sport->curr_tx_desc = sport->dummy_tx_desc;318/* Restore the damaged descriptor */319*desc = temp_desc;320321return 0;322}323324int sport_tx_start(struct sport_device *sport)325{326unsigned long flags;327pr_debug("%s: tx_run:%d, rx_run:%d\n", __func__,328sport->tx_run, sport->rx_run);329if (sport->tx_run)330return -EBUSY;331if (sport->rx_run) {332BUG_ON(sport->dma_tx_desc == NULL);333BUG_ON(sport->curr_tx_desc != sport->dummy_tx_desc);334/* Hook the normal buffer descriptor */335local_irq_save(flags);336while ((get_dma_curr_desc_ptr(sport->dma_tx_chan) -337sizeof(struct dmasg)) != sport->dummy_tx_desc)338continue;339sport->dummy_tx_desc->next_desc_addr = sport->dma_tx_desc;340local_irq_restore(flags);341sport->curr_tx_desc = sport->dma_tx_desc;342} else {343344sport_tx_dma_start(sport, 0);345/* Let rx dma run the dummy buffer */346sport_rx_dma_start(sport, 1);347sport_start(sport);348}349sport->tx_run = 1;350return 0;351}352EXPORT_SYMBOL(sport_tx_start);353354int sport_tx_stop(struct sport_device *sport)355{356if (!sport->tx_run)357return 0;358if (sport->rx_run) {359/* RX is still running, hook the dummy buffer */360sport_hook_tx_dummy(sport);361} else {362/* Both rx and tx dma stopped */363sport_stop(sport);364sport->curr_rx_desc = NULL;365sport->curr_tx_desc = NULL;366}367368sport->tx_run = 0;369370return 0;371}372EXPORT_SYMBOL(sport_tx_stop);373374static inline int compute_wdsize(size_t wdsize)375{376switch (wdsize) {377case 1:378return WDSIZE_8;379case 2:380return WDSIZE_16;381case 4:382default:383return WDSIZE_32;384}385}386387int sport_config_rx_dma(struct sport_device *sport, void *buf,388int fragcount, size_t fragsize)389{390unsigned int x_count;391unsigned int y_count;392unsigned int cfg;393dma_addr_t addr;394395pr_debug("%s buf:%p, frag:%d, fragsize:0x%lx\n", __func__, \396buf, fragcount, fragsize);397398x_count = fragsize / sport->wdsize;399y_count = 0;400401/* for fragments larger than 64k words we use 2d dma,402* denote fragecount as two numbers' mutliply and both of them403* are less than 64k.*/404if (x_count >= 0x10000) {405int i, count = x_count;406407for (i = 16; i > 0; i--) {408x_count = 1 << i;409if ((count & (x_count - 1)) == 0) {410y_count = count >> i;411if (y_count < 0x10000)412break;413}414}415if (i == 0)416return -EINVAL;417}418pr_debug("%s(x_count:0x%x, y_count:0x%x)\n", __func__,419x_count, y_count);420421if (sport->dma_rx_desc)422dma_free_coherent(NULL, sport->rx_desc_bytes,423sport->dma_rx_desc, 0);424425/* Allocate a new descritor ring as current one. */426sport->dma_rx_desc = dma_alloc_coherent(NULL, \427fragcount * sizeof(struct dmasg), &addr, 0);428sport->rx_desc_bytes = fragcount * sizeof(struct dmasg);429430if (!sport->dma_rx_desc) {431pr_err("Failed to allocate memory for rx desc\n");432return -ENOMEM;433}434435sport->rx_buf = buf;436sport->rx_fragsize = fragsize;437sport->rx_frags = fragcount;438439cfg = 0x7000 | DI_EN | compute_wdsize(sport->wdsize) | WNR | \440(DESC_ELEMENT_COUNT << 8); /* large descriptor mode */441442if (y_count != 0)443cfg |= DMA2D;444445setup_desc(sport->dma_rx_desc, buf, fragcount, fragsize,446cfg|DMAEN, x_count, y_count, sport->wdsize);447448return 0;449}450EXPORT_SYMBOL(sport_config_rx_dma);451452int sport_config_tx_dma(struct sport_device *sport, void *buf, \453int fragcount, size_t fragsize)454{455unsigned int x_count;456unsigned int y_count;457unsigned int cfg;458dma_addr_t addr;459460pr_debug("%s buf:%p, fragcount:%d, fragsize:0x%lx\n",461__func__, buf, fragcount, fragsize);462463x_count = fragsize/sport->wdsize;464y_count = 0;465466/* for fragments larger than 64k words we use 2d dma,467* denote fragecount as two numbers' mutliply and both of them468* are less than 64k.*/469if (x_count >= 0x10000) {470int i, count = x_count;471472for (i = 16; i > 0; i--) {473x_count = 1 << i;474if ((count & (x_count - 1)) == 0) {475y_count = count >> i;476if (y_count < 0x10000)477break;478}479}480if (i == 0)481return -EINVAL;482}483pr_debug("%s x_count:0x%x, y_count:0x%x\n", __func__,484x_count, y_count);485486487if (sport->dma_tx_desc) {488dma_free_coherent(NULL, sport->tx_desc_bytes, \489sport->dma_tx_desc, 0);490}491492sport->dma_tx_desc = dma_alloc_coherent(NULL, \493fragcount * sizeof(struct dmasg), &addr, 0);494sport->tx_desc_bytes = fragcount * sizeof(struct dmasg);495if (!sport->dma_tx_desc) {496pr_err("Failed to allocate memory for tx desc\n");497return -ENOMEM;498}499500sport->tx_buf = buf;501sport->tx_fragsize = fragsize;502sport->tx_frags = fragcount;503cfg = 0x7000 | DI_EN | compute_wdsize(sport->wdsize) | \504(DESC_ELEMENT_COUNT << 8); /* large descriptor mode */505506if (y_count != 0)507cfg |= DMA2D;508509setup_desc(sport->dma_tx_desc, buf, fragcount, fragsize,510cfg|DMAEN, x_count, y_count, sport->wdsize);511512return 0;513}514EXPORT_SYMBOL(sport_config_tx_dma);515516/* setup dummy dma descriptor ring, which don't generate interrupts,517* the x_modify is set to 0 */518static int sport_config_rx_dummy(struct sport_device *sport)519{520struct dmasg *desc;521unsigned config;522523pr_debug("%s entered\n", __func__);524if (L1_DATA_A_LENGTH)525desc = l1_data_sram_zalloc(2 * sizeof(*desc));526else {527dma_addr_t addr;528desc = dma_alloc_coherent(NULL, 2 * sizeof(*desc), &addr, 0);529memset(desc, 0, 2 * sizeof(*desc));530}531if (desc == NULL) {532pr_err("Failed to allocate memory for dummy rx desc\n");533return -ENOMEM;534}535sport->dummy_rx_desc = desc;536desc->start_addr = (unsigned long)sport->dummy_buf;537config = DMAFLOW_LARGE | NDSIZE_9 | compute_wdsize(sport->wdsize)538| WNR | DMAEN;539desc->cfg = config;540desc->x_count = sport->dummy_count/sport->wdsize;541desc->x_modify = sport->wdsize;542desc->y_count = 0;543desc->y_modify = 0;544memcpy(desc+1, desc, sizeof(*desc));545desc->next_desc_addr = desc + 1;546desc[1].next_desc_addr = desc;547return 0;548}549550static int sport_config_tx_dummy(struct sport_device *sport)551{552struct dmasg *desc;553unsigned int config;554555pr_debug("%s entered\n", __func__);556557if (L1_DATA_A_LENGTH)558desc = l1_data_sram_zalloc(2 * sizeof(*desc));559else {560dma_addr_t addr;561desc = dma_alloc_coherent(NULL, 2 * sizeof(*desc), &addr, 0);562memset(desc, 0, 2 * sizeof(*desc));563}564if (!desc) {565pr_err("Failed to allocate memory for dummy tx desc\n");566return -ENOMEM;567}568sport->dummy_tx_desc = desc;569desc->start_addr = (unsigned long)sport->dummy_buf + \570sport->dummy_count;571config = DMAFLOW_LARGE | NDSIZE_9 |572compute_wdsize(sport->wdsize) | DMAEN;573desc->cfg = config;574desc->x_count = sport->dummy_count/sport->wdsize;575desc->x_modify = sport->wdsize;576desc->y_count = 0;577desc->y_modify = 0;578memcpy(desc+1, desc, sizeof(*desc));579desc->next_desc_addr = desc + 1;580desc[1].next_desc_addr = desc;581return 0;582}583584unsigned long sport_curr_offset_rx(struct sport_device *sport)585{586unsigned long curr = get_dma_curr_addr(sport->dma_rx_chan);587588return (unsigned char *)curr - sport->rx_buf;589}590EXPORT_SYMBOL(sport_curr_offset_rx);591592unsigned long sport_curr_offset_tx(struct sport_device *sport)593{594unsigned long curr = get_dma_curr_addr(sport->dma_tx_chan);595596return (unsigned char *)curr - sport->tx_buf;597}598EXPORT_SYMBOL(sport_curr_offset_tx);599600void sport_incfrag(struct sport_device *sport, int *frag, int tx)601{602++(*frag);603if (tx == 1 && *frag == sport->tx_frags)604*frag = 0;605606if (tx == 0 && *frag == sport->rx_frags)607*frag = 0;608}609EXPORT_SYMBOL(sport_incfrag);610611void sport_decfrag(struct sport_device *sport, int *frag, int tx)612{613--(*frag);614if (tx == 1 && *frag == 0)615*frag = sport->tx_frags;616617if (tx == 0 && *frag == 0)618*frag = sport->rx_frags;619}620EXPORT_SYMBOL(sport_decfrag);621622static int sport_check_status(struct sport_device *sport,623unsigned int *sport_stat,624unsigned int *rx_stat,625unsigned int *tx_stat)626{627int status = 0;628629if (sport_stat) {630SSYNC();631status = sport->regs->stat;632if (status & (TOVF|TUVF|ROVF|RUVF))633sport->regs->stat = (status & (TOVF|TUVF|ROVF|RUVF));634SSYNC();635*sport_stat = status;636}637638if (rx_stat) {639SSYNC();640status = get_dma_curr_irqstat(sport->dma_rx_chan);641if (status & (DMA_DONE|DMA_ERR))642clear_dma_irqstat(sport->dma_rx_chan);643SSYNC();644*rx_stat = status;645}646647if (tx_stat) {648SSYNC();649status = get_dma_curr_irqstat(sport->dma_tx_chan);650if (status & (DMA_DONE|DMA_ERR))651clear_dma_irqstat(sport->dma_tx_chan);652SSYNC();653*tx_stat = status;654}655656return 0;657}658659int sport_dump_stat(struct sport_device *sport, char *buf, size_t len)660{661int ret;662663ret = snprintf(buf, len,664"sts: 0x%04x\n"665"rx dma %d sts: 0x%04x tx dma %d sts: 0x%04x\n",666sport->regs->stat,667sport->dma_rx_chan,668get_dma_curr_irqstat(sport->dma_rx_chan),669sport->dma_tx_chan,670get_dma_curr_irqstat(sport->dma_tx_chan));671buf += ret;672len -= ret;673674ret += snprintf(buf, len,675"curr_rx_desc:0x%p, curr_tx_desc:0x%p\n"676"dma_rx_desc:0x%p, dma_tx_desc:0x%p\n"677"dummy_rx_desc:0x%p, dummy_tx_desc:0x%p\n",678sport->curr_rx_desc, sport->curr_tx_desc,679sport->dma_rx_desc, sport->dma_tx_desc,680sport->dummy_rx_desc, sport->dummy_tx_desc);681682return ret;683}684685static irqreturn_t rx_handler(int irq, void *dev_id)686{687unsigned int rx_stat;688struct sport_device *sport = dev_id;689690pr_debug("%s enter\n", __func__);691sport_check_status(sport, NULL, &rx_stat, NULL);692if (!(rx_stat & DMA_DONE))693pr_err("rx dma is already stopped\n");694695if (sport->rx_callback) {696sport->rx_callback(sport->rx_data);697return IRQ_HANDLED;698}699700return IRQ_NONE;701}702703static irqreturn_t tx_handler(int irq, void *dev_id)704{705unsigned int tx_stat;706struct sport_device *sport = dev_id;707pr_debug("%s enter\n", __func__);708sport_check_status(sport, NULL, NULL, &tx_stat);709if (!(tx_stat & DMA_DONE)) {710pr_err("tx dma is already stopped\n");711return IRQ_HANDLED;712}713if (sport->tx_callback) {714sport->tx_callback(sport->tx_data);715return IRQ_HANDLED;716}717718return IRQ_NONE;719}720721static irqreturn_t err_handler(int irq, void *dev_id)722{723unsigned int status = 0;724struct sport_device *sport = dev_id;725726pr_debug("%s\n", __func__);727if (sport_check_status(sport, &status, NULL, NULL)) {728pr_err("error checking status ??");729return IRQ_NONE;730}731732if (status & (TOVF|TUVF|ROVF|RUVF)) {733pr_info("sport status error:%s%s%s%s\n",734status & TOVF ? " TOVF" : "",735status & TUVF ? " TUVF" : "",736status & ROVF ? " ROVF" : "",737status & RUVF ? " RUVF" : "");738if (status & TOVF || status & TUVF) {739disable_dma(sport->dma_tx_chan);740if (sport->tx_run)741sport_tx_dma_start(sport, 0);742else743sport_tx_dma_start(sport, 1);744enable_dma(sport->dma_tx_chan);745} else {746disable_dma(sport->dma_rx_chan);747if (sport->rx_run)748sport_rx_dma_start(sport, 0);749else750sport_rx_dma_start(sport, 1);751enable_dma(sport->dma_rx_chan);752}753}754status = sport->regs->stat;755if (status & (TOVF|TUVF|ROVF|RUVF))756sport->regs->stat = (status & (TOVF|TUVF|ROVF|RUVF));757SSYNC();758759if (sport->err_callback)760sport->err_callback(sport->err_data);761762return IRQ_HANDLED;763}764765int sport_set_rx_callback(struct sport_device *sport,766void (*rx_callback)(void *), void *rx_data)767{768BUG_ON(rx_callback == NULL);769sport->rx_callback = rx_callback;770sport->rx_data = rx_data;771772return 0;773}774EXPORT_SYMBOL(sport_set_rx_callback);775776int sport_set_tx_callback(struct sport_device *sport,777void (*tx_callback)(void *), void *tx_data)778{779BUG_ON(tx_callback == NULL);780sport->tx_callback = tx_callback;781sport->tx_data = tx_data;782783return 0;784}785EXPORT_SYMBOL(sport_set_tx_callback);786787int sport_set_err_callback(struct sport_device *sport,788void (*err_callback)(void *), void *err_data)789{790BUG_ON(err_callback == NULL);791sport->err_callback = err_callback;792sport->err_data = err_data;793794return 0;795}796EXPORT_SYMBOL(sport_set_err_callback);797798static int sport_config_pdev(struct platform_device *pdev, struct sport_param *param)799{800/* Extract settings from platform data */801struct device *dev = &pdev->dev;802struct bfin_snd_platform_data *pdata = dev->platform_data;803struct resource *res;804805param->num = pdev->id;806807if (!pdata) {808dev_err(dev, "no platform_data\n");809return -ENODEV;810}811param->pin_req = pdata->pin_req;812813res = platform_get_resource(pdev, IORESOURCE_MEM, 0);814if (!res) {815dev_err(dev, "no MEM resource\n");816return -ENODEV;817}818param->regs = (struct sport_register *)res->start;819820/* first RX, then TX */821res = platform_get_resource(pdev, IORESOURCE_DMA, 0);822if (!res) {823dev_err(dev, "no rx DMA resource\n");824return -ENODEV;825}826param->dma_rx_chan = res->start;827828res = platform_get_resource(pdev, IORESOURCE_DMA, 1);829if (!res) {830dev_err(dev, "no tx DMA resource\n");831return -ENODEV;832}833param->dma_tx_chan = res->start;834835res = platform_get_resource(pdev, IORESOURCE_IRQ, 0);836if (!res) {837dev_err(dev, "no irq resource\n");838return -ENODEV;839}840param->err_irq = res->start;841842return 0;843}844845struct sport_device *sport_init(struct platform_device *pdev,846unsigned int wdsize, unsigned int dummy_count, size_t priv_size)847{848struct device *dev = &pdev->dev;849struct sport_param param;850struct sport_device *sport;851int ret;852853dev_dbg(dev, "%s enter\n", __func__);854855param.wdsize = wdsize;856param.dummy_count = dummy_count;857BUG_ON(param.wdsize == 0 || param.dummy_count == 0);858859ret = sport_config_pdev(pdev, ¶m);860if (ret)861return NULL;862863if (peripheral_request_list(param.pin_req, "soc-audio")) {864dev_err(dev, "requesting Peripherals failed\n");865return NULL;866}867868sport = kzalloc(sizeof(*sport), GFP_KERNEL);869if (!sport) {870dev_err(dev, "failed to allocate for sport device\n");871goto __init_err0;872}873874sport->num = param.num;875sport->dma_rx_chan = param.dma_rx_chan;876sport->dma_tx_chan = param.dma_tx_chan;877sport->err_irq = param.err_irq;878sport->regs = param.regs;879sport->pin_req = param.pin_req;880881if (request_dma(sport->dma_rx_chan, "SPORT RX Data") == -EBUSY) {882dev_err(dev, "failed to request RX dma %d\n", sport->dma_rx_chan);883goto __init_err1;884}885if (set_dma_callback(sport->dma_rx_chan, rx_handler, sport) != 0) {886dev_err(dev, "failed to request RX irq %d\n", sport->dma_rx_chan);887goto __init_err2;888}889890if (request_dma(sport->dma_tx_chan, "SPORT TX Data") == -EBUSY) {891dev_err(dev, "failed to request TX dma %d\n", sport->dma_tx_chan);892goto __init_err2;893}894895if (set_dma_callback(sport->dma_tx_chan, tx_handler, sport) != 0) {896dev_err(dev, "failed to request TX irq %d\n", sport->dma_tx_chan);897goto __init_err3;898}899900if (request_irq(sport->err_irq, err_handler, IRQF_SHARED, "SPORT err",901sport) < 0) {902dev_err(dev, "failed to request err irq %d\n", sport->err_irq);903goto __init_err3;904}905906dev_info(dev, "dma rx:%d tx:%d, err irq:%d, regs:%p\n",907sport->dma_rx_chan, sport->dma_tx_chan,908sport->err_irq, sport->regs);909910sport->wdsize = param.wdsize;911sport->dummy_count = param.dummy_count;912913sport->private_data = kzalloc(priv_size, GFP_KERNEL);914if (!sport->private_data) {915dev_err(dev, "could not alloc priv data %zu bytes\n", priv_size);916goto __init_err4;917}918919if (L1_DATA_A_LENGTH)920sport->dummy_buf = l1_data_sram_zalloc(param.dummy_count * 2);921else922sport->dummy_buf = kzalloc(param.dummy_count * 2, GFP_KERNEL);923if (sport->dummy_buf == NULL) {924dev_err(dev, "failed to allocate dummy buffer\n");925goto __error1;926}927928ret = sport_config_rx_dummy(sport);929if (ret) {930dev_err(dev, "failed to config rx dummy ring\n");931goto __error2;932}933ret = sport_config_tx_dummy(sport);934if (ret) {935dev_err(dev, "failed to config tx dummy ring\n");936goto __error3;937}938939platform_set_drvdata(pdev, sport);940941return sport;942__error3:943if (L1_DATA_A_LENGTH)944l1_data_sram_free(sport->dummy_rx_desc);945else946dma_free_coherent(NULL, 2*sizeof(struct dmasg),947sport->dummy_rx_desc, 0);948__error2:949if (L1_DATA_A_LENGTH)950l1_data_sram_free(sport->dummy_buf);951else952kfree(sport->dummy_buf);953__error1:954kfree(sport->private_data);955__init_err4:956free_irq(sport->err_irq, sport);957__init_err3:958free_dma(sport->dma_tx_chan);959__init_err2:960free_dma(sport->dma_rx_chan);961__init_err1:962kfree(sport);963__init_err0:964peripheral_free_list(param.pin_req);965return NULL;966}967EXPORT_SYMBOL(sport_init);968969void sport_done(struct sport_device *sport)970{971if (sport == NULL)972return;973974sport_stop(sport);975if (sport->dma_rx_desc)976dma_free_coherent(NULL, sport->rx_desc_bytes,977sport->dma_rx_desc, 0);978if (sport->dma_tx_desc)979dma_free_coherent(NULL, sport->tx_desc_bytes,980sport->dma_tx_desc, 0);981982#if L1_DATA_A_LENGTH != 0983l1_data_sram_free(sport->dummy_rx_desc);984l1_data_sram_free(sport->dummy_tx_desc);985l1_data_sram_free(sport->dummy_buf);986#else987dma_free_coherent(NULL, 2*sizeof(struct dmasg),988sport->dummy_rx_desc, 0);989dma_free_coherent(NULL, 2*sizeof(struct dmasg),990sport->dummy_tx_desc, 0);991kfree(sport->dummy_buf);992#endif993free_dma(sport->dma_rx_chan);994free_dma(sport->dma_tx_chan);995free_irq(sport->err_irq, sport);996997kfree(sport->private_data);998peripheral_free_list(sport->pin_req);999kfree(sport);1000}1001EXPORT_SYMBOL(sport_done);10021003/*1004* It is only used to send several bytes when dma is not enabled1005* sport controller is configured but not enabled.1006* Multichannel cannot works with pio mode */1007/* Used by ac97 to write and read codec register */1008int sport_send_and_recv(struct sport_device *sport, u8 *out_data, \1009u8 *in_data, int len)1010{1011unsigned short dma_config;1012unsigned short status;1013unsigned long flags;1014unsigned long wait = 0;10151016pr_debug("%s enter, out_data:%p, in_data:%p len:%d\n", \1017__func__, out_data, in_data, len);1018pr_debug("tcr1:0x%04x, tcr2:0x%04x, tclkdiv:0x%04x, tfsdiv:0x%04x\n"1019"mcmc1:0x%04x, mcmc2:0x%04x\n",1020sport->regs->tcr1, sport->regs->tcr2,1021sport->regs->tclkdiv, sport->regs->tfsdiv,1022sport->regs->mcmc1, sport->regs->mcmc2);1023flush_dcache_range((unsigned)out_data, (unsigned)(out_data + len));10241025/* Enable tx dma */1026dma_config = (RESTART | WDSIZE_16 | DI_EN);1027set_dma_start_addr(sport->dma_tx_chan, (unsigned long)out_data);1028set_dma_x_count(sport->dma_tx_chan, len/2);1029set_dma_x_modify(sport->dma_tx_chan, 2);1030set_dma_config(sport->dma_tx_chan, dma_config);1031enable_dma(sport->dma_tx_chan);10321033if (in_data != NULL) {1034invalidate_dcache_range((unsigned)in_data, \1035(unsigned)(in_data + len));1036/* Enable rx dma */1037dma_config = (RESTART | WDSIZE_16 | WNR | DI_EN);1038set_dma_start_addr(sport->dma_rx_chan, (unsigned long)in_data);1039set_dma_x_count(sport->dma_rx_chan, len/2);1040set_dma_x_modify(sport->dma_rx_chan, 2);1041set_dma_config(sport->dma_rx_chan, dma_config);1042enable_dma(sport->dma_rx_chan);1043}10441045local_irq_save(flags);1046sport->regs->tcr1 |= TSPEN;1047sport->regs->rcr1 |= RSPEN;1048SSYNC();10491050status = get_dma_curr_irqstat(sport->dma_tx_chan);1051while (status & DMA_RUN) {1052udelay(1);1053status = get_dma_curr_irqstat(sport->dma_tx_chan);1054pr_debug("DMA status:0x%04x\n", status);1055if (wait++ > 100)1056goto __over;1057}1058status = sport->regs->stat;1059wait = 0;10601061while (!(status & TXHRE)) {1062pr_debug("sport status:0x%04x\n", status);1063udelay(1);1064status = *(unsigned short *)&sport->regs->stat;1065if (wait++ > 1000)1066goto __over;1067}1068/* Wait for the last byte sent out */1069udelay(20);1070pr_debug("sport status:0x%04x\n", status);10711072__over:1073sport->regs->tcr1 &= ~TSPEN;1074sport->regs->rcr1 &= ~RSPEN;1075SSYNC();1076disable_dma(sport->dma_tx_chan);1077/* Clear the status */1078clear_dma_irqstat(sport->dma_tx_chan);1079if (in_data != NULL) {1080disable_dma(sport->dma_rx_chan);1081clear_dma_irqstat(sport->dma_rx_chan);1082}1083SSYNC();1084local_irq_restore(flags);10851086return 0;1087}1088EXPORT_SYMBOL(sport_send_and_recv);10891090MODULE_AUTHOR("Roy Huang");1091MODULE_DESCRIPTION("SPORT driver for ADI Blackfin");1092MODULE_LICENSE("GPL");109310941095