Path: blob/master/arch/mn10300/include/asm/dma-mapping.h
15126 views
/* DMA mapping routines for the MN10300 arch1*2* Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.3* Written by David Howells ([email protected])4*5* This program is free software; you can redistribute it and/or6* modify it under the terms of the GNU General Public Licence7* as published by the Free Software Foundation; either version8* 2 of the Licence, or (at your option) any later version.9*/10#ifndef _ASM_DMA_MAPPING_H11#define _ASM_DMA_MAPPING_H1213#include <linux/mm.h>14#include <linux/scatterlist.h>1516#include <asm/cache.h>17#include <asm/io.h>1819/*20* See Documentation/DMA-API.txt for the description of how the21* following DMA API should work.22*/2324extern void *dma_alloc_coherent(struct device *dev, size_t size,25dma_addr_t *dma_handle, int flag);2627extern void dma_free_coherent(struct device *dev, size_t size,28void *vaddr, dma_addr_t dma_handle);2930#define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent((d), (s), (h), (f))31#define dma_free_noncoherent(d, s, v, h) dma_free_coherent((d), (s), (v), (h))3233static inline34dma_addr_t dma_map_single(struct device *dev, void *ptr, size_t size,35enum dma_data_direction direction)36{37BUG_ON(direction == DMA_NONE);38mn10300_dcache_flush_inv();39return virt_to_bus(ptr);40}4142static inline43void dma_unmap_single(struct device *dev, dma_addr_t dma_addr, size_t size,44enum dma_data_direction direction)45{46BUG_ON(direction == DMA_NONE);47}4849static inline50int dma_map_sg(struct device *dev, struct scatterlist *sglist, int nents,51enum dma_data_direction direction)52{53struct scatterlist *sg;54int i;5556BUG_ON(!valid_dma_direction(direction));57WARN_ON(nents == 0 || sglist[0].length == 0);5859for_each_sg(sglist, sg, nents, i) {60BUG_ON(!sg_page(sg));6162sg->dma_address = sg_phys(sg);63}6465mn10300_dcache_flush_inv();66return nents;67}6869static inline70void dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries,71enum dma_data_direction direction)72{73BUG_ON(!valid_dma_direction(direction));74}7576static inline77dma_addr_t dma_map_page(struct device *dev, struct page *page,78unsigned long offset, size_t size,79enum dma_data_direction direction)80{81BUG_ON(direction == DMA_NONE);82return page_to_bus(page) + offset;83}8485static inline86void dma_unmap_page(struct device *dev, dma_addr_t dma_address, size_t size,87enum dma_data_direction direction)88{89BUG_ON(direction == DMA_NONE);90}9192static inline93void dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle,94size_t size, enum dma_data_direction direction)95{96}9798static inline99void dma_sync_single_for_device(struct device *dev, dma_addr_t dma_handle,100size_t size, enum dma_data_direction direction)101{102mn10300_dcache_flush_inv();103}104105static inline106void dma_sync_single_range_for_cpu(struct device *dev, dma_addr_t dma_handle,107unsigned long offset, size_t size,108enum dma_data_direction direction)109{110}111112static inline void113dma_sync_single_range_for_device(struct device *dev, dma_addr_t dma_handle,114unsigned long offset, size_t size,115enum dma_data_direction direction)116{117mn10300_dcache_flush_inv();118}119120121static inline122void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg,123int nelems, enum dma_data_direction direction)124{125}126127static inline128void dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg,129int nelems, enum dma_data_direction direction)130{131mn10300_dcache_flush_inv();132}133134static inline135int dma_mapping_error(struct device *dev, dma_addr_t dma_addr)136{137return 0;138}139140static inline141int dma_supported(struct device *dev, u64 mask)142{143/*144* we fall back to GFP_DMA when the mask isn't all 1s, so we can't145* guarantee allocations that must be within a tighter range than146* GFP_DMA147*/148if (mask < 0x00ffffff)149return 0;150return 1;151}152153static inline154int dma_set_mask(struct device *dev, u64 mask)155{156if (!dev->dma_mask || !dma_supported(dev, mask))157return -EIO;158159*dev->dma_mask = mask;160return 0;161}162163static inline164void dma_cache_sync(void *vaddr, size_t size,165enum dma_data_direction direction)166{167mn10300_dcache_flush_inv();168}169170#endif171172173