Path: blob/master/arch/unicore32/include/asm/dma-mapping.h
10818 views
/*1* linux/arch/unicore32/include/asm/dma-mapping.h2*3* Code specific to PKUnity SoC and UniCore ISA4*5* Copyright (C) 2001-2010 GUAN Xue-tao6*7* This program is free software; you can redistribute it and/or modify8* it under the terms of the GNU General Public License version 2 as9* published by the Free Software Foundation.10*/11#ifndef __UNICORE_DMA_MAPPING_H__12#define __UNICORE_DMA_MAPPING_H__1314#ifdef __KERNEL__1516#include <linux/mm_types.h>17#include <linux/scatterlist.h>18#include <linux/swiotlb.h>1920#include <asm-generic/dma-coherent.h>2122#include <asm/memory.h>23#include <asm/cacheflush.h>2425extern struct dma_map_ops swiotlb_dma_map_ops;2627static inline struct dma_map_ops *get_dma_ops(struct device *dev)28{29return &swiotlb_dma_map_ops;30}3132static inline int dma_supported(struct device *dev, u64 mask)33{34struct dma_map_ops *dma_ops = get_dma_ops(dev);3536if (unlikely(dma_ops == NULL))37return 0;3839return dma_ops->dma_supported(dev, mask);40}4142static inline int dma_mapping_error(struct device *dev, dma_addr_t dma_addr)43{44struct dma_map_ops *dma_ops = get_dma_ops(dev);4546if (dma_ops->mapping_error)47return dma_ops->mapping_error(dev, dma_addr);4849return 0;50}5152#include <asm-generic/dma-mapping-common.h>5354static inline bool dma_capable(struct device *dev, dma_addr_t addr, size_t size)55{56if (dev && dev->dma_mask)57return addr + size - 1 <= *dev->dma_mask;5859return 1;60}6162static inline dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr)63{64return paddr;65}6667static inline phys_addr_t dma_to_phys(struct device *dev, dma_addr_t daddr)68{69return daddr;70}7172static inline void dma_mark_clean(void *addr, size_t size) {}7374static inline int dma_set_mask(struct device *dev, u64 dma_mask)75{76if (!dev->dma_mask || !dma_supported(dev, dma_mask))77return -EIO;7879*dev->dma_mask = dma_mask;8081return 0;82}8384static inline void *dma_alloc_coherent(struct device *dev, size_t size,85dma_addr_t *dma_handle, gfp_t flag)86{87struct dma_map_ops *dma_ops = get_dma_ops(dev);8889return dma_ops->alloc_coherent(dev, size, dma_handle, flag);90}9192static inline void dma_free_coherent(struct device *dev, size_t size,93void *cpu_addr, dma_addr_t dma_handle)94{95struct dma_map_ops *dma_ops = get_dma_ops(dev);9697dma_ops->free_coherent(dev, size, cpu_addr, dma_handle);98}99100#define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)101#define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)102103static inline void dma_cache_sync(struct device *dev, void *vaddr,104size_t size, enum dma_data_direction direction)105{106unsigned long start = (unsigned long)vaddr;107unsigned long end = start + size;108109switch (direction) {110case DMA_NONE:111BUG();112case DMA_FROM_DEVICE:113case DMA_BIDIRECTIONAL: /* writeback and invalidate */114__cpuc_dma_flush_range(start, end);115break;116case DMA_TO_DEVICE: /* writeback only */117__cpuc_dma_clean_range(start, end);118break;119}120}121122#endif /* __KERNEL__ */123#endif124125126