2010-05-29 11:09:12 +08:00
|
|
|
/*
|
|
|
|
* Copyright 2010 Tilera Corporation. All Rights Reserved.
|
|
|
|
*
|
|
|
|
* This program is free software; you can redistribute it and/or
|
|
|
|
* modify it under the terms of the GNU General Public License
|
|
|
|
* as published by the Free Software Foundation, version 2.
|
|
|
|
*
|
|
|
|
* This program is distributed in the hope that it will be useful, but
|
|
|
|
* WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE, GOOD TITLE or
|
|
|
|
* NON INFRINGEMENT. See the GNU General Public License for
|
|
|
|
* more details.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#ifndef _ASM_TILE_DMA_MAPPING_H
|
|
|
|
#define _ASM_TILE_DMA_MAPPING_H
|
|
|
|
|
|
|
|
#include <linux/mm.h>
|
|
|
|
#include <linux/scatterlist.h>
|
|
|
|
#include <linux/cache.h>
|
|
|
|
#include <linux/io.h>
|
|
|
|
|
2013-08-06 01:40:47 +08:00
|
|
|
#ifdef __tilegx__
|
|
|
|
#define ARCH_HAS_DMA_GET_REQUIRED_MASK
|
|
|
|
#endif
|
|
|
|
|
2017-01-21 05:04:01 +08:00
|
|
|
extern const struct dma_map_ops *tile_dma_map_ops;
|
|
|
|
extern const struct dma_map_ops *gx_pci_dma_map_ops;
|
|
|
|
extern const struct dma_map_ops *gx_legacy_pci_dma_map_ops;
|
|
|
|
extern const struct dma_map_ops *gx_hybrid_pci_dma_map_ops;
|
2012-06-16 03:23:06 +08:00
|
|
|
|
2017-01-21 05:04:04 +08:00
|
|
|
static inline const struct dma_map_ops *get_arch_dma_ops(struct bus_type *bus)
|
2012-06-16 03:23:06 +08:00
|
|
|
{
|
2017-01-21 05:04:04 +08:00
|
|
|
return tile_dma_map_ops;
|
2012-06-16 03:23:06 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline dma_addr_t get_dma_offset(struct device *dev)
|
|
|
|
{
|
|
|
|
return dev->archdata.dma_offset;
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void set_dma_offset(struct device *dev, dma_addr_t off)
|
|
|
|
{
|
|
|
|
dev->archdata.dma_offset = off;
|
|
|
|
}
|
2010-05-29 11:09:12 +08:00
|
|
|
|
2012-06-16 03:23:06 +08:00
|
|
|
static inline dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr)
|
|
|
|
{
|
2013-08-03 00:24:42 +08:00
|
|
|
return paddr;
|
2012-06-16 03:23:06 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline phys_addr_t dma_to_phys(struct device *dev, dma_addr_t daddr)
|
|
|
|
{
|
2013-08-03 00:24:42 +08:00
|
|
|
return daddr;
|
2012-06-16 03:23:06 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline void dma_mark_clean(void *addr, size_t size) {}
|
|
|
|
|
|
|
|
static inline bool dma_capable(struct device *dev, dma_addr_t addr, size_t size)
|
|
|
|
{
|
|
|
|
if (!dev->dma_mask)
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
return addr + size - 1 <= *dev->dma_mask;
|
|
|
|
}
|
2010-05-29 11:09:12 +08:00
|
|
|
|
2015-09-10 06:39:53 +08:00
|
|
|
#define HAVE_ARCH_DMA_SET_MASK 1
|
2016-01-21 07:02:02 +08:00
|
|
|
int dma_set_mask(struct device *dev, u64 mask);
|
2010-05-29 11:09:12 +08:00
|
|
|
|
2012-06-16 03:23:06 +08:00
|
|
|
/*
|
|
|
|
* dma_alloc_noncoherent() is #defined to return coherent memory,
|
|
|
|
* so there's no need to do any flushing here.
|
|
|
|
*/
|
|
|
|
static inline void dma_cache_sync(struct device *dev, void *vaddr, size_t size,
|
|
|
|
enum dma_data_direction direction)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2010-05-29 11:09:12 +08:00
|
|
|
#endif /* _ASM_TILE_DMA_MAPPING_H */
|