2019-06-04 16:11:33 +08:00
|
|
|
// SPDX-License-Identifier: GPL-2.0-only
|
2010-02-22 19:16:08 +08:00
|
|
|
/*
|
|
|
|
* Microblaze support for cache consistent memory.
|
|
|
|
* Copyright (C) 2010 Michal Simek <monstr@monstr.eu>
|
|
|
|
* Copyright (C) 2010 PetaLogix
|
|
|
|
* Copyright (C) 2005 John Williams <jwilliams@itee.uq.edu.au>
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include <linux/kernel.h>
|
|
|
|
#include <linux/string.h>
|
|
|
|
#include <linux/types.h>
|
|
|
|
#include <linux/mm.h>
|
|
|
|
#include <linux/init.h>
|
2018-07-19 20:54:39 +08:00
|
|
|
#include <linux/dma-noncoherent.h>
|
2010-02-22 19:16:08 +08:00
|
|
|
#include <asm/cpuinfo.h>
|
2019-08-14 22:03:48 +08:00
|
|
|
#include <asm/cacheflush.h>
|
2010-02-22 19:16:08 +08:00
|
|
|
|
2019-08-14 22:03:47 +08:00
|
|
|
void arch_dma_prep_coherent(struct page *page, size_t size)
|
2010-02-22 19:16:08 +08:00
|
|
|
{
|
2019-08-14 22:03:47 +08:00
|
|
|
phys_addr_t paddr = page_to_phys(page);
|
2010-02-22 19:16:08 +08:00
|
|
|
|
2019-08-14 22:03:47 +08:00
|
|
|
flush_dcache_range(paddr, paddr + size);
|
|
|
|
}
|
2010-04-10 23:34:06 +08:00
|
|
|
|
|
|
|
#ifndef CONFIG_MMU
|
2010-02-22 19:16:08 +08:00
|
|
|
/*
|
2019-08-14 22:03:47 +08:00
|
|
|
* Consistent memory allocators. Used for DMA devices that want to share
|
|
|
|
* uncached memory with the processor core. My crufty no-MMU approach is
|
|
|
|
* simple. In the HW platform we can optionally mirror the DDR up above the
|
|
|
|
* processor cacheable region. So, memory accessed in this mirror region will
|
|
|
|
* not be cached. It's alloced from the same pool as normal memory, but the
|
|
|
|
* handle we return is shifted up into the uncached region. This will no doubt
|
|
|
|
* cause big problems if memory allocated here is not also freed properly. -- JW
|
|
|
|
*
|
|
|
|
* I have to use dcache values because I can't relate on ram size:
|
2010-02-22 19:16:08 +08:00
|
|
|
*/
|
2019-08-14 22:03:47 +08:00
|
|
|
#ifdef CONFIG_XILINX_UNCACHED_SHADOW
|
|
|
|
#define UNCACHED_SHADOW_MASK (cpuinfo.dcache_high - cpuinfo.dcache_base + 1)
|
2010-04-10 23:34:06 +08:00
|
|
|
#else
|
2019-08-14 22:03:47 +08:00
|
|
|
#define UNCACHED_SHADOW_MASK 0
|
|
|
|
#endif /* CONFIG_XILINX_UNCACHED_SHADOW */
|
2010-04-10 23:34:06 +08:00
|
|
|
|
2019-08-14 22:03:47 +08:00
|
|
|
void *uncached_kernel_address(void *ptr)
|
2014-12-03 23:07:28 +08:00
|
|
|
{
|
2019-08-14 22:03:47 +08:00
|
|
|
unsigned long addr = (unsigned long)ptr;
|
2014-12-03 23:07:28 +08:00
|
|
|
|
2019-08-14 22:03:47 +08:00
|
|
|
addr |= UNCACHED_SHADOW_MASK;
|
|
|
|
if (addr > cpuinfo.dcache_base && addr < cpuinfo.dcache_high)
|
|
|
|
pr_warn("ERROR: Your cache coherent area is CACHED!!!\n");
|
|
|
|
return (void *)addr;
|
2014-12-03 23:07:28 +08:00
|
|
|
}
|
|
|
|
|
2019-08-14 22:03:47 +08:00
|
|
|
void *cached_kernel_address(void *ptr)
|
2010-02-22 19:16:08 +08:00
|
|
|
{
|
2019-08-14 22:03:47 +08:00
|
|
|
unsigned long addr = (unsigned long)ptr;
|
2010-04-10 23:34:06 +08:00
|
|
|
|
2019-08-14 22:03:47 +08:00
|
|
|
return (void *)(addr & ~UNCACHED_SHADOW_MASK);
|
2010-02-22 19:16:08 +08:00
|
|
|
}
|
2019-08-14 22:03:47 +08:00
|
|
|
#endif /* CONFIG_MMU */
|