5#ifndef ZEPHYR_INCLUDE_ARCH_XTENSA_CACHE_H_
6#define ZEPHYR_INCLUDE_ARCH_XTENSA_CACHE_H_
8#include <xtensa/config/core-isa.h>
12#include <xtensa/hal.h>
18#define Z_DCACHE_MAX (XCHAL_DCACHE_SIZE / XCHAL_DCACHE_WAYS)
21BUILD_ASSERT(Z_IS_POW2(XCHAL_DCACHE_LINESIZE));
22BUILD_ASSERT(Z_IS_POW2(Z_DCACHE_MAX));
25#if defined(CONFIG_DCACHE) || defined(__DOXYGEN__)
31 size_t step = XCHAL_DCACHE_LINESIZE;
33 size_t last =
ROUND_UP(((
long)addr) + bytes, step);
36 for (line = first; bytes && line < last; line += step) {
37 __asm__
volatile(
"dhwb %0, 0" ::
"r"(line));
47 size_t step = XCHAL_DCACHE_LINESIZE;
49 size_t last =
ROUND_UP(((
long)addr) + bytes, step);
52 for (line = first; bytes && line < last; line += step) {
53 __asm__
volatile(
"dhwbi %0, 0" ::
"r"(line));
63 size_t step = XCHAL_DCACHE_LINESIZE;
65 size_t last =
ROUND_UP(((
long)addr) + bytes, step);
68 for (line = first; bytes && line < last; line += step) {
69 __asm__
volatile(
"dhi %0, 0" ::
"r"(line));
79 size_t step = XCHAL_DCACHE_LINESIZE;
82 for (line = 0; line < XCHAL_DCACHE_SIZE; line += step) {
83 __asm__
volatile(
"dii %0, 0" ::
"r"(line));
93 size_t step = XCHAL_DCACHE_LINESIZE;
96 for (line = 0; line < XCHAL_DCACHE_SIZE; line += step) {
97 __asm__
volatile(
"diwb %0, 0" ::
"r"(line));
107 size_t step = XCHAL_DCACHE_LINESIZE;
110 for (line = 0; line < XCHAL_DCACHE_SIZE; line += step) {
111 __asm__
volatile(
"diwbi %0, 0" ::
"r"(line));
131#if defined(CONFIG_ICACHE) || defined(__DOXYGEN__)
149 xthal_icache_all_invalidate();
170 xthal_icache_region_invalidate(addr, size);
195#if defined(CONFIG_CACHE_DOUBLEMAP)
211 size_t addr = (
size_t) ptr;
213 return (addr >> 29) == CONFIG_XTENSA_CACHED_REGION;
231 size_t addr = (
size_t) ptr;
233 return (addr >> 29) == CONFIG_XTENSA_UNCACHED_REGION;
243 uint32_t rxor = (rto ^ rfrom) << 29;
246 if (Z_IS_POW2(rxor)) {
247 if ((rxor & rto) == 0) {
253 return (addr & ~(7U << 29)) | rto;
279 return (__sparse_force
void __sparse_cache *)z_xtrpoflip((
uint32_t) ptr,
280 CONFIG_XTENSA_CACHED_REGION,
281 CONFIG_XTENSA_UNCACHED_REGION);
304 return (
void *)z_xtrpoflip((__sparse_force
uint32_t)ptr,
305 CONFIG_XTENSA_UNCACHED_REGION,
306 CONFIG_XTENSA_CACHED_REGION);
#define ALWAYS_INLINE
Definition: common.h:129
void arch_dcache_disable(void)
Disable the d-cache.
void arch_icache_disable(void)
Disable the i-cache.
void * arch_cache_uncached_ptr_get(void *ptr)
int arch_dcache_invd_range(void *addr, size_t size)
Invalidate an address range in the d-cache.
int arch_icache_flush_and_invd_all(void)
Flush and Invalidate the i-cache.
int arch_icache_flush_all(void)
Flush the i-cache.
int arch_icache_invd_all(void)
Invalidate the i-cache.
int arch_dcache_flush_range(void *addr, size_t size)
Flush an address range in the d-cache.
size_t arch_icache_line_size_get(void)
Get the i-cache line size.
int arch_icache_flush_range(void *addr, size_t size)
Flush an address range in the i-cache.
int arch_icache_invd_range(void *addr, size_t size)
Invalidate an address range in the i-cache.
int arch_dcache_flush_all(void)
Flush the d-cache.
bool arch_cache_is_ptr_cached(void *ptr)
int arch_icache_flush_and_invd_range(void *addr, size_t size)
Flush and Invalidate an address range in the i-cache.
bool arch_cache_is_ptr_uncached(void *ptr)
void * arch_cache_cached_ptr_get(void *ptr)
int arch_dcache_flush_and_invd_all(void)
Flush and Invalidate the d-cache.
int arch_dcache_invd_all(void)
Invalidate the d-cache.
void arch_icache_enable(void)
Enable the i-cache.
int arch_dcache_flush_and_invd_range(void *addr, size_t size)
Flush and Invalidate an address range in the d-cache.
void arch_dcache_enable(void)
Enable the d-cache.
#define ROUND_UP(x, align)
Value of x rounded up to the next multiple of align.
Definition: util.h:306
#define ROUND_DOWN(x, align)
Value of x rounded down to the previous multiple of align.
Definition: util.h:313
#define ENOTSUP
Unsupported value.
Definition: errno.h:115
Size of off_t must be equal or less than size of size_t
Definition: retained_mem.h:28
__UINT32_TYPE__ uint32_t
Definition: stdint.h:90