Changes in / [5265eea4:0328987] in mainline


Ignore:
Files:
12 edited

Legend:

Unmodified
Added
Removed
  • boot/arch/arm32/src/asm.S

    r5265eea4 r0328987  
    7575        bic     r4, r4, #(1 << CP15_C1_DC)
    7676
    77         # Disable I-cache and Branch predictors.
     77        # Disable I-cache and Branche predictors.
    7878        bic     r4, r4, #(1 << CP15_C1_IC)
    79 #ifdef PROCESSOR_ARCH_armv6
    8079        bic     r4, r4, #(1 << CP15_C1_BP)
    81 #endif
    8280       
    8381        mcr     p15, 0, r4, c1, c0, 0
    8482#endif
     83
     84
    8585       
    86         # Wait for the operations to complete
     86        #Wait for the operations to complete
    8787#ifdef PROCESSOR_ARCH_armv7_a
    8888        dsb
    8989#else
    90         # cp15 dsb, r4 is ignored (should be zero)
     90        #cp15 dsb, r4 is ignored (should be zero)
    9191        mov r4, #0
    9292        mcr p15, 0, r4, c7, c10, 4
     
    9898        nop
    9999
    100         # Wait for the operations to complete
     100        #Wait for the operations to complete
    101101#ifdef PROCESSOR_ARCH_armv7_a
    102102        isb
    103103        nop
    104 #elif defined(PROCESSOR_ARCH_armv6)
     104#else
    105105        # cp15 isb
    106106        mcr p15, 0, r4, c7, c5, 4
  • boot/arch/arm32/src/main.c

    r5265eea4 r0328987  
    4747#include <errno.h>
    4848#include <inflate.h>
    49 #include <arch/cp15.h>
    5049
    5150#define TOP2ADDR(top)  (((void *) PA2KA(BOOT_OFFSET)) + (top))
     
    5655static inline void clean_dcache_poc(void *address, size_t size)
    5756{
    58         const uintptr_t addr = (uintptr_t) address;
    59 
    60 #if !defined(PROCESSOR_ARCH_armv7_a)
    61         bool sep;
    62         if (MIDR_read() != CTR_read()) {
    63                 sep = (CTR_read() & CTR_SEP_FLAG) == CTR_SEP_FLAG;
    64         } else {
    65                 printf("Unknown cache type.\n");
    66                 halt();
    67         }
    68 #endif
    69 
    70         for (uintptr_t a = ALIGN_DOWN(addr, CP15_C7_MVA_ALIGN); a < addr + size;
    71             a += CP15_C7_MVA_ALIGN) {
    72 #if defined(PROCESSOR_ARCH_armv7_a)
    73                 DCCMVAC_write(a);
    74 #else
    75                 if (sep)
    76                         DCCMVA_write(a);
    77                 else
    78                         CCMVA_write(a);
    79 #endif
     57        const uintptr_t addr = (uintptr_t)address;
     58        for (uintptr_t a = addr; a < addr + size; a += 4) {
     59                /* DCCMVAC - clean by address to the point of coherence */
     60                asm volatile ("mcr p15, 0, %[a], c7, c10, 1\n" :: [a]"r"(a) : );
    8061        }
    8162}
  • boot/arch/arm32/src/mm.c

    r5265eea4 r0328987  
    143143        pte->should_be_zero_1 = 0;
    144144        pte->access_permission_0 = PTE_AP_USER_NO_KERNEL_RW;
    145 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a)
     145#ifdef PROCESSOR_ARCH_armv7_a
    146146        /*
    147147         * Keeps this setting in sync with memory type attributes in:
     
    152152        pte->tex = section_cacheable(frame) ? 5 : 0;
    153153        pte->cacheable = section_cacheable(frame) ? 0 : 0;
    154         pte->bufferable = section_cacheable(frame) ? 1 : 1;
     154        pte->bufferable = section_cacheable(frame) ? 1 : 0;
    155155#else
    156         pte->bufferable = section_cacheable(frame);
     156        pte->bufferable = 1;
    157157        pte->cacheable = section_cacheable(frame);
    158158        pte->tex = 0;
     
    189189         */
    190190        uint32_t val = (uint32_t)boot_pt & TTBR_ADDR_MASK;
    191 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a)
    192         // FIXME: TTBR_RGN_WBWA_CACHE is unpredictable on ARMv6
    193191        val |= TTBR_RGN_WBWA_CACHE | TTBR_C_FLAG;
    194 #endif
    195192        TTBR0_write(val);
    196193}
  • kernel/arch/arm32/include/arch/atomic.h

    r5265eea4 r0328987  
    7878 *
    7979 */
    80 NO_TRACE static inline void atomic_dec(atomic_t *val)
    81 {
     80NO_TRACE static inline void atomic_dec(atomic_t *val) {
    8281        atomic_add(val, -1);
    8382}
  • kernel/arch/arm32/include/arch/barrier.h

    r5265eea4 r0328987  
    3838
    3939#ifdef KERNEL
    40 #include <arch/cache.h>
    4140#include <arch/cp15.h>
    42 #include <align.h>
    4341#else
    4442#include <libarch/cp15.h>
     
    7371 * CP15 implementation is mandatory only for armv6+.
    7472 */
    75 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a)
    7673#define memory_barrier()  CP15DMB_write(0)
    77 #else
    78 #define memory_barrier()  CP15DSB_write(0)
    79 #endif
    80 #define read_barrier()    CP15DSB_write(0)
     74#define read_barrier()    CP15DSB_write(0)
    8175#define write_barrier()   read_barrier()
    82 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a)
    8376#define inst_barrier()    CP15ISB_write(0)
    84 #else
    85 #define inst_barrier()
    86 #endif
    8777#else
    8878/* Older manuals mention syscalls as a way to implement cache coherency and
     
    113103
    114104#if defined PROCESSOR_ARCH_armv7_a | defined PROCESSOR_ARCH_armv6 | defined KERNEL
     105/* Available on all supported arms,
     106 * invalidates entire ICache so the written value does not matter. */
    115107//TODO might be PL1 only on armv5-
    116108#define smc_coherence(a) \
    117109do { \
    118         dcache_clean_mva_pou(ALIGN_DOWN((uintptr_t) a, CP15_C7_MVA_ALIGN)); \
     110        DCCMVAU_write((uint32_t)(a));  /* Flush changed memory */\
    119111        write_barrier();               /* Wait for completion */\
    120         icache_invalidate();\
     112        ICIALLU_write(0);              /* Flush ICache */\
    121113        inst_barrier();                /* Wait for Inst refetch */\
    122114} while (0)
     
    125117#define smc_coherence_block(a, l) \
    126118do { \
    127         for (uintptr_t addr = (uintptr_t) a; addr < (uintptr_t) a + l; \
    128             addr += CP15_C7_MVA_ALIGN) \
     119        for (uintptr_t addr = (uintptr_t)a; addr < (uintptr_t)a + l; addr += 4)\
    129120                smc_coherence(addr); \
    130121} while (0)
  • kernel/arch/arm32/include/arch/cache.h

    r5265eea4 r0328987  
    3737#define KERN_arm32_CACHE_H_
    3838
    39 #include <typedefs.h>
    40 
    4139unsigned dcache_levels(void);
    4240
     
    4543void cpu_dcache_flush(void);
    4644void cpu_dcache_flush_invalidate(void);
    47 extern void icache_invalidate(void);
    48 extern void dcache_invalidate(void);
    49 extern void dcache_clean_mva_pou(uintptr_t);
     45void icache_invalidate(void);
    5046
    5147#endif
  • kernel/arch/arm32/include/arch/cp15.h

    r5265eea4 r0328987  
    118118};
    119119CONTROL_REG_GEN_READ(CTR, c0, 0, c0, 1);
    120 
    121 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a)
    122120CONTROL_REG_GEN_READ(TCMR, c0, 0, c0, 2);
    123 
    124 enum {
    125         TLBTR_SEP_FLAG = 1,
    126 };
    127 
    128121CONTROL_REG_GEN_READ(TLBTR, c0, 0, c0, 3);
    129 #endif
    130 
    131 #if defined(PROCESSOR_ARCH_armv7_a)
    132122CONTROL_REG_GEN_READ(MPIDR, c0, 0, c0, 5);
    133123CONTROL_REG_GEN_READ(REVIDR, c0, 0, c0, 6);
    134 #endif
    135124
    136125enum {
     
    320309enum {
    321310        TTBR_ADDR_MASK = 0xffffff80,
    322 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a)
    323311        TTBR_NOS_FLAG = 1 << 5,
    324312        TTBR_RGN_MASK = 0x3 << 3,
     
    329317        TTBR_S_FLAG = 1 << 1,
    330318        TTBR_C_FLAG = 1 << 0,
    331 #endif
    332319};
    333320CONTROL_REG_GEN_READ(TTBR0, c2, 0, c0, 0);
    334321CONTROL_REG_GEN_WRITE(TTBR0, c2, 0, c0, 0);
    335 
    336 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a)
    337322CONTROL_REG_GEN_READ(TTBR1, c2, 0, c0, 1);
    338323CONTROL_REG_GEN_WRITE(TTBR1, c2, 0, c0, 1);
    339324CONTROL_REG_GEN_READ(TTBCR, c2, 0, c0, 2);
    340325CONTROL_REG_GEN_WRITE(TTBCR, c2, 0, c0, 2);
    341 #endif
    342 
    343 #if defined(PROCESSOR_ARCH_armv7)
     326
    344327CONTROL_REG_GEN_READ(HTCR, c2, 4, c0, 2);
    345328CONTROL_REG_GEN_WRITE(HTCR, c2, 4, c0, 2);
     
    356339CONTROL_REG_GEN_READ(VTTBRH, c2, 0, c2, 6);
    357340CONTROL_REG_GEN_WRITE(VTTBRH, c2, 0, c2, 6);
    358 #endif
    359341
    360342CONTROL_REG_GEN_READ(DACR, c3, 0, c0, 0);
     
    391373CONTROL_REG_GEN_WRITE(HPFAR, c6, 4, c0, 4);
    392374
    393 /*
    394  * Cache maintenance, address translation and other
    395  */
    396 
    397 #if defined(PROCESSOR_cortex_a8)
    398 #define CP15_C7_MVA_ALIGN       64
    399 #elif defined(PROCESSOR_arm1176)
    400 #define CP15_C7_MVA_ALIGN       32
    401 #elif defined(PROCESSOR_arm926ej_s)
    402 #define CP15_C7_MVA_ALIGN       32
    403 #elif defined(PROCESSOR_arm920t)
    404 #define CP15_C7_MVA_ALIGN       32
    405 #else
    406 #error Unknow MVA alignment
    407 #endif
    408 
    409 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a)
     375/* Cache maintenance, address translation and other */
     376CONTROL_REG_GEN_WRITE(WFI, c7, 0, c0, 4); /* armv6 only */
     377CONTROL_REG_GEN_WRITE(ICIALLLUIS, c7, 0, c1, 0);
     378CONTROL_REG_GEN_WRITE(BPIALLIS, c7, 0, c1, 6);
     379CONTROL_REG_GEN_READ(PAR, c7, 0, c4, 0);
     380CONTROL_REG_GEN_WRITE(PAR, c7, 0, c4, 0);
     381CONTROL_REG_GEN_READ(PARH, c7, 0, c7, 0);   /* PAE */
     382CONTROL_REG_GEN_WRITE(PARH, c7, 0, c7, 0);   /* PAE */
     383CONTROL_REG_GEN_WRITE(ICIALLU, c7, 0, c5, 0);
     384CONTROL_REG_GEN_WRITE(ICIMVAU, c7, 0, c5, 1);
    410385CONTROL_REG_GEN_WRITE(CP15ISB, c7, 0, c5, 4);
    411386CONTROL_REG_GEN_WRITE(BPIALL, c7, 0, c5, 6);
    412387CONTROL_REG_GEN_WRITE(BPIMVA, c7, 0, c5, 7);
    413 #endif
    414 
    415 #if !defined(PROCESSOR_arm920t)
     388
     389CONTROL_REG_GEN_WRITE(DCIMVAC, c7, 0, c6, 1);
    416390CONTROL_REG_GEN_WRITE(DCISW, c7, 0, c6, 2);
    417 #endif
    418 
    419 #if defined(PROCESSOR_arm920t) || !defined(PROCESSOR_ARCH_armv4)
     391
     392CONTROL_REG_GEN_WRITE(ATS1CPR, c7, 0, c8, 0);
     393CONTROL_REG_GEN_WRITE(ATS1CPW, c7, 0, c8, 1);
     394CONTROL_REG_GEN_WRITE(ATS1CUR, c7, 0, c8, 2);
     395CONTROL_REG_GEN_WRITE(ATS1CUW, c7, 0, c8, 3);
     396CONTROL_REG_GEN_WRITE(ATS12NSOPR, c7, 0, c8, 4);
     397CONTROL_REG_GEN_WRITE(ATS12NSOPW, c7, 0, c8, 5);
     398CONTROL_REG_GEN_WRITE(ATS12NSOUR, c7, 0, c8, 6);
     399CONTROL_REG_GEN_WRITE(ATS12NSOUW, c7, 0, c8, 7);
     400
     401
     402CONTROL_REG_GEN_WRITE(DCCMVAC, c7, 0, c10, 1);
    420403CONTROL_REG_GEN_WRITE(DCCSW, c7, 0, c10, 2);
    421 #endif
    422 
    423404CONTROL_REG_GEN_WRITE(CP15DSB, c7, 0, c10, 4);
    424 
    425 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a)
    426405CONTROL_REG_GEN_WRITE(CP15DMB, c7, 0, c10, 5);
    427 #endif
    428 
    429 #if defined(PROCESSOR_arm920t) || !defined(PROCESSOR_ARCH_armv4)
     406CONTROL_REG_GEN_WRITE(DCCMVAU, c7, 0, c11, 1);
     407
     408CONTROL_REG_GEN_WRITE(PFI, c7, 0, c11, 1); /* armv6 only */
     409
     410CONTROL_REG_GEN_WRITE(DCCIMVAC, c7, 0, c14, 1);
    430411CONTROL_REG_GEN_WRITE(DCCISW, c7, 0, c14, 2);
    431 #endif
    432 
    433 #if defined(PROCESSOR_ARCH_armv7_a)
    434 CONTROL_REG_GEN_WRITE(ICIALLLUIS, c7, 0, c1, 0);
    435 CONTROL_REG_GEN_WRITE(BPIALLIS, c7, 0, c1, 6);
    436 CONTROL_REG_GEN_READ(PAR, c7, 0, c4, 0); /* Security Extensions */
    437 CONTROL_REG_GEN_WRITE(PAR, c7, 0, c4, 0); /* Security Extensions */
    438 CONTROL_REG_GEN_WRITE(ICIALLU, c7, 0, c5, 0);
    439 CONTROL_REG_GEN_WRITE(ICIMVAU, c7, 0, c5, 1);
    440 CONTROL_REG_GEN_WRITE(DCIMVAC, c7, 0, c6, 1);
    441 CONTROL_REG_GEN_READ(PARH, c7, 0, c7, 0); /* PAE */
    442 CONTROL_REG_GEN_WRITE(PARH, c7, 0, c7, 0); /* PAE */
    443 CONTROL_REG_GEN_WRITE(ATS1CPR, c7, 0, c8, 0); /* Security Extensions */
    444 CONTROL_REG_GEN_WRITE(ATS1CPW, c7, 0, c8, 1); /* Security Extensions */
    445 CONTROL_REG_GEN_WRITE(ATS1CUR, c7, 0, c8, 2); /* Security Extensions */
    446 CONTROL_REG_GEN_WRITE(ATS1CUW, c7, 0, c8, 3); /* Security Extensions */
    447 CONTROL_REG_GEN_WRITE(ATS12NSOPR, c7, 0, c8, 4); /* Security Extensions */
    448 CONTROL_REG_GEN_WRITE(ATS12NSOPW, c7, 0, c8, 5); /* Security Extensions */
    449 CONTROL_REG_GEN_WRITE(ATS12NSOUR, c7, 0, c8, 6); /* Security Extensions */
    450 CONTROL_REG_GEN_WRITE(ATS12NSOUW, c7, 0, c8, 7); /* Security Extensions */
    451 CONTROL_REG_GEN_WRITE(ATS1HR, c7, 4, c8, 0); /* Virtualization Extensions */
    452 CONTROL_REG_GEN_WRITE(ATS1HW, c7, 4, c8, 1); /* Virtualization Extensions */
    453 CONTROL_REG_GEN_WRITE(DCCMVAC, c7, 0, c10, 1);
    454 CONTROL_REG_GEN_WRITE(DCCMVAU, c7, 0, c11, 1);
    455 CONTROL_REG_GEN_WRITE(DCCIMVAC, c7, 0, c14, 1);
    456 #else
    457 
    458 #if defined(PROCESSOR_arm920t) || !defined(PROCESSOR_ARCH_armv4)
    459 CONTROL_REG_GEN_WRITE(WFI, c7, 0, c0, 4);
    460 #endif
    461 
    462 CONTROL_REG_GEN_WRITE(ICIALL, c7, 0, c5, 0);
    463 CONTROL_REG_GEN_WRITE(ICIMVA, c7, 0, c5, 1);
    464 
    465 #if !defined(PROCESSOR_ARCH_armv4)
    466 CONTROL_REG_GEN_WRITE(ICISW, c7, 0, c5, 2);
    467 #endif
    468 
    469 CONTROL_REG_GEN_WRITE(DCIALL, c7, 0, c6, 0);
    470 CONTROL_REG_GEN_WRITE(DCIMVA, c7, 0, c6, 1);
    471 CONTROL_REG_GEN_WRITE(CIALL, c7, 0, c7, 0);
    472 CONTROL_REG_GEN_WRITE(CIMVA, c7, 0, c7, 1);
    473 
    474 #if !defined(PROCESSOR_ARCH_armv4)
    475 CONTROL_REG_GEN_WRITE(CISW, c7, 0, c7, 2);
    476 #endif
    477 
    478 #if defined(PROCESSOR_ARCH_armv4) || defined(PROCESSOR_ARCH_armv6)
    479 CONTROL_REG_GEN_WRITE(DCCALL, c7, 0, c10, 0);
    480 #endif
    481 
    482 CONTROL_REG_GEN_WRITE(DCCMVA, c7, 0, c10, 1);
    483 
    484 #if defined(PROCESSOR_ARCH_armv4) || defined(PROCESSOR_ARCH_armv6)
    485 CONTROL_REG_GEN_WRITE(CCALL, c7, 0, c11, 0);
    486 #endif
    487 
    488 CONTROL_REG_GEN_WRITE(CCMVA, c7, 0, c11, 1);
    489 
    490 #if !defined(PROCESSOR_ARCH_armv4)
    491 CONTROL_REG_GEN_WRITE(CCSW, c7, 0, c11, 2);
    492 #endif
    493 
    494 #if defined(PROCESSOR_arm920t) || !defined(PROCESSOR_ARCH_armv4)
    495 CONTROL_REG_GEN_WRITE(PFIMVA, c7, 0, c13, 1);
    496 #endif
    497 
    498 #if defined(PROCESSOR_ARCH_armv4) || defined(PROCESSOR_ARCH_armv6)
    499 CONTROL_REG_GEN_WRITE(DCCIALL, c7, 0, c14, 0);
    500 #endif
    501 
    502 CONTROL_REG_GEN_WRITE(DCCIMVA, c7, 0, c14, 1);
    503 
    504 #if defined(PROCESSOR_ARCH_armv4) || defined(PROCESSOR_ARCH_armv6)
    505 CONTROL_REG_GEN_WRITE(CCIALL, c7, 0, c15, 0);
    506 #endif
    507 
    508 CONTROL_REG_GEN_WRITE(CCIMVA, c7, 0, c15, 1);
    509 
    510 #if defined(PROCESSOR_ARCH_armv5) || defined(PROCESSOR_ARCH_armv6)
    511 CONTROL_REG_GEN_WRITE(CCISW, c7, 0, c15, 2);
    512 #endif
    513 
    514 #endif
     412
     413CONTROL_REG_GEN_WRITE(ATS1HR, c7, 4, c8, 0);
     414CONTROL_REG_GEN_WRITE(ATS1HW, c7, 4, c8, 1);
    515415
    516416/* TLB maintenance */
    517 #if defined(PROCESSOR_ARCH_armv7_a)
    518417CONTROL_REG_GEN_WRITE(TLBIALLIS, c8, 0, c3, 0); /* Inner shareable */
    519418CONTROL_REG_GEN_WRITE(TLBIMVAIS, c8, 0, c3, 1); /* Inner shareable */
    520419CONTROL_REG_GEN_WRITE(TLBIASIDIS, c8, 0, c3, 2); /* Inner shareable */
    521420CONTROL_REG_GEN_WRITE(TLBIMVAAIS, c8, 0, c3, 3); /* Inner shareable */
    522 #endif
    523421
    524422CONTROL_REG_GEN_WRITE(ITLBIALL, c8, 0, c5, 0);
    525423CONTROL_REG_GEN_WRITE(ITLBIMVA, c8, 0, c5, 1);
    526 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a)
    527424CONTROL_REG_GEN_WRITE(ITLBIASID, c8, 0, c5, 2);
    528 #endif
    529425
    530426CONTROL_REG_GEN_WRITE(DTLBIALL, c8, 0, c6, 0);
    531427CONTROL_REG_GEN_WRITE(DTLBIMVA, c8, 0, c6, 1);
    532 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a)
    533428CONTROL_REG_GEN_WRITE(DTLBIASID, c8, 0, c6, 2);
    534 #endif
    535429
    536430CONTROL_REG_GEN_WRITE(TLBIALL, c8, 0, c7, 0);
    537431CONTROL_REG_GEN_WRITE(TLBIMVA, c8, 0, c7, 1);
    538 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a)
    539432CONTROL_REG_GEN_WRITE(TLBIASID, c8, 0, c7, 2);
    540 #endif
    541 #if defined(PROCESSOR_ARCH_armv7_a)
    542433CONTROL_REG_GEN_WRITE(TLBIMVAA, c8, 0, c7, 3);
    543 #endif
    544 
    545 #if defined(PROCESSOR_ARCH_armv7_a)
     434
    546435CONTROL_REG_GEN_WRITE(TLBIALLHIS, c8, 4, c3, 0); /* Inner shareable */
    547436CONTROL_REG_GEN_WRITE(TLBIMVAHIS, c8, 4, c3, 1); /* Inner shareable */
    548437CONTROL_REG_GEN_WRITE(TLBIALLNSNHIS, c8, 4, c3, 4); /* Inner shareable */
    549 #endif
    550 
    551 #if defined(PROCESSOR_ARCH_armv7_a)
     438
    552439CONTROL_REG_GEN_WRITE(TLBIALLH, c8, 4, c7, 0);
    553440CONTROL_REG_GEN_WRITE(TLBIMVAH, c8, 4, c7, 1);
    554441CONTROL_REG_GEN_WRITE(TLBIALLNSNHS, c8, 4, c7, 4);
    555 #endif
    556442
    557443/* c9 are performance monitoring resgisters */
  • kernel/arch/arm32/include/arch/mm/page.h

    r5265eea4 r0328987  
    154154{
    155155        uint32_t val = (uint32_t)pt & TTBR_ADDR_MASK;
    156 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a)
    157         // FIXME: TTBR_RGN_WBWA_CACHE is unpredictable on ARMv6
    158156        val |= TTBR_RGN_WBWA_CACHE | TTBR_C_FLAG;
    159 #endif
    160157        TTBR0_write(val);
    161158}
  • kernel/arch/arm32/include/arch/mm/page_armv4.h

    r5265eea4 r0328987  
    123123do { \
    124124        for (unsigned i = 0; i < count; ++i) \
    125                 dcache_clean_mva_pou((uintptr_t)(pt + i)); \
     125                DCCMVAU_write((uintptr_t)(pt + i)); \
    126126        read_barrier(); \
    127127} while (0)
  • kernel/arch/arm32/include/arch/mm/page_armv6.h

    r5265eea4 r0328987  
    156156do { \
    157157        for (unsigned i = 0; i < count; ++i) \
    158                 dcache_clean_mva_pou((uintptr_t)(pt + i)); \
     158                DCCMVAU_write((uintptr_t)(pt + i)); \
    159159        read_barrier(); \
    160160} while (0)
     161
    161162
    162163/** Returns level 0 page table entry flags.
     
    257258        if (flags & PAGE_CACHEABLE) {
    258259                /*
    259                  * Outer and inner write-back, write-allocate memory,
    260                  * see ch. B3.8.2 (p. B3-1358) of ARM Architecture reference
    261                  * manual.
    262                  *
     260                 * Write-through, write-allocate memory, see ch. B3.8.2
     261                 * (p. B3-1358) of ARM Architecture reference manual.
    263262                 * Make sure the memory type is correct, and in sync with:
    264263                 * init_boot_pt (boot/arch/arm32/src/mm.c)
     
    279278        }
    280279       
     280#if defined(PROCESSOR_ARCH_armv6)
     281        /* FIXME: this disables caches */
     282        p->shareable = 1;
     283#else
    281284        /* Shareable is ignored for devices (non-cacheable),
    282285         * turn it off for normal memory. */
    283286        p->shareable = 0;
     287#endif
    284288       
    285289        p->non_global = !(flags & PAGE_GLOBAL);
  • kernel/arch/arm32/src/cpu/cpu.c

    r5265eea4 r0328987  
    130130{
    131131        uint32_t control_reg = SCTLR_read();
    132 
    133         dcache_invalidate();
    134         read_barrier();
    135 
     132       
    136133        /* Turn off tex remap, RAZ/WI prior to armv7 */
    137134        control_reg &= ~SCTLR_TEX_REMAP_EN_FLAG;
     
    325322void icache_invalidate(void)
    326323{
    327 #if defined(PROCESSOR_ARCH_armv7_a)
    328324        ICIALLU_write(0);
    329 #else
    330         ICIALL_write(0);
    331 #endif
    332 }
    333 
    334 #if !defined(PROCESSOR_ARCH_armv7_a)
    335 static bool cache_is_unified(void)
    336 {
    337         if (MIDR_read() != CTR_read()) {
    338                 /* We have the CTR register */
    339                 return (CTR_read() & CTR_SEP_FLAG) != CTR_SEP_FLAG;
    340         } else {
    341                 panic("Unknown cache type");
    342         }
    343 }
    344 #endif
    345 
    346 void dcache_invalidate(void)
    347 {
    348 #if defined(PROCESSOR_ARCH_armv7_a)
    349         dcache_flush_invalidate();
    350 #else
    351         if (cache_is_unified())
    352                 CIALL_write(0);
    353         else
    354                 DCIALL_write(0);
    355 #endif
    356 }
    357 
    358 void dcache_clean_mva_pou(uintptr_t mva)
    359 {
    360 #if defined(PROCESSOR_ARCH_armv7_a)
    361         DCCMVAU_write(mva);
    362 #else
    363         if (cache_is_unified())
    364                 CCMVA_write(mva);
    365         else
    366                 DCCMVA_write(mva);
    367 #endif
    368325}
    369326
  • kernel/arch/arm32/src/mm/tlb.c

    r5265eea4 r0328987  
    7979static inline void invalidate_page(uintptr_t page)
    8080{
    81 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a)
    82         if (TLBTR_read() & TLBTR_SEP_FLAG) {
    83                 ITLBIMVA_write(page);
    84                 DTLBIMVA_write(page);
    85         } else {
    86                 TLBIMVA_write(page);
    87         }
    88 #elif defined(PROCESSOR_arm920t)
    89         ITLBIMVA_write(page);
    90         DTLBIMVA_write(page);
    91 #elif defined(PROCESSOR_arm926ej_s)
     81        //TODO: What about TLBIMVAA?
    9282        TLBIMVA_write(page);
    93 #else
    94 #error Unknown TLB type
    95 #endif
    96 
    9783        /*
    9884         * "A TLB maintenance operation is only guaranteed to be complete after
Note: See TracChangeset for help on using the changeset viewer.