[BACK]Return to cpufunc_asm.S CVS log [TXT][DIR] Up to [cvs.NetBSD.org] / src / sys / arch / arm / arm

Annotation of src/sys/arch/arm/arm/cpufunc_asm.S, Revision 1.8

1.8     ! chris       1: /*     $NetBSD: cpufunc_asm.S,v 1.7 2001/09/05 16:14:49 matt Exp $     */
1.1       bjh21       2:
                      3: /*
1.5       matt        4:  * xscale support code Copyright (c) 2001 Matt Thomas
1.3       chris       5:  * arm7tdmi support code Copyright (c) 2001 John Fremlin
1.1       bjh21       6:  * arm8 support code Copyright (c) 1997 ARM Limited
                      7:  * arm8 support code Copyright (c) 1997 Causality Limited
                      8:  * Copyright (c) 1997,1998 Mark Brinicombe.
                      9:  * Copyright (c) 1997 Causality Limited
                     10:  * All rights reserved.
                     11:  *
                     12:  * Redistribution and use in source and binary forms, with or without
                     13:  * modification, are permitted provided that the following conditions
                     14:  * are met:
                     15:  * 1. Redistributions of source code must retain the above copyright
                     16:  *    notice, this list of conditions and the following disclaimer.
                     17:  * 2. Redistributions in binary form must reproduce the above copyright
                     18:  *    notice, this list of conditions and the following disclaimer in the
                     19:  *    documentation and/or other materials provided with the distribution.
                     20:  * 3. All advertising materials mentioning features or use of this software
                     21:  *    must display the following acknowledgement:
                     22:  *     This product includes software developed by Causality Limited.
                     23:  * 4. The name of Causality Limited may not be used to endorse or promote
                     24:  *    products derived from this software without specific prior written
                     25:  *    permission.
                     26:  *
                     27:  * THIS SOFTWARE IS PROVIDED BY CAUSALITY LIMITED ``AS IS'' AND ANY EXPRESS
                     28:  * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
                     29:  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
                     30:  * DISCLAIMED. IN NO EVENT SHALL CAUSALITY LIMITED BE LIABLE FOR ANY DIRECT,
                     31:  * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
                     32:  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
                     33:  * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
                     34:  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
                     35:  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
                     36:  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
                     37:  * SUCH DAMAGE.
                     38:  *
                     39:  * RiscBSD kernel project
                     40:  *
                     41:  * cpufunc.S
                     42:  *
                     43:  * Assembly functions for CPU / MMU / TLB specific operations
                     44:  *
                     45:  * Created      : 30/01/97
                     46:  */
                     47:
                     48: #include <machine/cpu.h>
                     49: #include <machine/asm.h>
                     50:
                     51: sp     .req    r13
                     52: lr     .req    r14
                     53: pc     .req    r15
                     54:
                     55:        .text
                     56:        .align  0
                     57:
                     58: ENTRY(cpufunc_nullop)
                     59:        mov     pc, lr
                     60:
                     61: /*
                     62:  * Generic functions to read the internal coprocessor registers
                     63:  *
                     64:  * Currently these registers are :
                     65:  *  c0 - CPU ID
                     66:  *  c5 - Fault status
                     67:  *  c6 - Fault address
                     68:  *
                     69:  */
                     70:
                     71: ENTRY(cpufunc_id)
                     72:        mrc     15, 0, r0, c0, c0, 0
                     73:        mov     pc, lr
                     74:
                     75: ENTRY(cpu_get_control)
                     76:        mrc     15, 0, r0, c1, c0, 0
                     77:        mov     pc, lr
                     78:
                     79: ENTRY(cpufunc_faultstatus)
                     80:        mrc     15, 0, r0, c5, c0, 0
                     81:        mov     pc, lr
                     82:
                     83: ENTRY(cpufunc_faultaddress)
                     84:        mrc     15, 0, r0, c6, c0, 0
                     85:        mov     pc, lr
                     86:
                     87:
                     88: /*
                     89:  * Generic functions to write the internal coprocessor registers
                     90:  *
                     91:  *
                     92:  * Currently these registers are
                     93:  *  c1 - CPU Control
                     94:  *  c3 - Domain Access Control
                     95:  *
                     96:  * All other registers are CPU architecture specific
                     97:  */
                     98:
                     99: /*ENTRY(cpufunc_control)
                    100:        mcr     15, 0, r0, c1, c0, 0
                    101:        mov     pc, lr*/
                    102:
                    103: ENTRY(cpufunc_domains)
                    104:        mcr     15, 0, r0, c3, c0, 0
                    105:        mov     pc, lr
                    106:
                    107: /*
                    108:  * Generic functions to read/modify/write the internal coprocessor registers
                    109:  *
                    110:  *
                    111:  * Currently these registers are
                    112:  *  c1 - CPU Control
                    113:  *
                    114:  * All other registers are CPU architecture specific
                    115:  */
                    116:
                    117: ENTRY(cpufunc_control)
                    118:        mrc     15, 0, r3, c1, c0, 0    /* Read the control register */
                    119:        bic     r2, r3, r0              /* Clear bits */
                    120:        eor     r2, r2, r1              /* XOR bits */
                    121:
                    122:        teq     r2, r3                  /* Only write if there is a change */
                    123:        mcrne   15, 0, r2, c1, c0, 0    /* Write new control register */
                    124:        mov     r0, r3                  /* Return old value */
                    125:        mov     pc, lr
                    126:
1.2       bjh21     127: #ifdef CPU_ARM3
                    128:        /* The ARM3 has its control register in a different place. */
                    129: ENTRY(arm3_control)
                    130:        mrc     15, 0, r3, c2, c0, 0    /* Read the control register */
                    131:        bic     r2, r3, r0              /* Clear bits */
                    132:        eor     r2, r2, r1              /* XOR bits */
                    133:
                    134:        teq     r2, r3                  /* Only write if there is a change */
                    135:        mcrne   15, 0, r2, c2, c0, 0    /* Write new control register */
                    136:        mov     r0, r3                  /* Return old value */
                    137:        mov     pc, lr
                    138: #endif
                    139:
1.1       bjh21     140: #ifdef CPU_ARM8
                    141: ENTRY(arm8_clock_config)
                    142:        mrc     15, 0, r3, c15, c0, 0   /* Read the clock register */
                    143:        bic     r2, r3, #0x11           /* turn off dynamic clocking
                    144:                                           and clear L bit */
                    145:        mcr     15, 0, r2, c15, c0, 0   /* Write clock register */
                    146:
                    147:        bic     r2, r3, r0              /* Clear bits */
                    148:        eor     r2, r2, r1              /* XOR bits */
                    149:        bic     r2, r2, #0x10           /* clear the L bit */
                    150:
                    151:        bic     r1, r2, #0x01           /* still keep dynamic clocking off */
                    152:        mcr     15, 0, r1, c15, c0, 0   /* Write clock register */
                    153:        mov     r0, r0                  /* NOP */
                    154:        mov     r0, r0                  /* NOP */
                    155:        mov     r0, r0                  /* NOP */
                    156:        mov     r0, r0                  /* NOP */
                    157:        mcr     15, 0, r2, c15, c0, 0   /* Write clock register */
                    158:        mov     r0, r3                  /* Return old value */
                    159:        mov     pc, lr
                    160: #endif /* CPU_ARM8 */
                    161:
                    162: /*
                    163:  * Functions to set the MMU Translation Table Base register
                    164:  */
                    165:
                    166: #if defined(CPU_ARM6) || defined(CPU_ARM7)
                    167: ENTRY(arm67_setttb)
                    168:
                    169:        /*
                    170:         * We need to flush the cache as it uses virtual addresses that
                    171:         * are about to change
                    172:         */
                    173:         mcr     15, 0, r0, c7, c0, 0
                    174:
                    175:        /* Write the TTB */
                    176:        mcr     15, 0, r0, c2, c0, 0
                    177:
                    178:        /* If we have updated the TTB we must flush the TLB */
                    179:         mcr     15, 0, r0, c5, c0, 0
                    180:
                    181:        /* For good measure we will flush the IDC as well */
                    182:         mcr     15, 0, r0, c7, c0, 0
                    183:
                    184:        /* Make sure that pipeline is emptied */
                    185:         mov     r0, r0
                    186:         mov     r0, r0
                    187:
                    188:        mov     pc, lr
                    189: #endif /* CPU_ARM6 || CPU_ARM7 */
                    190:
1.3       chris     191: #ifdef CPU_ARM7TDMI
                    192:
                    193: ENTRY(arm7tdmi_setttb)
                    194:        mov     r1,r0 /* store the ttb in a safe place */
                    195:        mov     r2,lr /* ditto with lr */
                    196:
                    197:        bl      _C_LABEL(arm7tdmi_cache_flushID)
                    198:
                    199:        /* Write the TTB */
                    200:        mcr     p15, 0, r1, c2, c0, 0
                    201:
                    202:        /* If we have updated the TTB we must flush the TLB */
                    203:        bl      _C_LABEL(arm7tdmi_tlb_flushID)
                    204:        /* For good measure we will flush the IDC as well */
                    205:        bl      _C_LABEL(arm7tdmi_cache_flushID)
                    206:
                    207:        mov     pc, r2
                    208: #endif /* CPU_7TDMI */
                    209:
1.1       bjh21     210: #ifdef CPU_ARM8
                    211: ENTRY(arm8_setttb)
                    212:        /* We need to clean and flush the cache as it uses virtual
                    213:         * addresses that are about to change
                    214:         */
                    215:        mrs     r3, cpsr_all
                    216:        orr     r1, r3, #(I32_bit | F32_bit)
                    217:        msr     cpsr_all , r1
                    218:
                    219:        stmfd   sp!, {r0-r3, lr}
                    220:        bl      _C_LABEL(arm8_cache_cleanID)
                    221:        ldmfd   sp!, {r0-r3, lr}
                    222:        mcr     15, 0, r0, c7, c7, 0            /* flush I+D cache */
                    223:
                    224:        /* Write the TTB */
                    225:        mcr     15, 0, r0, c2, c0, 0
                    226:
                    227:        /* If we have updated the TTB we must flush the TLB */
                    228:        mcr     15, 0, r0, c8, c7, 0
                    229:
                    230:        /* For good measure we will flush the IDC as well */
                    231:        mcr     15, 0, r0, c7, c7, 0
                    232:
                    233:        /* Make sure that pipeline is emptied */
                    234:        mov     r0, r0
                    235:        mov     r0, r0
                    236:        msr     cpsr_all , r3
                    237:
                    238:        mov     pc, lr
                    239: #endif /* CPU_ARM8 */
                    240:
                    241:
1.5       matt      242: #if defined(CPU_SA110) || defined(CPU_XSCALE)
1.1       bjh21     243: Lblock_userspace_access:
                    244:        .word   _C_LABEL(block_userspace_access)
1.5       matt      245: #endif
1.1       bjh21     246:
1.5       matt      247: #if defined(CPU_SA110)
1.1       bjh21     248: ENTRY(sa110_setttb)
                    249:        /* We need to flush the cache as it uses virtual addresses that are about to change */
                    250: #ifdef CACHE_CLEAN_BLOCK_INTR
                    251:        mrs     r3, cpsr_all
                    252:        orr     r1, r3, #(I32_bit | F32_bit)
                    253:        msr     cpsr_all , r1
                    254: #else
                    255:        ldr     r3, Lblock_userspace_access
                    256:        ldr     r2, [r3]
                    257:        orr     r1, r2, #1
                    258:        str     r1, [r3]
                    259: #endif
                    260:        stmfd   sp!, {r0-r3, lr}
                    261:        bl      _C_LABEL(sa110_cache_cleanID)
                    262:        ldmfd   sp!, {r0-r3, lr}
1.5       matt      263:        mcr     15, 0, r0, c7, c5, 0    /* invalidate icache & BTB */
                    264:        mcr     15, 0, r0, c7, c10, 4   /* drain write (& fill) buffer */
1.1       bjh21     265:
                    266:        /* Write the TTB */
1.5       matt      267:        mcr     15, 0, r0, c2, c0, 0    /* set translation table base */
1.1       bjh21     268:
                    269:        /* If we have updated the TTB we must flush the TLB */
1.5       matt      270:         mcr     15, 0, r0, c8, c7, 0   /* invalidate I&D TLB */
1.1       bjh21     271:
                    272:        /* The cleanID above means we only need to flush the I cache here */
1.5       matt      273:         mcr     15, 0, r0, c7, c5, 0   /* invalidate icache & BTB */
1.1       bjh21     274:
                    275:        /* Make sure that pipeline is emptied */
                    276:         mov     r0, r0
                    277:         mov     r0, r0
                    278: #ifdef CACHE_CLEAN_BLOCK_INTR
                    279:        msr     cpsr_all, r3
                    280: #else
                    281:        str     r2, [r3]
                    282: #endif
                    283:        mov     pc, lr
                    284: #endif /* CPU_SA110 */
                    285:
1.5       matt      286: #if defined(CPU_XSCALE)
                    287: ENTRY(xscale_setttb)
                    288:        /* We need to flush the cache as it uses virtual addresses that are about to change */
                    289: #ifdef CACHE_CLEAN_BLOCK_INTR
                    290:        mrs     r3, cpsr_all
                    291:        orr     r1, r3, #(I32_bit | F32_bit)
                    292:        msr     cpsr_all , r1
                    293: #else
                    294:        ldr     r3, Lblock_userspace_access
                    295:        ldr     r2, [r3]
                    296:        orr     r1, r2, #1
                    297:        str     r1, [r3]
                    298: #endif
                    299:        stmfd   sp!, {r0-r3, lr}
                    300:        bl      _C_LABEL(xscale_cache_cleanID)
                    301:        ldmfd   sp!, {r0-r3, lr}
                    302:        mcr     15, 0, r0, c7, c5, 0    /* invalidate icache & BTB */
                    303:        mcr     15, 0, r0, c7, c10, 4   /* drain write (& fill) buffer */
                    304:
                    305:        /* Write the TTB */
                    306:        mcr     15, 0, r0, c2, c0, 0    /* set translation table base */
                    307:
                    308:        /* If we have updated the TTB we must flush the TLB */
                    309:         mcr     15, 0, r0, c8, c7, 0   /* invalidate I&D TLB */
                    310:
                    311:        /* The cleanID above means we only need to flush the I cache here */
                    312:         mcr     15, 0, r0, c7, c5, 0   /* invalidate icache & BTB */
                    313:
                    314:        /* Make sure that pipeline is emptied */
1.6       matt      315:        mrc     15, 0, r0, c2, c0, 0    /* read some register in CP15 */
                    316:         mov    r0, r0                  /* for the read to complete */
                    317:         sub    pc, pc, #4              /* branch to next instruction */
                    318:                                        /*  (flush the instruction pipeline) */
1.5       matt      319: #ifdef CACHE_CLEAN_BLOCK_INTR
                    320:        msr     cpsr_all, r3
                    321: #else
                    322:        str     r2, [r3]
                    323: #endif
                    324:        mov     pc, lr
                    325: #endif /* CPU_XSCALE */
                    326:
1.1       bjh21     327: /*
                    328:  * TLB functions
                    329:  */
                    330:
                    331: #if defined(CPU_ARM6) || defined(CPU_ARM7)
                    332: ENTRY(arm67_tlb_flush)
                    333:        mcr     15, 0, r0, c5, c0, 0
                    334:        mov     pc, lr
                    335:
                    336: ENTRY(arm67_tlb_purge)
                    337:        mcr     15, 0, r0, c6, c0, 0
                    338:        mov     pc, lr
                    339: #endif /* CPU_ARM6 || CPU_ARM7 */
                    340:
1.3       chris     341: #ifdef CPU_ARM7TDMI
                    342: ENTRY(arm7tdmi_tlb_flushID)
                    343:        mov     r0,#0
                    344:        mcr     p15, 0, r0, c8, c7, 0
                    345:        mov     pc,lr
                    346:
                    347: ENTRY(arm7tdmi_tlb_flushID_SE)
                    348:        mcr     p15, 0, r0, c8, c7, 1
                    349:        mov     pc,lr
                    350: #endif
1.1       bjh21     351: #ifdef CPU_ARM8
                    352: ENTRY(arm8_tlb_flushID)
                    353:        mcr     15, 0, r0, c8, c7, 0            /* flush I+D tlb */
                    354:        mov     pc, lr
                    355:
                    356: ENTRY(arm8_tlb_flushID_SE)
                    357:        mcr     15, 0, r0, c8, c7, 1            /* flush I+D tlb single entry */
                    358:        mov     pc, lr
                    359: #endif /* CPU_ARM8 */
                    360:
1.5       matt      361: #if defined(CPU_SA110) || defined(CPU_XSCALE)
                    362: ENTRY_NP(xscale_tlb_flushID)
1.1       bjh21     363: ENTRY(sa110_tlb_flushID)
                    364:        mcr     15, 0, r0, c8, c7, 0            /* flush I+D tlb */
                    365:        mov     pc, lr
                    366:
1.5       matt      367: #if defined(CPU_SA110)
1.1       bjh21     368: ENTRY(sa110_tlb_flushID_SE)
                    369:        mcr     15, 0, r0, c8, c6, 1            /* flush D tlb single entry */
                    370:        mcr     15, 0, r0, c8, c5, 0            /* flush I tlb */
                    371:        mov     pc, lr
1.5       matt      372: #endif /* CPU_SA110 */
                    373:
                    374: #if defined(CPU_XSCALE)
                    375: ENTRY(xscale_tlb_flushID_SE)
                    376:        mcr     15, 0, r0, c8, c6, 1            /* flush D tlb single entry */
                    377:        mcr     15, 0, r0, c8, c5, 1            /* flush I tlb single entry */
1.6       matt      378:        mcr     15, 0, r0, c7, c5, 6            /* inv. branch target buffer */
1.5       matt      379:        mov     pc, lr
                    380: #endif /* CPU_XSCALE */
1.1       bjh21     381:
1.5       matt      382: ENTRY_NP(xscale_tlb_flushI)
1.1       bjh21     383: ENTRY(sa110_tlb_flushI)
                    384:        mcr     15, 0, r0, c8, c5, 0            /* flush I tlb */
                    385:        mov     pc, lr
                    386:
1.5       matt      387: ENTRY_NP(xscale_tlb_flushD)
1.1       bjh21     388: ENTRY(sa110_tlb_flushD)
                    389:        mcr     15, 0, r0, c8, c6, 0            /* flush D tlb */
                    390:        mov     pc, lr
                    391:
1.5       matt      392: ENTRY_NP(xscale_tlb_flushD_SE)
1.1       bjh21     393: ENTRY(sa110_tlb_flushD_SE)
                    394:        mcr     15, 0, r0, c8, c6, 1            /* flush D tlb single entry */
                    395:        mov     pc, lr
1.5       matt      396: #endif /* CPU_SA110 || CPU_XSCALE */
1.1       bjh21     397:
                    398: /*
                    399:  * Cache functions
                    400:  */
1.2       bjh21     401:
                    402: #if defined(CPU_ARM3)
                    403: ENTRY(arm3_cache_flush)
                    404:        mcr     15, 0, r0, c1, c0, 0
                    405:        mov     pc, lr
                    406: #endif /* CPU_ARM3 */
1.1       bjh21     407:
                    408: #if defined(CPU_ARM6) || defined(CPU_ARM7)
                    409: ENTRY(arm67_cache_flush)
                    410:        mcr     15, 0, r0, c7, c0, 0
                    411:        mov     pc, lr
                    412: #endif /* CPU_ARM6 || CPU_ARM7 */
                    413:
1.3       chris     414: #ifdef CPU_ARM7TDMI
                    415: ENTRY(arm7tdmi_cache_flushID)
                    416:        mov     r0, #0
                    417:
                    418:        mcr     p15, 0, r0, c7, c7, 0
                    419:        /* Make sure that the pipeline is emptied */
                    420:        mov     r0, r0
                    421:        mov     r0, r0
                    422:
                    423:        mov     pc,lr
                    424: #endif
                    425:
1.1       bjh21     426: #ifdef CPU_ARM8
                    427: ENTRY(arm8_cache_flushID)
                    428:        mcr     15, 0, r0, c7, c7, 0            /* flush I+D cache */
                    429:        mov     pc, lr
                    430:
                    431: ENTRY(arm8_cache_flushID_E)
                    432:        mcr     15, 0, r0, c7, c7, 1            /* flush I+D single entry */
                    433:        mov     pc, lr
                    434:
                    435: ENTRY(arm8_cache_cleanID)
                    436:        mov     r0, #0x00000000
                    437:
                    438: Larm8_cache_cleanID_loop:
                    439:        mov     r2, r0
                    440:        mcr     15, 0, r2, c7, c11, 1
                    441:        add     r2, r2, #0x10
                    442:        mcr     15, 0, r2, c7, c11, 1
                    443:        add     r2, r2, #0x10
                    444:        mcr     15, 0, r2, c7, c11, 1
                    445:        add     r2, r2, #0x10
                    446:        mcr     15, 0, r2, c7, c11, 1
                    447:        add     r2, r2, #0x10
                    448:        mcr     15, 0, r2, c7, c11, 1
                    449:        add     r2, r2, #0x10
                    450:        mcr     15, 0, r2, c7, c11, 1
                    451:        add     r2, r2, #0x10
                    452:        mcr     15, 0, r2, c7, c11, 1
                    453:        add     r2, r2, #0x10
                    454:        mcr     15, 0, r2, c7, c11, 1
                    455:        add     r2, r2, #0x10
                    456:        mcr     15, 0, r2, c7, c11, 1
                    457:        add     r2, r2, #0x10
                    458:        mcr     15, 0, r2, c7, c11, 1
                    459:        add     r2, r2, #0x10
                    460:        mcr     15, 0, r2, c7, c11, 1
                    461:        add     r2, r2, #0x10
                    462:        mcr     15, 0, r2, c7, c11, 1
                    463:        add     r2, r2, #0x10
                    464:        mcr     15, 0, r2, c7, c11, 1
                    465:        add     r2, r2, #0x10
                    466:        mcr     15, 0, r2, c7, c11, 1
                    467:        add     r2, r2, #0x10
                    468:        mcr     15, 0, r2, c7, c11, 1
                    469:        add     r2, r2, #0x10
                    470:        mcr     15, 0, r2, c7, c11, 1
                    471:
                    472:        adds    r0, r0, #0x04000000
                    473:        bne     Larm8_cache_cleanID_loop
                    474:
                    475:        mov     pc, lr
                    476:
                    477: ENTRY(arm8_cache_cleanID_E)
                    478:        mcr     15, 0, r0, c7, c11, 1           /* clean ID single entry */
                    479:        mov     pc, lr
                    480:
                    481: ENTRY(arm8_cache_purgeID)
                    482:        /*
                    483:         * ARM810 bug 3
                    484:         *
                    485:         * Clean and invalidate entry will not invalidate the entry
                    486:         * if the line was already clean. (mcr 15, 0, rd, c7, 15, 1)
                    487:         *
                    488:         * Instead of using the clean and invalidate entry operation
                    489:         * use a separate clean and invalidate entry operations.
                    490:         * i.e.
                    491:         * mcr 15, 0, rd, c7, 11, 1
                    492:         * mcr 15, 0, rd, c7, 7, 1
                    493:         */
                    494:
                    495:        mov     r0, #0x00000000
                    496:
                    497:        mrs     r3, cpsr_all
                    498:        orr     r2, r3, #(I32_bit | F32_bit)
                    499:        msr     cpsr_all , r2
                    500:
                    501: Larm8_cache_purgeID_loop:
                    502:        mov     r2, r0
                    503:        mcr     15, 0, r2, c7, c11, 1
                    504:        mcr     15, 0, r2, c7, c7, 1
                    505:        add     r2, r2, #0x10
                    506:        mcr     15, 0, r2, c7, c11, 1
                    507:        mcr     15, 0, r2, c7, c7, 1
                    508:        add     r2, r2, #0x10
                    509:        mcr     15, 0, r2, c7, c11, 1
                    510:        mcr     15, 0, r2, c7, c7, 1
                    511:        add     r2, r2, #0x10
                    512:        mcr     15, 0, r2, c7, c11, 1
                    513:        mcr     15, 0, r2, c7, c7, 1
                    514:        add     r2, r2, #0x10
                    515:        mcr     15, 0, r2, c7, c11, 1
                    516:        mcr     15, 0, r2, c7, c7, 1
                    517:        add     r2, r2, #0x10
                    518:        mcr     15, 0, r2, c7, c11, 1
                    519:        mcr     15, 0, r2, c7, c7, 1
                    520:        add     r2, r2, #0x10
                    521:        mcr     15, 0, r2, c7, c11, 1
                    522:        mcr     15, 0, r2, c7, c7, 1
                    523:        add     r2, r2, #0x10
                    524:        mcr     15, 0, r2, c7, c11, 1
                    525:        mcr     15, 0, r2, c7, c7, 1
                    526:        add     r2, r2, #0x10
                    527:        mcr     15, 0, r2, c7, c11, 1
                    528:        mcr     15, 0, r2, c7, c7, 1
                    529:        add     r2, r2, #0x10
                    530:        mcr     15, 0, r2, c7, c11, 1
                    531:        mcr     15, 0, r2, c7, c7, 1
                    532:        add     r2, r2, #0x10
                    533:        mcr     15, 0, r2, c7, c11, 1
                    534:        mcr     15, 0, r2, c7, c7, 1
                    535:        add     r2, r2, #0x10
                    536:        mcr     15, 0, r2, c7, c11, 1
                    537:        mcr     15, 0, r2, c7, c7, 1
                    538:        add     r2, r2, #0x10
                    539:        mcr     15, 0, r2, c7, c11, 1
                    540:        mcr     15, 0, r2, c7, c7, 1
                    541:        add     r2, r2, #0x10
                    542:        mcr     15, 0, r2, c7, c11, 1
                    543:        mcr     15, 0, r2, c7, c7, 1
                    544:        add     r2, r2, #0x10
                    545:        mcr     15, 0, r2, c7, c11, 1
                    546:        mcr     15, 0, r2, c7, c7, 1
                    547:        add     r2, r2, #0x10
                    548:        mcr     15, 0, r2, c7, c11, 1
                    549:        mcr     15, 0, r2, c7, c7, 1
                    550:
                    551:        adds    r0, r0, #0x04000000
                    552:        bne     Larm8_cache_purgeID_loop
                    553:
                    554:        msr     cpsr_all, r3
                    555:        mov     pc, lr
                    556:
                    557: ENTRY(arm8_cache_purgeID_E)
                    558:        /*
                    559:         * ARM810 bug 3
                    560:         *
                    561:         * Clean and invalidate entry will not invalidate the entry
                    562:         * if the line was already clean. (mcr 15, 0, rd, c7, 15, 1)
                    563:         *
                    564:         * Instead of using the clean and invalidate entry operation
                    565:         * use a separate clean and invalidate entry operations.
                    566:         * i.e.
                    567:         * mcr 15, 0, rd, c7, 11, 1
                    568:         * mcr 15, 0, rd, c7, 7, 1
                    569:         */
                    570:        mrs     r3, cpsr_all
                    571:        orr     r2, r3, #(I32_bit | F32_bit)
                    572:        msr     cpsr_all , r2
                    573:        mcr     15, 0, r0, c7, c11, 1           /* clean ID single entry */
                    574:        mcr     15, 0, r0, c7, c7, 1            /* flush ID single entry */
                    575:        msr     cpsr_all , r3
                    576:        mov     pc, lr
                    577: #endif /* CPU_ARM8 */
                    578:
1.5       matt      579: #if defined(CPU_SA110) || defined(CPU_XSCALE)
                    580: ENTRY_NP(xscale_cache_flushID)
1.1       bjh21     581: ENTRY(sa110_cache_flushID)
                    582:        mcr     15, 0, r0, c7, c7, 0            /* flush I+D cache */
                    583:        mov     pc, lr
                    584:
1.5       matt      585: ENTRY_NP(xscale_cache_flushI)
1.1       bjh21     586: ENTRY(sa110_cache_flushI)
                    587:        mcr     15, 0, r0, c7, c5, 0            /* flush I cache */
                    588:        mov     pc, lr
                    589:
1.5       matt      590: ENTRY_NP(xscale_cache_flushD)
1.1       bjh21     591: ENTRY(sa110_cache_flushD)
                    592:        mcr     15, 0, r0, c7, c6, 0            /* flush D cache */
                    593:        mov     pc, lr
                    594:
1.5       matt      595: #if defined(CPU_XSCALE)
                    596: ENTRY(xscale_cache_flushI_SE)
1.6       matt      597:        mcr     15, 0, r0, c7, c5, 1            /* flush I cache single entry */
                    598:        mcr     15, 0, r0, c7, c5, 6            /* inv. branch target buffer */
1.5       matt      599:        mov     pc, lr
                    600: #endif
                    601:
                    602: ENTRY_NP(xscale_cache_flushD_SE)
1.1       bjh21     603: ENTRY(sa110_cache_flushD_SE)
                    604:        mcr     15, 0, r0, c7, c6, 1            /* flush D cache single entry */
                    605:        mov     pc, lr
                    606:
1.5       matt      607: ENTRY_NP(xscale_cache_cleanD_E)
1.1       bjh21     608: ENTRY(sa110_cache_cleanD_E)
                    609:        mcr     15, 0, r0, c7, c10, 1           /* clean D cache entry */
                    610:        mov     pc, lr
1.5       matt      611: #endif /* CPU_SA110 || CPU_XSCALE */
1.1       bjh21     612:
1.5       matt      613: #ifdef CPU_SA110
1.1       bjh21     614: /*
                    615:  * Information for SA110 cache clean/purge functions
                    616:  *
                    617:  * The address of the blocks of memory to use
                    618:  * The size of the block of memory to use
                    619:  */
                    620:
                    621:        .data
                    622:        .global _C_LABEL(sa110_cache_clean_addr)
                    623: _C_LABEL(sa110_cache_clean_addr):
                    624:        .word   0xf0000000
                    625:        .global _C_LABEL(sa110_cache_clean_size)
                    626: _C_LABEL(sa110_cache_clean_size):
                    627:        .word   0x00008000
                    628:
                    629:        .text
                    630: Lsa110_cache_clean_addr:
                    631:        .word   _C_LABEL(sa110_cache_clean_addr)
                    632: Lsa110_cache_clean_size:
                    633:        .word   _C_LABEL(sa110_cache_clean_size)
                    634:
                    635: ENTRY(sa110_cache_cleanID)
                    636: ENTRY(sa110_cache_cleanD)
                    637: #ifdef CACHE_CLEAN_BLOCK_INTR
                    638:        mrs     r3, cpsr_all
                    639:        orr     r0, r3, #(I32_bit | F32_bit)
                    640:        msr     cpsr_all , r0
                    641: #else
                    642:        ldr     r3, Lblock_userspace_access
                    643:        ldr     ip, [r3]
                    644:        orr     r0, ip, #1
                    645:        str     r0, [r3]
                    646: #endif
                    647:        ldr     r2, Lsa110_cache_clean_addr
                    648:        ldmia   r2, {r0, r1}
1.8     ! chris     649: #ifdef DOUBLE_CACHE_CLEAN_BANK
1.1       bjh21     650:        eor     r0, r0, r1
                    651:        str     r0, [r2]
1.8     ! chris     652: #endif
1.1       bjh21     653:
                    654: Lsa110_cache_cleanD_loop:
                    655:        ldr     r2, [r0], #32
                    656:        subs    r1, r1, #32
                    657:        bne     Lsa110_cache_cleanD_loop
                    658:
                    659:        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
                    660: #ifdef CACHE_CLEAN_BLOCK_INTR
                    661:        msr     cpsr_all , r3
                    662: #else
                    663:        str     ip, [r3]
                    664: #endif
                    665:        mov     pc, lr
                    666:
                    667: ENTRY(sa110_cache_purgeID)
                    668: #ifdef CACHE_CLEAN_BLOCK_INTR
                    669:        mrs     r3, cpsr_all
                    670:        orr     r0, r3, #(I32_bit | F32_bit)
                    671:        msr     cpsr_all , r0
                    672: #else
                    673:        ldr     r3, Lblock_userspace_access
                    674:        ldr     ip, [r3]
                    675:        orr     r0, ip, #1
                    676:        str     r0, [r3]
                    677: #endif
                    678:        ldr     r2, Lsa110_cache_clean_addr
                    679:        ldmia   r2, {r0, r1}
1.8     ! chris     680: #ifdef DOUBLE_CACHE_CLEAN_BANK
1.1       bjh21     681:        eor     r0, r0, r1
                    682:        str     r0, [r2]
1.8     ! chris     683: #endif
1.1       bjh21     684:
                    685: Lsa110_cache_purgeID_loop:
                    686:        ldr     r2, [r0], #32
                    687:        subs    r1, r1, #32
                    688:        bne     Lsa110_cache_purgeID_loop
                    689:
                    690:        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
                    691:        mcr     15, 0, r0, c7, c5, 0    /* flush I cache (D flushed above) */
                    692: #ifdef CACHE_CLEAN_BLOCK_INTR
                    693:        msr     cpsr_all , r3
                    694: #else
                    695:        str     ip, [r3]
                    696: #endif
                    697:        mov     pc, lr
                    698:
                    699: ENTRY(sa110_cache_purgeD)
                    700: #ifdef CACHE_CLEAN_BLOCK_INTR
                    701:        mrs     r3, cpsr_all
                    702:        orr     r0, r3, #(I32_bit | F32_bit)
                    703:        msr     cpsr_all , r0
                    704: #else
                    705:        ldr     r3, Lblock_userspace_access
                    706:        ldr     ip, [r3]
                    707:        orr     r0, ip, #1
                    708:        str     r0, [r3]
                    709: #endif
                    710:        ldr     r2, Lsa110_cache_clean_addr
                    711:        ldmia   r2, {r0, r1}
1.8     ! chris     712: #ifdef DOUBLE_CACHE_CLEAN_BANK
1.1       bjh21     713:        eor     r0, r0, r1
                    714:        str     r0, [r2]
1.8     ! chris     715: #endif
1.1       bjh21     716:
                    717: Lsa110_cache_purgeD_loop:
                    718:        ldr     r2, [r0], #32
                    719:        subs    r1, r1, #32
                    720:        bne     Lsa110_cache_purgeD_loop
                    721:
                    722:        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
                    723: #ifdef CACHE_CLEAN_BLOCK_INTR
                    724:        msr     cpsr_all , r3
                    725: #else
                    726:        str     ip, [r3]
                    727: #endif
                    728:        mov     pc, lr
                    729:
1.5       matt      730: #endif /* CPU_SA110 */
                    731:
                    732: #ifdef CPU_XSCALE
                    733: /*
                    734:  * Information for XScale cache clean/purge functions
                    735:  *
                    736:  * The address of the blocks of memory to use
                    737:  * The size of the block of memory to use
                    738:  */
                    739:
                    740:        .data
                    741:        .global _C_LABEL(xscale_cache_clean_addr)
                    742: _C_LABEL(xscale_cache_clean_addr):
                    743:        .word   0xf0000000
                    744:        .global _C_LABEL(xscale_cache_clean_size)
                    745: _C_LABEL(xscale_cache_clean_size):
                    746:        .word   0x00008000
                    747:
                    748:        .text
                    749: Lxscale_cache_clean_addr:
                    750:        .word   _C_LABEL(xscale_cache_clean_addr)
                    751: Lxscale_cache_clean_size:
                    752:        .word   _C_LABEL(xscale_cache_clean_size)
                    753:
                    754: ENTRY_NP(xscale_cache_syncI)
                    755: ENTRY_NP(xscale_cache_purgeID)
                    756:        mcr     15, 0, r0, c7, c5, 0    /* flush I cache (D cleaned below) */
                    757: ENTRY_NP(xscale_cache_cleanID)
                    758: ENTRY_NP(xscale_cache_purgeD)
                    759: ENTRY(xscale_cache_cleanD)
                    760: #ifdef CACHE_CLEAN_BLOCK_INTR
                    761:        mrs     r3, cpsr_all
                    762:        orr     r0, r3, #(I32_bit | F32_bit)
                    763:        msr     cpsr_all , r0
                    764: #else
                    765:        ldr     r3, Lblock_userspace_access
                    766:        ldr     ip, [r3]
                    767:        orr     r0, ip, #1
                    768:        str     r0, [r3]
                    769: #endif
                    770:        ldr     r2, Lxscale_cache_clean_addr
                    771:        ldmia   r2, {r0, r1}
                    772:        add     r0, r0, r1
                    773:
                    774: Lxscale_cache_cleanD_loop:
                    775:        subs    r0, r0, #32
                    776:        mcr     15, 0, r0, c7, c2, 5            /* allocate cache line */
                    777:        subs    r1, r1, #32
                    778:        bne     Lxscale_cache_cleanD_loop
                    779:
                    780: /*
1.7       matt      781:  * It's expected that we only use the minidata cache for kernel
                    782:  * addresses, so there is no need to purge it on context switch
                    783:  */
                    784: #ifdef CACHE_CLEAN_MINIDATA
                    785: /*
1.5       matt      786:  * Clean mini-data-cache
                    787:  */
                    788:        mov     r1, #64
                    789: Lxscale_cache_cleanD_loop2:
                    790:        ldr     r3, [r0], #32
                    791:        subs    r1, r1, #1
                    792:        bne     Lxscale_cache_cleanD_loop2
1.7       matt      793: #endif
1.5       matt      794:
                    795:        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
                    796:
                    797: #ifdef CACHE_CLEAN_BLOCK_INTR
                    798:        msr     cpsr_all , r3
                    799: #else
                    800:        str     ip, [r3]
                    801: #endif
                    802:        mov     pc, lr
                    803:
                    804: #endif /* CPU_XSCALE */
                    805:
                    806: #if defined(CPU_SA110)
1.1       bjh21     807: ENTRY(sa110_cache_purgeID_E)
                    808:        mcr     15, 0, r0, c7, c10, 1           /* clean dcache entry */
                    809:        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
                    810:        mcr     15, 0, r0, c7, c5, 0            /* flush I cache */
                    811:        mcr     15, 0, r0, c7, c6, 1            /* flush D cache single entry */
                    812:        mov     pc, lr
1.5       matt      813: #endif /* CPU_SA110 */
                    814:
                    815: #if defined(CPU_XSCALE)
                    816: ENTRY(xscale_cache_purgeID_E)
                    817:        mcr     15, 0, r0, c7, c10, 1           /* clean dcache entry */
                    818:        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
                    819:        mcr     15, 0, r0, c7, c5, 1            /* flush I cache single entry */
1.6       matt      820:        mcr     15, 0, r0, c7, c5, 6            /* inv. branch target buffer */
1.5       matt      821:        mcr     15, 0, r0, c7, c6, 1            /* flush D cache single entry */
                    822:        mov     pc, lr
                    823: #endif /* CPU_XSCALE */
1.1       bjh21     824:
1.5       matt      825: #if defined(CPU_SA110) || defined(CPU_XSCALE)
                    826: ENTRY_NP(xscale_cache_purgeD_E)
1.1       bjh21     827: ENTRY(sa110_cache_purgeD_E)
                    828:        mcr     15, 0, r0, c7, c10, 1           /* clean dcache entry */
                    829:        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
                    830:        mcr     15, 0, r0, c7, c6, 1            /* flush D cache single entry */
                    831:        mov     pc, lr
1.5       matt      832: #endif /* CPU_SA110 || CPU_XSCALE */
1.1       bjh21     833:
                    834: /*
                    835:  * Other functions
                    836:  */
                    837:
1.5       matt      838: #if defined(CPU_SA110) || defined(CPU_XSCALE)
1.1       bjh21     839: ENTRY(sa110_drain_writebuf)
                    840:        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
                    841:        mov     pc, lr
                    842: #endif /* CPU_SA110 */
                    843:
                    844: /*
                    845:  * Soft functions
                    846:  */
                    847:
                    848: #ifdef CPU_SA110
                    849: ENTRY(sa110_cache_syncI)
                    850: #ifdef CACHE_CLEAN_BLOCK_INTR
                    851:        mrs     r3, cpsr_all
                    852:        orr     r0, r3, #(I32_bit | F32_bit)
                    853:        msr     cpsr_all , r0
                    854: #else
                    855:        ldr     r3, Lblock_userspace_access
                    856:        ldr     ip, [r3]
                    857:        orr     r0, ip, #1
                    858:        str     r0, [r3]
                    859: #endif
                    860:        ldr     r2, Lsa110_cache_clean_addr
                    861:        ldmia   r2, {r0, r1}
1.8     ! chris     862: #ifdef DOUBLE_CACHE_CLEAN_BANK
1.1       bjh21     863:        eor     r0, r0, r1
                    864:        str     r0, [r2]
1.8     ! chris     865: #endif
1.1       bjh21     866:
                    867: Lsa110_cache_syncI_loop:
                    868:        ldr     r2, [r0], #32
                    869:        subs    r1, r1, #32
                    870:        bne     Lsa110_cache_syncI_loop
                    871:
                    872:        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
                    873:        mcr     15, 0, r0, c7, c5, 0            /* flush I cache */
                    874: #ifdef CACHE_CLEAN_BLOCK_INTR
                    875:        msr     cpsr_all , r3
                    876: #else
                    877:        str     ip, [r3]
                    878: #endif
                    879:        mov     pc, lr
                    880:
                    881: ENTRY(sa110_cache_cleanID_rng)
                    882: ENTRY(sa110_cache_cleanD_rng)
                    883:        cmp     r1, #0x4000
                    884:        bcs     _C_LABEL(sa110_cache_cleanID)
                    885:
                    886:        and     r2, r0, #0x1f
                    887:        add     r1, r1, r2
                    888:        bic     r0, r0, #0x1f
                    889:
                    890: sa110_cache_cleanD_rng_loop:
                    891:        mcr     15, 0, r0, c7, c10, 1           /* clean D cache entry */
                    892:        add     r0, r0, #32
                    893:        subs    r1, r1, #32
                    894:        bpl     sa110_cache_cleanD_rng_loop
                    895:
                    896:        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
                    897:        mov     pc, lr
                    898:
                    899: ENTRY(sa110_cache_purgeID_rng)
                    900:        cmp     r1, #0x4000
                    901:        bcs     _C_LABEL(sa110_cache_purgeID)
                    902:
                    903:        and     r2, r0, #0x1f
                    904:        add     r1, r1, r2
                    905:        bic     r0, r0, #0x1f
                    906:
                    907: sa110_cache_purgeID_rng_loop:
                    908:        mcr     15, 0, r0, c7, c10, 1           /* clean D cache entry */
                    909:        mcr     15, 0, r0, c7, c6, 1            /* flush D cache single entry */
                    910:        add     r0, r0, #32
                    911:        subs    r1, r1, #32
                    912:        bpl     sa110_cache_purgeID_rng_loop
                    913:
                    914:        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
                    915:        mcr     15, 0, r0, c7, c5, 0            /* flush I cache */
                    916:
                    917:        mov     pc, lr
                    918:
                    919: ENTRY(sa110_cache_purgeD_rng)
                    920:        cmp     r1, #0x4000
                    921:        bcs     _C_LABEL(sa110_cache_purgeD)
                    922:
                    923:        and     r2, r0, #0x1f
                    924:        add     r1, r1, r2
                    925:        bic     r0, r0, #0x1f
                    926:
                    927: sa110_cache_purgeD_rng_loop:
                    928:        mcr     15, 0, r0, c7, c10, 1           /* clean D cache entry */
                    929:        mcr     15, 0, r0, c7, c6, 1            /* flush D cache single entry */
                    930:        add     r0, r0, #32
                    931:        subs    r1, r1, #32
                    932:        bpl     sa110_cache_purgeD_rng_loop
                    933:
                    934:        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
                    935:        mov     pc, lr
                    936:
                    937: ENTRY(sa110_cache_syncI_rng)
                    938:        cmp     r1, #0x4000
                    939:        bcs     _C_LABEL(sa110_cache_syncI)
                    940:
                    941:        and     r2, r0, #0x1f
                    942:        add     r1, r1, r2
                    943:        bic     r0, r0, #0x1f
                    944:
                    945: sa110_cache_syncI_rng_loop:
                    946:        mcr     15, 0, r0, c7, c10, 1           /* clean D cache entry */
                    947:        add     r0, r0, #32
                    948:        subs    r1, r1, #32
                    949:        bpl     sa110_cache_syncI_rng_loop
                    950:
                    951:        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
                    952:        mcr     15, 0, r0, c7, c5, 0            /* flush I cache */
                    953:
                    954:        mov     pc, lr
                    955: #endif /* CPU_SA110 */
                    956:
1.5       matt      957: #ifdef CPU_XSCALE
                    958: /*
                    959:  * xscale_cache_syncI is identical to xscale_cache_purgeID
                    960:  */
                    961: #if 0
                    962: ENTRY(xscale_cache_syncI)
                    963: #ifdef CACHE_CLEAN_BLOCK_INTR
                    964:        mrs     r3, cpsr_all
                    965:        orr     r0, r3, #(I32_bit | F32_bit)
                    966:        msr     cpsr_all , r0
                    967: #else
                    968:        ldr     r3, Lblock_userspace_access
                    969:        ldr     ip, [r3]
                    970:        orr     r0, ip, #1
                    971:        str     r0, [r3]
                    972: #endif
                    973:        ldr     r2, Lxscale_cache_clean_addr
                    974:        ldmia   r2, {r0, r1}
                    975:        eor     r0, r0, r1
                    976:        str     r0, [r2]
                    977:
                    978: Lxscale_cache_syncI_loop:
                    979:        ldr     r2, [r0], #32
                    980:        subs    r1, r1, #32
                    981:        bne     Lxscale_cache_syncI_loop
                    982:
                    983:        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
                    984:        mcr     15, 0, r0, c7, c5, 0            /* flush I cache */
                    985: #ifdef CACHE_CLEAN_BLOCK_INTR
                    986:        msr     cpsr_all , r3
                    987: #else
                    988:        str     ip, [r3]
                    989: #endif
                    990:        mov     pc, lr
                    991: #endif
                    992:
                    993: ENTRY(xscale_cache_cleanID_rng)
                    994: ENTRY(xscale_cache_cleanD_rng)
                    995:        cmp     r1, #0x4000
                    996:        bcs     _C_LABEL(xscale_cache_cleanID)
                    997:
                    998:        and     r2, r0, #0x1f
                    999:        add     r1, r1, r2
                   1000:        bic     r0, r0, #0x1f
                   1001:
                   1002: xscale_cache_cleanD_rng_loop:
                   1003:        mcr     15, 0, r0, c7, c10, 1           /* clean D cache entry */
                   1004:        add     r0, r0, #32
                   1005:        subs    r1, r1, #32
                   1006:        bpl     xscale_cache_cleanD_rng_loop
                   1007:
                   1008:        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
                   1009:        mov     pc, lr
                   1010:
                   1011: ENTRY(xscale_cache_purgeID_rng)
                   1012:        cmp     r1, #0x4000
                   1013:        bcs     _C_LABEL(xscale_cache_purgeID)
                   1014:
                   1015:        and     r2, r0, #0x1f
                   1016:        add     r1, r1, r2
                   1017:        bic     r0, r0, #0x1f
                   1018:
                   1019: xscale_cache_purgeID_rng_loop:
                   1020:        mcr     15, 0, r0, c7, c10, 1           /* clean D cache entry */
                   1021:        mcr     15, 0, r0, c7, c6, 1            /* flush D cache single entry */
                   1022:        mcr     15, 0, r0, c7, c5, 1            /* flush I cache single entry */
1.6       matt     1023:        mcr     15, 0, r0, c7, c5, 6            /* inv. branch target buffer */
1.5       matt     1024:        add     r0, r0, #32
                   1025:        subs    r1, r1, #32
                   1026:        bpl     xscale_cache_purgeID_rng_loop
                   1027:
                   1028:        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
                   1029:
                   1030:        mov     pc, lr
                   1031:
                   1032: ENTRY(xscale_cache_purgeD_rng)
                   1033:        cmp     r1, #0x4000
                   1034:        bcs     _C_LABEL(xscale_cache_purgeD)
                   1035:
                   1036:        and     r2, r0, #0x1f
                   1037:        add     r1, r1, r2
                   1038:        bic     r0, r0, #0x1f
                   1039:
                   1040: xscale_cache_purgeD_rng_loop:
                   1041:        mcr     15, 0, r0, c7, c10, 1           /* clean D cache entry */
                   1042:        mcr     15, 0, r0, c7, c6, 1            /* flush D cache single entry */
                   1043:        add     r0, r0, #32
                   1044:        subs    r1, r1, #32
                   1045:        bpl     xscale_cache_purgeD_rng_loop
                   1046:
                   1047:        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
                   1048:        mov     pc, lr
                   1049:
                   1050: ENTRY(xscale_cache_syncI_rng)
                   1051:        cmp     r1, #0x4000
                   1052:        bcs     _C_LABEL(xscale_cache_syncI)
                   1053:
                   1054:        and     r2, r0, #0x1f
                   1055:        add     r1, r1, r2
                   1056:        bic     r0, r0, #0x1f
                   1057:
                   1058: xscale_cache_syncI_rng_loop:
                   1059:        mcr     15, 0, r0, c7, c10, 1           /* clean D cache entry */
                   1060:        mcr     15, 0, r0, c7, c5, 1            /* flush I cache single entry */
1.6       matt     1061:        mcr     15, 0, r0, c7, c5, 6            /* inv. branch target buffer */
1.5       matt     1062:        add     r0, r0, #32
                   1063:        subs    r1, r1, #32
                   1064:        bpl     xscale_cache_syncI_rng_loop
                   1065:
                   1066:        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
                   1067:
                   1068:        mov     pc, lr
                   1069: #endif /* CPU_SA110 */
                   1070:
1.1       bjh21    1071: /*
                   1072:  * *_context_switch()
                   1073:  *
                   1074:  * These are CPU specific parts of the context switcher cpu_switch()
                   1075:  * These functions actually perform the TTB reload.
                   1076:  *
                   1077:  * NOTE: Special calling convention
                   1078:  *       r1, r4-r13 must be preserved
                   1079:  */
                   1080:
                   1081: #if defined(CPU_ARM6) || defined(CPU_ARM7)
                   1082: ENTRY(arm67_context_switch)
                   1083:        /* Switch the memory to the new process */
                   1084:
                   1085:        /* For good measure we will flush the IDC as well */
                   1086:        mcr     15, 0, r0, c7, c0, 0            /* flush cache */
                   1087:
                   1088:        /* Write the TTB */
                   1089:        mcr     15, 0, r0, c2, c0, 0
                   1090:
                   1091:        /* If we have updated the TTB we must flush the TLB */
                   1092:        mcr     15, 0, r0, c5, c0, 0
                   1093:
                   1094:        /* For good measure we will flush the IDC as well */
                   1095: /*     mcr     15, 0, r0, c7, c0, 0*/
                   1096:
                   1097:        /* Make sure that pipeline is emptied */
                   1098:        mov     r0, r0
                   1099:        mov     r0, r0
                   1100:        mov     pc, lr
                   1101: #endif
                   1102:
1.3       chris    1103: #ifdef CPU_ARM7TDMI
                   1104: ENTRY(arm7tdmi_context_switch)
                   1105:        b arm7tdmi_setttb
                   1106: #endif
1.1       bjh21    1107: #ifdef CPU_ARM8
                   1108: ENTRY(arm8_context_switch)
                   1109:        /* Switch the memory to the new process */
                   1110:
                   1111:        /* For good measure we will flush the IDC as well */
                   1112:        mcr     15, 0, r0, c7, c7, 0            /* flush i+d cache */
                   1113:
                   1114:        /* Write the TTB */
                   1115:        mcr     15, 0, r0, c2, c0, 0
                   1116:
                   1117:        /* If we have updated the TTB we must flush the TLB */
                   1118:        mcr     15, 0, r0, c8, c7, 0            /* flush the i+d tlb */
                   1119:
                   1120:        /* For good measure we will flush the IDC as well */
                   1121: /*     mcr     15, 0, r0, c7, c7, 0*/          /* flush the i+d cache */
                   1122:
                   1123:        /* Make sure that pipeline is emptied */
                   1124:        mov     r0, r0
                   1125:        mov     r0, r0
                   1126:        mov     pc, lr
                   1127: #endif /* CPU_ARM8 */
                   1128:
1.6       matt     1129: #if defined(CPU_SA110)
1.1       bjh21    1130: ENTRY(sa110_context_switch)
                   1131:        /* Switch the memory to the new process */
                   1132:
                   1133:        /*
                   1134:         * CF_CACHE_PURGE_ID will ALWAYS be called prior to this
                   1135:         * Thus the data cache will contain only kernel data
                   1136:         * and the instruction cache will contain only kernel code
                   1137:         * and all the kernel mappings shared by all processes.
                   1138:         */
                   1139:
                   1140:        /* Write the TTB */
                   1141:        mcr     15, 0, r0, c2, c0, 0
                   1142:
                   1143:        /* If we have updated the TTB we must flush the TLB */
                   1144:        mcr     15, 0, r0, c8, c7, 0            /* flush the i+d tlb */
                   1145:
                   1146:        /* Make sure that pipeline is emptied */
                   1147:        mov     r0, r0
                   1148:        mov     r0, r0
                   1149:        mov     pc, lr
1.6       matt     1150: #endif /* CPU_SA110 */
                   1151:
                   1152: #if defined(CPU_XSCALE)
                   1153: ENTRY(xscale_context_switch)
                   1154:        /* Switch the memory to the new process */
                   1155:
                   1156:        /*
                   1157:         * CF_CACHE_PURGE_ID will ALWAYS be called prior to this
                   1158:         * Thus the data cache will contain only kernel data
                   1159:         * and the instruction cache will contain only kernel code
                   1160:         * and all the kernel mappings shared by all processes.
                   1161:         */
                   1162:
                   1163:        /* Write the TTB */
                   1164:        mcr     15, 0, r0, c2, c0, 0
                   1165:
                   1166:        /* If we have updated the TTB we must flush the TLB */
                   1167:        mcr     15, 0, r0, c8, c7, 0            /* flush the i+d tlb */
                   1168:
                   1169:        /* Make sure that pipeline is emptied */
                   1170:        mrc     15, 0, r0, c2, c0, 0    /* read some register in CP15 */
                   1171:         mov    r0, r0                  /* for the read to complete */
                   1172:         sub    pc, pc, #4              /* branch to next instruction */
                   1173:
                   1174:        /* return */
                   1175:        mov     pc, lr
                   1176: #endif /* CPU_XSCALE */
1.1       bjh21    1177:
                   1178: /*
                   1179:  * other potentially useful software functions are:
                   1180:  *  clean D cache entry and flush I cache entry
                   1181:  *   for the moment use cache_purgeID_E
                   1182:  */
1.4       bjh21    1183:
                   1184: /* Random odd functions */
                   1185:
                   1186: /*
                   1187:  * Function to get the offset of a stored program counter from the
                   1188:  * instruction doing the store.  This offset is defined to be the same
                   1189:  * for all STRs and STMs on a given implementation.  Code based on
                   1190:  * section 2.4.3 of the ARM ARM (2nd Ed.), with modifications to work
                   1191:  * in 26-bit modes as well.
                   1192:  */
                   1193: ENTRY(get_pc_str_offset)
                   1194:        mov     ip, sp
                   1195:        stmfd   sp!, {ip, fp, lr, pc}
                   1196:        sub     fp, ip, #4
                   1197:        sub     sp, sp, #4
                   1198:        mov     r1, pc          /* R1 = addr of following STR */
                   1199:        mov     r0, r0
                   1200:        str     pc, [sp]        /* [SP] = . + offset */
                   1201:        ldr     r0, [sp]
                   1202:        sub     r0, r0, r1
                   1203:        ldmdb   fp, {sp, fp, pc}

CVSweb <webmaster@jp.NetBSD.org>