aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/mm
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm/mm')
-rw-r--r--arch/arm/mm/cache-v6.S1
-rw-r--r--arch/arm/mm/cache-v7.S2
-rw-r--r--arch/arm/mm/context.c17
-rw-r--r--arch/arm/mm/init.c15
-rw-r--r--arch/arm/mm/mmu.c9
-rw-r--r--arch/arm/mm/proc-v6.S4
-rw-r--r--arch/arm/mm/proc-v7.S14
7 files changed, 41 insertions, 21 deletions
diff --git a/arch/arm/mm/cache-v6.S b/arch/arm/mm/cache-v6.S
index c96fa1b..73b4a8b 100644
--- a/arch/arm/mm/cache-v6.S
+++ b/arch/arm/mm/cache-v6.S
@@ -176,6 +176,7 @@ ENDPROC(v6_coherent_kern_range)
*/
ENTRY(v6_flush_kern_dcache_area)
add r1, r0, r1
+ bic r0, r0, #D_CACHE_LINE_SIZE - 1
1:
#ifdef HARVARD_CACHE
mcr p15, 0, r0, c7, c14, 1 @ clean & invalidate D line
diff --git a/arch/arm/mm/cache-v7.S b/arch/arm/mm/cache-v7.S
index dc18d81..d32f02b 100644
--- a/arch/arm/mm/cache-v7.S
+++ b/arch/arm/mm/cache-v7.S
@@ -221,6 +221,8 @@ ENDPROC(v7_coherent_user_range)
ENTRY(v7_flush_kern_dcache_area)
dcache_line_size r2, r3
add r1, r0, r1
+ sub r3, r2, #1
+ bic r0, r0, r3
1:
mcr p15, 0, r0, c7, c14, 1 @ clean & invalidate D line / unified line
add r0, r0, r2
diff --git a/arch/arm/mm/context.c b/arch/arm/mm/context.c
index b0ee9ba..8bfae96 100644
--- a/arch/arm/mm/context.c
+++ b/arch/arm/mm/context.c
@@ -24,9 +24,7 @@ DEFINE_PER_CPU(struct mm_struct *, current_mm);
/*
* We fork()ed a process, and we need a new context for the child
- * to run in. We reserve version 0 for initial tasks so we will
- * always allocate an ASID. The ASID 0 is reserved for the TTBR
- * register changing sequence.
+ * to run in.
*/
void __init_new_context(struct task_struct *tsk, struct mm_struct *mm)
{
@@ -36,8 +34,11 @@ void __init_new_context(struct task_struct *tsk, struct mm_struct *mm)
static void flush_context(void)
{
- /* set the reserved ASID before flushing the TLB */
- asm("mcr p15, 0, %0, c13, c0, 1\n" : : "r" (0));
+ u32 ttb;
+ /* Copy TTBR1 into TTBR0 */
+ asm volatile("mrc p15, 0, %0, c2, c0, 1\n"
+ "mcr p15, 0, %0, c2, c0, 0"
+ : "=r" (ttb));
isb();
local_flush_tlb_all();
if (icache_is_vivt_asid_tagged()) {
@@ -93,7 +94,7 @@ static void reset_context(void *info)
return;
smp_rmb();
- asid = cpu_last_asid + cpu + 1;
+ asid = cpu_last_asid + cpu;
flush_context();
set_mm_context(mm, asid);
@@ -143,13 +144,13 @@ void __new_context(struct mm_struct *mm)
* to start a new version and flush the TLB.
*/
if (unlikely((asid & ~ASID_MASK) == 0)) {
- asid = cpu_last_asid + smp_processor_id() + 1;
+ asid = cpu_last_asid + smp_processor_id();
flush_context();
#ifdef CONFIG_SMP
smp_wmb();
smp_call_function(reset_context, NULL, 1);
#endif
- cpu_last_asid += NR_CPUS;
+ cpu_last_asid += NR_CPUS - 1;
}
set_mm_context(mm, asid);
diff --git a/arch/arm/mm/init.c b/arch/arm/mm/init.c
index 3f17ea1..2c2cce9 100644
--- a/arch/arm/mm/init.c
+++ b/arch/arm/mm/init.c
@@ -15,12 +15,14 @@
#include <linux/mman.h>
#include <linux/nodemask.h>
#include <linux/initrd.h>
+#include <linux/of_fdt.h>
#include <linux/highmem.h>
#include <linux/gfp.h>
#include <linux/memblock.h>
#include <linux/sort.h>
#include <asm/mach-types.h>
+#include <asm/prom.h>
#include <asm/sections.h>
#include <asm/setup.h>
#include <asm/sizes.h>
@@ -71,6 +73,14 @@ static int __init parse_tag_initrd2(const struct tag *tag)
__tagtable(ATAG_INITRD2, parse_tag_initrd2);
+#ifdef CONFIG_OF_FLATTREE
+void __init early_init_dt_setup_initrd_arch(unsigned long start, unsigned long end)
+{
+ phys_initrd_start = start;
+ phys_initrd_size = end - start;
+}
+#endif /* CONFIG_OF_FLATTREE */
+
/*
* This keeps memory configuration data used by a couple memory
* initialization functions, as well as show_mem() for the skipping
@@ -273,13 +283,15 @@ static void __init arm_bootmem_free(unsigned long min, unsigned long max_low,
free_area_init_node(0, zone_size, min, zhole_size);
}
-#ifndef CONFIG_SPARSEMEM
+#ifdef CONFIG_HAVE_ARCH_PFN_VALID
int pfn_valid(unsigned long pfn)
{
return memblock_is_memory(pfn << PAGE_SHIFT);
}
EXPORT_SYMBOL(pfn_valid);
+#endif
+#ifndef CONFIG_SPARSEMEM
static void arm_memory_present(void)
{
}
@@ -334,6 +346,7 @@ void __init arm_memblock_init(struct meminfo *mi, struct machine_desc *mdesc)
#endif
arm_mm_memblock_reserve();
+ arm_dt_memblock_reserve();
/* reserve any platform specific memblock areas */
if (mdesc->reserve)
diff --git a/arch/arm/mm/mmu.c b/arch/arm/mm/mmu.c
index 08a9236..9d9e736 100644
--- a/arch/arm/mm/mmu.c
+++ b/arch/arm/mm/mmu.c
@@ -763,15 +763,12 @@ static void __init sanity_check_meminfo(void)
{
int i, j, highmem = 0;
- lowmem_limit = __pa(vmalloc_min - 1) + 1;
- memblock_set_current_limit(lowmem_limit);
-
for (i = 0, j = 0; i < meminfo.nr_banks; i++) {
struct membank *bank = &meminfo.bank[j];
*bank = meminfo.bank[i];
#ifdef CONFIG_HIGHMEM
- if (__va(bank->start) > vmalloc_min ||
+ if (__va(bank->start) >= vmalloc_min ||
__va(bank->start) < (void *)PAGE_OFFSET)
highmem = 1;
@@ -829,6 +826,9 @@ static void __init sanity_check_meminfo(void)
bank->size = newsize;
}
#endif
+ if (!bank->highmem && bank->start + bank->size > lowmem_limit)
+ lowmem_limit = bank->start + bank->size;
+
j++;
}
#ifdef CONFIG_HIGHMEM
@@ -852,6 +852,7 @@ static void __init sanity_check_meminfo(void)
}
#endif
meminfo.nr_banks = j;
+ memblock_set_current_limit(lowmem_limit);
}
static inline void prepare_page_table(void)
diff --git a/arch/arm/mm/proc-v6.S b/arch/arm/mm/proc-v6.S
index ab17cc0..1d2b845 100644
--- a/arch/arm/mm/proc-v6.S
+++ b/arch/arm/mm/proc-v6.S
@@ -213,7 +213,9 @@ __v6_setup:
mcr p15, 0, r0, c2, c0, 2 @ TTB control register
ALT_SMP(orr r4, r4, #TTB_FLAGS_SMP)
ALT_UP(orr r4, r4, #TTB_FLAGS_UP)
- mcr p15, 0, r4, c2, c0, 1 @ load TTB1
+ ALT_SMP(orr r8, r8, #TTB_FLAGS_SMP)
+ ALT_UP(orr r8, r8, #TTB_FLAGS_UP)
+ mcr p15, 0, r8, c2, c0, 1 @ load TTB1
#endif /* CONFIG_MMU */
adr r5, v6_crval
ldmia r5, {r5, r6}
diff --git a/arch/arm/mm/proc-v7.S b/arch/arm/mm/proc-v7.S
index babfba0..b3b566e 100644
--- a/arch/arm/mm/proc-v7.S
+++ b/arch/arm/mm/proc-v7.S
@@ -108,18 +108,16 @@ ENTRY(cpu_v7_switch_mm)
#ifdef CONFIG_ARM_ERRATA_430973
mcr p15, 0, r2, c7, c5, 6 @ flush BTAC/BTB
#endif
-#ifdef CONFIG_ARM_ERRATA_754322
- dsb
-#endif
- mcr p15, 0, r2, c13, c0, 1 @ set reserved context ID
- isb
-1: mcr p15, 0, r0, c2, c0, 0 @ set TTB 0
+ mrc p15, 0, r2, c2, c0, 1 @ load TTB 1
+ mcr p15, 0, r2, c2, c0, 0 @ into TTB 0
isb
#ifdef CONFIG_ARM_ERRATA_754322
dsb
#endif
mcr p15, 0, r1, c13, c0, 1 @ set context ID
isb
+ mcr p15, 0, r0, c2, c0, 0 @ set TTB 0
+ isb
#endif
mov pc, lr
ENDPROC(cpu_v7_switch_mm)
@@ -368,7 +366,9 @@ __v7_setup:
mcr p15, 0, r10, c2, c0, 2 @ TTB control register
ALT_SMP(orr r4, r4, #TTB_FLAGS_SMP)
ALT_UP(orr r4, r4, #TTB_FLAGS_UP)
- mcr p15, 0, r4, c2, c0, 1 @ load TTB1
+ ALT_SMP(orr r8, r8, #TTB_FLAGS_SMP)
+ ALT_UP(orr r8, r8, #TTB_FLAGS_UP)
+ mcr p15, 0, r8, c2, c0, 1 @ load TTB1
ldr r5, =PRRR @ PRRR
ldr r6, =NMRR @ NMRR
mcr p15, 0, r5, c10, c2, 0 @ write PRRR