Index: kernel/arch/ia64/src/mm/tlb.c
===================================================================
--- kernel/arch/ia64/src/mm/tlb.c	(revision 5bda2f3ec5d347e6cd33782ee9f7583f6d3aeda4)
+++ kernel/arch/ia64/src/mm/tlb.c	(revision 3052ff4d8cfe91bfe73946c0e1785fe4cfe44b07)
@@ -573,4 +573,16 @@
 void alternate_data_tlb_fault(uint64_t vector, istate_t *istate)
 {
+	if (istate->cr_isr.sp) {
+		/* Speculative load. Deffer the exception
+		   until a more clever approach can be used.
+		   
+		   Currently if we try to find the mapping
+		   for the speculative load while in the kernel,
+		   we might introduce a livelock because of
+		   the possibly invalid values of the address. */
+		istate->cr_ipsr.ed = true;
+		return;
+	}
+	
 	uintptr_t va = istate->cr_ifa;  /* faulting address */
 	
Index: kernel/arch/ia64/src/start.S
===================================================================
--- kernel/arch/ia64/src/start.S	(revision 5bda2f3ec5d347e6cd33782ee9f7583f6d3aeda4)
+++ kernel/arch/ia64/src/start.S	(revision 3052ff4d8cfe91bfe73946c0e1785fe4cfe44b07)
@@ -125,4 +125,11 @@
 	movl r10 = (KERNEL_TRANSLATION_FW)
 	itr.d dtr[r7] = r10
+	
+	# Initialize DSR
+	
+	movl r10 = (DCR_DP_MASK | DCR_DK_MASK | DCR_DX_MASK | DCR_DR_MASK | DCR_DA_MASK | DCR_DD_MASK | DCR_LC_MASK)
+	mov r9 = cr.dcr
+	or r10 = r10, r9
+	mov cr.dcr = r10
 	
 	# Initialize PSR
