Fix zero-sized bss handling

pull/84/head
Andre Richter 4 years ago
parent b30bc518c4
commit 098e19ecc4
No known key found for this signature in database
GPG Key ID: 2116C1AB102F615E

@ -48,7 +48,7 @@ diff -uNr 01_wait_forever/src/_arch/aarch64/cpu.S 02_runtime_init/src/_arch/aarc
diff -uNr 01_wait_forever/src/bsp/raspberrypi/link.ld 02_runtime_init/src/bsp/raspberrypi/link.ld
--- 01_wait_forever/src/bsp/raspberrypi/link.ld
+++ 02_runtime_init/src/bsp/raspberrypi/link.ld
@@ -13,5 +13,24 @@
@@ -13,5 +13,27 @@
*(.text._start) *(.text*)
}
@ -68,6 +68,9 @@ diff -uNr 01_wait_forever/src/bsp/raspberrypi/link.ld 02_runtime_init/src/bsp/ra
+ __bss_start = .;
+ *(.bss*);
+ . = ALIGN(8);
+
+ /* Fill for the bss == 0 case, so that __bss_start <= __bss_end_inclusive holds */
+ . += 8;
+ __bss_end_inclusive = . - 8;
+ }
+
@ -77,7 +80,7 @@ diff -uNr 01_wait_forever/src/bsp/raspberrypi/link.ld 02_runtime_init/src/bsp/ra
diff -uNr 01_wait_forever/src/bsp/raspberrypi/memory.rs 02_runtime_init/src/bsp/raspberrypi/memory.rs
--- 01_wait_forever/src/bsp/raspberrypi/memory.rs
+++ 02_runtime_init/src/bsp/raspberrypi/memory.rs
@@ -0,0 +1,31 @@
@@ -0,0 +1,37 @@
+// SPDX-License-Identifier: MIT OR Apache-2.0
+//
+// Copyright (c) 2018-2020 Andre Richter <andre.o.richter@gmail.com>
@ -107,7 +110,13 @@ diff -uNr 01_wait_forever/src/bsp/raspberrypi/memory.rs 02_runtime_init/src/bsp/
+/// - Values are provided by the linker script and must be trusted as-is.
+/// - The linker-provided addresses must be u64 aligned.
+pub fn bss_range_inclusive() -> RangeInclusive<*mut u64> {
+ unsafe { RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get()) }
+ let range;
+ unsafe {
+ range = RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get());
+ }
+ assert!(!range.is_empty());
+
+ range
+}
diff -uNr 01_wait_forever/src/bsp/raspberrypi.rs 02_runtime_init/src/bsp/raspberrypi.rs
@ -150,7 +159,7 @@ diff -uNr 01_wait_forever/src/main.rs 02_runtime_init/src/main.rs
diff -uNr 01_wait_forever/src/memory.rs 02_runtime_init/src/memory.rs
--- 01_wait_forever/src/memory.rs
+++ 02_runtime_init/src/memory.rs
@@ -0,0 +1,34 @@
@@ -0,0 +1,30 @@
+// SPDX-License-Identifier: MIT OR Apache-2.0
+//
+// Copyright (c) 2018-2020 Andre Richter <andre.o.richter@gmail.com>
@ -176,13 +185,9 @@ diff -uNr 01_wait_forever/src/memory.rs 02_runtime_init/src/memory.rs
+ let mut ptr = *range.start();
+ let end_inclusive = *range.end();
+
+ loop {
+ while ptr <= end_inclusive {
+ core::ptr::write_volatile(ptr, T::from(0));
+ ptr = ptr.offset(1);
+
+ if ptr > end_inclusive {
+ break;
+ }
+ }
+}

@ -29,6 +29,9 @@ SECTIONS
__bss_start = .;
*(.bss*);
. = ALIGN(8);
/* Fill for the bss == 0 case, so that __bss_start <= __bss_end_inclusive holds */
. += 8;
__bss_end_inclusive = . - 8;
}

@ -27,5 +27,11 @@ extern "Rust" {
/// - Values are provided by the linker script and must be trusted as-is.
/// - The linker-provided addresses must be u64 aligned.
pub fn bss_range_inclusive() -> RangeInclusive<*mut u64> {
unsafe { RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get()) }
let range;
unsafe {
range = RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get());
}
assert!(!range.is_empty());
range
}

@ -23,12 +23,8 @@ where
let mut ptr = *range.start();
let end_inclusive = *range.end();
loop {
while ptr <= end_inclusive {
core::ptr::write_volatile(ptr, T::from(0));
ptr = ptr.offset(1);
if ptr > end_inclusive {
break;
}
}
}

@ -29,6 +29,9 @@ SECTIONS
__bss_start = .;
*(.bss*);
. = ALIGN(8);
/* Fill for the bss == 0 case, so that __bss_start <= __bss_end_inclusive holds */
. += 8;
__bss_end_inclusive = . - 8;
}

@ -27,5 +27,11 @@ extern "Rust" {
/// - Values are provided by the linker script and must be trusted as-is.
/// - The linker-provided addresses must be u64 aligned.
pub fn bss_range_inclusive() -> RangeInclusive<*mut u64> {
unsafe { RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get()) }
let range;
unsafe {
range = RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get());
}
assert!(!range.is_empty());
range
}

@ -23,12 +23,8 @@ where
let mut ptr = *range.start();
let end_inclusive = *range.end();
loop {
while ptr <= end_inclusive {
core::ptr::write_volatile(ptr, T::from(0));
ptr = ptr.offset(1);
if ptr > end_inclusive {
break;
}
}
}

@ -29,6 +29,9 @@ SECTIONS
__bss_start = .;
*(.bss*);
. = ALIGN(8);
/* Fill for the bss == 0 case, so that __bss_start <= __bss_end_inclusive holds */
. += 8;
__bss_end_inclusive = . - 8;
}

@ -43,5 +43,11 @@ pub fn boot_core_stack_end() -> usize {
/// - Values are provided by the linker script and must be trusted as-is.
/// - The linker-provided addresses must be u64 aligned.
pub fn bss_range_inclusive() -> RangeInclusive<*mut u64> {
unsafe { RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get()) }
let range;
unsafe {
range = RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get());
}
assert!(!range.is_empty());
range
}

@ -23,12 +23,8 @@ where
let mut ptr = *range.start();
let end_inclusive = *range.end();
loop {
while ptr <= end_inclusive {
core::ptr::write_volatile(ptr, T::from(0));
ptr = ptr.offset(1);
if ptr > end_inclusive {
break;
}
}
}

@ -29,6 +29,9 @@ SECTIONS
__bss_start = .;
*(.bss*);
. = ALIGN(8);
/* Fill for the bss == 0 case, so that __bss_start <= __bss_end_inclusive holds */
. += 8;
__bss_end_inclusive = . - 8;
}

@ -43,5 +43,11 @@ pub fn boot_core_stack_end() -> usize {
/// - Values are provided by the linker script and must be trusted as-is.
/// - The linker-provided addresses must be u64 aligned.
pub fn bss_range_inclusive() -> RangeInclusive<*mut u64> {
unsafe { RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get()) }
let range;
unsafe {
range = RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get());
}
assert!(!range.is_empty());
range
}

@ -23,12 +23,8 @@ where
let mut ptr = *range.start();
let end_inclusive = *range.end();
loop {
while ptr <= end_inclusive {
core::ptr::write_volatile(ptr, T::from(0));
ptr = ptr.offset(1);
if ptr > end_inclusive {
break;
}
}
}

@ -29,6 +29,9 @@ SECTIONS
__bss_start = .;
*(.bss*);
. = ALIGN(8);
/* Fill for the bss == 0 case, so that __bss_start <= __bss_end_inclusive holds */
. += 8;
__bss_end_inclusive = . - 8;
}

@ -66,5 +66,11 @@ pub fn boot_core_stack_end() -> usize {
/// - Values are provided by the linker script and must be trusted as-is.
/// - The linker-provided addresses must be u64 aligned.
pub fn bss_range_inclusive() -> RangeInclusive<*mut u64> {
unsafe { RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get()) }
let range;
unsafe {
range = RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get());
}
assert!(!range.is_empty());
range
}

@ -23,12 +23,8 @@ where
let mut ptr = *range.start();
let end_inclusive = *range.end();
loop {
while ptr <= end_inclusive {
core::ptr::write_volatile(ptr, T::from(0));
ptr = ptr.offset(1);
if ptr > end_inclusive {
break;
}
}
}

@ -310,7 +310,7 @@ diff -uNr 06_drivers_gpio_uart/src/bsp/raspberrypi/link.ld 07_uart_chainloader/s
}
.rodata :
@@ -32,5 +35,16 @@
@@ -35,5 +38,16 @@
__bss_end_inclusive = . - 8;
}
@ -357,7 +357,7 @@ diff -uNr 06_drivers_gpio_uart/src/bsp/raspberrypi/memory.rs 07_uart_chainloader
/// Physical devices.
#[cfg(feature = "bsp_rpi3")]
@@ -59,12 +64,34 @@
@@ -59,13 +64,35 @@
map::BOOT_CORE_STACK_END
}
@ -392,8 +392,9 @@ diff -uNr 06_drivers_gpio_uart/src/bsp/raspberrypi/memory.rs 07_uart_chainloader
/// - The linker-provided addresses must be u64 aligned.
-pub fn bss_range_inclusive() -> RangeInclusive<*mut u64> {
+pub fn relocated_bss_range_inclusive() -> RangeInclusive<*mut u64> {
unsafe { RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get()) }
}
let range;
unsafe {
range = RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get());
diff -uNr 06_drivers_gpio_uart/src/console.rs 07_uart_chainloader/src/console.rs
--- 06_drivers_gpio_uart/src/console.rs
@ -531,7 +532,7 @@ diff -uNr 06_drivers_gpio_uart/src/main.rs 07_uart_chainloader/src/main.rs
diff -uNr 06_drivers_gpio_uart/src/relocate.rs 07_uart_chainloader/src/relocate.rs
--- 06_drivers_gpio_uart/src/relocate.rs
+++ 07_uart_chainloader/src/relocate.rs
@@ -0,0 +1,55 @@
@@ -0,0 +1,51 @@
+// SPDX-License-Identifier: MIT OR Apache-2.0
+//
+// Copyright (c) 2018-2020 Andre Richter <andre.o.richter@gmail.com>
@ -561,17 +562,13 @@ diff -uNr 06_drivers_gpio_uart/src/relocate.rs 07_uart_chainloader/src/relocate.
+ let mut current_binary_start_addr = bsp::memory::board_default_load_addr();
+
+ // Copy the whole binary.
+ loop {
+ while relocated_binary_start_addr <= relocated_binary_end_addr_inclusive {
+ core::ptr::write_volatile(
+ relocated_binary_start_addr,
+ core::ptr::read_volatile(current_binary_start_addr),
+ );
+ relocated_binary_start_addr = relocated_binary_start_addr.offset(1);
+ current_binary_start_addr = current_binary_start_addr.offset(1);
+
+ if relocated_binary_start_addr > relocated_binary_end_addr_inclusive {
+ break;
+ }
+ }
+
+ // The following function calls form a hack to achieve an "absolute jump" to

@ -32,6 +32,9 @@ SECTIONS
__bss_start = .;
*(.bss*);
. = ALIGN(8);
/* Fill for the bss == 0 case, so that __bss_start <= __bss_end_inclusive holds */
. += 8;
__bss_end_inclusive = . - 8;
}

@ -93,5 +93,11 @@ pub fn relocated_runtime_init_addr() -> *const u64 {
/// - Values are provided by the linker script and must be trusted as-is.
/// - The linker-provided addresses must be u64 aligned.
pub fn relocated_bss_range_inclusive() -> RangeInclusive<*mut u64> {
unsafe { RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get()) }
let range;
unsafe {
range = RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get());
}
assert!(!range.is_empty());
range
}

@ -23,12 +23,8 @@ where
let mut ptr = *range.start();
let end_inclusive = *range.end();
loop {
while ptr <= end_inclusive {
core::ptr::write_volatile(ptr, T::from(0));
ptr = ptr.offset(1);
if ptr > end_inclusive {
break;
}
}
}

@ -27,17 +27,13 @@ pub unsafe fn relocate_self() -> ! {
let mut current_binary_start_addr = bsp::memory::board_default_load_addr();
// Copy the whole binary.
loop {
while relocated_binary_start_addr <= relocated_binary_end_addr_inclusive {
core::ptr::write_volatile(
relocated_binary_start_addr,
core::ptr::read_volatile(current_binary_start_addr),
);
relocated_binary_start_addr = relocated_binary_start_addr.offset(1);
current_binary_start_addr = current_binary_start_addr.offset(1);
if relocated_binary_start_addr > relocated_binary_end_addr_inclusive {
break;
}
}
// The following function calls form a hack to achieve an "absolute jump" to

@ -301,7 +301,7 @@ diff -uNr 07_uart_chainloader/src/bsp/raspberrypi/link.ld 08_timestamps/src/bsp/
}
.rodata :
@@ -35,16 +32,5 @@
@@ -38,16 +35,5 @@
__bss_end_inclusive = . - 8;
}
@ -348,7 +348,7 @@ diff -uNr 07_uart_chainloader/src/bsp/raspberrypi/memory.rs 08_timestamps/src/bs
/// Physical devices.
#[cfg(feature = "bsp_rpi3")]
@@ -64,34 +59,12 @@
@@ -64,35 +59,13 @@
map::BOOT_CORE_STACK_END
}
@ -383,8 +383,9 @@ diff -uNr 07_uart_chainloader/src/bsp/raspberrypi/memory.rs 08_timestamps/src/bs
/// - The linker-provided addresses must be u64 aligned.
-pub fn relocated_bss_range_inclusive() -> RangeInclusive<*mut u64> {
+pub fn bss_range_inclusive() -> RangeInclusive<*mut u64> {
unsafe { RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get()) }
}
let range;
unsafe {
range = RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get());
diff -uNr 07_uart_chainloader/src/main.rs 08_timestamps/src/main.rs
--- 07_uart_chainloader/src/main.rs
@ -586,7 +587,7 @@ diff -uNr 07_uart_chainloader/src/print.rs 08_timestamps/src/print.rs
diff -uNr 07_uart_chainloader/src/relocate.rs 08_timestamps/src/relocate.rs
--- 07_uart_chainloader/src/relocate.rs
+++ 08_timestamps/src/relocate.rs
@@ -1,55 +0,0 @@
@@ -1,51 +0,0 @@
-// SPDX-License-Identifier: MIT OR Apache-2.0
-//
-// Copyright (c) 2018-2020 Andre Richter <andre.o.richter@gmail.com>
@ -616,17 +617,13 @@ diff -uNr 07_uart_chainloader/src/relocate.rs 08_timestamps/src/relocate.rs
- let mut current_binary_start_addr = bsp::memory::board_default_load_addr();
-
- // Copy the whole binary.
- loop {
- while relocated_binary_start_addr <= relocated_binary_end_addr_inclusive {
- core::ptr::write_volatile(
- relocated_binary_start_addr,
- core::ptr::read_volatile(current_binary_start_addr),
- );
- relocated_binary_start_addr = relocated_binary_start_addr.offset(1);
- current_binary_start_addr = current_binary_start_addr.offset(1);
-
- if relocated_binary_start_addr > relocated_binary_end_addr_inclusive {
- break;
- }
- }
-
- // The following function calls form a hack to achieve an "absolute jump" to

@ -29,6 +29,9 @@ SECTIONS
__bss_start = .;
*(.bss*);
. = ALIGN(8);
/* Fill for the bss == 0 case, so that __bss_start <= __bss_end_inclusive holds */
. += 8;
__bss_end_inclusive = . - 8;
}

@ -66,5 +66,11 @@ pub fn boot_core_stack_end() -> usize {
/// - Values are provided by the linker script and must be trusted as-is.
/// - The linker-provided addresses must be u64 aligned.
pub fn bss_range_inclusive() -> RangeInclusive<*mut u64> {
unsafe { RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get()) }
let range;
unsafe {
range = RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get());
}
assert!(!range.is_empty());
range
}

@ -23,12 +23,8 @@ where
let mut ptr = *range.start();
let end_inclusive = *range.end();
loop {
while ptr <= end_inclusive {
core::ptr::write_volatile(ptr, T::from(0));
ptr = ptr.offset(1);
if ptr > end_inclusive {
break;
}
}
}

@ -29,6 +29,9 @@ SECTIONS
__bss_start = .;
*(.bss*);
. = ALIGN(8);
/* Fill for the bss == 0 case, so that __bss_start <= __bss_end_inclusive holds */
. += 8;
__bss_end_inclusive = . - 8;
}

@ -66,5 +66,11 @@ pub fn boot_core_stack_end() -> usize {
/// - Values are provided by the linker script and must be trusted as-is.
/// - The linker-provided addresses must be u64 aligned.
pub fn bss_range_inclusive() -> RangeInclusive<*mut u64> {
unsafe { RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get()) }
let range;
unsafe {
range = RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get());
}
assert!(!range.is_empty());
range
}

@ -23,12 +23,8 @@ where
let mut ptr = *range.start();
let end_inclusive = *range.end();
loop {
while ptr <= end_inclusive {
core::ptr::write_volatile(ptr, T::from(0));
ptr = ptr.offset(1);
if ptr > end_inclusive {
break;
}
}
}

@ -29,6 +29,9 @@ SECTIONS
__bss_start = .;
*(.bss*);
. = ALIGN(8);
/* Fill for the bss == 0 case, so that __bss_start <= __bss_end_inclusive holds */
. += 8;
__bss_end_inclusive = . - 8;
}

@ -66,5 +66,11 @@ pub fn boot_core_stack_end() -> usize {
/// - Values are provided by the linker script and must be trusted as-is.
/// - The linker-provided addresses must be u64 aligned.
pub fn bss_range_inclusive() -> RangeInclusive<*mut u64> {
unsafe { RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get()) }
let range;
unsafe {
range = RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get());
}
assert!(!range.is_empty());
range
}

@ -23,12 +23,8 @@ where
let mut ptr = *range.start();
let end_inclusive = *range.end();
loop {
while ptr <= end_inclusive {
core::ptr::write_volatile(ptr, T::from(0));
ptr = ptr.offset(1);
if ptr > end_inclusive {
break;
}
}
}

@ -32,6 +32,9 @@ SECTIONS
__bss_start = .;
*(.bss*);
. = ALIGN(8);
/* Fill for the bss == 0 case, so that __bss_start <= __bss_end_inclusive holds */
. += 8;
__bss_end_inclusive = . - 8;
}

@ -98,5 +98,11 @@ pub fn boot_core_stack_end() -> usize {
/// - Values are provided by the linker script and must be trusted as-is.
/// - The linker-provided addresses must be u64 aligned.
pub fn bss_range_inclusive() -> RangeInclusive<*mut u64> {
unsafe { RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get()) }
let range;
unsafe {
range = RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get());
}
assert!(!range.is_empty());
range
}

@ -25,12 +25,8 @@ where
let mut ptr = *range.start();
let end_inclusive = *range.end();
loop {
while ptr <= end_inclusive {
core::ptr::write_volatile(ptr, T::from(0));
ptr = ptr.offset(1);
if ptr > end_inclusive {
break;
}
}
}

@ -37,6 +37,9 @@ SECTIONS
__bss_start = .;
*(.bss*);
. = ALIGN(8);
/* Fill for the bss == 0 case, so that __bss_start <= __bss_end_inclusive holds */
. += 8;
__bss_end_inclusive = . - 8;
}

@ -98,5 +98,11 @@ pub fn boot_core_stack_end() -> usize {
/// - Values are provided by the linker script and must be trusted as-is.
/// - The linker-provided addresses must be u64 aligned.
pub fn bss_range_inclusive() -> RangeInclusive<*mut u64> {
unsafe { RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get()) }
let range;
unsafe {
range = RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get());
}
assert!(!range.is_empty());
range
}

@ -25,12 +25,8 @@ where
let mut ptr = *range.start();
let end_inclusive = *range.end();
loop {
while ptr <= end_inclusive {
core::ptr::write_volatile(ptr, T::from(0));
ptr = ptr.offset(1);
if ptr > end_inclusive {
break;
}
}
}

@ -1512,8 +1512,8 @@ diff -uNr 12_exceptions_part1_groundwork/src/memory/mmu.rs 13_integrated_testing
diff -uNr 12_exceptions_part1_groundwork/src/memory.rs 13_integrated_testing/src/memory.rs
--- 12_exceptions_part1_groundwork/src/memory.rs
+++ 13_integrated_testing/src/memory.rs
@@ -34,3 +34,40 @@
}
@@ -30,3 +30,40 @@
ptr = ptr.offset(1);
}
}
+

@ -37,6 +37,9 @@ SECTIONS
__bss_start = .;
*(.bss*);
. = ALIGN(8);
/* Fill for the bss == 0 case, so that __bss_start <= __bss_end_inclusive holds */
. += 8;
__bss_end_inclusive = . - 8;
}

@ -98,5 +98,11 @@ pub fn boot_core_stack_end() -> usize {
/// - Values are provided by the linker script and must be trusted as-is.
/// - The linker-provided addresses must be u64 aligned.
pub fn bss_range_inclusive() -> RangeInclusive<*mut u64> {
unsafe { RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get()) }
let range;
unsafe {
range = RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get());
}
assert!(!range.is_empty());
range
}

@ -25,13 +25,9 @@ where
let mut ptr = *range.start();
let end_inclusive = *range.end();
loop {
while ptr <= end_inclusive {
core::ptr::write_volatile(ptr, T::from(0));
ptr = ptr.offset(1);
if ptr > end_inclusive {
break;
}
}
}

@ -37,6 +37,9 @@ SECTIONS
__bss_start = .;
*(.bss*);
. = ALIGN(8);
/* Fill for the bss == 0 case, so that __bss_start <= __bss_end_inclusive holds */
. += 8;
__bss_end_inclusive = . - 8;
}

@ -102,5 +102,11 @@ pub fn boot_core_stack_end() -> usize {
/// - Values are provided by the linker script and must be trusted as-is.
/// - The linker-provided addresses must be u64 aligned.
pub fn bss_range_inclusive() -> RangeInclusive<*mut u64> {
unsafe { RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get()) }
let range;
unsafe {
range = RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get());
}
assert!(!range.is_empty());
range
}

@ -25,13 +25,9 @@ where
let mut ptr = *range.start();
let end_inclusive = *range.end();
loop {
while ptr <= end_inclusive {
core::ptr::write_volatile(ptr, T::from(0));
ptr = ptr.offset(1);
if ptr > end_inclusive {
break;
}
}
}

@ -1453,8 +1453,8 @@ diff -uNr 14_exceptions_part2_peripheral_IRQs/src/bsp/raspberrypi/driver.rs 15_v
diff -uNr 14_exceptions_part2_peripheral_IRQs/src/bsp/raspberrypi/link.ld 15_virtual_mem_part2_mmio_remap/src/bsp/raspberrypi/link.ld
--- 14_exceptions_part2_peripheral_IRQs/src/bsp/raspberrypi/link.ld
+++ 15_virtual_mem_part2_mmio_remap/src/bsp/raspberrypi/link.ld
@@ -39,6 +39,11 @@
. = ALIGN(8);
@@ -42,6 +42,11 @@
. += 8;
__bss_end_inclusive = . - 8;
}
+ . = ALIGN(65536);

@ -37,6 +37,9 @@ SECTIONS
__bss_start = .;
*(.bss*);
. = ALIGN(8);
/* Fill for the bss == 0 case, so that __bss_start <= __bss_end_inclusive holds */
. += 8;
__bss_end_inclusive = . - 8;
}
. = ALIGN(65536);

@ -184,5 +184,11 @@ pub fn phys_boot_core_stack_end() -> Address<Physical> {
/// - Values are provided by the linker script and must be trusted as-is.
/// - The linker-provided addresses must be u64 aligned.
pub fn bss_range_inclusive() -> RangeInclusive<*mut u64> {
unsafe { RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get()) }
let range;
unsafe {
range = RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get());
}
assert!(!range.is_empty());
range
}

@ -25,13 +25,9 @@ where
let mut ptr = *range.start();
let end_inclusive = *range.end();
loop {
while ptr <= end_inclusive {
core::ptr::write_volatile(ptr, T::from(0));
ptr = ptr.offset(1);
if ptr > end_inclusive {
break;
}
}
}

Binary file not shown.

Binary file not shown.

@ -29,6 +29,9 @@ SECTIONS
__bss_start = .;
*(.bss*);
. = ALIGN(8);
/* Fill for the bss == 0 case, so that __bss_start <= __bss_end_inclusive holds */
. += 8;
__bss_end_inclusive = . - 8;
}

@ -66,5 +66,11 @@ pub fn boot_core_stack_end() -> usize {
/// - Values are provided by the linker script and must be trusted as-is.
/// - The linker-provided addresses must be u64 aligned.
pub fn bss_range_inclusive() -> RangeInclusive<*mut u64> {
unsafe { RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get()) }
let range;
unsafe {
range = RangeInclusive::new(__bss_start.get(), __bss_end_inclusive.get());
}
assert!(!range.is_empty());
range
}

@ -23,12 +23,8 @@ where
let mut ptr = *range.start();
let end_inclusive = *range.end();
loop {
while ptr <= end_inclusive {
core::ptr::write_volatile(ptr, T::from(0));
ptr = ptr.offset(1);
if ptr > end_inclusive {
break;
}
}
}

Loading…
Cancel
Save