* "Low" memory variables
****************************************************************/
-extern u8 _datalow_seg, datalow_base[];
-#define SEG_LOW ((u32)&_datalow_seg)
+extern u8 _zonelow_seg, zonelow_base[];
+#define SEG_LOW ((u32)&_zonelow_seg)
#if MODESEGMENT
#define GET_LOW(var) GET_FARVAR(SEG_LOW, (var))
#define SET_LOW(var, val) SET_FARVAR(SEG_LOW, (var), (val))
-#define LOWFLAT2LOW(var) ((typeof(var))((void*)(var) - (u32)datalow_base))
+#define LOWFLAT2LOW(var) ((typeof(var))((void*)(var) - (u32)zonelow_base))
#else
#define GET_LOW(var) (var)
#define SET_LOW(var, val) do { (var) = (val); } while (0)
u32 newend = ALIGN(RomEnd + size, OPTION_ROM_ALIGN) + OPROM_HEADER_RESERVE;
if (newend > (u32)RomBase->allocend)
return NULL;
- if (newend < (u32)datalow_base + OPROM_HEADER_RESERVE)
- newend = (u32)datalow_base + OPROM_HEADER_RESERVE;
+ if (newend < (u32)zonelow_base + OPROM_HEADER_RESERVE)
+ newend = (u32)zonelow_base + OPROM_HEADER_RESERVE;
RomBase->data = RomBase->dataend = (void*)newend;
return (void*)RomEnd;
}
// Populate other regions
addSpace(&ZoneTmpLow, (void*)BUILD_STACK_ADDR, (void*)BUILD_EBDA_MINIMUM);
addSpace(&ZoneFSeg, BiosTableSpace, &BiosTableSpace[CONFIG_MAX_BIOSTABLE]);
- extern u8 final_datalow_start[];
- addSpace(&ZoneLow, datalow_base + OPROM_HEADER_RESERVE, final_datalow_start);
+ extern u8 final_varlow_start[];
+ addSpace(&ZoneLow, zonelow_base + OPROM_HEADER_RESERVE, final_varlow_start);
RomBase = findLast(&ZoneLow);
if (highram) {
addSpace(&ZoneHigh, (void*)highram
}
addSpace(&ZoneTmpLow, (void *)low_pmm, (void *)low_pmm + low_pmm_size);
addSpace(&ZoneFSeg, BiosTableSpace, &BiosTableSpace[CONFIG_MAX_BIOSTABLE]);
- extern u8 final_datalow_start[];
- addSpace(&ZoneLow, datalow_base + OPROM_HEADER_RESERVE, final_datalow_start);
+ extern u8 final_varlow_start[];
+ addSpace(&ZoneLow, zonelow_base + OPROM_HEADER_RESERVE, final_varlow_start);
RomBase = findLast(&ZoneLow);
}
}
// Move low-memory initial variable content to new location.
- extern u8 datalow_start[], datalow_end[], final_datalow_start[];
- memmove(final_datalow_start, datalow_start, datalow_end - datalow_start);
+ extern u8 varlow_start[], varlow_end[], final_varlow_start[];
+ memmove(final_varlow_start, varlow_start, varlow_end - varlow_start);
// Add space free'd during relocation in f-segment to ZoneFSeg
extern u8 code32init_end[];
, E820_RESERVED);
// Init extra stack
- StackPos = (void*)(&ExtraStack[BUILD_EXTRA_STACK_SIZE] - datalow_base);
+ StackPos = (void*)(&ExtraStack[BUILD_EXTRA_STACK_SIZE] - zonelow_base);
}
void
extern u32 _reloc_rel_start[], _reloc_rel_end[];
extern u32 _reloc_init_start[], _reloc_init_end[];
extern u8 code32init_start[], code32init_end[];
- extern u32 _reloc_datalow_start[], _reloc_datalow_end[];
- extern u8 datalow_start[], datalow_end[], final_datalow_start[];
+ extern u32 _reloc_varlow_start[], _reloc_varlow_end[];
+ extern u8 varlow_start[], varlow_end[], final_varlow_start[];
// Allocate space for init code.
u32 initsize = code32init_end - code32init_start;
// Copy code and update relocs (init absolute, init relative, and runtime)
dprintf(1, "Relocating low data from %p to %p (size %d)\n"
- , datalow_start, final_datalow_start, datalow_end - datalow_start);
- updateRelocs(code32flat_start, _reloc_datalow_start, _reloc_datalow_end
- , final_datalow_start - datalow_start);
+ , varlow_start, final_varlow_start, varlow_end - varlow_start);
+ updateRelocs(code32flat_start, _reloc_varlow_start, _reloc_varlow_end
+ , final_varlow_start - varlow_start);
dprintf(1, "Relocating init from %p to %p (size %d)\n"
, code32init_start, codedest, initsize);
s32 delta = codedest - (void*)code32init_start;
cli
cld
// Use the ExtraStack in low mem.
- movl $_datalow_seg, %eax
+ movl $_zonelow_seg, %eax
movw %ax, %ds
movw %ax, %ss
movl $ExtraStack + BUILD_EXTRA_STACK_SIZE, %esp
cld
pushw %ds // Set %ds:%eax to space on ExtraStack
pushl %eax
- movl $_datalow_seg, %eax
+ movl $_zonelow_seg, %eax
movl %eax, %ds
movl StackPos, %eax
subl $24, %eax
# define VAR16FIXED(addr) VAR16VISIBLE
# define VAR32SEG __section(".discard.var32seg." UNIQSEC)
# define VAR32FLATVISIBLE __section(".data.runtime." UNIQSEC) __VISIBLE
-# define VARLOW __section(".datalow." UNIQSEC) __VISIBLE
+# define VARLOW __section(".data.varlow." UNIQSEC) __VISIBLE
# define ASM16(code)
# define ASM32FLAT(code) __ASM(code)
# define ASSERT16() __force_link_error__only_in_16bit()
sections32flat = sec32flat_start = sec32flat_align = None
sections32init = sec32init_start = sec32init_align = None
sections32low = sec32low_start = sec32low_align = None
- datalow_base = final_sec32low_start = None
+ zonelow_base = final_sec32low_start = None
# Determine final memory addresses for sections
def doLayout(sections, genreloc):
sec32low_top = min(BUILD_BIOS_ADDR, li.sec32init_start)
final_sec32low_top = sec32low_top
relocdelta = final_sec32low_top - sec32low_top
- datalow_base = final_sec32low_top - 64*1024
- li.datalow_base = max(BUILD_ROM_START, alignpos(datalow_base, 2*1024))
+ zonelow_base = final_sec32low_top - 64*1024
+ li.zonelow_base = max(BUILD_ROM_START, alignpos(zonelow_base, 2*1024))
li.sec32low_start, li.sec32low_align = setSectionsStart(
li.sections32low, sec32low_top, 16
- , segoffset=li.datalow_base - relocdelta)
+ , segoffset=li.zonelow_base - relocdelta)
li.final_sec32low_start = li.sec32low_start + relocdelta
# Print statistics
def writeLinkerScripts(li, exportsyms, genreloc, out16, out32seg, out32flat):
# Write 16bit linker script
out = outXRefs(li.sections16, useseg=1) + """
- datalow_base = 0x%x ;
- _datalow_seg = 0x%x ;
+ zonelow_base = 0x%x ;
+ _zonelow_seg = 0x%x ;
code16_start = 0x%x ;
.text16 code16_start : {
%s
}
-""" % (li.datalow_base,
- li.datalow_base / 16,
+""" % (li.zonelow_base,
+ li.zonelow_base / 16,
li.sec16_start - BUILD_BIOS_ADDR,
outRelSections(li.sections16, 'code16_start', useseg=1))
outfile = open(out16, 'wb')
relocstr = (strRelocs("_reloc_abs", "code32init_start", absrelocs)
+ strRelocs("_reloc_rel", "code32init_start", relrelocs)
+ strRelocs("_reloc_init", "code32flat_start", initrelocs)
- + strRelocs("_reloc_datalow", "code32flat_start", lowrelocs))
+ + strRelocs("_reloc_varlow", "code32flat_start", lowrelocs))
numrelocs = len(absrelocs + relrelocs + initrelocs + lowrelocs)
sec32all_start -= numrelocs * 4
out = outXRefs(sections32all, exportsyms=exportsyms) + """
_reloc_min_align = 0x%x ;
- datalow_base = 0x%x ;
- final_datalow_start = 0x%x ;
+ zonelow_base = 0x%x ;
+ final_varlow_start = 0x%x ;
code32flat_start = 0x%x ;
.text code32flat_start : {
%s
- datalow_start = ABSOLUTE(.) ;
+ varlow_start = ABSOLUTE(.) ;
%s
- datalow_end = ABSOLUTE(.) ;
+ varlow_end = ABSOLUTE(.) ;
code32init_start = ABSOLUTE(.) ;
%s
code32init_end = ABSOLUTE(.) ;
code32flat_end = ABSOLUTE(.) ;
} :text
""" % (li.sec32init_align,
- li.datalow_base,
+ li.zonelow_base,
li.final_sec32low_start,
sec32all_start,
relocstr,
def findInit(sections):
# Recursively find and mark all "runtime" sections.
for section in sections:
- if ('.datalow.' in section.name or '.runtime.' in section.name
+ if ('.data.varlow.' in section.name or '.runtime.' in section.name
or '.export.' in section.name):
markRuntime(section, sections)
for section in sections:
findInit(sections)
# Note "low memory" parts
- for section in getSectionsPrefix(sections, '.datalow.'):
+ for section in getSectionsPrefix(sections, '.data.varlow.'):
section.category = '32low'
# Determine the final memory locations of each kept section.