Allow loader to load 64-bit ELFv2 PowerPC kernels.

This commit is contained in:
Nathan Whitehorn 2015-12-01 17:01:27 +00:00
parent f19d421ac6
commit b931a53849
Notes: svn2git 2020-12-20 02:59:44 +00:00
svn path=/head/; revision=291598
3 changed files with 17 additions and 6 deletions

View File

@ -77,8 +77,11 @@ ppc64_ofw_elf_exec(struct preloaded_file *fp)
}
e = (Elf_Ehdr *)&fmp->md_data;
/* Handle function descriptor */
entry = *(uint64_t *)e->e_entry;
/* Handle function descriptor for ELFv1 kernels */
if ((e->e_flags & 3) == 2)
entry = e->e_entry;
else
entry = *(uint64_t *)e->e_entry;
if ((error = md_load64(fp->f_args, &mdp, &dtbp)) != 0)
return (error);

View File

@ -78,10 +78,15 @@ ppc64_elf_exec(struct preloaded_file *fp)
/* Figure out where to put it */
trampolinebase = archsw.arch_loadaddr(LOAD_RAW, NULL, 0);
/* Set up interesting values in function descriptor */
/* Set up loader trampoline */
trampoline = malloc(szkerneltramp);
memcpy(trampoline, &kerneltramp, szkerneltramp);
archsw.arch_copyout(e->e_entry + elf64_relocation_offset, &entry, 8);
/* Parse function descriptor for ELFv1 kernels */
if ((e->e_flags & 3) == 2)
entry = e->e_entry;
else
archsw.arch_copyout(e->e_entry + elf64_relocation_offset,
&entry, 8);
trampoline[2] = entry + elf64_relocation_offset;
trampoline[4] = 0; /* Phys. mem offset */
trampoline[5] = 0; /* OF entry point */

View File

@ -75,8 +75,11 @@ ppc64_elf_exec(struct preloaded_file *fp)
}
e = (Elf_Ehdr *)&fmp->md_data;
/* Handle function descriptor */
entry = (void *)(uintptr_t)(*(uint64_t *)e->e_entry);
/* Handle function descriptor for ELFv1 kernels */
if ((e->e_flags & 3) == 2)
entry = e->e_entry;
else
entry = (void *)(uintptr_t)(*(uint64_t *)e->e_entry);
if ((error = md_load64(fp->f_args, &mdp)) != 0)
return (error);