elf_file.cc revision 6e8cce623b6e4fe0c9e4af605d675dd9d0338c38
1// Copyright 2014 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "elf_file.h"
6
7#include <stdlib.h>
8#include <sys/types.h>
9#include <unistd.h>
10#include <string>
11#include <vector>
12
13#include "debug.h"
14#include "elf_traits.h"
15#include "libelf.h"
16#include "packer.h"
17
18namespace relocation_packer {
19
20// Stub identifier written to 'null out' packed data, "NULL".
21static const uint32_t kStubIdentifier = 0x4c4c554eu;
22
23// Out-of-band dynamic tags used to indicate the offset and size of the
24// android packed relocations section.
25static const ELF::Sword DT_ANDROID_REL_OFFSET = DT_LOOS;
26static const ELF::Sword DT_ANDROID_REL_SIZE = DT_LOOS + 1;
27
28// Alignment to preserve, in bytes.  This must be at least as large as the
29// largest d_align and sh_addralign values found in the loaded file.
30// Out of caution for RELRO page alignment, we preserve to a complete target
31// page.  See http://www.airs.com/blog/archives/189.
32static const size_t kPreserveAlignment = 4096;
33
34namespace {
35
36// Get section data.  Checks that the section has exactly one data entry,
37// so that the section size and the data size are the same.  True in
38// practice for all sections we resize when packing or unpacking.  Done
39// by ensuring that a call to elf_getdata(section, data) returns NULL as
40// the next data entry.
41Elf_Data* GetSectionData(Elf_Scn* section) {
42  Elf_Data* data = elf_getdata(section, NULL);
43  CHECK(data && elf_getdata(section, data) == NULL);
44  return data;
45}
46
47// Rewrite section data.  Allocates new data and makes it the data element's
48// buffer.  Relies on program exit to free allocated data.
49void RewriteSectionData(Elf_Data* data,
50                        const void* section_data,
51                        size_t size) {
52  CHECK(size == data->d_size);
53  uint8_t* area = new uint8_t[size];
54  memcpy(area, section_data, size);
55  data->d_buf = area;
56}
57
58// Verbose ELF header logging.
59void VerboseLogElfHeader(const ELF::Ehdr* elf_header) {
60  VLOG(1) << "e_phoff = " << elf_header->e_phoff;
61  VLOG(1) << "e_shoff = " << elf_header->e_shoff;
62  VLOG(1) << "e_ehsize = " << elf_header->e_ehsize;
63  VLOG(1) << "e_phentsize = " << elf_header->e_phentsize;
64  VLOG(1) << "e_phnum = " << elf_header->e_phnum;
65  VLOG(1) << "e_shnum = " << elf_header->e_shnum;
66  VLOG(1) << "e_shstrndx = " << elf_header->e_shstrndx;
67}
68
69// Verbose ELF program header logging.
70void VerboseLogProgramHeader(size_t program_header_index,
71                             const ELF::Phdr* program_header) {
72  std::string type;
73  switch (program_header->p_type) {
74    case PT_NULL: type = "NULL"; break;
75    case PT_LOAD: type = "LOAD"; break;
76    case PT_DYNAMIC: type = "DYNAMIC"; break;
77    case PT_INTERP: type = "INTERP"; break;
78    case PT_NOTE: type = "NOTE"; break;
79    case PT_SHLIB: type = "SHLIB"; break;
80    case PT_PHDR: type = "PHDR"; break;
81    case PT_TLS: type = "TLS"; break;
82    default: type = "(OTHER)"; break;
83  }
84  VLOG(1) << "phdr " << program_header_index << " : " << type;
85  VLOG(1) << "  p_offset = " << program_header->p_offset;
86  VLOG(1) << "  p_vaddr = " << program_header->p_vaddr;
87  VLOG(1) << "  p_paddr = " << program_header->p_paddr;
88  VLOG(1) << "  p_filesz = " << program_header->p_filesz;
89  VLOG(1) << "  p_memsz = " << program_header->p_memsz;
90}
91
92// Verbose ELF section header logging.
93void VerboseLogSectionHeader(const std::string& section_name,
94                             const ELF::Shdr* section_header) {
95  VLOG(1) << "section " << section_name;
96  VLOG(1) << "  sh_addr = " << section_header->sh_addr;
97  VLOG(1) << "  sh_offset = " << section_header->sh_offset;
98  VLOG(1) << "  sh_size = " << section_header->sh_size;
99  VLOG(1) << "  sh_addralign = " << section_header->sh_addralign;
100}
101
102// Verbose ELF section data logging.
103void VerboseLogSectionData(const Elf_Data* data) {
104  VLOG(1) << "  data";
105  VLOG(1) << "    d_buf = " << data->d_buf;
106  VLOG(1) << "    d_off = " << data->d_off;
107  VLOG(1) << "    d_size = " << data->d_size;
108  VLOG(1) << "    d_align = " << data->d_align;
109}
110
111}  // namespace
112
113// Load the complete ELF file into a memory image in libelf, and identify
114// the .rel.dyn or .rela.dyn, .dynamic, and .android.rel.dyn or
115// .android.rela.dyn sections.  No-op if the ELF file has already been loaded.
116bool ElfFile::Load() {
117  if (elf_)
118    return true;
119
120  Elf* elf = elf_begin(fd_, ELF_C_RDWR, NULL);
121  CHECK(elf);
122
123  if (elf_kind(elf) != ELF_K_ELF) {
124    LOG(ERROR) << "File not in ELF format";
125    return false;
126  }
127
128  ELF::Ehdr* elf_header = ELF::getehdr(elf);
129  if (!elf_header) {
130    LOG(ERROR) << "Failed to load ELF header: " << elf_errmsg(elf_errno());
131    return false;
132  }
133  if (elf_header->e_machine != ELF::kMachine) {
134    LOG(ERROR) << "ELF file architecture is not " << ELF::Machine();
135    return false;
136  }
137  if (elf_header->e_type != ET_DYN) {
138    LOG(ERROR) << "ELF file is not a shared object";
139    return false;
140  }
141
142  // Require that our endianness matches that of the target, and that both
143  // are little-endian.  Safe for all current build/target combinations.
144  const int endian = elf_header->e_ident[EI_DATA];
145  CHECK(endian == ELFDATA2LSB);
146  CHECK(__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__);
147
148  // Also require that the file class is as expected.
149  const int file_class = elf_header->e_ident[EI_CLASS];
150  CHECK(file_class == ELF::kFileClass);
151
152  VLOG(1) << "endian = " << endian << ", file class = " << file_class;
153  VerboseLogElfHeader(elf_header);
154
155  const ELF::Phdr* elf_program_header = ELF::getphdr(elf);
156  CHECK(elf_program_header);
157
158  const ELF::Phdr* dynamic_program_header = NULL;
159  for (size_t i = 0; i < elf_header->e_phnum; ++i) {
160    const ELF::Phdr* program_header = &elf_program_header[i];
161    VerboseLogProgramHeader(i, program_header);
162
163    if (program_header->p_type == PT_DYNAMIC) {
164      CHECK(dynamic_program_header == NULL);
165      dynamic_program_header = program_header;
166    }
167  }
168  CHECK(dynamic_program_header != NULL);
169
170  size_t string_index;
171  elf_getshdrstrndx(elf, &string_index);
172
173  // Notes of the dynamic relocations, packed relocations, and .dynamic
174  // sections.  Found while iterating sections, and later stored in class
175  // attributes.
176  Elf_Scn* found_relocations_section = NULL;
177  Elf_Scn* found_android_relocations_section = NULL;
178  Elf_Scn* found_dynamic_section = NULL;
179
180  // Notes of relocation section types seen.  We require one or the other of
181  // these; both is unsupported.
182  bool has_rel_relocations = false;
183  bool has_rela_relocations = false;
184
185  // Flag set if we encounter any .debug* section.  We do not adjust any
186  // offsets or addresses of any debug data, so if we find one of these then
187  // the resulting output shared object should still run, but might not be
188  // usable for debugging, disassembly, and so on.  Provides a warning if
189  // this occurs.
190  bool has_debug_section = false;
191
192  Elf_Scn* section = NULL;
193  while ((section = elf_nextscn(elf, section)) != NULL) {
194    const ELF::Shdr* section_header = ELF::getshdr(section);
195    std::string name = elf_strptr(elf, string_index, section_header->sh_name);
196    VerboseLogSectionHeader(name, section_header);
197
198    // Note relocation section types.
199    if (section_header->sh_type == SHT_REL) {
200      has_rel_relocations = true;
201    }
202    if (section_header->sh_type == SHT_RELA) {
203      has_rela_relocations = true;
204    }
205
206    // Note special sections as we encounter them.
207    if ((name == ".rel.dyn" || name == ".rela.dyn") &&
208        section_header->sh_size > 0) {
209      found_relocations_section = section;
210    }
211    if ((name == ".android.rel.dyn" || name == ".android.rela.dyn") &&
212        section_header->sh_size > 0) {
213      found_android_relocations_section = section;
214    }
215    if (section_header->sh_offset == dynamic_program_header->p_offset) {
216      found_dynamic_section = section;
217    }
218
219    // If we find a section named .debug*, set the debug warning flag.
220    if (std::string(name).find(".debug") == 0) {
221      has_debug_section = true;
222    }
223
224    // Ensure we preserve alignment, repeated later for the data block(s).
225    CHECK(section_header->sh_addralign <= kPreserveAlignment);
226
227    Elf_Data* data = NULL;
228    while ((data = elf_getdata(section, data)) != NULL) {
229      CHECK(data->d_align <= kPreserveAlignment);
230      VerboseLogSectionData(data);
231    }
232  }
233
234  // Loading failed if we did not find the required special sections.
235  if (!found_relocations_section) {
236    LOG(ERROR) << "Missing or empty .rel.dyn or .rela.dyn section";
237    return false;
238  }
239  if (!found_android_relocations_section) {
240    LOG(ERROR) << "Missing or empty .android.rel.dyn or .android.rela.dyn "
241               << "section (to fix, run with --help and follow the "
242               << "pre-packing instructions)";
243    return false;
244  }
245  if (!found_dynamic_section) {
246    LOG(ERROR) << "Missing .dynamic section";
247    return false;
248  }
249
250  // Loading failed if we could not identify the relocations type.
251  if (!has_rel_relocations && !has_rela_relocations) {
252    LOG(ERROR) << "No relocations sections found";
253    return false;
254  }
255  if (has_rel_relocations && has_rela_relocations) {
256    LOG(ERROR) << "Multiple relocations sections with different types found, "
257               << "not currently supported";
258    return false;
259  }
260
261  if (has_debug_section) {
262    LOG(WARNING) << "Found .debug section(s), and ignored them";
263  }
264
265  elf_ = elf;
266  relocations_section_ = found_relocations_section;
267  dynamic_section_ = found_dynamic_section;
268  android_relocations_section_ = found_android_relocations_section;
269  relocations_type_ = has_rel_relocations ? REL : RELA;
270  return true;
271}
272
273namespace {
274
275// Helper for ResizeSection().  Adjust the main ELF header for the hole.
276void AdjustElfHeaderForHole(ELF::Ehdr* elf_header,
277                            ELF::Off hole_start,
278                            ssize_t hole_size) {
279  if (elf_header->e_phoff > hole_start) {
280    elf_header->e_phoff += hole_size;
281    VLOG(1) << "e_phoff adjusted to " << elf_header->e_phoff;
282  }
283  if (elf_header->e_shoff > hole_start) {
284    elf_header->e_shoff += hole_size;
285    VLOG(1) << "e_shoff adjusted to " << elf_header->e_shoff;
286  }
287}
288
289// Helper for ResizeSection().  Adjust all program headers for the hole.
290void AdjustProgramHeadersForHole(ELF::Phdr* elf_program_header,
291                                 size_t program_header_count,
292                                 ELF::Off hole_start,
293                                 ssize_t hole_size) {
294  for (size_t i = 0; i < program_header_count; ++i) {
295    ELF::Phdr* program_header = &elf_program_header[i];
296
297    if (program_header->p_offset > hole_start) {
298      // The hole start is past this segment, so adjust offsets and addrs.
299      program_header->p_offset += hole_size;
300      VLOG(1) << "phdr " << i
301              << " p_offset adjusted to "<< program_header->p_offset;
302
303      // Only adjust vaddr and paddr if this program header has them.
304      if (program_header->p_vaddr != 0) {
305        program_header->p_vaddr += hole_size;
306        VLOG(1) << "phdr " << i
307                << " p_vaddr adjusted to " << program_header->p_vaddr;
308      }
309      if (program_header->p_paddr != 0) {
310        program_header->p_paddr += hole_size;
311        VLOG(1) << "phdr " << i
312                << " p_paddr adjusted to " << program_header->p_paddr;
313      }
314    } else if (program_header->p_offset +
315               program_header->p_filesz > hole_start) {
316      // The hole start is within this segment, so adjust file and in-memory
317      // sizes, but leave offsets and addrs unchanged.
318      program_header->p_filesz += hole_size;
319      VLOG(1) << "phdr " << i
320              << " p_filesz adjusted to " << program_header->p_filesz;
321      program_header->p_memsz += hole_size;
322      VLOG(1) << "phdr " << i
323              << " p_memsz adjusted to " << program_header->p_memsz;
324    }
325  }
326}
327
328// Helper for ResizeSection().  Adjust all section headers for the hole.
329void AdjustSectionHeadersForHole(Elf* elf,
330                                 ELF::Off hole_start,
331                                 ssize_t hole_size) {
332  size_t string_index;
333  elf_getshdrstrndx(elf, &string_index);
334
335  Elf_Scn* section = NULL;
336  while ((section = elf_nextscn(elf, section)) != NULL) {
337    ELF::Shdr* section_header = ELF::getshdr(section);
338    std::string name = elf_strptr(elf, string_index, section_header->sh_name);
339
340    if (section_header->sh_offset > hole_start) {
341      section_header->sh_offset += hole_size;
342      VLOG(1) << "section " << name
343              << " sh_offset adjusted to " << section_header->sh_offset;
344      // Only adjust section addr if this section has one.
345      if (section_header->sh_addr != 0) {
346        section_header->sh_addr += hole_size;
347        VLOG(1) << "section " << name
348                << " sh_addr adjusted to " << section_header->sh_addr;
349      }
350    }
351  }
352}
353
354// Helper for ResizeSection().  Adjust the .dynamic section for the hole.
355template <typename Rel>
356void AdjustDynamicSectionForHole(Elf_Scn* dynamic_section,
357                                 bool is_relocations_resize,
358                                 ELF::Off hole_start,
359                                 ssize_t hole_size) {
360  Elf_Data* data = GetSectionData(dynamic_section);
361
362  const ELF::Dyn* dynamic_base = reinterpret_cast<ELF::Dyn*>(data->d_buf);
363  std::vector<ELF::Dyn> dynamics(
364      dynamic_base,
365      dynamic_base + data->d_size / sizeof(dynamics[0]));
366
367  for (size_t i = 0; i < dynamics.size(); ++i) {
368    ELF::Dyn* dynamic = &dynamics[i];
369    const ELF::Sword tag = dynamic->d_tag;
370    // Any tags that hold offsets are adjustment candidates.
371    const bool is_adjustable = (tag == DT_PLTGOT ||
372                                tag == DT_HASH ||
373                                tag == DT_STRTAB ||
374                                tag == DT_SYMTAB ||
375                                tag == DT_RELA ||
376                                tag == DT_INIT ||
377                                tag == DT_FINI ||
378                                tag == DT_REL ||
379                                tag == DT_JMPREL ||
380                                tag == DT_INIT_ARRAY ||
381                                tag == DT_FINI_ARRAY ||
382                                tag == DT_ANDROID_REL_OFFSET);
383    if (is_adjustable && dynamic->d_un.d_ptr > hole_start) {
384      dynamic->d_un.d_ptr += hole_size;
385      VLOG(1) << "dynamic[" << i << "] " << dynamic->d_tag
386              << " d_ptr adjusted to " << dynamic->d_un.d_ptr;
387    }
388
389    // If we are specifically resizing dynamic relocations, we need to make
390    // some added adjustments to tags that indicate the counts of relative
391    // relocations in the shared object.
392    if (!is_relocations_resize)
393      continue;
394
395    // DT_RELSZ or DT_RELASZ indicate the overall size of relocations.
396    // Only one will be present.  Adjust by hole size.
397    if (tag == DT_RELSZ || tag == DT_RELASZ) {
398      dynamic->d_un.d_val += hole_size;
399      VLOG(1) << "dynamic[" << i << "] " << dynamic->d_tag
400              << " d_val adjusted to " << dynamic->d_un.d_val;
401    }
402
403    // DT_RELCOUNT or DT_RELACOUNT hold the count of relative relocations.
404    // Only one will be present.  Packing reduces it to the alignment
405    // padding, if any; unpacking restores it to its former value.  The
406    // crazy linker does not use it, but we update it anyway.
407    if (tag == DT_RELCOUNT || tag == DT_RELACOUNT) {
408      // Cast sizeof to a signed type to avoid the division result being
409      // promoted into an unsigned size_t.
410      const ssize_t sizeof_rel = static_cast<ssize_t>(sizeof(Rel));
411      dynamic->d_un.d_val += hole_size / sizeof_rel;
412      VLOG(1) << "dynamic[" << i << "] " << dynamic->d_tag
413              << " d_val adjusted to " << dynamic->d_un.d_val;
414    }
415
416    // DT_RELENT and DT_RELAENT don't change, but make sure they are what
417    // we expect.  Only one will be present.
418    if (tag == DT_RELENT || tag == DT_RELAENT) {
419      CHECK(dynamic->d_un.d_val == sizeof(Rel));
420    }
421  }
422
423  void* section_data = &dynamics[0];
424  size_t bytes = dynamics.size() * sizeof(dynamics[0]);
425  RewriteSectionData(data, section_data, bytes);
426}
427
428// Helper for ResizeSection().  Adjust the .dynsym section for the hole.
429// We need to adjust the values for the symbols represented in it.
430void AdjustDynSymSectionForHole(Elf_Scn* dynsym_section,
431                                ELF::Off hole_start,
432                                ssize_t hole_size) {
433  Elf_Data* data = GetSectionData(dynsym_section);
434
435  const ELF::Sym* dynsym_base = reinterpret_cast<ELF::Sym*>(data->d_buf);
436  std::vector<ELF::Sym> dynsyms
437      (dynsym_base,
438       dynsym_base + data->d_size / sizeof(dynsyms[0]));
439
440  for (size_t i = 0; i < dynsyms.size(); ++i) {
441    ELF::Sym* dynsym = &dynsyms[i];
442    const int type = static_cast<int>(ELF_ST_TYPE(dynsym->st_info));
443    const bool is_adjustable = (type == STT_OBJECT ||
444                                type == STT_FUNC ||
445                                type == STT_SECTION ||
446                                type == STT_FILE ||
447                                type == STT_COMMON ||
448                                type == STT_TLS);
449    if (is_adjustable && dynsym->st_value > hole_start) {
450      dynsym->st_value += hole_size;
451      VLOG(1) << "dynsym[" << i << "] type=" << type
452              << " st_value adjusted to " << dynsym->st_value;
453    }
454  }
455
456  void* section_data = &dynsyms[0];
457  size_t bytes = dynsyms.size() * sizeof(dynsyms[0]);
458  RewriteSectionData(data, section_data, bytes);
459}
460
461// Helper for ResizeSection().  Adjust the plt relocations section for the
462// hole.  We need to adjust the offset of every relocation inside it that
463// falls beyond the hole start.
464template <typename Rel>
465void AdjustRelPltSectionForHole(Elf_Scn* relplt_section,
466                                ELF::Off hole_start,
467                                ssize_t hole_size) {
468  Elf_Data* data = GetSectionData(relplt_section);
469
470  const Rel* relplt_base = reinterpret_cast<Rel*>(data->d_buf);
471  std::vector<Rel> relplts(
472      relplt_base,
473      relplt_base + data->d_size / sizeof(relplts[0]));
474
475  for (size_t i = 0; i < relplts.size(); ++i) {
476    Rel* relplt = &relplts[i];
477    if (relplt->r_offset > hole_start) {
478      relplt->r_offset += hole_size;
479      VLOG(1) << "relplt[" << i
480              << "] r_offset adjusted to " << relplt->r_offset;
481    }
482  }
483
484  void* section_data = &relplts[0];
485  size_t bytes = relplts.size() * sizeof(relplts[0]);
486  RewriteSectionData(data, section_data, bytes);
487}
488
489// Helper for ResizeSection().  Adjust the .symtab section for the hole.
490// We want to adjust the value of every symbol in it that falls beyond
491// the hole start.
492void AdjustSymTabSectionForHole(Elf_Scn* symtab_section,
493                                ELF::Off hole_start,
494                                ssize_t hole_size) {
495  Elf_Data* data = GetSectionData(symtab_section);
496
497  const ELF::Sym* symtab_base = reinterpret_cast<ELF::Sym*>(data->d_buf);
498  std::vector<ELF::Sym> symtab(
499      symtab_base,
500      symtab_base + data->d_size / sizeof(symtab[0]));
501
502  for (size_t i = 0; i < symtab.size(); ++i) {
503    ELF::Sym* sym = &symtab[i];
504    if (sym->st_value > hole_start) {
505      sym->st_value += hole_size;
506      VLOG(1) << "symtab[" << i << "] value adjusted to " << sym->st_value;
507    }
508  }
509
510  void* section_data = &symtab[0];
511  size_t bytes = symtab.size() * sizeof(symtab[0]);
512  RewriteSectionData(data, section_data, bytes);
513}
514
515// Resize a section.  If the new size is larger than the current size, open
516// up a hole by increasing file offsets that come after the hole.  If smaller
517// than the current size, remove the hole by decreasing those offsets.
518template <typename Rel>
519void ResizeSection(Elf* elf, Elf_Scn* section, size_t new_size) {
520  ELF::Shdr* section_header = ELF::getshdr(section);
521  if (section_header->sh_size == new_size)
522    return;
523
524  // Note if we are resizing the real dyn relocations.  If yes, then we have
525  // to massage d_un.d_val in the dynamic section where d_tag is DT_RELSZ or
526  // DT_RELASZ and DT_RELCOUNT or DT_RELACOUNT.
527  size_t string_index;
528  elf_getshdrstrndx(elf, &string_index);
529  const std::string section_name =
530      elf_strptr(elf, string_index, section_header->sh_name);
531  const bool is_relocations_resize =
532      (section_name == ".rel.dyn" || section_name == ".rela.dyn");
533
534  // Require that the section size and the data size are the same.  True
535  // in practice for all sections we resize when packing or unpacking.
536  Elf_Data* data = GetSectionData(section);
537  CHECK(data->d_off == 0 && data->d_size == section_header->sh_size);
538
539  // Require that the section is not zero-length (that is, has allocated
540  // data that we can validly expand).
541  CHECK(data->d_size && data->d_buf);
542
543  const ELF::Off hole_start = section_header->sh_offset;
544  const ssize_t hole_size = new_size - data->d_size;
545
546  VLOG_IF(1, (hole_size > 0)) << "expand section size = " << data->d_size;
547  VLOG_IF(1, (hole_size < 0)) << "shrink section size = " << data->d_size;
548
549  // Resize the data and the section header.
550  data->d_size += hole_size;
551  section_header->sh_size += hole_size;
552
553  ELF::Ehdr* elf_header = ELF::getehdr(elf);
554  ELF::Phdr* elf_program_header = ELF::getphdr(elf);
555
556  // Add the hole size to all offsets in the ELF file that are after the
557  // start of the hole.  If the hole size is positive we are expanding the
558  // section to create a new hole; if negative, we are closing up a hole.
559
560  // Start with the main ELF header.
561  AdjustElfHeaderForHole(elf_header, hole_start, hole_size);
562
563  // Adjust all program headers.
564  AdjustProgramHeadersForHole(elf_program_header,
565                              elf_header->e_phnum,
566                              hole_start,
567                              hole_size);
568
569  // Adjust all section headers.
570  AdjustSectionHeadersForHole(elf, hole_start, hole_size);
571
572  // We use the dynamic program header entry to locate the dynamic section.
573  const ELF::Phdr* dynamic_program_header = NULL;
574
575  // Find the dynamic program header entry.
576  for (size_t i = 0; i < elf_header->e_phnum; ++i) {
577    ELF::Phdr* program_header = &elf_program_header[i];
578
579    if (program_header->p_type == PT_DYNAMIC) {
580      dynamic_program_header = program_header;
581    }
582  }
583  CHECK(dynamic_program_header);
584
585  // Sections requiring special attention, and the packed android
586  // relocations offset.
587  Elf_Scn* dynamic_section = NULL;
588  Elf_Scn* dynsym_section = NULL;
589  Elf_Scn* plt_relocations_section = NULL;
590  Elf_Scn* symtab_section = NULL;
591  ELF::Off android_relocations_offset = 0;
592
593  // Find these sections, and the packed android relocations offset.
594  section = NULL;
595  while ((section = elf_nextscn(elf, section)) != NULL) {
596    ELF::Shdr* section_header = ELF::getshdr(section);
597    std::string name = elf_strptr(elf, string_index, section_header->sh_name);
598
599    if (section_header->sh_offset == dynamic_program_header->p_offset) {
600      dynamic_section = section;
601    }
602    if (name == ".dynsym") {
603      dynsym_section = section;
604    }
605    if (name == ".rel.plt" || name == ".rela.plt") {
606      plt_relocations_section = section;
607    }
608    if (name == ".symtab") {
609      symtab_section = section;
610    }
611
612    // Note packed android relocations offset.
613    if (name == ".android.rel.dyn" || name == ".android.rela.dyn") {
614      android_relocations_offset = section_header->sh_offset;
615    }
616  }
617  CHECK(dynamic_section != NULL);
618  CHECK(dynsym_section != NULL);
619  CHECK(plt_relocations_section != NULL);
620  CHECK(android_relocations_offset != 0);
621
622  // Adjust the .dynamic section for the hole.  Because we have to edit the
623  // current contents of .dynamic we disallow resizing it.
624  CHECK(section != dynamic_section);
625  AdjustDynamicSectionForHole<Rel>(dynamic_section,
626                                   is_relocations_resize,
627                                   hole_start,
628                                   hole_size);
629
630  // Adjust the .dynsym section for the hole.
631  AdjustDynSymSectionForHole(dynsym_section, hole_start, hole_size);
632
633  // Adjust the plt relocations section for the hole.
634  AdjustRelPltSectionForHole<Rel>(plt_relocations_section,
635                                  hole_start,
636                                  hole_size);
637
638  // If present, adjust the .symtab section for the hole.  If the shared
639  // library was stripped then .symtab will be absent.
640  if (symtab_section)
641    AdjustSymTabSectionForHole(symtab_section, hole_start, hole_size);
642}
643
644// Find the first slot in a dynamics array with the given tag.  The array
645// always ends with a free (unused) element, and which we exclude from the
646// search.  Returns dynamics->size() if not found.
647size_t FindDynamicEntry(ELF::Sword tag,
648                        std::vector<ELF::Dyn>* dynamics) {
649  // Loop until the penultimate entry.  We exclude the end sentinel.
650  for (size_t i = 0; i < dynamics->size() - 1; ++i) {
651    if (dynamics->at(i).d_tag == tag)
652      return i;
653  }
654
655  // The tag was not found.
656  return dynamics->size();
657}
658
659// Replace the first free (unused) slot in a dynamics vector with the given
660// value.  The vector always ends with a free (unused) element, so the slot
661// found cannot be the last one in the vector.
662void AddDynamicEntry(const ELF::Dyn& dyn,
663                     std::vector<ELF::Dyn>* dynamics) {
664  const size_t slot = FindDynamicEntry(DT_NULL, dynamics);
665  if (slot == dynamics->size()) {
666    LOG(FATAL) << "No spare dynamic array slots found "
667               << "(to fix, increase gold's --spare-dynamic-tags value)";
668  }
669
670  // Replace this entry with the one supplied.
671  dynamics->at(slot) = dyn;
672  VLOG(1) << "dynamic[" << slot << "] overwritten with " << dyn.d_tag;
673}
674
675// Remove the element in the dynamics vector that matches the given tag with
676// unused slot data.  Shuffle the following elements up, and ensure that the
677// last is the null sentinel.
678void RemoveDynamicEntry(ELF::Sword tag,
679                        std::vector<ELF::Dyn>* dynamics) {
680  const size_t slot = FindDynamicEntry(tag, dynamics);
681  CHECK(slot != dynamics->size());
682
683  // Remove this entry by shuffling up everything that follows.
684  for (size_t i = slot; i < dynamics->size() - 1; ++i) {
685    dynamics->at(i) = dynamics->at(i + 1);
686    VLOG(1) << "dynamic[" << i
687            << "] overwritten with dynamic[" << i + 1 << "]";
688  }
689
690  // Ensure that the end sentinel is still present.
691  CHECK(dynamics->at(dynamics->size() - 1).d_tag == DT_NULL);
692}
693
694// Adjust a relocation.  For a relocation without addend, we find its target
695// in the section and adjust that.  For a relocation with addend, the target
696// is the relocation addend, and the section data at the target is zero.
697template <typename Rel>
698void AdjustRelocation(ssize_t index,
699                      ELF::Addr hole_start,
700                      ssize_t hole_size,
701                      Rel* relocation,
702                      ELF::Off* target);
703
704template <>
705void AdjustRelocation<ELF::Rel>(ssize_t index,
706                                ELF::Addr hole_start,
707                                ssize_t hole_size,
708                                ELF::Rel* relocation,
709                                ELF::Off* target) {
710  // Adjust the target if after the hole start.
711  if (*target > hole_start) {
712    *target += hole_size;
713    VLOG(1) << "relocation[" << index << "] target adjusted to " << *target;
714  }
715}
716
717template <>
718void AdjustRelocation<ELF::Rela>(ssize_t index,
719                                 ELF::Addr hole_start,
720                                 ssize_t hole_size,
721                                 ELF::Rela* relocation,
722                                 ELF::Off* target) {
723  // The relocation's target is the addend.  Adjust if after the hole start.
724  if (relocation->r_addend > hole_start) {
725    relocation->r_addend += hole_size;
726    VLOG(1) << "relocation["
727            << index << "] addend adjusted to " << relocation->r_addend;
728  }
729}
730
731// For relative relocations without addends, adjust the file data to which
732// they refer.  For relative relocations with addends, adjust the addends.
733// This translates data into the area it will occupy after the hole in
734// the dynamic relocations is added or removed.
735template <typename Rel>
736void AdjustRelocationTargets(Elf* elf,
737                             ELF::Off hole_start,
738                             ssize_t hole_size,
739                             std::vector<Rel>* relocations) {
740  Elf_Scn* section = NULL;
741  while ((section = elf_nextscn(elf, section)) != NULL) {
742    const ELF::Shdr* section_header = ELF::getshdr(section);
743
744    // Ignore sections that do not appear in a process memory image.
745    if (section_header->sh_addr == 0)
746      continue;
747
748    Elf_Data* data = GetSectionData(section);
749
750    // Ignore sections with no effective data.
751    if (data->d_buf == NULL)
752      continue;
753
754    // Identify this section's start and end addresses.
755    const ELF::Addr section_start = section_header->sh_addr;
756    const ELF::Addr section_end = section_start + section_header->sh_size;
757
758    // Create a copy of the section's data.
759    uint8_t* area = new uint8_t[data->d_size];
760    memcpy(area, data->d_buf, data->d_size);
761
762    for (size_t i = 0; i < relocations->size(); ++i) {
763      Rel* relocation = &relocations->at(i);
764      CHECK(ELF_R_TYPE(relocation->r_info) == ELF::kRelativeRelocationCode);
765
766      // See if this relocation points into the current section.
767      if (relocation->r_offset >= section_start &&
768          relocation->r_offset < section_end) {
769        // The relocation's target is what it points to in area.
770        // For relocations without addend, this is what we adjust; for
771        // relocations with addend, we leave this (it will be zero)
772        // and instead adjust the addend.
773        ELF::Addr byte_offset = relocation->r_offset - section_start;
774        ELF::Off* target = reinterpret_cast<ELF::Off*>(area + byte_offset);
775        AdjustRelocation<Rel>(i, hole_start, hole_size, relocation, target);
776      }
777    }
778
779    // If we altered the data for this section, write it back.
780    if (memcmp(area, data->d_buf, data->d_size)) {
781      RewriteSectionData(data, area, data->d_size);
782    }
783    delete [] area;
784  }
785}
786
787// Pad relocations with a given number of null relocations.
788template <typename Rel>
789void PadRelocations(size_t count, std::vector<Rel>* relocations);
790
791template <>
792void PadRelocations<ELF::Rel>(size_t count,
793                              std::vector<ELF::Rel>* relocations) {
794  ELF::Rel null_relocation;
795  null_relocation.r_offset = 0;
796  null_relocation.r_info = ELF_R_INFO(0, ELF::kNoRelocationCode);
797  std::vector<ELF::Rel> padding(count, null_relocation);
798  relocations->insert(relocations->end(), padding.begin(), padding.end());
799}
800
801template <>
802void PadRelocations<ELF::Rela>(size_t count,
803                               std::vector<ELF::Rela>* relocations) {
804  ELF::Rela null_relocation;
805  null_relocation.r_offset = 0;
806  null_relocation.r_info = ELF_R_INFO(0, ELF::kNoRelocationCode);
807  null_relocation.r_addend = 0;
808  std::vector<ELF::Rela> padding(count, null_relocation);
809  relocations->insert(relocations->end(), padding.begin(), padding.end());
810}
811
812// Adjust relocations so that the offset that they indicate will be correct
813// after the hole in the dynamic relocations is added or removed (in effect,
814// relocate the relocations).
815template <typename Rel>
816void AdjustRelocations(ELF::Off hole_start,
817                       ssize_t hole_size,
818                       std::vector<Rel>* relocations) {
819  for (size_t i = 0; i < relocations->size(); ++i) {
820    Rel* relocation = &relocations->at(i);
821    if (relocation->r_offset > hole_start) {
822      relocation->r_offset += hole_size;
823      VLOG(1) << "relocation[" << i
824              << "] offset adjusted to " << relocation->r_offset;
825    }
826  }
827}
828
829}  // namespace
830
831// Remove relative entries from dynamic relocations and write as packed
832// data into android packed relocations.
833bool ElfFile::PackRelocations() {
834  // Load the ELF file into libelf.
835  if (!Load()) {
836    LOG(ERROR) << "Failed to load as ELF";
837    return false;
838  }
839
840  // Retrieve the current dynamic relocations section data.
841  Elf_Data* data = GetSectionData(relocations_section_);
842
843  if (relocations_type_ == REL) {
844    // Convert data to a vector of relocations.
845    const ELF::Rel* relocations_base = reinterpret_cast<ELF::Rel*>(data->d_buf);
846    std::vector<ELF::Rel> relocations(
847        relocations_base,
848        relocations_base + data->d_size / sizeof(relocations[0]));
849
850    LOG(INFO) << "Relocations   : REL";
851    return PackTypedRelocations<ELF::Rel>(relocations, data);
852  }
853
854  if (relocations_type_ == RELA) {
855    // Convert data to a vector of relocations with addends.
856    const ELF::Rela* relocations_base =
857        reinterpret_cast<ELF::Rela*>(data->d_buf);
858    std::vector<ELF::Rela> relocations(
859        relocations_base,
860        relocations_base + data->d_size / sizeof(relocations[0]));
861
862    LOG(INFO) << "Relocations   : RELA";
863    return PackTypedRelocations<ELF::Rela>(relocations, data);
864  }
865
866  NOTREACHED();
867  return false;
868}
869
870// Helper for PackRelocations().  Rel type is one of ELF::Rel or ELF::Rela.
871template <typename Rel>
872bool ElfFile::PackTypedRelocations(const std::vector<Rel>& relocations,
873                                   Elf_Data* data) {
874  // Filter relocations into those that are relative and others.
875  std::vector<Rel> relative_relocations;
876  std::vector<Rel> other_relocations;
877
878  for (size_t i = 0; i < relocations.size(); ++i) {
879    const Rel& relocation = relocations[i];
880    if (ELF_R_TYPE(relocation.r_info) == ELF::kRelativeRelocationCode) {
881      CHECK(ELF_R_SYM(relocation.r_info) == 0);
882      relative_relocations.push_back(relocation);
883    } else {
884      other_relocations.push_back(relocation);
885    }
886  }
887  LOG(INFO) << "Relative      : " << relative_relocations.size() << " entries";
888  LOG(INFO) << "Other         : " << other_relocations.size() << " entries";
889  LOG(INFO) << "Total         : " << relocations.size() << " entries";
890
891  // If no relative relocations then we have nothing packable.  Perhaps
892  // the shared object has already been packed?
893  if (relative_relocations.empty()) {
894    LOG(ERROR) << "No relative relocations found (already packed?)";
895    return false;
896  }
897
898  // Unless padding, pre-apply relative relocations to account for the
899  // hole, and pre-adjust all relocation offsets accordingly.
900  if (!is_padding_relocations_) {
901    // Pre-calculate the size of the hole we will close up when we rewrite
902    // dynamic relocations.  We have to adjust relocation addresses to
903    // account for this.
904    ELF::Shdr* section_header = ELF::getshdr(relocations_section_);
905    const ELF::Off hole_start = section_header->sh_offset;
906    ssize_t hole_size =
907        relative_relocations.size() * sizeof(relative_relocations[0]);
908    const ssize_t unaligned_hole_size = hole_size;
909
910    // Adjust the actual hole size to preserve alignment.  We always adjust
911    // by a whole number of NONE-type relocations.
912    while (hole_size % kPreserveAlignment)
913      hole_size -= sizeof(relative_relocations[0]);
914    LOG(INFO) << "Compaction    : " << hole_size << " bytes";
915
916    // Adjusting for alignment may have removed any packing benefit.
917    if (hole_size == 0) {
918      LOG(INFO) << "Too few relative relocations to pack after alignment";
919      return false;
920    }
921
922    // Find the padding needed in other_relocations to preserve alignment.
923    // Ensure that we never completely empty the real relocations section.
924    size_t padding_bytes = unaligned_hole_size - hole_size;
925    if (padding_bytes == 0 && other_relocations.size() == 0) {
926      do {
927        padding_bytes += sizeof(relative_relocations[0]);
928      } while (padding_bytes % kPreserveAlignment);
929    }
930    CHECK(padding_bytes % sizeof(other_relocations[0]) == 0);
931    const size_t padding = padding_bytes / sizeof(other_relocations[0]);
932
933    // Padding may have removed any packing benefit.
934    if (padding >= relative_relocations.size()) {
935      LOG(INFO) << "Too few relative relocations to pack after padding";
936      return false;
937    }
938
939    // Add null relocations to other_relocations to preserve alignment.
940    PadRelocations<Rel>(padding, &other_relocations);
941    LOG(INFO) << "Alignment pad : " << padding << " relocations";
942
943    // Apply relocations to all relative data to relocate it into the
944    // area it will occupy once the hole in the dynamic relocations is removed.
945    AdjustRelocationTargets<Rel>(
946        elf_, hole_start, -hole_size, &relative_relocations);
947    // Relocate the relocations.
948    AdjustRelocations<Rel>(hole_start, -hole_size, &relative_relocations);
949    AdjustRelocations<Rel>(hole_start, -hole_size, &other_relocations);
950  } else {
951    // If padding, add NONE-type relocations to other_relocations to make it
952    // the same size as the the original relocations we read in.  This makes
953    // the ResizeSection() below a no-op.
954    const size_t padding = relocations.size() - other_relocations.size();
955    PadRelocations<Rel>(padding, &other_relocations);
956  }
957
958  // Pack relative relocations.
959  const size_t initial_bytes =
960      relative_relocations.size() * sizeof(relative_relocations[0]);
961  LOG(INFO) << "Unpacked relative: " << initial_bytes << " bytes";
962  std::vector<uint8_t> packed;
963  RelocationPacker packer;
964  packer.PackRelativeRelocations(relative_relocations, &packed);
965  const void* packed_data = &packed[0];
966  const size_t packed_bytes = packed.size() * sizeof(packed[0]);
967  LOG(INFO) << "Packed   relative: " << packed_bytes << " bytes";
968
969  // If we have insufficient relative relocations to form a run then
970  // packing fails.
971  if (packed.empty()) {
972    LOG(INFO) << "Too few relative relocations to pack";
973    return false;
974  }
975
976  // Run a loopback self-test as a check that packing is lossless.
977  std::vector<Rel> unpacked;
978  packer.UnpackRelativeRelocations(packed, &unpacked);
979  CHECK(unpacked.size() == relative_relocations.size());
980  CHECK(!memcmp(&unpacked[0],
981                &relative_relocations[0],
982                unpacked.size() * sizeof(unpacked[0])));
983
984  // Make sure packing saved some space.
985  if (packed_bytes >= initial_bytes) {
986    LOG(INFO) << "Packing relative relocations saves no space";
987    return false;
988  }
989
990  // Rewrite the current dynamic relocations section to be only the ARM
991  // non-relative relocations, then shrink it to size.
992  const void* section_data = &other_relocations[0];
993  const size_t bytes = other_relocations.size() * sizeof(other_relocations[0]);
994  ResizeSection<Rel>(elf_, relocations_section_, bytes);
995  RewriteSectionData(data, section_data, bytes);
996
997  // Rewrite the current packed android relocations section to hold the packed
998  // relative relocations.
999  data = GetSectionData(android_relocations_section_);
1000  ResizeSection<Rel>(elf_, android_relocations_section_, packed_bytes);
1001  RewriteSectionData(data, packed_data, packed_bytes);
1002
1003  // Rewrite .dynamic to include two new tags describing the packed android
1004  // relocations.
1005  data = GetSectionData(dynamic_section_);
1006  const ELF::Dyn* dynamic_base = reinterpret_cast<ELF::Dyn*>(data->d_buf);
1007  std::vector<ELF::Dyn> dynamics(
1008      dynamic_base,
1009      dynamic_base + data->d_size / sizeof(dynamics[0]));
1010  // Use two of the spare slots to describe the packed section.
1011  ELF::Shdr* section_header = ELF::getshdr(android_relocations_section_);
1012  const ELF::Dyn offset_dyn
1013      = {DT_ANDROID_REL_OFFSET, {section_header->sh_offset}};
1014  AddDynamicEntry(offset_dyn, &dynamics);
1015  const ELF::Dyn size_dyn
1016      = {DT_ANDROID_REL_SIZE, {section_header->sh_size}};
1017  AddDynamicEntry(size_dyn, &dynamics);
1018  const void* dynamics_data = &dynamics[0];
1019  const size_t dynamics_bytes = dynamics.size() * sizeof(dynamics[0]);
1020  RewriteSectionData(data, dynamics_data, dynamics_bytes);
1021
1022  Flush();
1023  return true;
1024}
1025
1026// Find packed relative relocations in the packed android relocations
1027// section, unpack them, and rewrite the dynamic relocations section to
1028// contain unpacked data.
1029bool ElfFile::UnpackRelocations() {
1030  // Load the ELF file into libelf.
1031  if (!Load()) {
1032    LOG(ERROR) << "Failed to load as ELF";
1033    return false;
1034  }
1035
1036  // Retrieve the current packed android relocations section data.
1037  Elf_Data* data = GetSectionData(android_relocations_section_);
1038
1039  // Convert data to a vector of bytes.
1040  const uint8_t* packed_base = reinterpret_cast<uint8_t*>(data->d_buf);
1041  std::vector<uint8_t> packed(
1042      packed_base,
1043      packed_base + data->d_size / sizeof(packed[0]));
1044
1045  if (packed.size() > 3 &&
1046      packed[0] == 'A' &&
1047      packed[1] == 'P' &&
1048      packed[2] == 'R' &&
1049      packed[3] == '1') {
1050    // Signature is APR1, unpack relocations.
1051    CHECK(relocations_type_ == REL);
1052    LOG(INFO) << "Relocations   : REL";
1053    return UnpackTypedRelocations<ELF::Rel>(packed, data);
1054  }
1055
1056  if (packed.size() > 3 &&
1057      packed[0] == 'A' &&
1058      packed[1] == 'P' &&
1059      packed[2] == 'A' &&
1060      packed[3] == '1') {
1061    // Signature is APA1, unpack relocations with addends.
1062    CHECK(relocations_type_ == RELA);
1063    LOG(INFO) << "Relocations   : RELA";
1064    return UnpackTypedRelocations<ELF::Rela>(packed, data);
1065  }
1066
1067  LOG(ERROR) << "Packed relative relocations not found (not packed?)";
1068  return false;
1069}
1070
1071// Helper for UnpackRelocations().  Rel type is one of ELF::Rel or ELF::Rela.
1072template <typename Rel>
1073bool ElfFile::UnpackTypedRelocations(const std::vector<uint8_t>& packed,
1074                                     Elf_Data* data) {
1075  // Unpack the data to re-materialize the relative relocations.
1076  const size_t packed_bytes = packed.size() * sizeof(packed[0]);
1077  LOG(INFO) << "Packed   relative: " << packed_bytes << " bytes";
1078  std::vector<Rel> relative_relocations;
1079  RelocationPacker packer;
1080  packer.UnpackRelativeRelocations(packed, &relative_relocations);
1081  const size_t unpacked_bytes =
1082      relative_relocations.size() * sizeof(relative_relocations[0]);
1083  LOG(INFO) << "Unpacked relative: " << unpacked_bytes << " bytes";
1084
1085  // Retrieve the current dynamic relocations section data.
1086  data = GetSectionData(relocations_section_);
1087
1088  // Interpret data as relocations.
1089  const Rel* relocations_base = reinterpret_cast<Rel*>(data->d_buf);
1090  std::vector<Rel> relocations(
1091      relocations_base,
1092      relocations_base + data->d_size / sizeof(relocations[0]));
1093
1094  std::vector<Rel> other_relocations;
1095  size_t padding = 0;
1096
1097  // Filter relocations to locate any that are NONE-type.  These will occur
1098  // if padding was turned on for packing.
1099  for (size_t i = 0; i < relocations.size(); ++i) {
1100    const Rel& relocation = relocations[i];
1101    if (ELF_R_TYPE(relocation.r_info) != ELF::kNoRelocationCode) {
1102      other_relocations.push_back(relocation);
1103    } else {
1104      ++padding;
1105    }
1106  }
1107  LOG(INFO) << "Relative      : " << relative_relocations.size() << " entries";
1108  LOG(INFO) << "Other         : " << other_relocations.size() << " entries";
1109
1110  // If we found the same number of null relocation entries in the dynamic
1111  // relocations section as we hold as unpacked relative relocations, then
1112  // this is a padded file.
1113  const bool is_padded = padding == relative_relocations.size();
1114
1115  // Unless padded, pre-apply relative relocations to account for the
1116  // hole, and pre-adjust all relocation offsets accordingly.
1117  if (!is_padded) {
1118    // Pre-calculate the size of the hole we will open up when we rewrite
1119    // dynamic relocations.  We have to adjust relocation addresses to
1120    // account for this.
1121    ELF::Shdr* section_header = ELF::getshdr(relocations_section_);
1122    const ELF::Off hole_start = section_header->sh_offset;
1123    ssize_t hole_size =
1124        relative_relocations.size() * sizeof(relative_relocations[0]);
1125
1126    // Adjust the hole size for the padding added to preserve alignment.
1127    hole_size -= padding * sizeof(other_relocations[0]);
1128    LOG(INFO) << "Expansion     : " << hole_size << " bytes";
1129
1130    // Apply relocations to all relative data to relocate it into the
1131    // area it will occupy once the hole in dynamic relocations is opened.
1132    AdjustRelocationTargets<Rel>(
1133        elf_, hole_start, hole_size, &relative_relocations);
1134    // Relocate the relocations.
1135    AdjustRelocations<Rel>(hole_start, hole_size, &relative_relocations);
1136    AdjustRelocations<Rel>(hole_start, hole_size, &other_relocations);
1137  }
1138
1139  // Rewrite the current dynamic relocations section to be the relative
1140  // relocations followed by other relocations.  This is the usual order in
1141  // which we find them after linking, so this action will normally put the
1142  // entire dynamic relocations section back to its pre-split-and-packed state.
1143  relocations.assign(relative_relocations.begin(), relative_relocations.end());
1144  relocations.insert(relocations.end(),
1145                     other_relocations.begin(), other_relocations.end());
1146  const void* section_data = &relocations[0];
1147  const size_t bytes = relocations.size() * sizeof(relocations[0]);
1148  LOG(INFO) << "Total         : " << relocations.size() << " entries";
1149  ResizeSection<Rel>(elf_, relocations_section_, bytes);
1150  RewriteSectionData(data, section_data, bytes);
1151
1152  // Nearly empty the current packed android relocations section.  Leaves a
1153  // four-byte stub so that some data remains allocated to the section.
1154  // This is a convenience which allows us to re-pack this file again without
1155  // having to remove the section and then add a new small one with objcopy.
1156  // The way we resize sections relies on there being some data in a section.
1157  data = GetSectionData(android_relocations_section_);
1158  ResizeSection<Rel>(
1159      elf_, android_relocations_section_, sizeof(kStubIdentifier));
1160  RewriteSectionData(data, &kStubIdentifier, sizeof(kStubIdentifier));
1161
1162  // Rewrite .dynamic to remove two tags describing packed android relocations.
1163  data = GetSectionData(dynamic_section_);
1164  const ELF::Dyn* dynamic_base = reinterpret_cast<ELF::Dyn*>(data->d_buf);
1165  std::vector<ELF::Dyn> dynamics(
1166      dynamic_base,
1167      dynamic_base + data->d_size / sizeof(dynamics[0]));
1168  RemoveDynamicEntry(DT_ANDROID_REL_OFFSET, &dynamics);
1169  RemoveDynamicEntry(DT_ANDROID_REL_SIZE, &dynamics);
1170  const void* dynamics_data = &dynamics[0];
1171  const size_t dynamics_bytes = dynamics.size() * sizeof(dynamics[0]);
1172  RewriteSectionData(data, dynamics_data, dynamics_bytes);
1173
1174  Flush();
1175  return true;
1176}
1177
1178// Flush rewritten shared object file data.
1179void ElfFile::Flush() {
1180  // Flag all ELF data held in memory as needing to be written back to the
1181  // file, and tell libelf that we have controlled the file layout.
1182  elf_flagelf(elf_, ELF_C_SET, ELF_F_DIRTY);
1183  elf_flagelf(elf_, ELF_C_SET, ELF_F_LAYOUT);
1184
1185  // Write ELF data back to disk.
1186  const off_t file_bytes = elf_update(elf_, ELF_C_WRITE);
1187  CHECK(file_bytes > 0);
1188  VLOG(1) << "elf_update returned: " << file_bytes;
1189
1190  // Clean up libelf, and truncate the output file to the number of bytes
1191  // written by elf_update().
1192  elf_end(elf_);
1193  elf_ = NULL;
1194  const int truncate = ftruncate(fd_, file_bytes);
1195  CHECK(truncate == 0);
1196}
1197
1198}  // namespace relocation_packer
1199