blob: 60a1a3e4675081af35615e934885a80941fcfb88 [file] [log] [blame]
Mike Wittmanf1ff2df2020-07-28 19:58:071// Copyright 2018 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "base/debug/test_elf_image_builder.h"
6
7#include <cstring>
8#include <type_traits>
9#include <utility>
10
11#include "base/bits.h"
12#include "base/check.h"
Mike Wittman6a7b5172020-08-04 18:35:4413#include "base/notreached.h"
Mike Wittmanf1ff2df2020-07-28 19:58:0714#include "build/build_config.h"
15
16#if __SIZEOF_POINTER__ == 4
17using Dyn = Elf32_Dyn;
18using Nhdr = Elf32_Nhdr;
19using Shdr = Elf32_Shdr;
20#else
21using Dyn = Elf64_Dyn;
22using Nhdr = Elf64_Nhdr;
23using Shdr = Elf64_Shdr;
24#endif
25
26namespace base {
27
28namespace {
29// Sizes/alignments to use in the ELF image.
30static constexpr size_t kPageSize = 4096;
31static constexpr size_t kPhdrAlign = 0x4;
32static constexpr size_t kNoteAlign = 0x4;
33static constexpr size_t kLoadAlign = 0x1000;
34static constexpr size_t kDynamicAlign = 0x4;
35} // namespace
36
37struct TestElfImageBuilder::LoadSegment {
38 Word flags;
39 Word size;
40};
41
42TestElfImage::TestElfImage(std::vector<uint8_t> buffer, const void* elf_start)
43 : buffer_(std::move(buffer)), elf_start_(elf_start) {}
44
45TestElfImage::~TestElfImage() = default;
46
47TestElfImage::TestElfImage(TestElfImage&&) = default;
48
49TestElfImage& TestElfImage::operator=(TestElfImage&&) = default;
50
Mike Wittman6a7b5172020-08-04 18:35:4451TestElfImageBuilder::TestElfImageBuilder(MappingType mapping_type)
52 : mapping_type_(mapping_type) {}
Mike Wittmanf1ff2df2020-07-28 19:58:0753
54TestElfImageBuilder::~TestElfImageBuilder() = default;
55
56TestElfImageBuilder& TestElfImageBuilder::AddLoadSegment(Word flags,
57 size_t size) {
58 load_segments_.push_back({flags, size});
59 return *this;
60}
61
62TestElfImageBuilder& TestElfImageBuilder::AddNoteSegment(
63 Word type,
64 StringPiece name,
65 span<const uint8_t> desc) {
66 const size_t name_with_null_size = name.size() + 1;
67 std::vector<uint8_t> buffer(sizeof(Nhdr) +
Benoit Lize7b302c42021-02-04 11:20:3868 bits::AlignUp(name_with_null_size, 4) +
69 bits::AlignUp(desc.size(), 4),
Mike Wittmanf1ff2df2020-07-28 19:58:0770 '\0');
71 uint8_t* loc = &buffer.front();
72 Nhdr* nhdr = reinterpret_cast<Nhdr*>(loc);
73 nhdr->n_namesz = name_with_null_size;
74 nhdr->n_descsz = desc.size();
75 nhdr->n_type = type;
76 loc += sizeof(Nhdr);
77
78 memcpy(loc, name.data(), name.size());
79 *(loc + name.size()) = '\0';
Benoit Lize7b302c42021-02-04 11:20:3880 loc += bits::AlignUp(name_with_null_size, 4);
Mike Wittmanf1ff2df2020-07-28 19:58:0781
82 memcpy(loc, &desc.front(), desc.size());
Benoit Lize7b302c42021-02-04 11:20:3883 loc += bits::AlignUp(desc.size(), 4);
Mike Wittmanf1ff2df2020-07-28 19:58:0784
85 DCHECK_EQ(&buffer.front() + buffer.size(), loc);
86
87 note_contents_.push_back(std::move(buffer));
88
89 return *this;
90}
91
92TestElfImageBuilder& TestElfImageBuilder::AddSoName(StringPiece soname) {
93 DCHECK(!soname_.has_value());
94 soname_.emplace(soname);
95 return *this;
96}
97
98struct TestElfImageBuilder::ImageMeasures {
99 size_t phdrs_required;
100 size_t note_start;
101 size_t note_size;
102 std::vector<size_t> load_segment_start;
103 size_t dynamic_start;
104 size_t strtab_start;
105 size_t total_size;
106};
107
Mike Wittman87363cc12020-08-04 19:10:15108Addr TestElfImageBuilder::GetVirtualAddressForOffset(
109 Off offset,
110 const uint8_t* elf_start) const {
Mike Wittman6a7b5172020-08-04 18:35:44111 switch (mapping_type_) {
112 case RELOCATABLE:
113 return static_cast<Addr>(offset);
114
115 case RELOCATABLE_WITH_BIAS:
116 return static_cast<Addr>(offset + kLoadBias);
Mike Wittman87363cc12020-08-04 19:10:15117
118 case NON_RELOCATABLE:
119 return reinterpret_cast<Addr>(elf_start + offset);
Mike Wittman6a7b5172020-08-04 18:35:44120 }
121}
122
Mike Wittmanf1ff2df2020-07-28 19:58:07123TestElfImageBuilder::ImageMeasures TestElfImageBuilder::MeasureSizesAndOffsets()
124 const {
125 ImageMeasures measures;
126
127 measures.phdrs_required = 1 + load_segments_.size();
128 if (!note_contents_.empty())
129 ++measures.phdrs_required;
130 if (soname_.has_value())
131 ++measures.phdrs_required;
132
133 // The current offset into the image, where the next bytes are to be written.
134 // Starts after the ELF header.
135 size_t offset = sizeof(Ehdr);
136
137 // Add space for the program header table.
Benoit Lize7b302c42021-02-04 11:20:38138 offset = bits::AlignUp(offset, kPhdrAlign);
Mike Wittmanf1ff2df2020-07-28 19:58:07139 offset += sizeof(Phdr) * measures.phdrs_required;
140
141 // Add space for the notes.
142 measures.note_start = offset;
143 if (!note_contents_.empty())
Benoit Lize7b302c42021-02-04 11:20:38144 offset = bits::AlignUp(offset, kNoteAlign);
Mike Wittmanf1ff2df2020-07-28 19:58:07145 for (const std::vector<uint8_t>& contents : note_contents_)
146 offset += contents.size();
147 measures.note_size = offset - measures.note_start;
148
149 // Add space for the load segments.
150 for (auto it = load_segments_.begin(); it != load_segments_.end(); ++it) {
151 size_t size = 0;
152 // The first non PT_PHDR program header is expected to be a PT_LOAD and
153 // start at the already-aligned start of the ELF header.
154 if (it == load_segments_.begin()) {
155 size = offset + it->size;
156 measures.load_segment_start.push_back(0);
157 } else {
Benoit Lize7b302c42021-02-04 11:20:38158 offset = bits::AlignUp(offset, kLoadAlign);
Mike Wittmanf1ff2df2020-07-28 19:58:07159 size = it->size;
160 measures.load_segment_start.push_back(offset);
161 }
162 offset += it->size;
163 }
164
165 // Add space for the dynamic segment.
Benoit Lize7b302c42021-02-04 11:20:38166 measures.dynamic_start = bits::AlignUp(offset, kDynamicAlign);
Mike Wittmanf1ff2df2020-07-28 19:58:07167 offset += sizeof(Dyn) * (soname_ ? 2 : 1);
168 measures.strtab_start = offset;
169
170 // Add space for the string table.
171 ++offset; // The first string table byte holds a null character.
172 if (soname_)
173 offset += soname_->size() + 1;
174
175 measures.total_size = offset;
176
177 return measures;
178}
179
180TestElfImage TestElfImageBuilder::Build() {
181 ImageMeasures measures = MeasureSizesAndOffsets();
182
Mike Wittman6a7b5172020-08-04 18:35:44183 // Write the ELF contents into |buffer|. Extends the buffer back to the 0
184 // address in the case of load bias, so that the memory between the 0 address
185 // and the image start is zero-initialized.
186 const size_t load_bias =
187 mapping_type_ == RELOCATABLE_WITH_BIAS ? kLoadBias : 0;
188 std::vector<uint8_t> buffer(load_bias + (kPageSize - 1) + measures.total_size,
189 '\0');
190 uint8_t* const elf_start =
Benoit Lize7b302c42021-02-04 11:20:38191 bits::AlignUp(&buffer.front() + load_bias, kPageSize);
Mike Wittmanf1ff2df2020-07-28 19:58:07192 uint8_t* loc = elf_start;
193
194 // Add the ELF header.
195 loc = AppendHdr(CreateEhdr(measures.phdrs_required), loc);
196
197 // Add the program header table.
Benoit Lize7b302c42021-02-04 11:20:38198 loc = bits::AlignUp(loc, kPhdrAlign);
Mike Wittman87363cc12020-08-04 19:10:15199 loc = AppendHdr(
200 CreatePhdr(PT_PHDR, PF_R, kPhdrAlign, loc - elf_start,
201 GetVirtualAddressForOffset(loc - elf_start, elf_start),
202 sizeof(Phdr) * measures.phdrs_required),
203 loc);
Mike Wittmanf1ff2df2020-07-28 19:58:07204 for (size_t i = 0; i < load_segments_.size(); ++i) {
205 const LoadSegment& load_segment = load_segments_[i];
206 size_t size = load_segment.size;
207 // The first non PT_PHDR program header is expected to be a PT_LOAD and
208 // encompass all the preceding headers.
209 if (i == 0)
210 size += loc - elf_start;
Mike Wittman87363cc12020-08-04 19:10:15211 loc = AppendHdr(CreatePhdr(PT_LOAD, load_segment.flags, kLoadAlign,
212 measures.load_segment_start[i],
213 GetVirtualAddressForOffset(
214 measures.load_segment_start[i], elf_start),
215 size),
Mike Wittmanf1ff2df2020-07-28 19:58:07216 loc);
217 }
Mike Wittman87363cc12020-08-04 19:10:15218 if (measures.note_size != 0) {
219 loc = AppendHdr(
220 CreatePhdr(PT_NOTE, PF_R, kNoteAlign, measures.note_start,
221 GetVirtualAddressForOffset(measures.note_start, elf_start),
222 measures.note_size),
223 loc);
224 }
Mike Wittmanf1ff2df2020-07-28 19:58:07225 if (soname_) {
Mike Wittman87363cc12020-08-04 19:10:15226 loc = AppendHdr(
227 CreatePhdr(
228 PT_DYNAMIC, PF_R | PF_W, kDynamicAlign, measures.dynamic_start,
229 GetVirtualAddressForOffset(measures.dynamic_start, elf_start),
230 sizeof(Dyn) * 2),
231 loc);
Mike Wittmanf1ff2df2020-07-28 19:58:07232 }
233
234 // Add the notes.
Benoit Lize7b302c42021-02-04 11:20:38235 loc = bits::AlignUp(loc, kNoteAlign);
Mike Wittmanf1ff2df2020-07-28 19:58:07236 for (const std::vector<uint8_t>& contents : note_contents_) {
237 memcpy(loc, &contents.front(), contents.size());
238 loc += contents.size();
239 }
240
241 // Add the load segments.
242 for (auto it = load_segments_.begin(); it != load_segments_.end(); ++it) {
243 if (it != load_segments_.begin())
Benoit Lize7b302c42021-02-04 11:20:38244 loc = bits::AlignUp(loc, kLoadAlign);
Mike Wittmanf1ff2df2020-07-28 19:58:07245 memset(loc, 0, it->size);
246 loc += it->size;
247 }
248
Benoit Lize7b302c42021-02-04 11:20:38249 loc = bits::AlignUp(loc, kDynamicAlign);
Mike Wittmanf1ff2df2020-07-28 19:58:07250
251 // Add the soname state.
252 if (soname_) {
253 // Add a DYNAMIC section for the soname.
254 Dyn* soname_dyn = reinterpret_cast<Dyn*>(loc);
255 soname_dyn->d_tag = DT_SONAME;
256 soname_dyn->d_un.d_val = 1; // One char into the string table.
257 loc += sizeof(Dyn);
258 }
259
260 Dyn* strtab_dyn = reinterpret_cast<Dyn*>(loc);
261 strtab_dyn->d_tag = DT_STRTAB;
262#if defined(OS_FUCHSIA) || defined(OS_ANDROID)
Mike Wittman6a7b5172020-08-04 18:35:44263 // Fuchsia and Android do not alter the symtab pointer on ELF load -- it's
264 // expected to remain a 'virutal address'.
Mike Wittman87363cc12020-08-04 19:10:15265 strtab_dyn->d_un.d_ptr =
266 GetVirtualAddressForOffset(measures.strtab_start, elf_start);
Mike Wittmanf1ff2df2020-07-28 19:58:07267#else
Mike Wittman6a7b5172020-08-04 18:35:44268 // Linux relocates this value on ELF load, so produce the pointer value after
269 // relocation. That value will always be equal to the actual memory address.
Mike Wittmanf1ff2df2020-07-28 19:58:07270 strtab_dyn->d_un.d_ptr =
271 reinterpret_cast<uintptr_t>(elf_start + measures.strtab_start);
272#endif
273 loc += sizeof(Dyn);
274
275 // Add a string table with one entry for the soname, if necessary.
276 *loc++ = '\0'; // The first byte holds a null character.
277 if (soname_) {
278 memcpy(loc, soname_->data(), soname_->size());
279 *(loc + soname_->size()) = '\0';
280 loc += soname_->size() + 1;
281 }
282
283 // The offset past the end of the contents should be consistent with the size
284 // mmeasurement above.
285 DCHECK_EQ(loc, elf_start + measures.total_size);
286
287 return TestElfImage(std::move(buffer), elf_start);
288}
289
290// static
291template <typename T>
292uint8_t* TestElfImageBuilder::AppendHdr(const T& hdr, uint8_t* loc) {
293 static_assert(std::is_trivially_copyable<T>::value,
294 "T should be a plain struct");
295 memcpy(loc, &hdr, sizeof(T));
296 return loc + sizeof(T);
297}
298
299Ehdr TestElfImageBuilder::CreateEhdr(Half phnum) {
300 Ehdr ehdr;
301 ehdr.e_ident[EI_MAG0] = ELFMAG0;
302 ehdr.e_ident[EI_MAG1] = ELFMAG1;
303 ehdr.e_ident[EI_MAG2] = ELFMAG2;
304 ehdr.e_ident[EI_MAG3] = ELFMAG3;
305 ehdr.e_ident[EI_CLASS] = __SIZEOF_POINTER__ == 4 ? 1 : 2;
306 ehdr.e_ident[EI_DATA] = 1; // Little endian.
307 ehdr.e_ident[EI_VERSION] = 1;
308 ehdr.e_ident[EI_OSABI] = 0x00;
309 ehdr.e_ident[EI_ABIVERSION] = 0;
310 ehdr.e_ident[EI_PAD] = 0;
311 ehdr.e_type = ET_DYN;
312 ehdr.e_machine = 0x28; // ARM.
313 ehdr.e_version = 1;
314 ehdr.e_entry = 0;
315 ehdr.e_phoff = sizeof(Ehdr);
316 ehdr.e_shoff = 0;
317 ehdr.e_flags = 0;
318 ehdr.e_ehsize = sizeof(Ehdr);
319 ehdr.e_phentsize = sizeof(Phdr);
320 ehdr.e_phnum = phnum;
321 ehdr.e_shentsize = sizeof(Shdr);
322 ehdr.e_shnum = 0;
323 ehdr.e_shstrndx = 0;
324
325 return ehdr;
326}
327
328Phdr TestElfImageBuilder::CreatePhdr(Word type,
329 Word flags,
330 size_t align,
331 Off offset,
Mike Wittman87363cc12020-08-04 19:10:15332 Addr vaddr,
Mike Wittmanf1ff2df2020-07-28 19:58:07333 size_t size) {
334 Phdr phdr;
335 phdr.p_type = type;
336 phdr.p_flags = flags;
337 phdr.p_offset = offset;
338 phdr.p_filesz = size;
Mike Wittman87363cc12020-08-04 19:10:15339 phdr.p_vaddr = vaddr;
Mike Wittmanf1ff2df2020-07-28 19:58:07340 phdr.p_paddr = 0;
341 phdr.p_memsz = phdr.p_filesz;
342 phdr.p_align = align;
343
344 return phdr;
345}
346
347} // namespace base