Merge branch 'ziglang:master' into packed-structs-in-reader-writer

This commit is contained in:
Jeff Anderson 2024-10-13 16:44:39 -07:00 committed by GitHub
commit decfa371b5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
44 changed files with 1029 additions and 791 deletions

View File

@ -32,20 +32,17 @@ pub fn build(b: *std.Build) !void {
const skip_install_langref = b.option(bool, "no-langref", "skip copying of langref to the installation prefix") orelse skip_install_lib_files;
const std_docs = b.option(bool, "std-docs", "include standard library autodocs") orelse false;
const no_bin = b.option(bool, "no-bin", "skip emitting compiler binary") orelse false;
const enable_tidy = b.option(bool, "enable-tidy", "Check langref output HTML validity") orelse false;
const enable_superhtml = b.option(bool, "enable-superhtml", "Check langref output HTML validity") orelse false;
const langref_file = generateLangRef(b);
const install_langref = b.addInstallFileWithDir(langref_file, .prefix, "doc/langref.html");
const check_langref = tidyCheck(b, langref_file);
if (enable_tidy) install_langref.step.dependOn(check_langref);
// Checking autodocs is disabled because tidy gives a false positive:
// line 304 column 9 - Warning: moved <style> tag to <head>! fix-style-tags: no to avoid.
// I noticed that `--show-warnings no` still incorrectly causes exit code 1.
// I was unable to find an alternative to tidy.
//const check_autodocs = tidyCheck(b, b.path("lib/docs/index.html"));
if (enable_tidy) {
const check_langref = superHtmlCheck(b, langref_file);
if (enable_superhtml) install_langref.step.dependOn(check_langref);
const check_autodocs = superHtmlCheck(b, b.path("lib/docs/index.html"));
if (enable_superhtml) {
test_step.dependOn(check_langref);
//test_step.dependOn(check_autodocs);
test_step.dependOn(check_autodocs);
}
if (!skip_install_langref) {
b.getInstallStep().dependOn(&install_langref.step);
@ -1358,11 +1355,11 @@ fn generateLangRef(b: *std.Build) std.Build.LazyPath {
return docgen_cmd.addOutputFileArg("langref.html");
}
fn tidyCheck(b: *std.Build, html_file: std.Build.LazyPath) *std.Build.Step {
const run_tidy = b.addSystemCommand(&.{
"tidy", "--drop-empty-elements", "no", "-qe",
fn superHtmlCheck(b: *std.Build, html_file: std.Build.LazyPath) *std.Build.Step {
const run_superhtml = b.addSystemCommand(&.{
"superhtml", "check",
});
run_tidy.addFileArg(html_file);
run_tidy.expectExitCode(0);
return &run_tidy.step;
run_superhtml.addFileArg(html_file);
run_superhtml.expectExitCode(0);
return &run_superhtml.step;
}

View File

@ -12,7 +12,7 @@ CACHE_BASENAME="zig+llvm+lld+clang-$TARGET-0.14.0-dev.1622+2ac543388"
PREFIX="$HOME/deps/$CACHE_BASENAME"
ZIG="$PREFIX/bin/zig"
export PATH="$HOME/deps/wasmtime-v10.0.2-$ARCH-linux:$PATH"
export PATH="$HOME/deps/wasmtime-v10.0.2-$ARCH-linux:$HOME/local/bin:$PATH"
# Make the `zig version` number consistent.
# This will affect the cmake command below.
@ -62,7 +62,7 @@ stage3-debug/bin/zig build test docs \
-Dtarget=native-native-musl \
--search-prefix "$PREFIX" \
--zig-lib-dir "$PWD/../lib" \
-Denable-tidy
-Denable-superhtml
# Ensure that updating the wasm binary from this commit will result in a viable build.
stage3-debug/bin/zig build update-zig1

View File

@ -12,7 +12,7 @@ CACHE_BASENAME="zig+llvm+lld+clang-$TARGET-0.14.0-dev.1622+2ac543388"
PREFIX="$HOME/deps/$CACHE_BASENAME"
ZIG="$PREFIX/bin/zig"
export PATH="$HOME/deps/wasmtime-v10.0.2-$ARCH-linux:$PATH"
export PATH="$HOME/deps/wasmtime-v10.0.2-$ARCH-linux:$HOME/local/bin:$PATH"
# Make the `zig version` number consistent.
# This will affect the cmake command below.
@ -62,7 +62,7 @@ stage3-release/bin/zig build test docs \
-Dtarget=native-native-musl \
--search-prefix "$PREFIX" \
--zig-lib-dir "$PWD/../lib" \
-Denable-tidy
-Denable-superhtml
# Ensure that stage3 and stage4 are byte-for-byte identical.
stage3-release/bin/zig build \

View File

@ -12,7 +12,7 @@ CACHE_BASENAME="zig+llvm+lld+clang-$TARGET-0.14.0-dev.1622+2ac543388"
PREFIX="$HOME/deps/$CACHE_BASENAME"
ZIG="$PREFIX/bin/zig"
export PATH="$HOME/deps/wasmtime-v10.0.2-$ARCH-linux:$HOME/deps/qemu-linux-x86_64-8.2.1/bin:$PATH"
export PATH="$HOME/deps/wasmtime-v10.0.2-$ARCH-linux:$HOME/deps/qemu-linux-x86_64-8.2.1/bin:$HOME/local/bin:$PATH"
# Make the `zig version` number consistent.
# This will affect the cmake command below.
@ -71,7 +71,7 @@ stage3-debug/bin/zig build test docs \
-Dtarget=native-native-musl \
--search-prefix "$PREFIX" \
--zig-lib-dir "$PWD/../lib" \
-Denable-tidy
-Denable-superhtml
# Ensure that updating the wasm binary from this commit will result in a viable build.
stage3-debug/bin/zig build update-zig1

View File

@ -12,7 +12,7 @@ CACHE_BASENAME="zig+llvm+lld+clang-$TARGET-0.14.0-dev.1622+2ac543388"
PREFIX="$HOME/deps/$CACHE_BASENAME"
ZIG="$PREFIX/bin/zig"
export PATH="$HOME/deps/wasmtime-v10.0.2-$ARCH-linux:$HOME/deps/qemu-linux-x86_64-8.2.1/bin:$PATH"
export PATH="$HOME/deps/wasmtime-v10.0.2-$ARCH-linux:$HOME/deps/qemu-linux-x86_64-8.2.1/bin:$HOME/local/bin:$PATH"
# Make the `zig version` number consistent.
# This will affect the cmake command below.
@ -71,7 +71,7 @@ stage3-release/bin/zig build test docs \
-Dtarget=native-native-musl \
--search-prefix "$PREFIX" \
--zig-lib-dir "$PWD/../lib" \
-Denable-tidy
-Denable-superhtml
# Ensure that stage3 and stage4 are byte-for-byte identical.
stage3-release/bin/zig build \

View File

@ -2190,6 +2190,7 @@ or
<li>An {#link|enum#} field uses exactly the bit width of its integer tag type.</li>
<li>A {#link|packed union#} field uses exactly the bit width of the union field with
the largest bit width.</li>
<li>Packed structs support equality operators.</li>
</ul>
<p>
This means that a {#syntax#}packed struct{#endsyntax#} can participate
@ -2240,6 +2241,12 @@ or
</p>
{#code|test_aligned_struct_fields.zig#}
<p>
Equating packed structs results in a comparison of the backing integer,
and only works for the `==` and `!=` operators.
</p>
{#code|test_packed_struct_equality.zig#}
<p>
Using packed structs with {#link|volatile#} is problematic, and may be a compile error in the future.
For details on this subscribe to

View File

@ -0,0 +1,14 @@
const std = @import("std");
const expect = std.testing.expect;
test "packed struct equality" {
const S = packed struct {
a: u4,
b: u4,
};
const x: S = .{ .a = 1, .b = 2 };
const y: S = .{ .b = 2, .a = 1 };
try expect(x == y);
}
// test

View File

@ -832,7 +832,6 @@ fn ElfFile(comptime is_64: bool) type {
const Elf_Shdr = if (is_64) elf.Elf64_Shdr else elf.Elf32_Shdr;
const Elf_Chdr = if (is_64) elf.Elf64_Chdr else elf.Elf32_Chdr;
const Elf_Sym = if (is_64) elf.Elf64_Sym else elf.Elf32_Sym;
const Elf_Verdef = if (is_64) elf.Elf64_Verdef else elf.Elf32_Verdef;
const Elf_OffSize = if (is_64) elf.Elf64_Off else elf.Elf32_Off;
return struct {
@ -1179,11 +1178,11 @@ fn ElfFile(comptime is_64: bool) type {
const data = try allocator.alignedAlloc(u8, section_memory_align, src_data.len);
@memcpy(data, src_data);
const defs = @as([*]Elf_Verdef, @ptrCast(data))[0 .. @as(usize, @intCast(src.sh_size)) / @sizeOf(Elf_Verdef)];
for (defs) |*def| {
if (def.vd_ndx != elf.SHN_UNDEF)
def.vd_ndx = sections_update[src.sh_info].remap_idx;
}
const defs = @as([*]elf.Verdef, @ptrCast(data))[0 .. @as(usize, @intCast(src.sh_size)) / @sizeOf(elf.Verdef)];
for (defs) |*def| switch (def.ndx) {
.LOCAL, .GLOBAL => {},
else => def.ndx = @enumFromInt(sections_update[src.sh_info].remap_idx),
};
break :dst_data data;
},

View File

@ -150,6 +150,14 @@ pub const File = struct {
inode: fs.File.INode,
size: u64,
mtime: i128,
pub fn fromFs(fs_stat: fs.File.Stat) Stat {
return .{
.inode = fs_stat.inode,
.size = fs_stat.size,
.mtime = fs_stat.mtime,
};
}
};
pub fn deinit(self: *File, gpa: Allocator) void {

View File

@ -374,7 +374,9 @@ pub fn canDetectLibC(self: Query) bool {
if (self.isNativeOs()) return true;
if (self.os_tag) |os| {
if (builtin.os.tag == .macos and os.isDarwin()) return true;
if (os == .linux and self.abi.isAndroid()) return true;
if (os == .linux) {
if (self.abi) |abi| if (abi.isAndroid()) return true;
}
}
return false;
}

View File

@ -141,7 +141,7 @@ pub const ElfDynLib = struct {
strings: [*:0]u8,
syms: [*]elf.Sym,
hashtab: [*]posix.Elf_Symndx,
versym: ?[*]u16,
versym: ?[*]elf.Versym,
verdef: ?*elf.Verdef,
memory: []align(mem.page_size) u8,
@ -319,7 +319,7 @@ pub const ElfDynLib = struct {
var maybe_strings: ?[*:0]u8 = null;
var maybe_syms: ?[*]elf.Sym = null;
var maybe_hashtab: ?[*]posix.Elf_Symndx = null;
var maybe_versym: ?[*]u16 = null;
var maybe_versym: ?[*]elf.Versym = null;
var maybe_verdef: ?*elf.Verdef = null;
{
@ -327,11 +327,11 @@ pub const ElfDynLib = struct {
while (dynv[i] != 0) : (i += 2) {
const p = base + dynv[i + 1];
switch (dynv[i]) {
elf.DT_STRTAB => maybe_strings = @as([*:0]u8, @ptrFromInt(p)),
elf.DT_SYMTAB => maybe_syms = @as([*]elf.Sym, @ptrFromInt(p)),
elf.DT_HASH => maybe_hashtab = @as([*]posix.Elf_Symndx, @ptrFromInt(p)),
elf.DT_VERSYM => maybe_versym = @as([*]u16, @ptrFromInt(p)),
elf.DT_VERDEF => maybe_verdef = @as(*elf.Verdef, @ptrFromInt(p)),
elf.DT_STRTAB => maybe_strings = @ptrFromInt(p),
elf.DT_SYMTAB => maybe_syms = @ptrFromInt(p),
elf.DT_HASH => maybe_hashtab = @ptrFromInt(p),
elf.DT_VERSYM => maybe_versym = @ptrFromInt(p),
elf.DT_VERDEF => maybe_verdef = @ptrFromInt(p),
else => {},
}
}
@ -399,18 +399,16 @@ pub const ElfDynLib = struct {
}
};
fn checkver(def_arg: *elf.Verdef, vsym_arg: i32, vername: []const u8, strings: [*:0]u8) bool {
fn checkver(def_arg: *elf.Verdef, vsym_arg: elf.Versym, vername: []const u8, strings: [*:0]u8) bool {
var def = def_arg;
const vsym = @as(u32, @bitCast(vsym_arg)) & 0x7fff;
const vsym_index = vsym_arg.VERSION;
while (true) {
if (0 == (def.vd_flags & elf.VER_FLG_BASE) and (def.vd_ndx & 0x7fff) == vsym)
break;
if (def.vd_next == 0)
return false;
def = @as(*elf.Verdef, @ptrFromInt(@intFromPtr(def) + def.vd_next));
if (0 == (def.flags & elf.VER_FLG_BASE) and @intFromEnum(def.ndx) == vsym_index) break;
if (def.next == 0) return false;
def = @ptrFromInt(@intFromPtr(def) + def.next);
}
const aux = @as(*elf.Verdaux, @ptrFromInt(@intFromPtr(def) + def.vd_aux));
return mem.eql(u8, vername, mem.sliceTo(strings + aux.vda_name, 0));
const aux: *elf.Verdaux = @ptrFromInt(@intFromPtr(def) + def.aux);
return mem.eql(u8, vername, mem.sliceTo(strings + aux.name, 0));
}
test "ElfDynLib" {

View File

@ -258,17 +258,26 @@ pub const DF_1_SINGLETON = 0x02000000;
pub const DF_1_STUB = 0x04000000;
pub const DF_1_PIE = 0x08000000;
pub const VERSYM_HIDDEN = 0x8000;
pub const VERSYM_VERSION = 0x7fff;
pub const Versym = packed struct(u16) {
VERSION: u15,
HIDDEN: bool,
/// Symbol is local
pub const VER_NDX_LOCAL = 0;
/// Symbol is global
pub const VER_NDX_GLOBAL = 1;
/// Beginning of reserved entries
pub const VER_NDX_LORESERVE = 0xff00;
/// Symbol is to be eliminated
pub const VER_NDX_ELIMINATE = 0xff01;
pub const LOCAL: Versym = @bitCast(@intFromEnum(VER_NDX.LOCAL));
pub const GLOBAL: Versym = @bitCast(@intFromEnum(VER_NDX.GLOBAL));
};
pub const VER_NDX = enum(u16) {
/// Symbol is local
LOCAL = 0,
/// Symbol is global
GLOBAL = 1,
/// Beginning of reserved entries
LORESERVE = 0xff00,
/// Symbol is to be eliminated
ELIMINATE = 0xff01,
UNSPECIFIED = 0xffff,
_,
};
/// Version definition of the file itself
pub const VER_FLG_BASE = 1;
@ -698,12 +707,9 @@ pub const EI_PAD = 9;
pub const EI_NIDENT = 16;
pub const Elf32_Half = u16;
pub const Elf64_Half = u16;
pub const Elf32_Word = u32;
pub const Elf32_Sword = i32;
pub const Elf64_Word = u32;
pub const Elf64_Sword = i32;
pub const Half = u16;
pub const Word = u32;
pub const Sword = i32;
pub const Elf32_Xword = u64;
pub const Elf32_Sxword = i64;
pub const Elf64_Xword = u64;
@ -714,53 +720,51 @@ pub const Elf32_Off = u32;
pub const Elf64_Off = u64;
pub const Elf32_Section = u16;
pub const Elf64_Section = u16;
pub const Elf32_Versym = Elf32_Half;
pub const Elf64_Versym = Elf64_Half;
pub const Elf32_Ehdr = extern struct {
e_ident: [EI_NIDENT]u8,
e_type: ET,
e_machine: EM,
e_version: Elf32_Word,
e_version: Word,
e_entry: Elf32_Addr,
e_phoff: Elf32_Off,
e_shoff: Elf32_Off,
e_flags: Elf32_Word,
e_ehsize: Elf32_Half,
e_phentsize: Elf32_Half,
e_phnum: Elf32_Half,
e_shentsize: Elf32_Half,
e_shnum: Elf32_Half,
e_shstrndx: Elf32_Half,
e_flags: Word,
e_ehsize: Half,
e_phentsize: Half,
e_phnum: Half,
e_shentsize: Half,
e_shnum: Half,
e_shstrndx: Half,
};
pub const Elf64_Ehdr = extern struct {
e_ident: [EI_NIDENT]u8,
e_type: ET,
e_machine: EM,
e_version: Elf64_Word,
e_version: Word,
e_entry: Elf64_Addr,
e_phoff: Elf64_Off,
e_shoff: Elf64_Off,
e_flags: Elf64_Word,
e_ehsize: Elf64_Half,
e_phentsize: Elf64_Half,
e_phnum: Elf64_Half,
e_shentsize: Elf64_Half,
e_shnum: Elf64_Half,
e_shstrndx: Elf64_Half,
e_flags: Word,
e_ehsize: Half,
e_phentsize: Half,
e_phnum: Half,
e_shentsize: Half,
e_shnum: Half,
e_shstrndx: Half,
};
pub const Elf32_Phdr = extern struct {
p_type: Elf32_Word,
p_type: Word,
p_offset: Elf32_Off,
p_vaddr: Elf32_Addr,
p_paddr: Elf32_Addr,
p_filesz: Elf32_Word,
p_memsz: Elf32_Word,
p_flags: Elf32_Word,
p_align: Elf32_Word,
p_filesz: Word,
p_memsz: Word,
p_flags: Word,
p_align: Word,
};
pub const Elf64_Phdr = extern struct {
p_type: Elf64_Word,
p_flags: Elf64_Word,
p_type: Word,
p_flags: Word,
p_offset: Elf64_Off,
p_vaddr: Elf64_Addr,
p_paddr: Elf64_Addr,
@ -769,44 +773,44 @@ pub const Elf64_Phdr = extern struct {
p_align: Elf64_Xword,
};
pub const Elf32_Shdr = extern struct {
sh_name: Elf32_Word,
sh_type: Elf32_Word,
sh_flags: Elf32_Word,
sh_name: Word,
sh_type: Word,
sh_flags: Word,
sh_addr: Elf32_Addr,
sh_offset: Elf32_Off,
sh_size: Elf32_Word,
sh_link: Elf32_Word,
sh_info: Elf32_Word,
sh_addralign: Elf32_Word,
sh_entsize: Elf32_Word,
sh_size: Word,
sh_link: Word,
sh_info: Word,
sh_addralign: Word,
sh_entsize: Word,
};
pub const Elf64_Shdr = extern struct {
sh_name: Elf64_Word,
sh_type: Elf64_Word,
sh_name: Word,
sh_type: Word,
sh_flags: Elf64_Xword,
sh_addr: Elf64_Addr,
sh_offset: Elf64_Off,
sh_size: Elf64_Xword,
sh_link: Elf64_Word,
sh_info: Elf64_Word,
sh_link: Word,
sh_info: Word,
sh_addralign: Elf64_Xword,
sh_entsize: Elf64_Xword,
};
pub const Elf32_Chdr = extern struct {
ch_type: COMPRESS,
ch_size: Elf32_Word,
ch_addralign: Elf32_Word,
ch_size: Word,
ch_addralign: Word,
};
pub const Elf64_Chdr = extern struct {
ch_type: COMPRESS,
ch_reserved: Elf64_Word = 0,
ch_reserved: Word = 0,
ch_size: Elf64_Xword,
ch_addralign: Elf64_Xword,
};
pub const Elf32_Sym = extern struct {
st_name: Elf32_Word,
st_name: Word,
st_value: Elf32_Addr,
st_size: Elf32_Word,
st_size: Word,
st_info: u8,
st_other: u8,
st_shndx: Elf32_Section,
@ -819,7 +823,7 @@ pub const Elf32_Sym = extern struct {
}
};
pub const Elf64_Sym = extern struct {
st_name: Elf64_Word,
st_name: Word,
st_info: u8,
st_other: u8,
st_shndx: Elf64_Section,
@ -834,16 +838,16 @@ pub const Elf64_Sym = extern struct {
}
};
pub const Elf32_Syminfo = extern struct {
si_boundto: Elf32_Half,
si_flags: Elf32_Half,
si_boundto: Half,
si_flags: Half,
};
pub const Elf64_Syminfo = extern struct {
si_boundto: Elf64_Half,
si_flags: Elf64_Half,
si_boundto: Half,
si_flags: Half,
};
pub const Elf32_Rel = extern struct {
r_offset: Elf32_Addr,
r_info: Elf32_Word,
r_info: Word,
pub inline fn r_sym(self: @This()) u24 {
return @truncate(self.r_info >> 8);
@ -865,8 +869,8 @@ pub const Elf64_Rel = extern struct {
};
pub const Elf32_Rela = extern struct {
r_offset: Elf32_Addr,
r_info: Elf32_Word,
r_addend: Elf32_Sword,
r_info: Word,
r_addend: Sword,
pub inline fn r_sym(self: @This()) u24 {
return @truncate(self.r_info >> 8);
@ -887,69 +891,49 @@ pub const Elf64_Rela = extern struct {
return @truncate(self.r_info);
}
};
pub const Elf32_Relr = Elf32_Word;
pub const Elf32_Relr = Word;
pub const Elf64_Relr = Elf64_Xword;
pub const Elf32_Dyn = extern struct {
d_tag: Elf32_Sword,
d_tag: Sword,
d_val: Elf32_Addr,
};
pub const Elf64_Dyn = extern struct {
d_tag: Elf64_Sxword,
d_val: Elf64_Addr,
};
pub const Elf32_Verdef = extern struct {
vd_version: Elf32_Half,
vd_flags: Elf32_Half,
vd_ndx: Elf32_Half,
vd_cnt: Elf32_Half,
vd_hash: Elf32_Word,
vd_aux: Elf32_Word,
vd_next: Elf32_Word,
pub const Verdef = extern struct {
version: Half,
flags: Half,
ndx: VER_NDX,
cnt: Half,
hash: Word,
aux: Word,
next: Word,
};
pub const Elf64_Verdef = extern struct {
vd_version: Elf64_Half,
vd_flags: Elf64_Half,
vd_ndx: Elf64_Half,
vd_cnt: Elf64_Half,
vd_hash: Elf64_Word,
vd_aux: Elf64_Word,
vd_next: Elf64_Word,
};
pub const Elf32_Verdaux = extern struct {
vda_name: Elf32_Word,
vda_next: Elf32_Word,
};
pub const Elf64_Verdaux = extern struct {
vda_name: Elf64_Word,
vda_next: Elf64_Word,
pub const Verdaux = extern struct {
name: Word,
next: Word,
};
pub const Elf32_Verneed = extern struct {
vn_version: Elf32_Half,
vn_cnt: Elf32_Half,
vn_file: Elf32_Word,
vn_aux: Elf32_Word,
vn_next: Elf32_Word,
vn_version: Half,
vn_cnt: Half,
vn_file: Word,
vn_aux: Word,
vn_next: Word,
};
pub const Elf64_Verneed = extern struct {
vn_version: Elf64_Half,
vn_cnt: Elf64_Half,
vn_file: Elf64_Word,
vn_aux: Elf64_Word,
vn_next: Elf64_Word,
vn_version: Half,
vn_cnt: Half,
vn_file: Word,
vn_aux: Word,
vn_next: Word,
};
pub const Elf32_Vernaux = extern struct {
vna_hash: Elf32_Word,
vna_flags: Elf32_Half,
vna_other: Elf32_Half,
vna_name: Elf32_Word,
vna_next: Elf32_Word,
};
pub const Elf64_Vernaux = extern struct {
vna_hash: Elf64_Word,
vna_flags: Elf64_Half,
vna_other: Elf64_Half,
vna_name: Elf64_Word,
vna_next: Elf64_Word,
pub const Vernaux = extern struct {
hash: Word,
flags: Half,
other: Half,
name: Word,
next: Word,
};
pub const Elf32_auxv_t = extern struct {
a_type: u32,
@ -964,81 +948,81 @@ pub const Elf64_auxv_t = extern struct {
},
};
pub const Elf32_Nhdr = extern struct {
n_namesz: Elf32_Word,
n_descsz: Elf32_Word,
n_type: Elf32_Word,
n_namesz: Word,
n_descsz: Word,
n_type: Word,
};
pub const Elf64_Nhdr = extern struct {
n_namesz: Elf64_Word,
n_descsz: Elf64_Word,
n_type: Elf64_Word,
n_namesz: Word,
n_descsz: Word,
n_type: Word,
};
pub const Elf32_Move = extern struct {
m_value: Elf32_Xword,
m_info: Elf32_Word,
m_poffset: Elf32_Word,
m_repeat: Elf32_Half,
m_stride: Elf32_Half,
m_info: Word,
m_poffset: Word,
m_repeat: Half,
m_stride: Half,
};
pub const Elf64_Move = extern struct {
m_value: Elf64_Xword,
m_info: Elf64_Xword,
m_poffset: Elf64_Xword,
m_repeat: Elf64_Half,
m_stride: Elf64_Half,
m_repeat: Half,
m_stride: Half,
};
pub const Elf32_gptab = extern union {
gt_header: extern struct {
gt_current_g_value: Elf32_Word,
gt_unused: Elf32_Word,
gt_current_g_value: Word,
gt_unused: Word,
},
gt_entry: extern struct {
gt_g_value: Elf32_Word,
gt_bytes: Elf32_Word,
gt_g_value: Word,
gt_bytes: Word,
},
};
pub const Elf32_RegInfo = extern struct {
ri_gprmask: Elf32_Word,
ri_cprmask: [4]Elf32_Word,
ri_gp_value: Elf32_Sword,
ri_gprmask: Word,
ri_cprmask: [4]Word,
ri_gp_value: Sword,
};
pub const Elf_Options = extern struct {
kind: u8,
size: u8,
section: Elf32_Section,
info: Elf32_Word,
info: Word,
};
pub const Elf_Options_Hw = extern struct {
hwp_flags1: Elf32_Word,
hwp_flags2: Elf32_Word,
hwp_flags1: Word,
hwp_flags2: Word,
};
pub const Elf32_Lib = extern struct {
l_name: Elf32_Word,
l_time_stamp: Elf32_Word,
l_checksum: Elf32_Word,
l_version: Elf32_Word,
l_flags: Elf32_Word,
l_name: Word,
l_time_stamp: Word,
l_checksum: Word,
l_version: Word,
l_flags: Word,
};
pub const Elf64_Lib = extern struct {
l_name: Elf64_Word,
l_time_stamp: Elf64_Word,
l_checksum: Elf64_Word,
l_version: Elf64_Word,
l_flags: Elf64_Word,
l_name: Word,
l_time_stamp: Word,
l_checksum: Word,
l_version: Word,
l_flags: Word,
};
pub const Elf32_Conflict = Elf32_Addr;
pub const Elf_MIPS_ABIFlags_v0 = extern struct {
version: Elf32_Half,
version: Half,
isa_level: u8,
isa_rev: u8,
gpr_size: u8,
cpr1_size: u8,
cpr2_size: u8,
fp_abi: u8,
isa_ext: Elf32_Word,
ases: Elf32_Word,
flags1: Elf32_Word,
flags2: Elf32_Word,
isa_ext: Word,
ases: Word,
flags1: Word,
flags2: Word,
};
comptime {
@ -1102,22 +1086,11 @@ pub const Sym = switch (@sizeOf(usize)) {
8 => Elf64_Sym,
else => @compileError("expected pointer size of 32 or 64"),
};
pub const Verdef = switch (@sizeOf(usize)) {
4 => Elf32_Verdef,
8 => Elf64_Verdef,
else => @compileError("expected pointer size of 32 or 64"),
};
pub const Verdaux = switch (@sizeOf(usize)) {
4 => Elf32_Verdaux,
8 => Elf64_Verdaux,
else => @compileError("expected pointer size of 32 or 64"),
};
pub const Addr = switch (@sizeOf(usize)) {
4 => Elf32_Addr,
8 => Elf64_Addr,
else => @compileError("expected pointer size of 32 or 64"),
};
pub const Half = u16;
pub const OSABI = enum(u8) {
/// UNIX System V ABI

View File

@ -34,10 +34,10 @@ pub fn lookup(vername: []const u8, name: []const u8) usize {
const dynv = maybe_dynv orelse return 0;
if (base == maxInt(usize)) return 0;
var maybe_strings: ?[*]u8 = null;
var maybe_strings: ?[*:0]u8 = null;
var maybe_syms: ?[*]elf.Sym = null;
var maybe_hashtab: ?[*]linux.Elf_Symndx = null;
var maybe_versym: ?[*]u16 = null;
var maybe_versym: ?[*]elf.Versym = null;
var maybe_verdef: ?*elf.Verdef = null;
{
@ -45,11 +45,11 @@ pub fn lookup(vername: []const u8, name: []const u8) usize {
while (dynv[i] != 0) : (i += 2) {
const p = base +% dynv[i + 1];
switch (dynv[i]) {
elf.DT_STRTAB => maybe_strings = @as([*]u8, @ptrFromInt(p)),
elf.DT_SYMTAB => maybe_syms = @as([*]elf.Sym, @ptrFromInt(p)),
elf.DT_HASH => maybe_hashtab = @as([*]linux.Elf_Symndx, @ptrFromInt(p)),
elf.DT_VERSYM => maybe_versym = @as([*]u16, @ptrFromInt(p)),
elf.DT_VERDEF => maybe_verdef = @as(*elf.Verdef, @ptrFromInt(p)),
elf.DT_STRTAB => maybe_strings = @ptrFromInt(p),
elf.DT_SYMTAB => maybe_syms = @ptrFromInt(p),
elf.DT_HASH => maybe_hashtab = @ptrFromInt(p),
elf.DT_VERSYM => maybe_versym = @ptrFromInt(p),
elf.DT_VERDEF => maybe_verdef = @ptrFromInt(p),
else => {},
}
}
@ -80,17 +80,14 @@ pub fn lookup(vername: []const u8, name: []const u8) usize {
return 0;
}
fn checkver(def_arg: *elf.Verdef, vsym_arg: i32, vername: []const u8, strings: [*]u8) bool {
fn checkver(def_arg: *elf.Verdef, vsym_arg: elf.Versym, vername: []const u8, strings: [*:0]u8) bool {
var def = def_arg;
const vsym = @as(u32, @bitCast(vsym_arg)) & 0x7fff;
const vsym_index = vsym_arg.VERSION;
while (true) {
if (0 == (def.vd_flags & elf.VER_FLG_BASE) and (def.vd_ndx & 0x7fff) == vsym)
break;
if (def.vd_next == 0)
return false;
def = @as(*elf.Verdef, @ptrFromInt(@intFromPtr(def) + def.vd_next));
if (0 == (def.flags & elf.VER_FLG_BASE) and @intFromEnum(def.ndx) == vsym_index) break;
if (def.next == 0) return false;
def = @ptrFromInt(@intFromPtr(def) + def.next);
}
const aux = @as(*elf.Verdaux, @ptrFromInt(@intFromPtr(def) + def.vd_aux));
const vda_name = @as([*:0]u8, @ptrCast(strings + aux.vda_name));
return mem.eql(u8, vername, mem.sliceTo(vda_name, 0));
const aux: *elf.Verdaux = @ptrFromInt(@intFromPtr(def) + def.aux);
return mem.eql(u8, vername, mem.sliceTo(strings + aux.name, 0));
}

View File

@ -1002,6 +1002,7 @@ const CacheUse = union(CacheMode) {
pub const LinkObject = struct {
path: Path,
must_link: bool = false,
needed: bool = false,
// When the library is passed via a positional argument, it will be
// added as a full path. If it's `-l<lib>`, then just the basename.
//
@ -2561,6 +2562,7 @@ fn addNonIncrementalStuffToCacheManifest(
for (comp.objects) |obj| {
_ = try man.addFilePath(obj.path, null);
man.hash.add(obj.must_link);
man.hash.add(obj.needed);
man.hash.add(obj.loption);
}

View File

@ -39,6 +39,7 @@ pub fn baseZigTypeTag(self: Type, mod: *Zcu) std.builtin.TypeId {
};
}
/// Asserts the type is resolved.
pub fn isSelfComparable(ty: Type, zcu: *const Zcu, is_equality_cmp: bool) bool {
return switch (ty.zigTypeTag(zcu)) {
.int,
@ -62,7 +63,6 @@ pub fn isSelfComparable(ty: Type, zcu: *const Zcu, is_equality_cmp: bool) bool {
.noreturn,
.array,
.@"struct",
.undefined,
.null,
.error_union,
@ -70,6 +70,7 @@ pub fn isSelfComparable(ty: Type, zcu: *const Zcu, is_equality_cmp: bool) bool {
.frame,
=> false,
.@"struct" => is_equality_cmp and ty.containerLayout(zcu) == .@"packed",
.pointer => !ty.isSlice(zcu) and (is_equality_cmp or ty.isCPtr(zcu)),
.optional => {
if (!is_equality_cmp) return false;

View File

@ -5162,6 +5162,7 @@ fn airCmp(func: *Func, inst: Air.Inst.Index, tag: Air.Inst.Tag) !void {
const bin_op = func.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
const pt = func.pt;
const zcu = pt.zcu;
const ip = &zcu.intern_pool;
const result: MCValue = if (func.liveness.isUnused(inst)) .unreach else result: {
const lhs_ty = func.typeOf(bin_op.lhs);
@ -5173,6 +5174,7 @@ fn airCmp(func: *Func, inst: Air.Inst.Index, tag: Air.Inst.Tag) !void {
.pointer,
.error_set,
.optional,
.@"struct",
=> {
const int_ty = switch (lhs_ty.zigTypeTag(zcu)) {
.@"enum" => lhs_ty.intTagType(zcu),
@ -5190,6 +5192,12 @@ fn airCmp(func: *Func, inst: Air.Inst.Index, tag: Air.Inst.Tag) !void {
return func.fail("TODO riscv cmp non-pointer optionals", .{});
}
},
.@"struct" => blk: {
const struct_obj = ip.loadStructType(lhs_ty.toIntern());
assert(struct_obj.layout == .@"packed");
const backing_index = struct_obj.backingIntTypeUnordered(ip);
break :blk Type.fromInterned(backing_index);
},
else => unreachable,
};

View File

@ -6032,6 +6032,7 @@ pub const FuncGen = struct {
const o = self.ng.object;
const pt = o.pt;
const zcu = pt.zcu;
const ip = &zcu.intern_pool;
const scalar_ty = operand_ty.scalarType(zcu);
const int_ty = switch (scalar_ty.zigTypeTag(zcu)) {
.@"enum" => scalar_ty.intTagType(zcu),
@ -6110,6 +6111,12 @@ pub const FuncGen = struct {
return phi.toValue();
},
.float => return self.buildFloatCmp(fast, op, operand_ty, .{ lhs, rhs }),
.@"struct" => blk: {
const struct_obj = ip.loadStructType(scalar_ty.toIntern());
assert(struct_obj.layout == .@"packed");
const backing_index = struct_obj.backingIntTypeUnordered(ip);
break :blk Type.fromInterned(backing_index);
},
else => unreachable,
};
const is_signed = int_ty.isSignedInt(zcu);

View File

@ -190,6 +190,7 @@ pub const Object = struct {
nav_index: InternPool.Nav.Index,
air: Air,
liveness: Liveness,
do_codegen: bool,
) !void {
const zcu = pt.zcu;
const gpa = zcu.gpa;
@ -214,7 +215,7 @@ pub const Object = struct {
};
defer nav_gen.deinit();
nav_gen.genNav() catch |err| switch (err) {
nav_gen.genNav(do_codegen) catch |err| switch (err) {
error.CodegenFail => {
try zcu.failed_codegen.put(gpa, nav_index, nav_gen.error_msg.?);
},
@ -239,7 +240,7 @@ pub const Object = struct {
) !void {
const nav = pt.zcu.funcInfo(func_index).owner_nav;
// TODO: Separate types for generating decls and functions?
try self.genNav(pt, nav, air, liveness);
try self.genNav(pt, nav, air, liveness, true);
}
pub fn updateNav(
@ -247,7 +248,7 @@ pub const Object = struct {
pt: Zcu.PerThread,
nav: InternPool.Nav.Index,
) !void {
try self.genNav(pt, nav, undefined, undefined);
try self.genNav(pt, nav, undefined, undefined, false);
}
/// Fetch or allocate a result id for nav index. This function also marks the nav as alive.
@ -2943,16 +2944,22 @@ const NavGen = struct {
try self.spv.declareEntryPoint(spv_decl_index, test_name, .Kernel);
}
fn genNav(self: *NavGen) !void {
fn genNav(self: *NavGen, do_codegen: bool) !void {
const pt = self.pt;
const zcu = pt.zcu;
const ip = &zcu.intern_pool;
const spv_decl_index = try self.object.resolveNav(zcu, self.owner_nav);
const result_id = self.spv.declPtr(spv_decl_index).result_id;
const nav = ip.getNav(self.owner_nav);
const val = zcu.navValue(self.owner_nav);
const ty = val.typeOf(zcu);
if (!do_codegen and !ty.hasRuntimeBits(zcu)) {
return;
}
const spv_decl_index = try self.object.resolveNav(zcu, self.owner_nav);
const result_id = self.spv.declPtr(spv_decl_index).result_id;
switch (self.spv.declPtr(spv_decl_index).kind) {
.func => {
const fn_info = zcu.typeToFunc(ty).?;
@ -3343,7 +3350,9 @@ const NavGen = struct {
.store, .store_safe => return self.airStore(inst),
.br => return self.airBr(inst),
.repeat => return self.fail("TODO implement `repeat`", .{}),
// For now just ignore this instruction. This effectively falls back on the old implementation,
// this doesn't change anything for us.
.repeat => return,
.breakpoint => return,
.cond_br => return self.airCondBr(inst),
.loop => return self.airLoop(inst),
@ -3356,7 +3365,7 @@ const NavGen = struct {
.dbg_stmt => return self.airDbgStmt(inst),
.dbg_inline_block => try self.airDbgInlineBlock(inst),
.dbg_var_ptr, .dbg_var_val => return self.airDbgVar(inst),
.dbg_var_ptr, .dbg_var_val, .dbg_arg_inline => return self.airDbgVar(inst),
.unwrap_errunion_err => try self.airErrUnionErr(inst),
.unwrap_errunion_payload => try self.airErrUnionPayload(inst),
@ -6535,10 +6544,6 @@ const NavGen = struct {
.id_ref_3 = params[0..n_params],
});
if (return_type == .noreturn_type) {
try self.func.body.emit(self.spv.gpa, .OpUnreachable, {});
}
if (self.liveness.isUnused(inst) or !Type.fromInterned(return_type).hasRuntimeBitsIgnoreComptime(zcu)) {
return null;
}

View File

@ -935,10 +935,11 @@ pub fn buildSharedObjects(comp: *Compilation, prog_node: std.Progress.Node) !voi
var ver_buf_i: u8 = 0;
while (ver_buf_i < versions_len) : (ver_buf_i += 1) {
// Example:
// .balign 4
// .globl _Exit_2_2_5
// .type _Exit_2_2_5, %function;
// .symver _Exit_2_2_5, _Exit@@GLIBC_2.2.5
// _Exit_2_2_5:
// _Exit_2_2_5: .long 0
const ver_index = versions_buffer[ver_buf_i];
const ver = metadata.all_versions[ver_index];
@ -957,12 +958,14 @@ pub fn buildSharedObjects(comp: *Compilation, prog_node: std.Progress.Node) !voi
.{ sym_name, ver.major, ver.minor },
);
try stubs_asm.writer().print(
\\.balign {d}
\\.globl {s}
\\.type {s}, %function;
\\.symver {s}, {s}{s}GLIBC_{d}.{d}
\\{s}:
\\{s}: {s} 0
\\
, .{
target.ptrBitWidth() / 8,
sym_plus_ver,
sym_plus_ver,
sym_plus_ver,
@ -971,6 +974,7 @@ pub fn buildSharedObjects(comp: *Compilation, prog_node: std.Progress.Node) !voi
ver.major,
ver.minor,
sym_plus_ver,
wordDirective(target),
});
} else {
const sym_plus_ver = if (want_default)
@ -982,12 +986,14 @@ pub fn buildSharedObjects(comp: *Compilation, prog_node: std.Progress.Node) !voi
.{ sym_name, ver.major, ver.minor, ver.patch },
);
try stubs_asm.writer().print(
\\.balign {d}
\\.globl {s}
\\.type {s}, %function;
\\.symver {s}, {s}{s}GLIBC_{d}.{d}.{d}
\\{s}:
\\{s}: {s} 0
\\
, .{
target.ptrBitWidth() / 8,
sym_plus_ver,
sym_plus_ver,
sym_plus_ver,
@ -997,6 +1003,7 @@ pub fn buildSharedObjects(comp: *Compilation, prog_node: std.Progress.Node) !voi
ver.minor,
ver.patch,
sym_plus_ver,
wordDirective(target),
});
}
}
@ -1024,10 +1031,14 @@ pub fn buildSharedObjects(comp: *Compilation, prog_node: std.Progress.Node) !voi
// a strong reference.
if (std.mem.eql(u8, lib.name, "c")) {
try stubs_asm.writer().print(
\\.balign {d}
\\.globl _IO_stdin_used
\\{s} _IO_stdin_used
\\
, .{wordDirective(target)});
, .{
target.ptrBitWidth() / 8,
wordDirective(target),
});
}
const obj_inclusions_len = try inc_reader.readInt(u16, .little);
@ -1099,11 +1110,12 @@ pub fn buildSharedObjects(comp: *Compilation, prog_node: std.Progress.Node) !voi
var ver_buf_i: u8 = 0;
while (ver_buf_i < versions_len) : (ver_buf_i += 1) {
// Example:
// .balign 4
// .globl environ_2_2_5
// .type environ_2_2_5, %object;
// .size environ_2_2_5, 4;
// .symver environ_2_2_5, environ@@GLIBC_2.2.5
// environ_2_2_5:
// environ_2_2_5: .fill 4, 1, 0
const ver_index = versions_buffer[ver_buf_i];
const ver = metadata.all_versions[ver_index];
@ -1122,13 +1134,15 @@ pub fn buildSharedObjects(comp: *Compilation, prog_node: std.Progress.Node) !voi
.{ sym_name, ver.major, ver.minor },
);
try stubs_asm.writer().print(
\\.balign {d}
\\.globl {s}
\\.type {s}, %object;
\\.size {s}, {d};
\\.symver {s}, {s}{s}GLIBC_{d}.{d}
\\{s}:
\\{s}: .fill {d}, 1, 0
\\
, .{
target.ptrBitWidth() / 8,
sym_plus_ver,
sym_plus_ver,
sym_plus_ver,
@ -1139,6 +1153,7 @@ pub fn buildSharedObjects(comp: *Compilation, prog_node: std.Progress.Node) !voi
ver.major,
ver.minor,
sym_plus_ver,
size,
});
} else {
const sym_plus_ver = if (want_default)
@ -1150,13 +1165,15 @@ pub fn buildSharedObjects(comp: *Compilation, prog_node: std.Progress.Node) !voi
.{ sym_name, ver.major, ver.minor, ver.patch },
);
try stubs_asm.writer().print(
\\.balign {d}
\\.globl {s}
\\.type {s}, %object;
\\.size {s}, {d};
\\.symver {s}, {s}{s}GLIBC_{d}.{d}.{d}
\\{s}:
\\{s}: .fill {d}, 1, 0
\\
, .{
target.ptrBitWidth() / 8,
sym_plus_ver,
sym_plus_ver,
sym_plus_ver,
@ -1168,6 +1185,7 @@ pub fn buildSharedObjects(comp: *Compilation, prog_node: std.Progress.Node) !voi
ver.minor,
ver.patch,
sym_plus_ver,
size,
});
}
}

View File

@ -207,23 +207,19 @@ pub const Diags = struct {
pub fn addError(diags: *Diags, comptime format: []const u8, args: anytype) void {
@branchHint(.cold);
const gpa = diags.gpa;
const eu_main_msg = std.fmt.allocPrint(gpa, format, args);
diags.mutex.lock();
defer diags.mutex.unlock();
diags.msgs.ensureUnusedCapacity(gpa, 1) catch |err| switch (err) {
error.OutOfMemory => {
diags.flags.alloc_failure_occurred = true;
return;
},
addErrorLockedFallible(diags, eu_main_msg) catch |err| switch (err) {
error.OutOfMemory => diags.setAllocFailureLocked(),
};
const err_msg: Msg = .{
.msg = std.fmt.allocPrint(gpa, format, args) catch |err| switch (err) {
error.OutOfMemory => {
diags.flags.alloc_failure_occurred = true;
return;
},
},
};
diags.msgs.appendAssumeCapacity(err_msg);
}
fn addErrorLockedFallible(diags: *Diags, eu_main_msg: Allocator.Error![]u8) Allocator.Error!void {
const gpa = diags.gpa;
const main_msg = try eu_main_msg;
errdefer gpa.free(main_msg);
try diags.msgs.append(gpa, .{ .msg = main_msg });
}
pub fn addErrorWithNotes(diags: *Diags, note_count: usize) error{OutOfMemory}!ErrorWithNotes {
@ -242,7 +238,7 @@ pub const Diags = struct {
const err = diags.msgs.addOneAssumeCapacity();
err.* = .{
.msg = undefined,
.notes = try gpa.alloc(Diags.Msg, note_count),
.notes = try gpa.alloc(Msg, note_count),
};
return .{
.diags = diags,
@ -250,34 +246,93 @@ pub const Diags = struct {
};
}
pub fn reportMissingLibraryError(
pub fn addMissingLibraryError(
diags: *Diags,
checked_paths: []const []const u8,
comptime format: []const u8,
args: anytype,
) error{OutOfMemory}!void {
) void {
@branchHint(.cold);
var err = try diags.addErrorWithNotes(checked_paths.len);
try err.addMsg(format, args);
for (checked_paths) |path| {
try err.addNote("tried {s}", .{path});
}
const gpa = diags.gpa;
const eu_main_msg = std.fmt.allocPrint(gpa, format, args);
diags.mutex.lock();
defer diags.mutex.unlock();
addMissingLibraryErrorLockedFallible(diags, checked_paths, eu_main_msg) catch |err| switch (err) {
error.OutOfMemory => diags.setAllocFailureLocked(),
};
}
pub fn reportParseError(
fn addMissingLibraryErrorLockedFallible(
diags: *Diags,
checked_paths: []const []const u8,
eu_main_msg: Allocator.Error![]u8,
) Allocator.Error!void {
const gpa = diags.gpa;
const main_msg = try eu_main_msg;
errdefer gpa.free(main_msg);
try diags.msgs.ensureUnusedCapacity(gpa, 1);
const notes = try gpa.alloc(Msg, checked_paths.len);
errdefer gpa.free(notes);
for (checked_paths, notes) |path, *note| {
note.* = .{ .msg = try std.fmt.allocPrint(gpa, "tried {s}", .{path}) };
}
diags.msgs.appendAssumeCapacity(.{
.msg = main_msg,
.notes = notes,
});
}
pub fn addParseError(
diags: *Diags,
path: Path,
comptime format: []const u8,
args: anytype,
) error{OutOfMemory}!void {
) void {
@branchHint(.cold);
var err = try diags.addErrorWithNotes(1);
try err.addMsg(format, args);
try err.addNote("while parsing {}", .{path});
const gpa = diags.gpa;
const eu_main_msg = std.fmt.allocPrint(gpa, format, args);
diags.mutex.lock();
defer diags.mutex.unlock();
addParseErrorLockedFallible(diags, path, eu_main_msg) catch |err| switch (err) {
error.OutOfMemory => diags.setAllocFailureLocked(),
};
}
fn addParseErrorLockedFallible(diags: *Diags, path: Path, m: Allocator.Error![]u8) Allocator.Error!void {
const gpa = diags.gpa;
const main_msg = try m;
errdefer gpa.free(main_msg);
try diags.msgs.ensureUnusedCapacity(gpa, 1);
const note = try std.fmt.allocPrint(gpa, "while parsing {}", .{path});
errdefer gpa.free(note);
const notes = try gpa.create([1]Msg);
errdefer gpa.destroy(notes);
notes.* = .{.{ .msg = note }};
diags.msgs.appendAssumeCapacity(.{
.msg = main_msg,
.notes = notes,
});
}
pub fn failParse(
diags: *Diags,
path: Path,
comptime format: []const u8,
args: anytype,
) error{LinkFailure} {
@branchHint(.cold);
addParseError(diags, path, format, args);
return error.LinkFailure;
}
pub fn setAllocFailure(diags: *Diags) void {
@branchHint(.cold);
diags.mutex.lock();
defer diags.mutex.unlock();
setAllocFailureLocked(diags);
}
fn setAllocFailureLocked(diags: *Diags) void {
log.debug("memory allocation failure", .{});
diags.flags.alloc_failure_occurred = true;
}
@ -727,7 +782,8 @@ pub const File = struct {
FailedToEmit,
FileSystem,
FilesOpenedWithWrongFlags,
/// Indicates an error will be present in `Compilation.link_errors`.
/// Deprecated. Use `LinkFailure` instead.
/// Formerly used to indicate an error will be present in `Compilation.link_errors`.
FlushFailure,
/// Indicates an error will be present in `Compilation.link_errors`.
LinkFailure,

View File

@ -46,7 +46,7 @@ file_handles: std.ArrayListUnmanaged(File.Handle) = .empty,
zig_object_index: ?File.Index = null,
linker_defined_index: ?File.Index = null,
objects: std.ArrayListUnmanaged(File.Index) = .empty,
shared_objects: std.ArrayListUnmanaged(File.Index) = .empty,
shared_objects: std.StringArrayHashMapUnmanaged(File.Index) = .empty,
/// List of all output sections and their associated metadata.
sections: std.MultiArrayList(Section) = .{},
@ -62,7 +62,7 @@ phdr_indexes: ProgramHeaderIndexes = .{},
section_indexes: SectionIndexes = .{},
page_size: u32,
default_sym_version: elf.Elf64_Versym,
default_sym_version: elf.Versym,
/// .shstrtab buffer
shstrtab: std.ArrayListUnmanaged(u8) = .empty,
@ -75,7 +75,7 @@ dynsym: DynsymSection = .{},
/// .dynstrtab buffer
dynstrtab: std.ArrayListUnmanaged(u8) = .empty,
/// Version symbol table. Only populated and emitted when linking dynamically.
versym: std.ArrayListUnmanaged(elf.Elf64_Versym) = .empty,
versym: std.ArrayListUnmanaged(elf.Versym) = .empty,
/// .verneed section
verneed: VerneedSection = .{},
/// .got section
@ -114,7 +114,7 @@ thunks: std.ArrayListUnmanaged(Thunk) = .empty,
merge_sections: std.ArrayListUnmanaged(Merge.Section) = .empty,
comment_merge_section_index: ?Merge.Section.Index = null,
first_eflags: ?elf.Elf64_Word = null,
first_eflags: ?elf.Word = null,
const SectionIndexes = struct {
copy_rel: ?u32 = null,
@ -265,10 +265,7 @@ pub fn createEmpty(
};
const is_dyn_lib = output_mode == .Lib and link_mode == .dynamic;
const default_sym_version: elf.Elf64_Versym = if (is_dyn_lib or comp.config.rdynamic)
elf.VER_NDX_GLOBAL
else
elf.VER_NDX_LOCAL;
const default_sym_version: elf.Versym = if (is_dyn_lib or comp.config.rdynamic) .GLOBAL else .LOCAL;
// If using LLD to link, this code should produce an object file so that it
// can be passed to LLD.
@ -794,58 +791,51 @@ pub fn flushModule(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_nod
// --verbose-link
if (comp.verbose_link) try self.dumpArgv(comp);
if (self.zigObjectPtr()) |zig_object| try zig_object.flushModule(self, tid);
if (self.zigObjectPtr()) |zig_object| try zig_object.flush(self, tid);
if (self.base.isStaticLib()) return relocatable.flushStaticLib(self, comp, module_obj_path);
if (self.base.isObject()) return relocatable.flushObject(self, comp, module_obj_path);
const csu = try comp.getCrtPaths(arena);
// csu prelude
if (csu.crt0) |path| try parseObjectReportingFailure(self, path);
if (csu.crti) |path| try parseObjectReportingFailure(self, path);
if (csu.crtbegin) |path| try parseObjectReportingFailure(self, path);
if (csu.crt0) |path| parseObjectReportingFailure(self, path);
if (csu.crti) |path| parseObjectReportingFailure(self, path);
if (csu.crtbegin) |path| parseObjectReportingFailure(self, path);
for (comp.objects) |obj| {
if (obj.isObject()) {
try parseObjectReportingFailure(self, obj.path);
} else {
try parseLibraryReportingFailure(self, .{ .path = obj.path }, obj.must_link);
}
parseInputReportingFailure(self, obj.path, obj.needed, obj.must_link);
}
// This is a set of object files emitted by clang in a single `build-exe` invocation.
// For instance, the implicit `a.o` as compiled by `zig build-exe a.c` will end up
// in this set.
for (comp.c_object_table.keys()) |key| {
try parseObjectReportingFailure(self, key.status.success.object_path);
parseObjectReportingFailure(self, key.status.success.object_path);
}
if (module_obj_path) |path| try parseObjectReportingFailure(self, path);
if (module_obj_path) |path| parseObjectReportingFailure(self, path);
if (comp.config.any_sanitize_thread) try parseCrtFileReportingFailure(self, comp.tsan_lib.?);
if (comp.config.any_fuzz) try parseCrtFileReportingFailure(self, comp.fuzzer_lib.?);
if (comp.config.any_sanitize_thread) parseCrtFileReportingFailure(self, comp.tsan_lib.?);
if (comp.config.any_fuzz) parseCrtFileReportingFailure(self, comp.fuzzer_lib.?);
// libc
if (!comp.skip_linker_dependencies and !comp.config.link_libc) {
if (comp.libc_static_lib) |lib| try parseCrtFileReportingFailure(self, lib);
if (comp.libc_static_lib) |lib| parseCrtFileReportingFailure(self, lib);
}
for (comp.system_libs.values()) |lib_info| {
try self.parseLibraryReportingFailure(.{
.needed = lib_info.needed,
.path = lib_info.path.?,
}, false);
parseInputReportingFailure(self, lib_info.path.?, lib_info.needed, false);
}
// libc++ dep
if (comp.config.link_libcpp) {
try self.parseLibraryReportingFailure(.{ .path = comp.libcxxabi_static_lib.?.full_object_path }, false);
try self.parseLibraryReportingFailure(.{ .path = comp.libcxx_static_lib.?.full_object_path }, false);
parseInputReportingFailure(self, comp.libcxxabi_static_lib.?.full_object_path, false, false);
parseInputReportingFailure(self, comp.libcxx_static_lib.?.full_object_path, false, false);
}
// libunwind dep
if (comp.config.link_libunwind) {
try self.parseLibraryReportingFailure(.{ .path = comp.libunwind_static_lib.?.full_object_path }, false);
parseInputReportingFailure(self, comp.libunwind_static_lib.?.full_object_path, false, false);
}
// libc dep
@ -869,17 +859,16 @@ pub fn flushModule(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_nod
if (try self.accessLibPath(arena, &test_path, &checked_paths, lc.crt_dir.?, lib_name, .static))
break :success;
try diags.reportMissingLibraryError(
diags.addMissingLibraryError(
checked_paths.items,
"missing system library: '{s}' was not found",
.{lib_name},
);
continue;
}
const resolved_path = Path.initCwd(try arena.dupe(u8, test_path.items));
try self.parseLibraryReportingFailure(.{ .path = resolved_path }, false);
parseInputReportingFailure(self, resolved_path, false, false);
}
} else if (target.isGnuLibC()) {
for (glibc.libs) |lib| {
@ -890,17 +879,15 @@ pub fn flushModule(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_nod
const lib_path = Path.initCwd(try std.fmt.allocPrint(arena, "{s}{c}lib{s}.so.{d}", .{
comp.glibc_so_files.?.dir_path, fs.path.sep, lib.name, lib.sover,
}));
try self.parseLibraryReportingFailure(.{ .path = lib_path }, false);
parseInputReportingFailure(self, lib_path, false, false);
}
try self.parseLibraryReportingFailure(.{
.path = try comp.get_libc_crt_file(arena, "libc_nonshared.a"),
}, false);
parseInputReportingFailure(self, try comp.get_libc_crt_file(arena, "libc_nonshared.a"), false, false);
} else if (target.isMusl()) {
const path = try comp.get_libc_crt_file(arena, switch (link_mode) {
.static => "libc.a",
.dynamic => "libc.so",
});
try self.parseLibraryReportingFailure(.{ .path = path }, false);
parseInputReportingFailure(self, path, false, false);
} else {
diags.flags.missing_libc = true;
}
@ -912,35 +899,17 @@ pub fn flushModule(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_nod
// to be after the shared libraries, so they are picked up from the shared
// libraries, not libcompiler_rt.
if (comp.compiler_rt_lib) |crt_file| {
try parseLibraryReportingFailure(self, .{ .path = crt_file.full_object_path }, false);
parseInputReportingFailure(self, crt_file.full_object_path, false, false);
} else if (comp.compiler_rt_obj) |crt_file| {
try parseObjectReportingFailure(self, crt_file.full_object_path);
parseObjectReportingFailure(self, crt_file.full_object_path);
}
// csu postlude
if (csu.crtend) |path| try parseObjectReportingFailure(self, path);
if (csu.crtn) |path| try parseObjectReportingFailure(self, path);
if (csu.crtend) |path| parseObjectReportingFailure(self, path);
if (csu.crtn) |path| parseObjectReportingFailure(self, path);
if (diags.hasErrors()) return error.FlushFailure;
// Dedup shared objects
{
var seen_dsos = std.StringHashMap(void).init(gpa);
defer seen_dsos.deinit();
try seen_dsos.ensureTotalCapacity(@as(u32, @intCast(self.shared_objects.items.len)));
var i: usize = 0;
while (i < self.shared_objects.items.len) {
const index = self.shared_objects.items[i];
const shared_object = self.file(index).?.shared_object;
const soname = shared_object.soname();
const gop = seen_dsos.getOrPutAssumeCapacity(soname);
if (gop.found_existing) {
_ = self.shared_objects.orderedRemove(i);
} else i += 1;
}
}
// If we haven't already, create a linker-generated input file comprising of
// linker-defined synthetic symbols only such as `_DYNAMIC`, etc.
if (self.linker_defined_index == null) {
@ -1372,42 +1341,51 @@ pub const ParseError = error{
UnknownFileType,
} || LdScript.Error || fs.Dir.AccessError || fs.File.SeekError || fs.File.OpenError || fs.File.ReadError;
fn parseCrtFileReportingFailure(self: *Elf, crt_file: Compilation.CrtFile) error{OutOfMemory}!void {
if (crt_file.isObject()) {
try parseObjectReportingFailure(self, crt_file.full_object_path);
} else {
try parseLibraryReportingFailure(self, .{ .path = crt_file.full_object_path }, false);
fn parseCrtFileReportingFailure(self: *Elf, crt_file: Compilation.CrtFile) void {
parseInputReportingFailure(self, crt_file.full_object_path, false, false);
}
pub fn parseInputReportingFailure(self: *Elf, path: Path, needed: bool, must_link: bool) void {
const gpa = self.base.comp.gpa;
const diags = &self.base.comp.link_diags;
const target = self.getTarget();
switch (Compilation.classifyFileExt(path.sub_path)) {
.object => parseObjectReportingFailure(self, path),
.shared_library => parseSharedObject(gpa, diags, .{
.path = path,
.needed = needed,
}, &self.shared_objects, &self.files, target) catch |err| switch (err) {
error.LinkFailure => return, // already reported
error.BadMagic, error.UnexpectedEndOfFile => {
// It could be a linker script.
self.parseLdScript(.{ .path = path, .needed = needed }) catch |err2| switch (err2) {
error.LinkFailure => return, // already reported
else => |e| diags.addParseError(path, "failed to parse linker script: {s}", .{@errorName(e)}),
};
},
else => |e| diags.addParseError(path, "failed to parse shared object: {s}", .{@errorName(e)}),
},
.static_library => parseArchive(self, path, must_link) catch |err| switch (err) {
error.LinkFailure => return, // already reported
else => |e| diags.addParseError(path, "failed to parse archive: {s}", .{@errorName(e)}),
},
.unknown => self.parseLdScript(.{ .path = path, .needed = needed }) catch |err| switch (err) {
error.LinkFailure => return, // already reported
else => |e| diags.addParseError(path, "failed to parse linker script: {s}", .{@errorName(e)}),
},
else => diags.addParseError(path, "unrecognized file type", .{}),
}
}
pub fn parseObjectReportingFailure(self: *Elf, path: Path) error{OutOfMemory}!void {
pub fn parseObjectReportingFailure(self: *Elf, path: Path) void {
const diags = &self.base.comp.link_diags;
self.parseObject(path) catch |err| switch (err) {
error.LinkFailure => return, // already reported
error.OutOfMemory => return error.OutOfMemory,
else => |e| try self.addParseError(path, "unable to parse object: {s}", .{@errorName(e)}),
else => |e| diags.addParseError(path, "unable to parse object: {s}", .{@errorName(e)}),
};
}
pub fn parseLibraryReportingFailure(self: *Elf, lib: SystemLib, must_link: bool) error{OutOfMemory}!void {
self.parseLibrary(lib, must_link) catch |err| switch (err) {
error.LinkFailure => return, // already reported
error.OutOfMemory => return error.OutOfMemory,
else => |e| try self.addParseError(lib.path, "unable to parse library: {s}", .{@errorName(e)}),
};
}
fn parseLibrary(self: *Elf, lib: SystemLib, must_link: bool) ParseError!void {
const tracy = trace(@src());
defer tracy.end();
if (try Archive.isArchive(lib.path)) {
try self.parseArchive(lib.path, must_link);
} else if (try SharedObject.isSharedObject(lib.path)) {
try self.parseSharedObject(lib);
} else {
try self.parseLdScript(lib);
}
}
fn parseObject(self: *Elf, path: Path) ParseError!void {
const tracy = trace(@src());
defer tracy.end();
@ -1457,28 +1435,80 @@ fn parseArchive(self: *Elf, path: Path, must_link: bool) ParseError!void {
}
}
fn parseSharedObject(self: *Elf, lib: SystemLib) ParseError!void {
fn parseSharedObject(
gpa: Allocator,
diags: *Diags,
lib: SystemLib,
shared_objects: *std.StringArrayHashMapUnmanaged(File.Index),
files: *std.MultiArrayList(File.Entry),
target: std.Target,
) !void {
const tracy = trace(@src());
defer tracy.end();
const gpa = self.base.comp.gpa;
const handle = try lib.path.root_dir.handle.openFile(lib.path.sub_path, .{});
defer handle.close();
const index = @as(File.Index, @intCast(try self.files.addOne(gpa)));
self.files.set(index, .{ .shared_object = .{
.path = .{
.root_dir = lib.path.root_dir,
.sub_path = try gpa.dupe(u8, lib.path.sub_path),
},
.index = index,
.needed = lib.needed,
.alive = lib.needed,
} });
try self.shared_objects.append(gpa, index);
const stat = Stat.fromFs(try handle.stat());
var header = try SharedObject.parseHeader(gpa, diags, lib.path, handle, stat, target);
defer header.deinit(gpa);
const shared_object = self.file(index).?.shared_object;
try shared_object.parse(self, handle);
const soname = header.soname() orelse lib.path.basename();
const gop = try shared_objects.getOrPut(gpa, soname);
if (gop.found_existing) {
header.deinit(gpa);
return;
}
errdefer _ = shared_objects.pop();
const index: File.Index = @intCast(try files.addOne(gpa));
errdefer _ = files.pop();
gop.value_ptr.* = index;
var parsed = try SharedObject.parse(gpa, &header, handle);
errdefer parsed.deinit(gpa);
const duped_path: Path = .{
.root_dir = lib.path.root_dir,
.sub_path = try gpa.dupe(u8, lib.path.sub_path),
};
errdefer gpa.free(duped_path.sub_path);
files.set(index, .{
.shared_object = .{
.parsed = parsed,
.path = duped_path,
.index = index,
.needed = lib.needed,
.alive = lib.needed,
.aliases = null,
.symbols = .empty,
.symbols_extra = .empty,
.symbols_resolver = .empty,
.output_symtab_ctx = .{},
},
});
const so = fileLookup(files.*, index).?.shared_object;
// TODO: save this work for later
const nsyms = parsed.symbols.len;
try so.symbols.ensureTotalCapacityPrecise(gpa, nsyms);
try so.symbols_extra.ensureTotalCapacityPrecise(gpa, nsyms * @typeInfo(Symbol.Extra).@"struct".fields.len);
try so.symbols_resolver.ensureTotalCapacityPrecise(gpa, nsyms);
so.symbols_resolver.appendNTimesAssumeCapacity(0, nsyms);
for (parsed.symtab, parsed.symbols, parsed.versyms, 0..) |esym, sym, versym, i| {
const out_sym_index = so.addSymbolAssumeCapacity();
const out_sym = &so.symbols.items[out_sym_index];
out_sym.value = @intCast(esym.st_value);
out_sym.name_offset = sym.mangled_name;
out_sym.ref = .{ .index = 0, .file = 0 };
out_sym.esym_index = @intCast(i);
out_sym.version_index = versym;
out_sym.extra_index = so.addSymbolExtraAssumeCapacity(.{});
}
}
fn parseLdScript(self: *Elf, lib: SystemLib) ParseError!void {
@ -1537,7 +1567,7 @@ fn parseLdScript(self: *Elf, lib: SystemLib) ParseError!void {
}
}
try diags.reportMissingLibraryError(
diags.addMissingLibraryError(
checked_paths.items,
"missing library dependency: GNU ld script '{}' requires '{s}', but file not found",
.{ @as(Path, lib.path), script_arg.path },
@ -1546,26 +1576,16 @@ fn parseLdScript(self: *Elf, lib: SystemLib) ParseError!void {
}
const full_path = Path.initCwd(test_path.items);
self.parseLibrary(.{
.needed = script_arg.needed,
.path = full_path,
}, false) catch |err| switch (err) {
error.LinkFailure => continue, // already reported
else => |e| try self.addParseError(
full_path,
"unexpected error: parsing library failed with error {s}",
.{@errorName(e)},
),
};
parseInputReportingFailure(self, full_path, script_arg.needed, false);
}
}
pub fn validateEFlags(self: *Elf, file_index: File.Index, e_flags: elf.Elf64_Word) !void {
pub fn validateEFlags(self: *Elf, file_index: File.Index, e_flags: elf.Word) !void {
if (self.first_eflags == null) {
self.first_eflags = e_flags;
return; // there isn't anything to conflict with yet
}
const self_eflags: *elf.Elf64_Word = &self.first_eflags.?;
const self_eflags: *elf.Word = &self.first_eflags.?;
switch (self.getTarget().cpu.arch) {
.riscv64 => {
@ -1641,11 +1661,14 @@ fn accessLibPath(
/// 5. Remove references to dead objects/shared objects
/// 6. Re-run symbol resolution on pruned objects and shared objects sets.
pub fn resolveSymbols(self: *Elf) !void {
// This function mutates `shared_objects`.
const shared_objects = &self.shared_objects;
// Resolve symbols in the ZigObject. For now, we assume that it's always live.
if (self.zigObjectPtr()) |zo| try zo.asFile().resolveSymbols(self);
// Resolve symbols on the set of all objects and shared objects (even if some are unneeded).
for (self.objects.items) |index| try self.file(index).?.resolveSymbols(self);
for (self.shared_objects.items) |index| try self.file(index).?.resolveSymbols(self);
for (shared_objects.values()) |index| try self.file(index).?.resolveSymbols(self);
if (self.linkerDefinedPtr()) |obj| try obj.asFile().resolveSymbols(self);
// Mark live objects.
@ -1662,11 +1685,14 @@ pub fn resolveSymbols(self: *Elf) !void {
_ = self.objects.orderedRemove(i);
} else i += 1;
}
// TODO This loop has 2 major flaws:
// 1. It is O(N^2) which is never allowed in the codebase.
// 2. It mutates shared_objects, which is a non-starter for incremental compilation.
i = 0;
while (i < self.shared_objects.items.len) {
const index = self.shared_objects.items[i];
while (i < shared_objects.values().len) {
const index = shared_objects.values()[i];
if (!self.file(index).?.isAlive()) {
_ = self.shared_objects.orderedRemove(i);
_ = shared_objects.orderedRemoveAt(i);
} else i += 1;
}
@ -1687,7 +1713,7 @@ pub fn resolveSymbols(self: *Elf) !void {
// Re-resolve the symbols.
if (self.zigObjectPtr()) |zo| try zo.asFile().resolveSymbols(self);
for (self.objects.items) |index| try self.file(index).?.resolveSymbols(self);
for (self.shared_objects.items) |index| try self.file(index).?.resolveSymbols(self);
for (shared_objects.values()) |index| try self.file(index).?.resolveSymbols(self);
if (self.linkerDefinedPtr()) |obj| try obj.asFile().resolveSymbols(self);
}
@ -1696,12 +1722,13 @@ pub fn resolveSymbols(self: *Elf) !void {
/// This routine will prune unneeded objects extracted from archives and
/// unneeded shared objects.
fn markLive(self: *Elf) void {
const shared_objects = self.shared_objects.values();
if (self.zigObjectPtr()) |zig_object| zig_object.asFile().markLive(self);
for (self.objects.items) |index| {
const file_ptr = self.file(index).?;
if (file_ptr.isAlive()) file_ptr.markLive(self);
}
for (self.shared_objects.items) |index| {
for (shared_objects) |index| {
const file_ptr = self.file(index).?;
if (file_ptr.isAlive()) file_ptr.markLive(self);
}
@ -1716,6 +1743,7 @@ pub fn markEhFrameAtomsDead(self: *Elf) void {
}
fn markImportsExports(self: *Elf) void {
const shared_objects = self.shared_objects.values();
if (self.zigObjectPtr()) |zo| {
zo.markImportsExports(self);
}
@ -1723,7 +1751,7 @@ fn markImportsExports(self: *Elf) void {
self.file(index).?.object.markImportsExports(self);
}
if (!self.isEffectivelyDynLib()) {
for (self.shared_objects.items) |index| {
for (shared_objects) |index| {
self.file(index).?.shared_object.markImportExports(self);
}
}
@ -1744,6 +1772,7 @@ fn claimUnresolved(self: *Elf) void {
/// alloc sections.
fn scanRelocs(self: *Elf) !void {
const gpa = self.base.comp.gpa;
const shared_objects = self.shared_objects.values();
var undefs = std.AutoArrayHashMap(SymbolResolver.Index, std.ArrayList(Ref)).init(gpa);
defer {
@ -1787,7 +1816,7 @@ fn scanRelocs(self: *Elf) !void {
for (self.objects.items) |index| {
try self.file(index).?.createSymbolIndirection(self);
}
for (self.shared_objects.items) |index| {
for (shared_objects) |index| {
try self.file(index).?.createSymbolIndirection(self);
}
if (self.linkerDefinedPtr()) |obj| {
@ -1905,10 +1934,10 @@ fn linkWithLLD(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_node: s
// our digest. If so, we can skip linking. Otherwise, we proceed with invoking LLD.
const id_symlink_basename = "lld.id";
var man: Cache.Manifest = undefined;
var man: std.Build.Cache.Manifest = undefined;
defer if (!self.base.disable_lld_caching) man.deinit();
var digest: [Cache.hex_digest_len]u8 = undefined;
var digest: [std.Build.Cache.hex_digest_len]u8 = undefined;
if (!self.base.disable_lld_caching) {
man = comp.cache_parent.obtain();
@ -1988,7 +2017,7 @@ fn linkWithLLD(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_node: s
digest = man.final();
var prev_digest_buf: [digest.len]u8 = undefined;
const prev_digest: []u8 = Cache.readSmallFile(
const prev_digest: []u8 = std.Build.Cache.readSmallFile(
directory.handle,
id_symlink_basename,
&prev_digest_buf,
@ -2442,7 +2471,7 @@ fn linkWithLLD(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_node: s
if (!self.base.disable_lld_caching) {
// Update the file with the digest. If it fails we can continue; it only
// means that the next invocation will have an unnecessary cache miss.
Cache.writeSmallFile(directory.handle, id_symlink_basename, &digest) catch |err| {
std.Build.Cache.writeSmallFile(directory.handle, id_symlink_basename, &digest) catch |err| {
log.warn("failed to save linking hash digest file: {s}", .{@errorName(err)});
};
// Again failure here only means an unnecessary cache miss.
@ -2899,6 +2928,7 @@ fn initSyntheticSections(self: *Elf) !void {
const comp = self.base.comp;
const target = self.getTarget();
const ptr_size = self.ptrWidthBytes();
const shared_objects = self.shared_objects.values();
const needs_eh_frame = blk: {
if (self.zigObjectPtr()) |zo|
@ -3023,7 +3053,7 @@ fn initSyntheticSections(self: *Elf) !void {
});
}
if (self.isEffectivelyDynLib() or self.shared_objects.items.len > 0 or comp.config.pie) {
if (self.isEffectivelyDynLib() or shared_objects.len > 0 or comp.config.pie) {
if (self.section_indexes.dynstrtab == null) {
self.section_indexes.dynstrtab = try self.addSection(.{
.name = try self.insertShString(".dynstr"),
@ -3072,7 +3102,7 @@ fn initSyntheticSections(self: *Elf) !void {
const needs_versions = for (self.dynsym.entries.items) |entry| {
const sym = self.symbol(entry.ref).?;
if (sym.flags.import and sym.version_index & elf.VERSYM_VERSION > elf.VER_NDX_GLOBAL) break true;
if (sym.flags.import and sym.version_index.VERSION > elf.Versym.GLOBAL.VERSION) break true;
} else false;
if (needs_versions) {
if (self.section_indexes.versym == null) {
@ -3080,8 +3110,8 @@ fn initSyntheticSections(self: *Elf) !void {
.name = try self.insertShString(".gnu.version"),
.flags = elf.SHF_ALLOC,
.type = elf.SHT_GNU_VERSYM,
.addralign = @alignOf(elf.Elf64_Versym),
.entsize = @sizeOf(elf.Elf64_Versym),
.addralign = @alignOf(elf.Versym),
.entsize = @sizeOf(elf.Versym),
});
}
if (self.section_indexes.verneed == null) {
@ -3259,7 +3289,9 @@ fn sortInitFini(self: *Elf) !void {
fn setDynamicSection(self: *Elf, rpaths: []const []const u8) !void {
if (self.section_indexes.dynamic == null) return;
for (self.shared_objects.items) |index| {
const shared_objects = self.shared_objects.values();
for (shared_objects) |index| {
const shared_object = self.file(index).?.shared_object;
if (!shared_object.alive) continue;
try self.dynamic.addNeeded(shared_object, self);
@ -3283,7 +3315,7 @@ fn setVersionSymtab(self: *Elf) !void {
const gpa = self.base.comp.gpa;
if (self.section_indexes.versym == null) return;
try self.versym.resize(gpa, self.dynsym.count());
self.versym.items[0] = elf.VER_NDX_LOCAL;
self.versym.items[0] = .LOCAL;
for (self.dynsym.entries.items, 1..) |entry, i| {
const sym = self.symbol(entry.ref).?;
self.versym.items[i] = sym.version_index;
@ -3653,7 +3685,7 @@ fn updateSectionSizes(self: *Elf) !void {
}
if (self.section_indexes.versym) |index| {
shdrs[index].sh_size = self.versym.items.len * @sizeOf(elf.Elf64_Versym);
shdrs[index].sh_size = self.versym.items.len * @sizeOf(elf.Versym);
}
if (self.section_indexes.verneed) |index| {
@ -4055,13 +4087,15 @@ pub fn updateSymtabSize(self: *Elf) !void {
var strsize: u32 = 0;
const gpa = self.base.comp.gpa;
const shared_objects = self.shared_objects.values();
var files = std.ArrayList(File.Index).init(gpa);
defer files.deinit();
try files.ensureTotalCapacityPrecise(self.objects.items.len + self.shared_objects.items.len + 2);
try files.ensureTotalCapacityPrecise(self.objects.items.len + shared_objects.len + 2);
if (self.zig_object_index) |index| files.appendAssumeCapacity(index);
for (self.objects.items) |index| files.appendAssumeCapacity(index);
for (self.shared_objects.items) |index| files.appendAssumeCapacity(index);
for (shared_objects) |index| files.appendAssumeCapacity(index);
if (self.linker_defined_index) |index| files.appendAssumeCapacity(index);
// Section symbols
@ -4284,6 +4318,8 @@ pub fn writeShStrtab(self: *Elf) !void {
pub fn writeSymtab(self: *Elf) !void {
const gpa = self.base.comp.gpa;
const shared_objects = self.shared_objects.values();
const slice = self.sections.slice();
const symtab_shdr = slice.items(.shdr)[self.section_indexes.symtab.?];
const strtab_shdr = slice.items(.shdr)[self.section_indexes.strtab.?];
@ -4335,7 +4371,7 @@ pub fn writeSymtab(self: *Elf) !void {
file_ptr.writeSymtab(self);
}
for (self.shared_objects.items) |index| {
for (shared_objects) |index| {
const file_ptr = self.file(index).?;
file_ptr.writeSymtab(self);
}
@ -4368,8 +4404,8 @@ pub fn writeSymtab(self: *Elf) !void {
.st_info = sym.st_info,
.st_other = sym.st_other,
.st_shndx = sym.st_shndx,
.st_value = @as(u32, @intCast(sym.st_value)),
.st_size = @as(u32, @intCast(sym.st_size)),
.st_value = @intCast(sym.st_value),
.st_size = @intCast(sym.st_size),
};
if (foreign_endian) mem.byteSwapAllFields(elf.Elf32_Sym, out);
}
@ -4925,18 +4961,6 @@ fn reportUnsupportedCpuArch(self: *Elf) error{OutOfMemory}!void {
});
}
pub fn addParseError(
self: *Elf,
path: Path,
comptime format: []const u8,
args: anytype,
) error{OutOfMemory}!void {
const diags = &self.base.comp.link_diags;
var err = try diags.addErrorWithNotes(1);
try err.addMsg(format, args);
try err.addNote("while parsing {}", .{path});
}
pub fn addFileError(
self: *Elf,
file_index: File.Index,
@ -4959,16 +4983,6 @@ pub fn failFile(
return error.LinkFailure;
}
pub fn failParse(
self: *Elf,
path: Path,
comptime format: []const u8,
args: anytype,
) error{ OutOfMemory, LinkFailure } {
try addParseError(self, path, format, args);
return error.LinkFailure;
}
const FormatShdrCtx = struct {
elf_file: *Elf,
shdr: elf.Elf64_Shdr,
@ -5113,6 +5127,8 @@ fn fmtDumpState(
_ = unused_fmt_string;
_ = options;
const shared_objects = self.shared_objects.values();
if (self.zigObjectPtr()) |zig_object| {
try writer.print("zig_object({d}) : {s}\n", .{ zig_object.index, zig_object.basename });
try writer.print("{}{}", .{
@ -5136,11 +5152,11 @@ fn fmtDumpState(
});
}
for (self.shared_objects.items) |index| {
for (shared_objects) |index| {
const shared_object = self.file(index).?.shared_object;
try writer.print("shared_object({d}) : ", .{index});
try writer.print("{}", .{shared_object.path});
try writer.print(" : needed({})", .{shared_object.needed});
try writer.print("shared_object({d}) : {} : needed({})", .{
index, shared_object.path, shared_object.needed,
});
if (!shared_object.alive) try writer.writeAll(" : [*]");
try writer.writeByte('\n');
try writer.print("{}\n", .{shared_object.fmtSymtab(self)});
@ -5204,10 +5220,7 @@ pub fn preadAllAlloc(allocator: Allocator, handle: fs.File, offset: u64, size: u
}
/// Binary search
pub fn bsearch(comptime T: type, haystack: []align(1) const T, predicate: anytype) usize {
if (!@hasDecl(@TypeOf(predicate), "predicate"))
@compileError("Predicate is required to define fn predicate(@This(), T) bool");
pub fn bsearch(comptime T: type, haystack: []const T, predicate: anytype) usize {
var min: usize = 0;
var max: usize = haystack.len;
while (min < max) {
@ -5223,10 +5236,7 @@ pub fn bsearch(comptime T: type, haystack: []align(1) const T, predicate: anytyp
}
/// Linear search
pub fn lsearch(comptime T: type, haystack: []align(1) const T, predicate: anytype) usize {
if (!@hasDecl(@TypeOf(predicate), "predicate"))
@compileError("Predicate is required to define fn predicate(@This(), T) bool");
pub fn lsearch(comptime T: type, haystack: []const T, predicate: anytype) usize {
var i: usize = 0;
while (i < haystack.len) : (i += 1) {
if (predicate.predicate(haystack[i])) break;
@ -5569,6 +5579,11 @@ fn createThunks(elf_file: *Elf, atom_list: *AtomList) !void {
}
}
pub fn stringTableLookup(strtab: []const u8, off: u32) [:0]const u8 {
const slice = strtab[off..];
return slice[0..mem.indexOfScalar(u8, slice, 0).? :0];
}
const std = @import("std");
const build_options = @import("build_options");
const builtin = @import("builtin");
@ -5581,8 +5596,9 @@ const state_log = std.log.scoped(.link_state);
const math = std.math;
const mem = std.mem;
const Allocator = std.mem.Allocator;
const Cache = std.Build.Cache;
const Hash = std.hash.Wyhash;
const Path = std.Build.Cache.Path;
const Stat = std.Build.Cache.File.Stat;
const codegen = @import("../codegen.zig");
const dev = @import("../dev.zig");
@ -5601,10 +5617,10 @@ const Merge = @import("Elf/Merge.zig");
const Air = @import("../Air.zig");
const Archive = @import("Elf/Archive.zig");
const AtomList = @import("Elf/AtomList.zig");
const Path = Cache.Path;
const Compilation = @import("../Compilation.zig");
const ComdatGroupSection = synthetic_sections.ComdatGroupSection;
const CopyRelSection = synthetic_sections.CopyRelSection;
const Diags = @import("../link.zig").Diags;
const DynamicSection = synthetic_sections.DynamicSection;
const DynsymSection = synthetic_sections.DynsymSection;
const Dwarf = @import("Dwarf.zig");

View File

@ -1,15 +1,6 @@
objects: std.ArrayListUnmanaged(Object) = .empty,
strtab: std.ArrayListUnmanaged(u8) = .empty,
pub fn isArchive(path: Path) !bool {
const file = try path.root_dir.handle.openFile(path.sub_path, .{});
defer file.close();
const reader = file.reader();
const magic = reader.readBytesNoEof(elf.ARMAG.len) catch return false;
if (!mem.eql(u8, &magic, elf.ARMAG)) return false;
return true;
}
pub fn deinit(self: *Archive, allocator: Allocator) void {
self.objects.deinit(allocator);
self.strtab.deinit(allocator);
@ -18,6 +9,7 @@ pub fn deinit(self: *Archive, allocator: Allocator) void {
pub fn parse(self: *Archive, elf_file: *Elf, path: Path, handle_index: File.HandleIndex) !void {
const comp = elf_file.base.comp;
const gpa = comp.gpa;
const diags = &comp.link_diags;
const handle = elf_file.fileHandle(handle_index);
const size = (try handle.stat()).size;
@ -35,7 +27,7 @@ pub fn parse(self: *Archive, elf_file: *Elf, path: Path, handle_index: File.Hand
pos += @sizeOf(elf.ar_hdr);
if (!mem.eql(u8, &hdr.ar_fmag, elf.ARFMAG)) {
return elf_file.failParse(path, "invalid archive header delimiter: {s}", .{
return diags.failParse(path, "invalid archive header delimiter: {s}", .{
std.fmt.fmtSliceEscapeLower(&hdr.ar_fmag),
});
}

View File

@ -592,6 +592,7 @@ fn reportUndefined(
const file_ptr = self.file(elf_file).?;
const rel_esym = switch (file_ptr) {
.zig_object => |x| x.symbol(rel.r_sym()).elfSym(elf_file),
.shared_object => |so| so.parsed.symtab[rel.r_sym()],
inline else => |x| x.symtab.items[rel.r_sym()],
};
const esym = sym.elfSym(elf_file);

View File

@ -21,6 +21,8 @@ pub const Error = error{
pub fn parse(scr: *LdScript, data: []const u8, elf_file: *Elf) Error!void {
const comp = elf_file.base.comp;
const gpa = comp.gpa;
const diags = &comp.link_diags;
var tokenizer = Tokenizer{ .source = data };
var tokens = std.ArrayList(Token).init(gpa);
defer tokens.deinit();
@ -37,7 +39,7 @@ pub fn parse(scr: *LdScript, data: []const u8, elf_file: *Elf) Error!void {
try line_col.append(.{ .line = line, .column = column });
switch (tok.id) {
.invalid => {
return elf_file.failParse(scr.path, "invalid token in LD script: '{s}' ({d}:{d})", .{
return diags.failParse(scr.path, "invalid token in LD script: '{s}' ({d}:{d})", .{
std.fmt.fmtSliceEscapeLower(tok.get(data)), line, column,
});
},
@ -61,7 +63,7 @@ pub fn parse(scr: *LdScript, data: []const u8, elf_file: *Elf) Error!void {
const last_token_id = parser.it.pos - 1;
const last_token = parser.it.get(last_token_id);
const lcol = line_col.items[last_token_id];
return elf_file.failParse(scr.path, "unexpected token in LD script: {s}: '{s}' ({d}:{d})", .{
return diags.failParse(scr.path, "unexpected token in LD script: {s}: '{s}' ({d}:{d})", .{
@tagName(last_token.id),
last_token.get(data),
lcol.line,

View File

@ -310,7 +310,7 @@ fn initSymbols(self: *Object, allocator: Allocator, elf_file: *Elf) !void {
sym_ptr.name_offset = sym.st_name;
sym_ptr.esym_index = @intCast(i);
sym_ptr.extra_index = self.addSymbolExtraAssumeCapacity(.{});
sym_ptr.version_index = if (i >= first_global) elf_file.default_sym_version else elf.VER_NDX_LOCAL;
sym_ptr.version_index = if (i >= first_global) elf_file.default_sym_version else .LOCAL;
sym_ptr.flags.weak = sym.st_bind() == elf.STB_WEAK;
if (sym.st_shndx != elf.SHN_ABS and sym.st_shndx != elf.SHN_COMMON) {
sym_ptr.ref = .{ .index = self.atoms_indexes.items[sym.st_shndx], .file = self.index };
@ -536,7 +536,7 @@ pub fn claimUnresolved(self: *Object, elf_file: *Elf) void {
sym.ref = .{ .index = 0, .file = 0 };
sym.esym_index = esym_index;
sym.file_index = self.index;
sym.version_index = if (is_import) elf.VER_NDX_LOCAL else elf_file.default_sym_version;
sym.version_index = if (is_import) .LOCAL else elf_file.default_sym_version;
sym.flags.import = is_import;
const idx = self.symbols_resolver.items[i];
@ -598,8 +598,9 @@ pub fn markImportsExports(self: *Object, elf_file: *Elf) void {
const ref = self.resolveSymbol(@intCast(idx), elf_file);
const sym = elf_file.symbol(ref) orelse continue;
const file = sym.file(elf_file).?;
if (sym.version_index == elf.VER_NDX_LOCAL) continue;
const vis = @as(elf.STV, @enumFromInt(sym.elfSym(elf_file).st_other));
// https://github.com/ziglang/zig/issues/21678
if (@as(u16, @bitCast(sym.version_index)) == @as(u16, @bitCast(elf.Versym.LOCAL))) continue;
const vis: elf.STV = @enumFromInt(sym.elfSym(elf_file).st_other);
if (vis == .HIDDEN) continue;
if (file == .shared_object and !sym.isAbs(elf_file)) {
sym.flags.import = true;

View File

@ -1,236 +1,317 @@
path: Path,
index: File.Index,
header: ?elf.Elf64_Ehdr = null,
shdrs: std.ArrayListUnmanaged(elf.Elf64_Shdr) = .empty,
parsed: Parsed,
symtab: std.ArrayListUnmanaged(elf.Elf64_Sym) = .empty,
strtab: std.ArrayListUnmanaged(u8) = .empty,
/// Version symtab contains version strings of the symbols if present.
versyms: std.ArrayListUnmanaged(elf.Elf64_Versym) = .empty,
verstrings: std.ArrayListUnmanaged(u32) = .empty,
symbols: std.ArrayListUnmanaged(Symbol),
symbols_extra: std.ArrayListUnmanaged(u32),
symbols_resolver: std.ArrayListUnmanaged(Elf.SymbolResolver.Index),
symbols: std.ArrayListUnmanaged(Symbol) = .empty,
symbols_extra: std.ArrayListUnmanaged(u32) = .empty,
symbols_resolver: std.ArrayListUnmanaged(Elf.SymbolResolver.Index) = .empty,
aliases: ?std.ArrayListUnmanaged(u32) = null,
dynamic_table: std.ArrayListUnmanaged(elf.Elf64_Dyn) = .empty,
aliases: ?std.ArrayListUnmanaged(u32),
needed: bool,
alive: bool,
output_symtab_ctx: Elf.SymtabCtx = .{},
output_symtab_ctx: Elf.SymtabCtx,
pub fn isSharedObject(path: Path) !bool {
const file = try path.root_dir.handle.openFile(path.sub_path, .{});
defer file.close();
const reader = file.reader();
const header = reader.readStruct(elf.Elf64_Ehdr) catch return false;
if (!mem.eql(u8, header.e_ident[0..4], "\x7fELF")) return false;
if (header.e_ident[elf.EI_VERSION] != 1) return false;
if (header.e_type != elf.ET.DYN) return false;
return true;
pub fn deinit(so: *SharedObject, gpa: Allocator) void {
gpa.free(so.path.sub_path);
so.parsed.deinit(gpa);
so.symbols.deinit(gpa);
so.symbols_extra.deinit(gpa);
so.symbols_resolver.deinit(gpa);
if (so.aliases) |*aliases| aliases.deinit(gpa);
so.* = undefined;
}
pub fn deinit(self: *SharedObject, allocator: Allocator) void {
allocator.free(self.path.sub_path);
self.shdrs.deinit(allocator);
self.symtab.deinit(allocator);
self.strtab.deinit(allocator);
self.versyms.deinit(allocator);
self.verstrings.deinit(allocator);
self.symbols.deinit(allocator);
self.symbols_extra.deinit(allocator);
self.symbols_resolver.deinit(allocator);
if (self.aliases) |*aliases| aliases.deinit(allocator);
self.dynamic_table.deinit(allocator);
}
pub const Header = struct {
dynamic_table: []const elf.Elf64_Dyn,
soname_index: ?u32,
verdefnum: ?u32,
pub fn parse(self: *SharedObject, elf_file: *Elf, handle: std.fs.File) !void {
const comp = elf_file.base.comp;
const gpa = comp.gpa;
const file_size = (try handle.stat()).size;
sections: []const elf.Elf64_Shdr,
dynsym_sect_index: ?u32,
versym_sect_index: ?u32,
verdef_sect_index: ?u32,
const header_buffer = try Elf.preadAllAlloc(gpa, handle, 0, @sizeOf(elf.Elf64_Ehdr));
defer gpa.free(header_buffer);
self.header = @as(*align(1) const elf.Elf64_Ehdr, @ptrCast(header_buffer)).*;
stat: Stat,
strtab: std.ArrayListUnmanaged(u8),
const em = elf_file.base.comp.root_mod.resolved_target.result.toElfMachine();
if (em != self.header.?.e_machine) {
return elf_file.failFile(self.index, "invalid ELF machine type: {s}", .{
@tagName(self.header.?.e_machine),
});
pub fn deinit(header: *Header, gpa: Allocator) void {
gpa.free(header.sections);
gpa.free(header.dynamic_table);
header.strtab.deinit(gpa);
header.* = undefined;
}
const shoff = std.math.cast(usize, self.header.?.e_shoff) orelse return error.Overflow;
const shnum = std.math.cast(usize, self.header.?.e_shnum) orelse return error.Overflow;
const shsize = shnum * @sizeOf(elf.Elf64_Shdr);
if (file_size < shoff or file_size < shoff + shsize) {
return elf_file.failFile(self.index, "corrupted header: section header table extends past the end of file", .{});
pub fn soname(header: Header) ?[]const u8 {
const i = header.soname_index orelse return null;
return Elf.stringTableLookup(header.strtab.items, i);
}
};
pub const Parsed = struct {
stat: Stat,
strtab: []const u8,
soname_index: ?u32,
sections: []const elf.Elf64_Shdr,
/// Nonlocal symbols only.
symtab: []const elf.Elf64_Sym,
/// Version symtab contains version strings of the symbols if present.
/// Nonlocal symbols only.
versyms: []const elf.Versym,
/// Nonlocal symbols only.
symbols: []const Parsed.Symbol,
verstrings: []const u32,
const Symbol = struct {
mangled_name: u32,
};
pub fn deinit(p: *Parsed, gpa: Allocator) void {
gpa.free(p.strtab);
gpa.free(p.symtab);
gpa.free(p.versyms);
gpa.free(p.symbols);
gpa.free(p.verstrings);
p.* = undefined;
}
const shdrs_buffer = try Elf.preadAllAlloc(gpa, handle, shoff, shsize);
defer gpa.free(shdrs_buffer);
const shdrs = @as([*]align(1) const elf.Elf64_Shdr, @ptrCast(shdrs_buffer.ptr))[0..shnum];
try self.shdrs.appendUnalignedSlice(gpa, shdrs);
pub fn versionString(p: Parsed, index: elf.Versym) [:0]const u8 {
return versionStringLookup(p.strtab, p.verstrings, index);
}
pub fn soname(p: Parsed) ?[]const u8 {
const i = p.soname_index orelse return null;
return Elf.stringTableLookup(p.strtab, i);
}
};
pub fn parseHeader(
gpa: Allocator,
diags: *Diags,
file_path: Path,
fs_file: std.fs.File,
stat: Stat,
target: std.Target,
) !Header {
var ehdr: elf.Elf64_Ehdr = undefined;
{
const buf = mem.asBytes(&ehdr);
const amt = try fs_file.preadAll(buf, 0);
if (amt != buf.len) return error.UnexpectedEndOfFile;
}
if (!mem.eql(u8, ehdr.e_ident[0..4], "\x7fELF")) return error.BadMagic;
if (ehdr.e_ident[elf.EI_VERSION] != 1) return error.BadElfVersion;
if (ehdr.e_type != elf.ET.DYN) return error.NotSharedObject;
if (target.toElfMachine() != ehdr.e_machine)
return diags.failParse(file_path, "invalid ELF machine type: {s}", .{@tagName(ehdr.e_machine)});
const shoff = std.math.cast(usize, ehdr.e_shoff) orelse return error.Overflow;
const shnum = std.math.cast(u32, ehdr.e_shnum) orelse return error.Overflow;
const sections = try gpa.alloc(elf.Elf64_Shdr, shnum);
errdefer gpa.free(sections);
{
const buf = mem.sliceAsBytes(sections);
const amt = try fs_file.preadAll(buf, shoff);
if (amt != buf.len) return error.UnexpectedEndOfFile;
}
var dynsym_sect_index: ?u32 = null;
var dynamic_sect_index: ?u32 = null;
var versym_sect_index: ?u32 = null;
var verdef_sect_index: ?u32 = null;
for (self.shdrs.items, 0..) |shdr, i| {
if (shdr.sh_type != elf.SHT_NOBITS) {
if (file_size < shdr.sh_offset or file_size < shdr.sh_offset + shdr.sh_size) {
return elf_file.failFile(self.index, "corrupted section header", .{});
}
}
for (sections, 0..) |shdr, i_usize| {
const i: u32 = @intCast(i_usize);
switch (shdr.sh_type) {
elf.SHT_DYNSYM => dynsym_sect_index = @intCast(i),
elf.SHT_DYNAMIC => dynamic_sect_index = @intCast(i),
elf.SHT_GNU_VERSYM => versym_sect_index = @intCast(i),
elf.SHT_GNU_VERDEF => verdef_sect_index = @intCast(i),
else => {},
elf.SHT_DYNSYM => dynsym_sect_index = i,
elf.SHT_DYNAMIC => dynamic_sect_index = i,
elf.SHT_GNU_VERSYM => versym_sect_index = i,
elf.SHT_GNU_VERDEF => verdef_sect_index = i,
else => continue,
}
}
if (dynamic_sect_index) |index| {
const shdr = self.shdrs.items[index];
const raw = try Elf.preadAllAlloc(gpa, handle, shdr.sh_offset, shdr.sh_size);
defer gpa.free(raw);
const num = @divExact(raw.len, @sizeOf(elf.Elf64_Dyn));
const dyntab = @as([*]align(1) const elf.Elf64_Dyn, @ptrCast(raw.ptr))[0..num];
try self.dynamic_table.appendUnalignedSlice(gpa, dyntab);
const dynamic_table: []elf.Elf64_Dyn = if (dynamic_sect_index) |index| dt: {
const shdr = sections[index];
const n = std.math.cast(usize, shdr.sh_size / @sizeOf(elf.Elf64_Dyn)) orelse return error.Overflow;
const dynamic_table = try gpa.alloc(elf.Elf64_Dyn, n);
errdefer gpa.free(dynamic_table);
const buf = mem.sliceAsBytes(dynamic_table);
const amt = try fs_file.preadAll(buf, shdr.sh_offset);
if (amt != buf.len) return error.UnexpectedEndOfFile;
break :dt dynamic_table;
} else &.{};
errdefer gpa.free(dynamic_table);
var strtab: std.ArrayListUnmanaged(u8) = .empty;
errdefer strtab.deinit(gpa);
if (dynsym_sect_index) |index| {
const dynsym_shdr = sections[index];
if (dynsym_shdr.sh_link >= sections.len) return error.BadStringTableIndex;
const strtab_shdr = sections[dynsym_shdr.sh_link];
const n = std.math.cast(usize, strtab_shdr.sh_size) orelse return error.Overflow;
const buf = try strtab.addManyAsSlice(gpa, n);
const amt = try fs_file.preadAll(buf, strtab_shdr.sh_offset);
if (amt != buf.len) return error.UnexpectedEndOfFile;
}
const symtab = if (dynsym_sect_index) |index| blk: {
const shdr = self.shdrs.items[index];
const buffer = try Elf.preadAllAlloc(gpa, handle, shdr.sh_offset, shdr.sh_size);
const nsyms = @divExact(buffer.len, @sizeOf(elf.Elf64_Sym));
break :blk @as([*]align(1) const elf.Elf64_Sym, @ptrCast(buffer.ptr))[0..nsyms];
} else &[0]elf.Elf64_Sym{};
var soname_index: ?u32 = null;
var verdefnum: ?u32 = null;
for (dynamic_table) |entry| switch (entry.d_tag) {
elf.DT_SONAME => {
if (entry.d_val >= strtab.items.len) return error.BadSonameIndex;
soname_index = @intCast(entry.d_val);
},
elf.DT_VERDEFNUM => {
verdefnum = @intCast(entry.d_val);
},
else => continue,
};
return .{
.dynamic_table = dynamic_table,
.soname_index = soname_index,
.verdefnum = verdefnum,
.sections = sections,
.dynsym_sect_index = dynsym_sect_index,
.versym_sect_index = versym_sect_index,
.verdef_sect_index = verdef_sect_index,
.strtab = strtab,
.stat = stat,
};
}
pub fn parse(
gpa: Allocator,
/// Moves resources from header. Caller may unconditionally deinit.
header: *Header,
fs_file: std.fs.File,
) !Parsed {
const symtab = if (header.dynsym_sect_index) |index| st: {
const shdr = header.sections[index];
const n = std.math.cast(usize, shdr.sh_size / @sizeOf(elf.Elf64_Sym)) orelse return error.Overflow;
const symtab = try gpa.alloc(elf.Elf64_Sym, n);
errdefer gpa.free(symtab);
const buf = mem.sliceAsBytes(symtab);
const amt = try fs_file.preadAll(buf, shdr.sh_offset);
if (amt != buf.len) return error.UnexpectedEndOfFile;
break :st symtab;
} else &.{};
defer gpa.free(symtab);
const strtab = if (dynsym_sect_index) |index| blk: {
const symtab_shdr = self.shdrs.items[index];
const shdr = self.shdrs.items[symtab_shdr.sh_link];
const buffer = try Elf.preadAllAlloc(gpa, handle, shdr.sh_offset, shdr.sh_size);
break :blk buffer;
} else &[0]u8{};
defer gpa.free(strtab);
var verstrings: std.ArrayListUnmanaged(u32) = .empty;
defer verstrings.deinit(gpa);
try self.parseVersions(elf_file, handle, .{
.symtab = symtab,
.verdef_sect_index = verdef_sect_index,
.versym_sect_index = versym_sect_index,
});
try self.initSymbols(elf_file, .{
.symtab = symtab,
.strtab = strtab,
});
}
fn parseVersions(self: *SharedObject, elf_file: *Elf, handle: std.fs.File, opts: struct {
symtab: []align(1) const elf.Elf64_Sym,
verdef_sect_index: ?u32,
versym_sect_index: ?u32,
}) !void {
const comp = elf_file.base.comp;
const gpa = comp.gpa;
try self.verstrings.resize(gpa, 2);
self.verstrings.items[elf.VER_NDX_LOCAL] = 0;
self.verstrings.items[elf.VER_NDX_GLOBAL] = 0;
if (opts.verdef_sect_index) |shndx| {
const shdr = self.shdrs.items[shndx];
const verdefs = try Elf.preadAllAlloc(gpa, handle, shdr.sh_offset, shdr.sh_size);
if (header.verdef_sect_index) |shndx| {
const shdr = header.sections[shndx];
const verdefs = try Elf.preadAllAlloc(gpa, fs_file, shdr.sh_offset, shdr.sh_size);
defer gpa.free(verdefs);
const nverdefs = self.verdefNum();
try self.verstrings.resize(gpa, self.verstrings.items.len + nverdefs);
var i: u32 = 0;
var offset: u32 = 0;
while (i < nverdefs) : (i += 1) {
const verdef = @as(*align(1) const elf.Elf64_Verdef, @ptrCast(verdefs.ptr + offset)).*;
defer offset += verdef.vd_next;
if (verdef.vd_flags == elf.VER_FLG_BASE) continue; // Skip BASE entry
const vda_name = if (verdef.vd_cnt > 0)
@as(*align(1) const elf.Elf64_Verdaux, @ptrCast(verdefs.ptr + offset + verdef.vd_aux)).vda_name
else
0;
self.verstrings.items[verdef.vd_ndx] = vda_name;
while (true) {
const verdef = mem.bytesAsValue(elf.Verdef, verdefs[offset..][0..@sizeOf(elf.Verdef)]);
if (verdef.ndx == .UNSPECIFIED) return error.VerDefSymbolTooLarge;
if (verstrings.items.len <= @intFromEnum(verdef.ndx))
try verstrings.appendNTimes(gpa, 0, @intFromEnum(verdef.ndx) + 1 - verstrings.items.len);
const aux = mem.bytesAsValue(elf.Verdaux, verdefs[offset + verdef.aux ..][0..@sizeOf(elf.Verdaux)]);
verstrings.items[@intFromEnum(verdef.ndx)] = aux.name;
if (verdef.next == 0) break;
offset += verdef.next;
}
}
try self.versyms.ensureTotalCapacityPrecise(gpa, opts.symtab.len);
const versyms = if (header.versym_sect_index) |versym_sect_index| vs: {
const shdr = header.sections[versym_sect_index];
if (shdr.sh_size != symtab.len * @sizeOf(elf.Versym)) return error.BadVerSymSectionSize;
if (opts.versym_sect_index) |shndx| {
const shdr = self.shdrs.items[shndx];
const versyms_raw = try Elf.preadAllAlloc(gpa, handle, shdr.sh_offset, shdr.sh_size);
defer gpa.free(versyms_raw);
const nversyms = @divExact(versyms_raw.len, @sizeOf(elf.Elf64_Versym));
const versyms = @as([*]align(1) const elf.Elf64_Versym, @ptrCast(versyms_raw.ptr))[0..nversyms];
for (versyms) |ver| {
const normalized_ver = if (ver & elf.VERSYM_VERSION >= self.verstrings.items.len - 1)
elf.VER_NDX_GLOBAL
else
ver;
self.versyms.appendAssumeCapacity(normalized_ver);
}
} else for (0..opts.symtab.len) |_| {
self.versyms.appendAssumeCapacity(elf.VER_NDX_GLOBAL);
const versyms = try gpa.alloc(elf.Versym, symtab.len);
errdefer gpa.free(versyms);
const buf = mem.sliceAsBytes(versyms);
const amt = try fs_file.preadAll(buf, shdr.sh_offset);
if (amt != buf.len) return error.UnexpectedEndOfFile;
break :vs versyms;
} else &.{};
defer gpa.free(versyms);
var nonlocal_esyms: std.ArrayListUnmanaged(elf.Elf64_Sym) = .empty;
defer nonlocal_esyms.deinit(gpa);
var nonlocal_versyms: std.ArrayListUnmanaged(elf.Versym) = .empty;
defer nonlocal_versyms.deinit(gpa);
var nonlocal_symbols: std.ArrayListUnmanaged(Parsed.Symbol) = .empty;
defer nonlocal_symbols.deinit(gpa);
var strtab = header.strtab;
header.strtab = .empty;
defer strtab.deinit(gpa);
for (symtab, 0..) |sym, i| {
const ver: elf.Versym = if (versyms.len == 0 or sym.st_shndx == elf.SHN_UNDEF)
.GLOBAL
else
.{ .VERSION = versyms[i].VERSION, .HIDDEN = false };
// https://github.com/ziglang/zig/issues/21678
//if (ver == .LOCAL) continue;
if (@as(u16, @bitCast(ver)) == 0) continue;
try nonlocal_esyms.ensureUnusedCapacity(gpa, 1);
try nonlocal_versyms.ensureUnusedCapacity(gpa, 1);
try nonlocal_symbols.ensureUnusedCapacity(gpa, 1);
const name = Elf.stringTableLookup(strtab.items, sym.st_name);
const is_default = versyms.len == 0 or !versyms[i].HIDDEN;
const mangled_name = if (is_default) sym.st_name else mn: {
const off: u32 = @intCast(strtab.items.len);
const version_string = versionStringLookup(strtab.items, verstrings.items, versyms[i]);
try strtab.ensureUnusedCapacity(gpa, name.len + version_string.len + 2);
// Reload since the string table might have been resized.
const name2 = Elf.stringTableLookup(strtab.items, sym.st_name);
const version_string2 = versionStringLookup(strtab.items, verstrings.items, versyms[i]);
strtab.appendSliceAssumeCapacity(name2);
strtab.appendAssumeCapacity('@');
strtab.appendSliceAssumeCapacity(version_string2);
strtab.appendAssumeCapacity(0);
break :mn off;
};
nonlocal_esyms.appendAssumeCapacity(sym);
nonlocal_versyms.appendAssumeCapacity(ver);
nonlocal_symbols.appendAssumeCapacity(.{
.mangled_name = mangled_name,
});
}
}
fn initSymbols(self: *SharedObject, elf_file: *Elf, opts: struct {
symtab: []align(1) const elf.Elf64_Sym,
strtab: []const u8,
}) !void {
const gpa = elf_file.base.comp.gpa;
const nsyms = opts.symtab.len;
const sections = header.sections;
header.sections = &.{};
errdefer gpa.free(sections);
try self.strtab.appendSlice(gpa, opts.strtab);
try self.symtab.ensureTotalCapacityPrecise(gpa, nsyms);
try self.symbols.ensureTotalCapacityPrecise(gpa, nsyms);
try self.symbols_extra.ensureTotalCapacityPrecise(gpa, nsyms * @sizeOf(Symbol.Extra));
try self.symbols_resolver.ensureTotalCapacityPrecise(gpa, nsyms);
self.symbols_resolver.resize(gpa, nsyms) catch unreachable;
@memset(self.symbols_resolver.items, 0);
for (opts.symtab, 0..) |sym, i| {
const hidden = self.versyms.items[i] & elf.VERSYM_HIDDEN != 0;
const name = self.getString(sym.st_name);
// We need to garble up the name so that we don't pick this symbol
// during symbol resolution. Thank you GNU!
const name_off = if (hidden) blk: {
const mangled = try std.fmt.allocPrint(gpa, "{s}@{s}", .{
name,
self.versionString(self.versyms.items[i]),
});
defer gpa.free(mangled);
break :blk try self.addString(gpa, mangled);
} else sym.st_name;
const out_esym_index: u32 = @intCast(self.symtab.items.len);
const out_esym = self.symtab.addOneAssumeCapacity();
out_esym.* = sym;
out_esym.st_name = name_off;
const out_sym_index = self.addSymbolAssumeCapacity();
const out_sym = &self.symbols.items[out_sym_index];
out_sym.value = @intCast(out_esym.st_value);
out_sym.name_offset = name_off;
out_sym.ref = .{ .index = 0, .file = 0 };
out_sym.esym_index = out_esym_index;
out_sym.version_index = self.versyms.items[out_esym_index];
out_sym.extra_index = self.addSymbolExtraAssumeCapacity(.{});
}
return .{
.sections = sections,
.stat = header.stat,
.soname_index = header.soname_index,
.strtab = try strtab.toOwnedSlice(gpa),
.symtab = try nonlocal_esyms.toOwnedSlice(gpa),
.versyms = try nonlocal_versyms.toOwnedSlice(gpa),
.symbols = try nonlocal_symbols.toOwnedSlice(gpa),
.verstrings = try verstrings.toOwnedSlice(gpa),
};
}
pub fn resolveSymbols(self: *SharedObject, elf_file: *Elf) !void {
const gpa = elf_file.base.comp.gpa;
for (self.symtab.items, self.symbols_resolver.items, 0..) |esym, *resolv, i| {
for (self.parsed.symtab, self.symbols_resolver.items, 0..) |esym, *resolv, i| {
const gop = try elf_file.resolver.getOrPut(gpa, .{
.index = @intCast(i),
.file = self.index,
@ -253,7 +334,7 @@ pub fn resolveSymbols(self: *SharedObject, elf_file: *Elf) !void {
}
pub fn markLive(self: *SharedObject, elf_file: *Elf) void {
for (self.symtab.items, 0..) |esym, i| {
for (self.parsed.symtab, 0..) |esym, i| {
if (esym.st_shndx != elf.SHN_UNDEF) continue;
const ref = self.resolveSymbol(@intCast(i), elf_file);
@ -308,29 +389,21 @@ pub fn writeSymtab(self: *SharedObject, elf_file: *Elf) void {
}
}
pub fn versionString(self: SharedObject, index: elf.Elf64_Versym) [:0]const u8 {
const off = self.verstrings.items[index & elf.VERSYM_VERSION];
return self.getString(off);
pub fn versionString(self: SharedObject, index: elf.Versym) [:0]const u8 {
return self.parsed.versionString(index);
}
fn versionStringLookup(strtab: []const u8, verstrings: []const u32, index: elf.Versym) [:0]const u8 {
const off = verstrings[index.VERSION];
return Elf.stringTableLookup(strtab, off);
}
pub fn asFile(self: *SharedObject) File {
return .{ .shared_object = self };
}
fn verdefNum(self: *SharedObject) u32 {
for (self.dynamic_table.items) |entry| switch (entry.d_tag) {
elf.DT_VERDEFNUM => return @intCast(entry.d_val),
else => {},
};
return 0;
}
pub fn soname(self: *SharedObject) []const u8 {
for (self.dynamic_table.items) |entry| switch (entry.d_tag) {
elf.DT_SONAME => return self.getString(@intCast(entry.d_val)),
else => {},
};
return std.fs.path.basename(self.path.sub_path);
return self.parsed.soname() orelse self.path.basename();
}
pub fn initSymbolAliases(self: *SharedObject, elf_file: *Elf) !void {
@ -360,7 +433,7 @@ pub fn initSymbolAliases(self: *SharedObject, elf_file: *Elf) !void {
aliases.appendAssumeCapacity(@intCast(index));
}
std.mem.sort(u32, aliases.items, SortAlias{ .so = self, .ef = elf_file }, SortAlias.lessThan);
mem.sort(u32, aliases.items, SortAlias{ .so = self, .ef = elf_file }, SortAlias.lessThan);
self.aliases = aliases.moveToUnmanaged();
}
@ -384,17 +457,8 @@ pub fn symbolAliases(self: *SharedObject, index: u32, elf_file: *Elf) []const u3
return aliases.items[start..end];
}
fn addString(self: *SharedObject, allocator: Allocator, str: []const u8) !u32 {
const off: u32 = @intCast(self.strtab.items.len);
try self.strtab.ensureUnusedCapacity(allocator, str.len + 1);
self.strtab.appendSliceAssumeCapacity(str);
self.strtab.appendAssumeCapacity(0);
return off;
}
pub fn getString(self: SharedObject, off: u32) [:0]const u8 {
assert(off < self.strtab.items.len);
return mem.sliceTo(@as([*:0]const u8, @ptrCast(self.strtab.items.ptr + off)), 0);
return Elf.stringTableLookup(self.parsed.strtab, off);
}
pub fn resolveSymbol(self: SharedObject, index: Symbol.Index, elf_file: *Elf) Elf.Ref {
@ -402,25 +466,14 @@ pub fn resolveSymbol(self: SharedObject, index: Symbol.Index, elf_file: *Elf) El
return elf_file.resolver.get(resolv).?;
}
fn addSymbol(self: *SharedObject, allocator: Allocator) !Symbol.Index {
try self.symbols.ensureUnusedCapacity(allocator, 1);
return self.addSymbolAssumeCapacity();
}
fn addSymbolAssumeCapacity(self: *SharedObject) Symbol.Index {
pub fn addSymbolAssumeCapacity(self: *SharedObject) Symbol.Index {
const index: Symbol.Index = @intCast(self.symbols.items.len);
self.symbols.appendAssumeCapacity(.{ .file_index = self.index });
return index;
}
pub fn addSymbolExtra(self: *SharedObject, allocator: Allocator, extra: Symbol.Extra) !u32 {
const fields = @typeInfo(Symbol.Extra).@"struct".fields;
try self.symbols_extra.ensureUnusedCapacity(allocator, fields.len);
return self.addSymbolExtraAssumeCapacity(extra);
}
pub fn addSymbolExtraAssumeCapacity(self: *SharedObject, extra: Symbol.Extra) u32 {
const index = @as(u32, @intCast(self.symbols_extra.items.len));
const index: u32 = @intCast(self.symbols_extra.items.len);
const fields = @typeInfo(Symbol.Extra).@"struct".fields;
inline for (fields) |field| {
self.symbols_extra.appendAssumeCapacity(switch (field.type) {
@ -465,7 +518,7 @@ pub fn format(
_ = unused_fmt_string;
_ = options;
_ = writer;
@compileError("do not format shared objects directly");
@compileError("unreachable");
}
pub fn fmtSymtab(self: SharedObject, elf_file: *Elf) std.fmt.Formatter(formatSymtab) {
@ -509,8 +562,10 @@ const elf = std.elf;
const log = std.log.scoped(.elf);
const mem = std.mem;
const Path = std.Build.Cache.Path;
const Stat = std.Build.Cache.File.Stat;
const Allocator = mem.Allocator;
const Elf = @import("../Elf.zig");
const File = @import("file.zig").File;
const Symbol = @import("Symbol.zig");
const Diags = @import("../../link.zig").Diags;

View File

@ -22,7 +22,7 @@ esym_index: Index = 0,
/// Index of the source version symbol this symbol references if any.
/// If the symbol is unversioned it will have either VER_NDX_LOCAL or VER_NDX_GLOBAL.
version_index: elf.Elf64_Versym = elf.VER_NDX_LOCAL,
version_index: elf.Versym = .LOCAL,
/// Misc flags for the symbol packaged as packed struct for compression.
flags: Flags = .{},
@ -87,6 +87,7 @@ pub fn file(symbol: Symbol, elf_file: *Elf) ?File {
pub fn elfSym(symbol: Symbol, elf_file: *Elf) elf.Elf64_Sym {
return switch (symbol.file(elf_file).?) {
.zig_object => |x| x.symtab.items(.elf_sym)[symbol.esym_index],
.shared_object => |so| so.parsed.symtab[symbol.esym_index],
inline else => |x| x.symtab.items[symbol.esym_index],
};
}
@ -235,7 +236,7 @@ pub fn dsoAlignment(symbol: Symbol, elf_file: *Elf) !u64 {
assert(file_ptr == .shared_object);
const shared_object = file_ptr.shared_object;
const esym = symbol.elfSym(elf_file);
const shdr = shared_object.shdrs.items[esym.st_shndx];
const shdr = shared_object.parsed.sections[esym.st_shndx];
const alignment = @max(1, shdr.sh_addralign);
return if (esym.st_value == 0)
alignment
@ -351,8 +352,8 @@ fn formatName(
const elf_file = ctx.elf_file;
const symbol = ctx.symbol;
try writer.writeAll(symbol.name(elf_file));
switch (symbol.version_index & elf.VERSYM_VERSION) {
elf.VER_NDX_LOCAL, elf.VER_NDX_GLOBAL => {},
switch (symbol.version_index.VERSION) {
@intFromEnum(elf.VER_NDX.LOCAL), @intFromEnum(elf.VER_NDX.GLOBAL) => {},
else => {
const file_ptr = symbol.file(elf_file).?;
assert(file_ptr == .shared_object);

View File

@ -264,7 +264,7 @@ pub fn deinit(self: *ZigObject, allocator: Allocator) void {
}
}
pub fn flushModule(self: *ZigObject, elf_file: *Elf, tid: Zcu.PerThread.Id) !void {
pub fn flush(self: *ZigObject, elf_file: *Elf, tid: Zcu.PerThread.Id) !void {
// Handle any lazy symbols that were emitted by incremental compilation.
if (self.lazy_syms.getPtr(.anyerror_type)) |metadata| {
const pt: Zcu.PerThread = .{ .zcu = elf_file.base.comp.zcu.?, .tid = tid };
@ -623,7 +623,7 @@ pub fn claimUnresolved(self: *ZigObject, elf_file: *Elf) void {
global.ref = .{ .index = 0, .file = 0 };
global.esym_index = @intCast(index);
global.file_index = self.index;
global.version_index = if (is_import) elf.VER_NDX_LOCAL else elf_file.default_sym_version;
global.version_index = if (is_import) .LOCAL else elf_file.default_sym_version;
global.flags.import = is_import;
const idx = self.symbols_resolver.items[i];
@ -689,8 +689,9 @@ pub fn markImportsExports(self: *ZigObject, elf_file: *Elf) void {
const ref = self.resolveSymbol(@intCast(i | global_symbol_bit), elf_file);
const sym = elf_file.symbol(ref) orelse continue;
const file = sym.file(elf_file).?;
if (sym.version_index == elf.VER_NDX_LOCAL) continue;
const vis = @as(elf.STV, @enumFromInt(sym.elfSym(elf_file).st_other));
// https://github.com/ziglang/zig/issues/21678
if (@as(u16, @bitCast(sym.version_index)) == @as(u16, @bitCast(elf.Versym.LOCAL))) continue;
const vis: elf.STV = @enumFromInt(sym.elfSym(elf_file).st_other);
if (vis == .HIDDEN) continue;
if (file == .shared_object and !sym.isAbs(elf_file)) {
sym.flags.import = true;

View File

@ -4,22 +4,22 @@ pub fn flushStaticLib(elf_file: *Elf, comp: *Compilation, module_obj_path: ?Path
for (comp.objects) |obj| {
switch (Compilation.classifyFileExt(obj.path.sub_path)) {
.object => try parseObjectStaticLibReportingFailure(elf_file, obj.path),
.static_library => try parseArchiveStaticLibReportingFailure(elf_file, obj.path),
else => try elf_file.addParseError(obj.path, "unrecognized file extension", .{}),
.object => parseObjectStaticLibReportingFailure(elf_file, obj.path),
.static_library => parseArchiveStaticLibReportingFailure(elf_file, obj.path),
else => diags.addParseError(obj.path, "unrecognized file extension", .{}),
}
}
for (comp.c_object_table.keys()) |key| {
try parseObjectStaticLibReportingFailure(elf_file, key.status.success.object_path);
parseObjectStaticLibReportingFailure(elf_file, key.status.success.object_path);
}
if (module_obj_path) |path| {
try parseObjectStaticLibReportingFailure(elf_file, path);
parseObjectStaticLibReportingFailure(elf_file, path);
}
if (comp.include_compiler_rt) {
try parseObjectStaticLibReportingFailure(elf_file, comp.compiler_rt_obj.?.full_object_path);
parseObjectStaticLibReportingFailure(elf_file, comp.compiler_rt_obj.?.full_object_path);
}
if (diags.hasErrors()) return error.FlushFailure;
@ -154,21 +154,17 @@ pub fn flushObject(elf_file: *Elf, comp: *Compilation, module_obj_path: ?Path) l
const diags = &comp.link_diags;
for (comp.objects) |obj| {
if (obj.isObject()) {
try elf_file.parseObjectReportingFailure(obj.path);
} else {
try elf_file.parseLibraryReportingFailure(.{ .path = obj.path }, obj.must_link);
}
elf_file.parseInputReportingFailure(obj.path, false, obj.must_link);
}
// This is a set of object files emitted by clang in a single `build-exe` invocation.
// For instance, the implicit `a.o` as compiled by `zig build-exe a.c` will end up
// in this set.
for (comp.c_object_table.keys()) |key| {
try elf_file.parseObjectReportingFailure(key.status.success.object_path);
elf_file.parseObjectReportingFailure(key.status.success.object_path);
}
if (module_obj_path) |path| try elf_file.parseObjectReportingFailure(path);
if (module_obj_path) |path| elf_file.parseObjectReportingFailure(path);
if (diags.hasErrors()) return error.FlushFailure;
@ -219,19 +215,19 @@ pub fn flushObject(elf_file: *Elf, comp: *Compilation, module_obj_path: ?Path) l
if (diags.hasErrors()) return error.FlushFailure;
}
fn parseObjectStaticLibReportingFailure(elf_file: *Elf, path: Path) error{OutOfMemory}!void {
fn parseObjectStaticLibReportingFailure(elf_file: *Elf, path: Path) void {
const diags = &elf_file.base.comp.link_diags;
parseObjectStaticLib(elf_file, path) catch |err| switch (err) {
error.LinkFailure => return,
error.OutOfMemory => return error.OutOfMemory,
else => |e| try elf_file.addParseError(path, "parsing object failed: {s}", .{@errorName(e)}),
else => |e| diags.addParseError(path, "parsing object failed: {s}", .{@errorName(e)}),
};
}
fn parseArchiveStaticLibReportingFailure(elf_file: *Elf, path: Path) error{OutOfMemory}!void {
fn parseArchiveStaticLibReportingFailure(elf_file: *Elf, path: Path) void {
const diags = &elf_file.base.comp.link_diags;
parseArchiveStaticLib(elf_file, path) catch |err| switch (err) {
error.LinkFailure => return,
error.OutOfMemory => return error.OutOfMemory,
else => |e| try elf_file.addParseError(path, "parsing static library failed: {s}", .{@errorName(e)}),
else => |e| diags.addParseError(path, "parsing static library failed: {s}", .{@errorName(e)}),
};
}

View File

@ -1345,8 +1345,8 @@ pub const GnuHashSection = struct {
pub const VerneedSection = struct {
verneed: std.ArrayListUnmanaged(elf.Elf64_Verneed) = .empty,
vernaux: std.ArrayListUnmanaged(elf.Elf64_Vernaux) = .empty,
index: elf.Elf64_Versym = elf.VER_NDX_GLOBAL + 1,
vernaux: std.ArrayListUnmanaged(elf.Vernaux) = .empty,
index: elf.Versym = .{ .VERSION = elf.Versym.GLOBAL.VERSION + 1, .HIDDEN = false },
pub fn deinit(vern: *VerneedSection, allocator: Allocator) void {
vern.verneed.deinit(allocator);
@ -1363,7 +1363,7 @@ pub const VerneedSection = struct {
/// Index of the defining this symbol version shared object file
shared_object: File.Index,
/// Version index
version_index: elf.Elf64_Versym,
version_index: elf.Versym,
fn soname(this: @This(), ctx: *Elf) []const u8 {
const shared_object = ctx.file(this.shared_object).?.shared_object;
@ -1376,7 +1376,8 @@ pub const VerneedSection = struct {
}
pub fn lessThan(ctx: *Elf, lhs: @This(), rhs: @This()) bool {
if (lhs.shared_object == rhs.shared_object) return lhs.version_index < rhs.version_index;
if (lhs.shared_object == rhs.shared_object)
return @as(u16, @bitCast(lhs.version_index)) < @as(u16, @bitCast(rhs.version_index));
return mem.lessThan(u8, lhs.soname(ctx), rhs.soname(ctx));
}
};
@ -1389,7 +1390,7 @@ pub const VerneedSection = struct {
for (dynsyms, 1..) |entry, i| {
const symbol = elf_file.symbol(entry.ref).?;
if (symbol.flags.import and symbol.version_index & elf.VERSYM_VERSION > elf.VER_NDX_GLOBAL) {
if (symbol.flags.import and symbol.version_index.VERSION > elf.Versym.GLOBAL.VERSION) {
const shared_object = symbol.file(elf_file).?.shared_object;
verneed.appendAssumeCapacity(.{
.index = i,
@ -1404,11 +1405,12 @@ pub const VerneedSection = struct {
var last = verneed.items[0];
var last_verneed = try vern.addVerneed(last.soname(elf_file), elf_file);
var last_vernaux = try vern.addVernaux(last_verneed, last.versionString(elf_file), elf_file);
versyms[last.index] = last_vernaux.vna_other;
versyms[last.index] = @bitCast(last_vernaux.other);
for (verneed.items[1..]) |ver| {
if (ver.shared_object == last.shared_object) {
if (ver.version_index != last.version_index) {
// https://github.com/ziglang/zig/issues/21678
if (@as(u16, @bitCast(ver.version_index)) != @as(u16, @bitCast(last.version_index))) {
last_vernaux = try vern.addVernaux(last_verneed, ver.versionString(elf_file), elf_file);
}
} else {
@ -1416,7 +1418,7 @@ pub const VerneedSection = struct {
last_vernaux = try vern.addVernaux(last_verneed, ver.versionString(elf_file), elf_file);
}
last = ver;
versyms[ver.index] = last_vernaux.vna_other;
versyms[ver.index] = @bitCast(last_vernaux.other);
}
// Fixup offsets
@ -1428,8 +1430,8 @@ pub const VerneedSection = struct {
vsym.vn_aux = vernaux_off - verneed_off;
var inner_off: u32 = 0;
for (vern.vernaux.items[count..][0..vsym.vn_cnt], 0..) |*vaux, vaux_i| {
if (vaux_i < vsym.vn_cnt - 1) vaux.vna_next = @sizeOf(elf.Elf64_Vernaux);
inner_off += @sizeOf(elf.Elf64_Vernaux);
if (vaux_i < vsym.vn_cnt - 1) vaux.next = @sizeOf(elf.Vernaux);
inner_off += @sizeOf(elf.Vernaux);
}
vernaux_off += inner_off;
verneed_off += @sizeOf(elf.Elf64_Verneed);
@ -1456,24 +1458,24 @@ pub const VerneedSection = struct {
verneed_sym: *elf.Elf64_Verneed,
version: [:0]const u8,
elf_file: *Elf,
) !elf.Elf64_Vernaux {
) !elf.Vernaux {
const comp = elf_file.base.comp;
const gpa = comp.gpa;
const sym = try vern.vernaux.addOne(gpa);
sym.* = .{
.vna_hash = HashSection.hasher(version),
.vna_flags = 0,
.vna_other = vern.index,
.vna_name = try elf_file.insertDynString(version),
.vna_next = 0,
.hash = HashSection.hasher(version),
.flags = 0,
.other = @bitCast(vern.index),
.name = try elf_file.insertDynString(version),
.next = 0,
};
verneed_sym.vn_cnt += 1;
vern.index += 1;
vern.index.VERSION += 1;
return sym.*;
}
pub fn size(vern: VerneedSection) usize {
return vern.verneed.items.len * @sizeOf(elf.Elf64_Verneed) + vern.vernaux.items.len * @sizeOf(elf.Elf64_Vernaux);
return vern.verneed.items.len * @sizeOf(elf.Elf64_Verneed) + vern.vernaux.items.len * @sizeOf(elf.Vernaux);
}
pub fn write(vern: VerneedSection, writer: anytype) !void {

View File

@ -396,14 +396,8 @@ pub fn flushModule(self: *MachO, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
}
for (positionals.items) |obj| {
self.classifyInputFile(obj.path, .{ .path = obj.path }, obj.must_link) catch |err| switch (err) {
error.UnknownFileType => try diags.reportParseError(obj.path, "unknown file type for an input file", .{}),
else => |e| try diags.reportParseError(
obj.path,
"unexpected error: reading input file failed with error {s}",
.{@errorName(e)},
),
};
self.classifyInputFile(obj.path, .{ .path = obj.path }, obj.must_link) catch |err|
diags.addParseError(obj.path, "failed to read input file: {s}", .{@errorName(err)});
}
var system_libs = std.ArrayList(SystemLib).init(gpa);
@ -443,14 +437,8 @@ pub fn flushModule(self: *MachO, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
};
for (system_libs.items) |lib| {
self.classifyInputFile(lib.path, lib, false) catch |err| switch (err) {
error.UnknownFileType => try diags.reportParseError(lib.path, "unknown file type for an input file", .{}),
else => |e| try diags.reportParseError(
lib.path,
"unexpected error: parsing input file failed with error {s}",
.{@errorName(e)},
),
};
self.classifyInputFile(lib.path, lib, false) catch |err|
diags.addParseError(lib.path, "failed to parse input file: {s}", .{@errorName(err)});
}
// Finally, link against compiler_rt.
@ -460,14 +448,8 @@ pub fn flushModule(self: *MachO, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
break :blk null;
};
if (compiler_rt_path) |path| {
self.classifyInputFile(path, .{ .path = path }, false) catch |err| switch (err) {
error.UnknownFileType => try diags.reportParseError(path, "unknown file type for an input file", .{}),
else => |e| try diags.reportParseError(
path,
"unexpected error: parsing input file failed with error {s}",
.{@errorName(e)},
),
};
self.classifyInputFile(path, .{ .path = path }, false) catch |err|
diags.addParseError(path, "failed to parse input file: {s}", .{@errorName(err)});
}
try self.parseInputFiles();
@ -796,7 +778,7 @@ pub fn resolveLibSystem(
if (try accessLibPath(arena, &test_path, &checked_paths, dir, "System")) break :success;
}
try diags.reportMissingLibraryError(checked_paths.items, "unable to find libSystem system library", .{});
diags.addMissingLibraryError(checked_paths.items, "unable to find libSystem system library", .{});
return error.MissingLibSystem;
}
@ -847,10 +829,7 @@ fn parseFatFile(self: *MachO, file: std.fs.File, path: Path) !?fat.Arch {
for (fat_archs) |arch| {
if (arch.tag == cpu_arch) return arch;
}
try diags.reportParseError(path, "missing arch in universal file: expected {s}", .{
@tagName(cpu_arch),
});
return error.MissingCpuArch;
return diags.failParse(path, "missing arch in universal file: expected {s}", .{@tagName(cpu_arch)});
}
pub fn readMachHeader(file: std.fs.File, offset: usize) !macho.mach_header_64 {

View File

@ -29,10 +29,9 @@ pub fn unpack(self: *Archive, macho_file: *MachO, path: Path, handle_index: File
pos += @sizeOf(ar_hdr);
if (!mem.eql(u8, &hdr.ar_fmag, ARFMAG)) {
try diags.reportParseError(path, "invalid header delimiter: expected '{s}', found '{s}'", .{
return diags.failParse(path, "invalid header delimiter: expected '{s}', found '{s}'", .{
std.fmt.fmtSliceEscapeLower(ARFMAG), std.fmt.fmtSliceEscapeLower(&hdr.ar_fmag),
});
return error.MalformedArchive;
}
var hdr_size = try hdr.size();

View File

@ -308,7 +308,7 @@ fn initSubsections(self: *Object, allocator: Allocator, nlists: anytype) !void {
} else nlists.len;
if (nlist_start == nlist_end or nlists[nlist_start].nlist.n_value > sect.addr) {
const name = try std.fmt.allocPrintZ(allocator, "{s}${s}", .{ sect.segName(), sect.sectName() });
const name = try std.fmt.allocPrintZ(allocator, "{s}${s}$begin", .{ sect.segName(), sect.sectName() });
defer allocator.free(name);
const size = if (nlist_start == nlist_end) sect.size else nlists[nlist_start].nlist.n_value - sect.addr;
const atom_index = try self.addAtom(allocator, .{
@ -359,6 +359,25 @@ fn initSubsections(self: *Object, allocator: Allocator, nlists: anytype) !void {
self.symtab.items(.size)[nlists[i].idx] = size;
}
}
// Some compilers such as Go reference the end of a section (addr + size)
// which cannot be contained in any non-zero atom (since then this atom
// would exceed section boundaries). In order to facilitate this behaviour,
// we create a dummy zero-sized atom at section end (addr + size).
const name = try std.fmt.allocPrintZ(allocator, "{s}${s}$end", .{ sect.segName(), sect.sectName() });
defer allocator.free(name);
const atom_index = try self.addAtom(allocator, .{
.name = try self.addString(allocator, name),
.n_sect = @intCast(n_sect),
.off = sect.size,
.size = 0,
.alignment = sect.@"align",
});
try self.atoms_indexes.append(allocator, atom_index);
try subsections.append(allocator, .{
.atom = atom_index,
.off = sect.size,
});
}
}
@ -743,7 +762,7 @@ pub fn findAtom(self: Object, addr: u64) ?Atom.Index {
const slice = self.sections.slice();
for (slice.items(.header), slice.items(.subsections), 0..) |sect, subs, n_sect| {
if (subs.items.len == 0) continue;
if (sect.addr == addr) return subs.items[0].atom;
if (addr == sect.addr) return subs.items[0].atom;
if (sect.addr < addr and addr < sect.addr + sect.size) {
return self.findAtomInSection(addr, @intCast(n_sect));
}
@ -794,7 +813,19 @@ fn linkNlistToAtom(self: *Object, macho_file: *MachO) !void {
defer tracy.end();
for (self.symtab.items(.nlist), self.symtab.items(.atom)) |nlist, *atom| {
if (!nlist.stab() and nlist.sect()) {
if (self.findAtomInSection(nlist.n_value, nlist.n_sect - 1)) |atom_index| {
const sect = self.sections.items(.header)[nlist.n_sect - 1];
const subs = self.sections.items(.subsections)[nlist.n_sect - 1].items;
if (nlist.n_value == sect.addr) {
// If the nlist address is the start of the section, return the first atom
// since it is guaranteed to always start at section's start address.
atom.* = subs[0].atom;
} else if (nlist.n_value == sect.addr + sect.size) {
// If the nlist address matches section's boundary (address + size),
// return the last atom since it is guaranteed to always point
// at the section's end boundary.
atom.* = subs[subs.len - 1].atom;
} else if (self.findAtomInSection(nlist.n_value, nlist.n_sect - 1)) |atom_index| {
// In all other cases, do a binary search to find a matching atom for the symbol.
atom.* = atom_index;
} else {
try macho_file.reportParseError2(self.index, "symbol {s} not attached to any (sub)section", .{

View File

@ -29,14 +29,8 @@ pub fn flushObject(macho_file: *MachO, comp: *Compilation, module_obj_path: ?Pat
}
for (positionals.items) |obj| {
macho_file.classifyInputFile(obj.path, .{ .path = obj.path }, obj.must_link) catch |err| switch (err) {
error.UnknownFileType => try diags.reportParseError(obj.path, "unknown file type for an input file", .{}),
else => |e| try diags.reportParseError(
obj.path,
"unexpected error: reading input file failed with error {s}",
.{@errorName(e)},
),
};
macho_file.classifyInputFile(obj.path, .{ .path = obj.path }, obj.must_link) catch |err|
diags.addParseError(obj.path, "failed to read input file: {s}", .{@errorName(err)});
}
if (diags.hasErrors()) return error.FlushFailure;
@ -95,14 +89,8 @@ pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ?
}
for (positionals.items) |obj| {
macho_file.classifyInputFile(obj.path, .{ .path = obj.path }, obj.must_link) catch |err| switch (err) {
error.UnknownFileType => try diags.reportParseError(obj.path, "unknown file type for an input file", .{}),
else => |e| try diags.reportParseError(
obj.path,
"unexpected error: reading input file failed with error {s}",
.{@errorName(e)},
),
};
macho_file.classifyInputFile(obj.path, .{ .path = obj.path }, obj.must_link) catch |err|
diags.addParseError(obj.path, "failed to read input file: {s}", .{@errorName(err)});
}
if (diags.hasErrors()) return error.FlushFailure;

View File

@ -140,7 +140,7 @@ pub fn updateNav(self: *SpirV, pt: Zcu.PerThread, nav: InternPool.Nav.Index) !vo
}
const ip = &pt.zcu.intern_pool;
log.debug("lowering declaration {}", .{ip.getNav(nav).name.fmt(ip)});
log.debug("lowering nav {}({d})", .{ ip.getNav(nav).fqn.fmt(ip), nav });
try self.object.updateNav(pt, nav);
}

View File

@ -1260,6 +1260,7 @@ test "integer compare <= 64 bits" {
test "integer compare <= 128 bits" {
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
inline for (.{ u65, u96, u127, u128 }) |T| {
try testUnsignedCmp(T);

View File

@ -73,6 +73,8 @@ test "call decl literal" {
}
test "call decl literal with error union" {
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
const S = struct {
x: u32,
fn init(err: bool) !@This() {

View File

@ -1618,6 +1618,8 @@ test "struct in comptime false branch is not evaluated" {
}
test "result of nested switch assigned to variable" {
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
var zds: u32 = 0;
zds = switch (zds) {
0 => switch (zds) {

View File

@ -113,6 +113,7 @@ test "inline else enum" {
test "inline else int with gaps" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
var a: u8 = 0;
_ = &a;

View File

@ -833,6 +833,8 @@ test "@addWithOverflow > 64 bits" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
try testAddWithOverflow(u65, 4, 105, 109, 0);
try testAddWithOverflow(u65, 1000, 100, 1100, 0);
@ -986,6 +988,7 @@ test "@mulWithOverflow bitsize 128 bits" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
try testMulWithOverflow(u128, 3, 0x5555555555555555_5555555555555555, 0xffffffffffffffff_ffffffffffffffff, 0);
try testMulWithOverflow(u128, 3, 0x5555555555555555_5555555555555556, 2, 1);
@ -1065,6 +1068,7 @@ test "@subWithOverflow > 64 bits" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
try testSubWithOverflow(u65, 4, 105, maxInt(u65) - 100, 1);
try testSubWithOverflow(u65, 1000, 100, 900, 0);

View File

@ -1297,3 +1297,23 @@ test "packed struct contains optional pointer" {
} = .{};
try expect(foo.a == null);
}
test "packed struct equality" {
const Foo = packed struct {
a: u4,
b: u4,
};
const S = struct {
fn doTest(x: Foo, y: Foo) !void {
try expect(x == y);
try expect(!(x != y));
}
};
const x: Foo = .{ .a = 1, .b = 2 };
const y: Foo = .{ .b = 2, .a = 1 };
try S.doTest(x, y);
comptime try S.doTest(x, y);
}

View File

@ -45,6 +45,7 @@ test "pointer-integer arithmetic" {
test "pointer subtraction" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
{
const a: *u8 = @ptrFromInt(100);

View File

@ -12,6 +12,8 @@ test "switch with numbers" {
}
fn testSwitchWithNumbers(x: u32) !void {
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
const result = switch (x) {
1, 2, 3, 4...8 => false,
13 => true,
@ -22,6 +24,7 @@ fn testSwitchWithNumbers(x: u32) !void {
test "switch with all ranges" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
try expect(testSwitchWithAllRanges(50, 3) == 1);
try expect(testSwitchWithAllRanges(101, 0) == 2);
@ -173,6 +176,7 @@ test "undefined.u0" {
test "switch with disjoint range" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
var q: u8 = 0;
_ = &q;
@ -184,6 +188,8 @@ test "switch with disjoint range" {
}
test "switch variable for range and multiple prongs" {
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
const S = struct {
fn doTheTest() !void {
try doTheSwitch(16);
@ -281,6 +287,8 @@ test "switch handles all cases of number" {
}
fn testSwitchHandleAllCases() !void {
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
try expect(testSwitchHandleAllCasesExhaustive(0) == 3);
try expect(testSwitchHandleAllCasesExhaustive(1) == 2);
try expect(testSwitchHandleAllCasesExhaustive(2) == 1);
@ -497,6 +505,7 @@ test "switch prongs with error set cases make a new error set type for capture v
test "return result loc and then switch with range implicit casted to error union" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
const S = struct {
fn doTheTest() !void {
@ -714,6 +723,7 @@ test "switch capture copies its payload" {
test "capture of integer forwards the switch condition directly" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
const S = struct {
fn foo(x: u8) !void {
@ -854,6 +864,7 @@ test "inline switch range that includes the maximum value of the switched type"
test "nested break ignores switch conditions and breaks instead" {
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
const S = struct {
fn register_to_address(ident: []const u8) !u8 {
@ -901,6 +912,7 @@ test "peer type resolution on switch captures ignores unused payload bits" {
test "switch prong captures range" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
const S = struct {
fn a(b: []u3, c: u3) void {
@ -935,6 +947,8 @@ test "prong with inline call to unreachable" {
}
test "block error return trace index is reset between prongs" {
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
const S = struct {
fn returnError() error{TestFailed} {
return error.TestFailed;
@ -963,6 +977,8 @@ test "block error return trace index is reset between prongs" {
}
test "labeled switch with break" {
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
var six: u32 = undefined;
six = 6;

View File

@ -0,0 +1,35 @@
const x: Foo = .{};
const y: Foo = .{};
export fn a() void {
_ = x > y;
}
export fn b() void {
_ = x < y;
}
export fn c() void {
_ = x >= y;
}
export fn d() void {
_ = x <= y;
}
const Foo = packed struct {
a: u4 = 10,
b: u4 = 5,
};
// error
// backend=stage2
// target=native
//
// :5:11: error: operator > not allowed for type 'tmp.Foo'
// :19:20: note: struct declared here
// :9:11: error: operator < not allowed for type 'tmp.Foo'
// :19:20: note: struct declared here
// :13:11: error: operator >= not allowed for type 'tmp.Foo'
// :19:20: note: struct declared here
// :16:11: error: operator <= not allowed for type 'tmp.Foo'
// :19:20: note: struct declared here