add CBuf to standard library

and fix ability to take address of variables
from other namespaces
This commit is contained in:
Andrew Kelley 2016-08-11 22:25:13 -07:00
parent 0a482bbbfe
commit 0ae9023832
8 changed files with 206 additions and 72 deletions

View File

@ -2688,6 +2688,7 @@ static TypeTableEntry *analyze_field_access_expr(CodeGen *g, ImportTableEntry *i
return node->data.field_access_expr.type_struct_field->type_entry;
} else if (wrapped_in_fn_call) {
BlockContext *container_block_context = get_container_block_context(bare_struct_type);
assert(container_block_context);
auto entry = container_block_context->decl_table.maybe_get(field_name);
AstNode *fn_decl_node = entry ? entry->value : nullptr;
if (fn_decl_node && fn_decl_node->type == NodeTypeFnProto) {

View File

@ -1243,6 +1243,11 @@ static LLVMValueRef gen_field_ptr(CodeGen *g, AstNode *node, TypeTableEntry **ou
AstNode *struct_expr_node = node->data.field_access_expr.struct_expr;
*out_type_entry = node->data.field_access_expr.type_struct_field->type_entry;
if (!type_has_bits(*out_type_entry)) {
return nullptr;
}
LLVMValueRef struct_ptr;
if (struct_expr_node->type == NodeTypeSymbol) {
VariableTableEntry *var = get_resolved_expr(struct_expr_node)->variable;
@ -1272,8 +1277,6 @@ static LLVMValueRef gen_field_ptr(CodeGen *g, AstNode *node, TypeTableEntry **ou
int gen_field_index = node->data.field_access_expr.type_struct_field->gen_index;
assert(gen_field_index >= 0);
*out_type_entry = node->data.field_access_expr.type_struct_field->type_entry;
set_debug_source_node(g, node);
return LLVMBuildStructGEP(g->builder, struct_ptr, gen_field_index, "");
}
@ -1490,7 +1493,14 @@ static LLVMValueRef gen_lvalue(CodeGen *g, AstNode *expr_node, AstNode *node,
zig_unreachable();
}
} else if (node->type == NodeTypeFieldAccessExpr) {
target_ref = gen_field_ptr(g, node, out_type_entry);
AstNode *struct_expr_node = node->data.field_access_expr.struct_expr;
TypeTableEntry *struct_type = get_expr_type(struct_expr_node);
if (struct_type->id == TypeTableEntryIdNamespace) {
target_ref = gen_field_access_expr(g, node, true);
*out_type_entry = get_expr_type(node);
} else {
target_ref = gen_field_ptr(g, node, out_type_entry);
}
} else if (node->type == NodeTypePrefixOpExpr) {
assert(node->data.prefix_op_expr.prefix_op == PrefixOpDereference);
AstNode *target_expr = node->data.prefix_op_expr.primary_expr;

View File

@ -1,3 +1,11 @@
const List = @import("list.zig").List;
const mem = @import("mem.zig");
const Allocator = mem.Allocator;
const debug = @import("debug.zig");
const assert = debug.assert;
const strlen = len;
// TODO fix https://github.com/andrewrk/zig/issues/140
// and then make this able to run at compile time
#static_eval_enable(false)
@ -17,10 +25,121 @@ pub fn cmp(a: &const u8, b: &const u8) -> i32 {
}
pub fn to_slice_const(str: &const u8) -> []const u8 {
return str[0...len(str)];
return str[0...strlen(str)];
}
pub fn to_slice(str: &u8) -> []u8 {
return str[0...len(str)];
return str[0...strlen(str)];
}
/// A buffer that allocates memory and maintains a null byte at the end.
pub struct CBuf {
list: List(u8),
/// Must deinitialize with deinit.
pub fn init(self: &CBuf, allocator: &Allocator) {
self.list.init(allocator);
// This resize is guaranteed to not have an error because we use a list
// with preallocated memory of at least 1 byte.
%%self.resize(0);
}
/// Must deinitialize with deinit.
pub fn init_from_mem(self: &CBuf, allocator: &Allocator, m: []const u8) -> %void {
self.init(allocator);
%return self.resize(m.len);
mem.copy(u8, self.list.items, m);
}
/// Must deinitialize with deinit.
pub fn init_from_cstr(self: &CBuf, allocator: &Allocator, s: &const u8) -> %void {
self.init_from_mem(allocator, s[0...strlen(s)])
}
/// Must deinitialize with deinit.
pub fn init_from_cbuf(self: &CBuf, cbuf: &const CBuf) -> %void {
self.init_from_mem(cbuf.list.allocator, cbuf.list.items[0...cbuf.len()])
}
/// Must deinitialize with deinit.
pub fn init_from_slice(self: &CBuf, other: &const CBuf, start: usize, end: usize) -> %void {
self.init_from_mem(other.list.allocator, other.list.items[start...end])
}
pub fn deinit(self: &CBuf) {
self.list.deinit();
}
pub fn resize(self: &CBuf, new_len: usize) -> %void {
%return self.list.resize(new_len + 1);
self.list.items[self.len()] = 0;
}
pub fn len(self: &const CBuf) -> usize {
return self.list.len - 1;
}
pub fn append_mem(self: &CBuf, m: []const u8) -> %void {
const old_len = self.len();
%return self.resize(old_len + m.len);
mem.copy(u8, self.list.items[old_len...], m);
}
pub fn append_cstr(self: &CBuf, s: &const u8) -> %void {
self.append_mem(s[0...strlen(s)])
}
pub fn append_char(self: &CBuf, c: u8) -> %void {
%return self.resize(self.len() + 1);
self.list.items[self.len() - 1] = c;
}
pub fn eql_mem(self: &const CBuf, m: []const u8) -> bool {
if (self.len() != m.len) return false;
return mem.cmp(u8, self.list.items[0...m.len], m) == mem.Cmp.Equal;
}
pub fn eql_cstr(self: &const CBuf, s: &const u8) -> bool {
self.eql_mem(s[0...strlen(s)])
}
pub fn eql_cbuf(self: &const CBuf, other: &const CBuf) -> bool {
self.eql_mem(other.list.items[0...other.len()])
}
pub fn starts_with_mem(self: &const CBuf, m: []const u8) -> bool {
if (self.len() < m.len) return false;
return mem.cmp(u8, self.list.items[0...m.len], m) == mem.Cmp.Equal;
}
pub fn starts_with_cbuf(self: &const CBuf, other: &const CBuf) -> bool {
self.starts_with_mem(other.list.items[0...other.len()])
}
pub fn starts_with_cstr(self: &const CBuf, s: &const u8) -> bool {
self.starts_with_mem(s[0...strlen(s)])
}
}
#attribute("test")
fn test_simple_cbuf() {
var buf: CBuf = undefined;
buf.init(&debug.global_allocator);
assert(buf.len() == 0);
%%buf.append_cstr(c"hello");
%%buf.append_char(' ');
%%buf.append_mem("world");
assert(buf.eql_cstr(c"hello world"));
assert(buf.eql_mem("hello world"));
var buf2: CBuf = undefined;
%%buf2.init_from_cbuf(&buf);
assert(buf.eql_cbuf(&buf2));
assert(buf.starts_with_mem("hell"));
assert(buf.starts_with_cstr(c"hell"));
%%buf2.resize(4);
assert(buf.starts_with_cbuf(&buf2));
}

View File

@ -1,10 +1,11 @@
const Allocator = @import("mem.zig").Allocator;
const io = @import("io.zig");
pub fn assert(b: bool) {
if (!b) unreachable{}
}
pub fn print_stack_trace() {
pub fn printStackTrace() {
var maybe_fp: ?&const u8 = @frame_address();
while (true) {
const fp = maybe_fp ?? break;
@ -14,3 +15,27 @@ pub fn print_stack_trace() {
maybe_fp = *(&const ?&const u8)(fp);
}
}
pub var global_allocator = Allocator {
.alloc_fn = globalAlloc,
.realloc_fn = globalRealloc,
.free_fn = globalFree,
.context = null,
};
var some_mem: [10 * 1024]u8 = undefined;
var some_mem_index: usize = 0;
fn globalAlloc(self: &Allocator, n: usize) -> %[]u8 {
const result = some_mem[some_mem_index ... some_mem_index + n];
some_mem_index += n;
return result;
}
fn globalRealloc(self: &Allocator, old_mem: []u8, new_size: usize) -> %[]u8 {
const result = %return globalAlloc(self, new_size);
@memcpy(result.ptr, old_mem.ptr, old_mem.len);
return result;
}
fn globalFree(self: &Allocator, old_mem: []u8) { }

View File

@ -1,4 +1,5 @@
const assert = @import("debug.zig").assert;
const debug = @import("debug.zig");
const assert = debug.assert;
const math = @import("math.zig");
const mem = @import("mem.zig");
const Allocator = mem.Allocator;
@ -9,7 +10,7 @@ const debug_u32 = if (want_modification_safety) u32 else void;
pub fn HashMap(inline K: type, inline V: type, inline hash: fn(key: K)->u32,
inline eql: fn(a: K, b: K)->bool) -> type
{
SmallHashMap(K, V, hash, eql, 8)
SmallHashMap(K, V, hash, eql, @sizeof(usize))
}
pub struct SmallHashMap(K: type, V: type, hash: fn(key: K)->u32, eql: fn(a: K, b: K)->bool, STATIC_SIZE: usize) {
@ -63,9 +64,8 @@ pub struct SmallHashMap(K: type, V: type, hash: fn(key: K)->u32, eql: fn(a: K, b
hm.allocator = allocator;
hm.size = 0;
hm.max_distance_from_start_index = 0;
for (hm.entries) |*entry| {
entry.used = false;
}
hm.prealloc_entries = zeroes; // sets used to false for all entries
hm.modification_count = zeroes;
}
pub fn deinit(hm: &Self) {
@ -162,7 +162,7 @@ pub struct SmallHashMap(K: type, V: type, hash: fn(key: K)->u32, eql: fn(a: K, b
fn increment_modification_count(hm: &Self) {
if (want_modification_safety) {
hm.modification_count += 1;
hm.modification_count +%= 1;
}
}
@ -231,35 +231,10 @@ pub struct SmallHashMap(K: type, V: type, hash: fn(key: K)->u32, eql: fn(a: K, b
}
}
var global_allocator = Allocator {
.alloc_fn = global_alloc,
.realloc_fn = global_realloc,
.free_fn = global_free,
.context = null,
};
var some_mem: [200]u8 = undefined;
var some_mem_index: usize = 0;
fn global_alloc(self: &Allocator, n: usize) -> %[]u8 {
const result = some_mem[some_mem_index ... some_mem_index + n];
some_mem_index += n;
return result;
}
fn global_realloc(self: &Allocator, old_mem: []u8, new_size: usize) -> %[]u8 {
const result = %return global_alloc(self, new_size);
@memcpy(result.ptr, old_mem.ptr, old_mem.len);
return result;
}
fn global_free(self: &Allocator, old_mem: []u8) {
}
#attribute("test")
fn basic_hash_map_test() {
var map: HashMap(i32, i32, hash_i32, eql_i32) = undefined;
map.init(&global_allocator);
map.init(&debug.global_allocator);
defer map.deinit();
%%map.put(1, 11);

View File

@ -1,22 +1,25 @@
const assert = @import("debug.zig").assert;
const debug = @import("debug.zig");
const assert = debug.assert;
const mem = @import("mem.zig");
const Allocator = mem.Allocator;
pub fn List(inline T: type) -> type {
SmallList(T, 8)
SmallList(T, @sizeof(usize))
}
// TODO: make sure that setting STATIC_SIZE to 0 codegens to the same code
// as if this were programmed without STATIC_SIZE at all.
pub struct SmallList(T: type, STATIC_SIZE: usize) {
const Self = SmallList(T, STATIC_SIZE);
items: []T,
length: usize,
len: usize,
prealloc_items: [STATIC_SIZE]T,
allocator: &Allocator,
pub fn init(l: &Self, allocator: &Allocator) {
l.items = l.prealloc_items[0...];
l.length = 0;
l.len = 0;
l.allocator = allocator;
}
@ -27,10 +30,15 @@ pub struct SmallList(T: type, STATIC_SIZE: usize) {
}
pub fn append(l: &Self, item: T) -> %void {
const new_length = l.length + 1;
const new_length = l.len + 1;
%return l.ensure_capacity(new_length);
l.items[l.length] = item;
l.length = new_length;
l.items[l.len] = item;
l.len = new_length;
}
pub fn resize(l: &Self, new_len: usize) -> %void {
%return l.ensure_capacity(new_len);
l.len = new_len;
}
pub fn ensure_capacity(l: &Self, new_capacity: usize) -> %void {
@ -50,35 +58,10 @@ pub struct SmallList(T: type, STATIC_SIZE: usize) {
}
}
var global_allocator = Allocator {
.alloc_fn = global_alloc,
.realloc_fn = global_realloc,
.free_fn = global_free,
.context = null,
};
var some_mem: [200]u8 = undefined;
var some_mem_index: usize = 0;
fn global_alloc(self: &Allocator, n: usize) -> %[]u8 {
const result = some_mem[some_mem_index ... some_mem_index + n];
some_mem_index += n;
return result;
}
fn global_realloc(self: &Allocator, old_mem: []u8, new_size: usize) -> %[]u8 {
const result = %return global_alloc(self, new_size);
@memcpy(result.ptr, old_mem.ptr, old_mem.len);
return result;
}
fn global_free(self: &Allocator, old_mem: []u8) {
}
#attribute("test")
fn basic_list_test() {
var list: List(i32) = undefined;
list.init(&global_allocator);
list.init(&debug.global_allocator);
defer list.deinit();
{var i: usize = 0; while (i < 10; i += 1) {

View File

@ -1,3 +1,9 @@
pub enum Cmp {
Equal,
Greater,
Less,
}
pub fn f64_from_bits(bits: u64) -> f64 {
*(&f64)(&bits)
}

View File

@ -3,6 +3,8 @@ const math = @import("math.zig");
const os = @import("os.zig");
const io = @import("io.zig");
pub const Cmp = math.Cmp;
pub error NoMem;
pub type Context = u8;
@ -40,7 +42,20 @@ pub struct Allocator {
/// Copy all of source into dest at position 0.
/// dest.len must be >= source.len.
pub fn copy(inline T: type, dest: []T, source: []T) {
pub fn copy(inline T: type, dest: []T, source: []const T) {
assert(dest.len >= source.len);
@memcpy(dest.ptr, source.ptr, @sizeof(T) * source.len);
}
/// Return < 0, == 0, or > 0 if memory a is less than, equal to, or greater than,
/// memory b, respectively.
pub fn cmp(inline T: type, a: []const T, b: []const T) -> Cmp {
const n = math.min(usize, a.len, b.len);
var i: usize = 0;
while (i < n; i += 1) {
if (a[i] == b[i]) continue;
return if (a[i] > b[i]) Cmp.Greater else if (a[i] < b[i]) Cmp.Less else Cmp.Equal;
}
return if (a.len > b.len) Cmp.Greater else if (a.len < b.len) Cmp.Less else Cmp.Equal;
}