Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions build.zig
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ const builtin = std.builtin;
const tests = @import("test/tests.zig");
const BufMap = std.BufMap;
const mem = std.mem;
const ArrayList = std.ArrayList;
const io = std.io;
const fs = std.fs;
const InstallDirectoryOptions = std.Build.InstallDirectoryOptions;
Expand Down Expand Up @@ -925,7 +924,7 @@ fn addCxxKnownPath(
return error.RequiredLibraryNotFound;

const path_padded = run: {
var args = std.ArrayList([]const u8).init(b.allocator);
var args = std.array_list.Managed([]const u8).init(b.allocator);
try args.append(ctx.cxx_compiler);
var it = std.mem.tokenizeAny(u8, ctx.cxx_compiler_arg1, &std.ascii.whitespace);
while (it.next()) |arg| try args.append(arg);
Expand Down
5 changes: 2 additions & 3 deletions doc/langref.html.in
Original file line number Diff line number Diff line change
Expand Up @@ -6241,9 +6241,8 @@ fn cmpxchgWeakButNotAtomic(comptime T: type, ptr: *T, expected_value: T, new_val
C has a default allocator - <code>malloc</code>, <code>realloc</code>, and <code>free</code>.
When linking against libc, Zig exposes this allocator with {#syntax#}std.heap.c_allocator{#endsyntax#}.
However, by convention, there is no default allocator in Zig. Instead, functions which need to
allocate accept an {#syntax#}Allocator{#endsyntax#} parameter. Likewise, data structures such as
{#syntax#}std.ArrayList{#endsyntax#} accept an {#syntax#}Allocator{#endsyntax#} parameter in
their initialization functions:
allocate accept an {#syntax#}Allocator{#endsyntax#} parameter. Likewise, some data structures
accept an {#syntax#}Allocator{#endsyntax#} parameter in their initialization functions:
</p>
{#code|test_allocator.zig#}

Expand Down
2 changes: 1 addition & 1 deletion doc/langref/testing_detect_leak.zig
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
const std = @import("std");

test "detect leak" {
var list = std.ArrayList(u21).init(std.testing.allocator);
var list = std.array_list.Managed(u21).init(std.testing.allocator);
// missing `defer list.deinit();`
try list.append('☔');

Expand Down
6 changes: 3 additions & 3 deletions lib/compiler/aro/aro/Compilation.zig
Original file line number Diff line number Diff line change
Expand Up @@ -533,7 +533,7 @@ fn generateSystemDefines(comp: *Compilation, w: anytype) !void {
pub fn generateBuiltinMacros(comp: *Compilation, system_defines_mode: SystemDefinesMode) !Source {
try comp.generateBuiltinTypes();

var buf = std.ArrayList(u8).init(comp.gpa);
var buf = std.array_list.Managed(u8).init(comp.gpa);
defer buf.deinit();

if (system_defines_mode == .include_system_defines) {
Expand Down Expand Up @@ -1143,7 +1143,7 @@ pub fn addSourceFromOwnedBuffer(comp: *Compilation, buf: []u8, path: []const u8,
const duped_path = try comp.gpa.dupe(u8, path);
errdefer comp.gpa.free(duped_path);

var splice_list = std.ArrayList(u32).init(comp.gpa);
var splice_list = std.array_list.Managed(u32).init(comp.gpa);
defer splice_list.deinit();

const source_id: Source.Id = @enumFromInt(comp.sources.count() + 2);
Expand Down Expand Up @@ -1428,7 +1428,7 @@ fn getFileContents(comp: *Compilation, path: []const u8, limit: ?u32) ![]const u
const file = try comp.cwd.openFile(path, .{});
defer file.close();

var buf = std.ArrayList(u8).init(comp.gpa);
var buf = std.array_list.Managed(u8).init(comp.gpa);
defer buf.deinit();

const max = limit orelse std.math.maxInt(u32);
Expand Down
4 changes: 2 additions & 2 deletions lib/compiler/aro/aro/Driver.zig
Original file line number Diff line number Diff line change
Expand Up @@ -590,7 +590,7 @@ var stdout_buffer: [4096]u8 = undefined;
/// The entry point of the Aro compiler.
/// **MAY call `exit` if `fast_exit` is set.**
pub fn main(d: *Driver, tc: *Toolchain, args: []const []const u8, comptime fast_exit: bool) !void {
var macro_buf = std.ArrayList(u8).init(d.comp.gpa);
var macro_buf = std.array_list.Managed(u8).init(d.comp.gpa);
defer macro_buf.deinit();

const std_out = std.fs.File.stdout().deprecatedWriter();
Expand Down Expand Up @@ -817,7 +817,7 @@ fn dumpLinkerArgs(items: []const []const u8) !void {
/// The entry point of the Aro compiler.
/// **MAY call `exit` if `fast_exit` is set.**
pub fn invokeLinker(d: *Driver, tc: *Toolchain, comptime fast_exit: bool) !void {
var argv = std.ArrayList([]const u8).init(d.comp.gpa);
var argv = std.array_list.Managed([]const u8).init(d.comp.gpa);
defer argv.deinit();

var linker_path_buf: [std.fs.max_path_bytes]u8 = undefined;
Expand Down
2 changes: 1 addition & 1 deletion lib/compiler/aro/aro/InitList.zig
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ const TokenIndex = Tree.TokenIndex;
const NodeIndex = Tree.NodeIndex;
const Type = @import("Type.zig");
const Diagnostics = @import("Diagnostics.zig");
const NodeList = std.ArrayList(NodeIndex);
const NodeList = std.array_list.Managed(NodeIndex);
const Parser = @import("Parser.zig");

const Item = struct {
Expand Down
38 changes: 19 additions & 19 deletions lib/compiler/aro/aro/Parser.zig
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ const TokenIndex = Tree.TokenIndex;
const NodeIndex = Tree.NodeIndex;
const Type = @import("Type.zig");
const Diagnostics = @import("Diagnostics.zig");
const NodeList = std.ArrayList(NodeIndex);
const NodeList = std.array_list.Managed(NodeIndex);
const InitList = @import("InitList.zig");
const Attribute = @import("Attribute.zig");
const char_info = @import("char_info.zig");
Expand All @@ -33,7 +33,7 @@ const target_util = @import("target.zig");

const Switch = struct {
default: ?TokenIndex = null,
ranges: std.ArrayList(Range),
ranges: std.array_list.Managed(Range),
ty: Type,
comp: *Compilation,

Expand Down Expand Up @@ -101,16 +101,16 @@ value_map: Tree.ValueMap,

// buffers used during compilation
syms: SymbolStack = .{},
strings: std.ArrayListAligned(u8, .@"4"),
labels: std.ArrayList(Label),
strings: std.array_list.AlignedManaged(u8, .@"4"),
labels: std.array_list.Managed(Label),
list_buf: NodeList,
decl_buf: NodeList,
param_buf: std.ArrayList(Type.Func.Param),
enum_buf: std.ArrayList(Type.Enum.Field),
record_buf: std.ArrayList(Type.Record.Field),
param_buf: std.array_list.Managed(Type.Func.Param),
enum_buf: std.array_list.Managed(Type.Enum.Field),
record_buf: std.array_list.Managed(Type.Record.Field),
attr_buf: std.MultiArrayList(TentativeAttribute) = .{},
attr_application_buf: std.ArrayListUnmanaged(Attribute) = .empty,
field_attr_buf: std.ArrayList([]const Attribute),
field_attr_buf: std.array_list.Managed([]const Attribute),
/// type name -> variable name location for tentative definitions (top-level defs with thus-far-incomplete types)
/// e.g. `struct Foo bar;` where `struct Foo` is not defined yet.
/// The key is the StringId of `Foo` and the value is the TokenIndex of `bar`
Expand Down Expand Up @@ -693,16 +693,16 @@ pub fn parse(pp: *Preprocessor) Compilation.Error!Tree {
.gpa = pp.comp.gpa,
.arena = arena.allocator(),
.tok_ids = pp.tokens.items(.id),
.strings = std.ArrayListAligned(u8, .@"4").init(pp.comp.gpa),
.strings = std.array_list.AlignedManaged(u8, .@"4").init(pp.comp.gpa),
.value_map = Tree.ValueMap.init(pp.comp.gpa),
.data = NodeList.init(pp.comp.gpa),
.labels = std.ArrayList(Label).init(pp.comp.gpa),
.labels = std.array_list.Managed(Label).init(pp.comp.gpa),
.list_buf = NodeList.init(pp.comp.gpa),
.decl_buf = NodeList.init(pp.comp.gpa),
.param_buf = std.ArrayList(Type.Func.Param).init(pp.comp.gpa),
.enum_buf = std.ArrayList(Type.Enum.Field).init(pp.comp.gpa),
.record_buf = std.ArrayList(Type.Record.Field).init(pp.comp.gpa),
.field_attr_buf = std.ArrayList([]const Attribute).init(pp.comp.gpa),
.param_buf = std.array_list.Managed(Type.Func.Param).init(pp.comp.gpa),
.enum_buf = std.array_list.Managed(Type.Enum.Field).init(pp.comp.gpa),
.record_buf = std.array_list.Managed(Type.Record.Field).init(pp.comp.gpa),
.field_attr_buf = std.array_list.Managed([]const Attribute).init(pp.comp.gpa),
.string_ids = .{
.declspec_id = try StrInt.intern(pp.comp, "__declspec"),
.main_id = try StrInt.intern(pp.comp, "main"),
Expand Down Expand Up @@ -1222,7 +1222,7 @@ fn staticAssertMessage(p: *Parser, cond_node: NodeIndex, message: Result) !?[]co
const cond_tag = p.nodes.items(.tag)[@intFromEnum(cond_node)];
if (cond_tag != .builtin_types_compatible_p and message.node == .none) return null;

var buf = std.ArrayList(u8).init(p.gpa);
var buf = std.array_list.Managed(u8).init(p.gpa);
defer buf.deinit();

if (cond_tag == .builtin_types_compatible_p) {
Expand Down Expand Up @@ -3994,7 +3994,7 @@ fn msvcAsmStmt(p: *Parser) Error!?NodeIndex {
}

/// asmOperand : ('[' IDENTIFIER ']')? asmStr '(' expr ')'
fn asmOperand(p: *Parser, names: *std.ArrayList(?TokenIndex), constraints: *NodeList, exprs: *NodeList) Error!void {
fn asmOperand(p: *Parser, names: *std.array_list.Managed(?TokenIndex), constraints: *NodeList, exprs: *NodeList) Error!void {
if (p.eatToken(.l_bracket)) |l_bracket| {
const ident = (try p.eatIdentifier()) orelse {
try p.err(.expected_identifier);
Expand Down Expand Up @@ -4044,7 +4044,7 @@ fn gnuAsmStmt(p: *Parser, quals: Tree.GNUAssemblyQualifiers, asm_tok: TokenIndex
const allocator = stack_fallback.get();

// TODO: Consider using a TokenIndex of 0 instead of null if we need to store the names in the tree
var names = std.ArrayList(?TokenIndex).initCapacity(allocator, expected_items) catch unreachable; // stack allocation already succeeded
var names = std.array_list.Managed(?TokenIndex).initCapacity(allocator, expected_items) catch unreachable; // stack allocation already succeeded
defer names.deinit();
var constraints = NodeList.initCapacity(allocator, expected_items) catch unreachable; // stack allocation already succeeded
defer constraints.deinit();
Expand Down Expand Up @@ -4317,7 +4317,7 @@ fn stmt(p: *Parser) Error!NodeIndex {

const old_switch = p.@"switch";
var @"switch" = Switch{
.ranges = std.ArrayList(Switch.Range).init(p.gpa),
.ranges = std.array_list.Managed(Switch.Range).init(p.gpa),
.ty = cond.ty,
.comp = p.comp,
};
Expand Down Expand Up @@ -8268,7 +8268,7 @@ fn charLiteral(p: *Parser) Error!Result {

const max_chars_expected = 4;
var stack_fallback = std.heap.stackFallback(max_chars_expected * @sizeOf(u32), p.comp.gpa);
var chars = std.ArrayList(u32).initCapacity(stack_fallback.get(), max_chars_expected) catch unreachable; // stack allocation already succeeded
var chars = std.array_list.Managed(u32).initCapacity(stack_fallback.get(), max_chars_expected) catch unreachable; // stack allocation already succeeded
defer chars.deinit();

while (char_literal_parser.next()) |item| switch (item) {
Expand Down
20 changes: 10 additions & 10 deletions lib/compiler/aro/aro/Preprocessor.zig
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ const features = @import("features.zig");
const Hideset = @import("Hideset.zig");

const DefineMap = std.StringHashMapUnmanaged(Macro);
const RawTokenList = std.ArrayList(RawToken);
const RawTokenList = std.array_list.Managed(RawToken);
const max_include_depth = 200;

/// Errors that can be returned when expanding a macro.
Expand Down Expand Up @@ -84,7 +84,7 @@ tokens: Token.List = .{},
/// Do not directly mutate this; must be kept in sync with `tokens`
expansion_entries: std.MultiArrayList(ExpansionEntry) = .{},
token_buf: RawTokenList,
char_buf: std.ArrayList(u8),
char_buf: std.array_list.Managed(u8),
/// Counter that is incremented each time preprocess() is called
/// Can be used to distinguish multiple preprocessings of the same file
preprocess_count: u32 = 0,
Expand Down Expand Up @@ -131,7 +131,7 @@ pub fn init(comp: *Compilation) Preprocessor {
.gpa = comp.gpa,
.arena = std.heap.ArenaAllocator.init(comp.gpa),
.token_buf = RawTokenList.init(comp.gpa),
.char_buf = std.ArrayList(u8).init(comp.gpa),
.char_buf = std.array_list.Managed(u8).init(comp.gpa),
.poisoned_identifiers = std.StringHashMap(void).init(comp.gpa),
.top_expansion_buf = ExpandBuf.init(comp.gpa),
.hideset = .{ .comp = comp },
Expand Down Expand Up @@ -982,7 +982,7 @@ fn expr(pp: *Preprocessor, tokenizer: *Tokenizer) MacroError!bool {
.tok_i = @intCast(token_state.tokens_len),
.arena = pp.arena.allocator(),
.in_macro = true,
.strings = std.ArrayListAligned(u8, .@"4").init(pp.comp.gpa),
.strings = std.array_list.AlignedManaged(u8, .@"4").init(pp.comp.gpa),

.data = undefined,
.value_map = undefined,
Expand Down Expand Up @@ -1140,7 +1140,7 @@ fn skipToNl(tokenizer: *Tokenizer) void {
}
}

const ExpandBuf = std.ArrayList(TokenWithExpansionLocs);
const ExpandBuf = std.array_list.Managed(TokenWithExpansionLocs);
fn removePlacemarkers(buf: *ExpandBuf) void {
var i: usize = buf.items.len -% 1;
while (i < buf.items.len) : (i -%= 1) {
Expand All @@ -1151,7 +1151,7 @@ fn removePlacemarkers(buf: *ExpandBuf) void {
}
}

const MacroArguments = std.ArrayList([]const TokenWithExpansionLocs);
const MacroArguments = std.array_list.Managed([]const TokenWithExpansionLocs);
fn deinitMacroArguments(allocator: Allocator, args: *const MacroArguments) void {
for (args.items) |item| {
for (item) |tok| TokenWithExpansionLocs.free(tok.expansion_locs, allocator);
Expand Down Expand Up @@ -2075,7 +2075,7 @@ fn collectMacroFuncArguments(
var parens: u32 = 0;
var args = MacroArguments.init(pp.gpa);
errdefer deinitMacroArguments(pp.gpa, &args);
var curArgument = std.ArrayList(TokenWithExpansionLocs).init(pp.gpa);
var curArgument = std.array_list.Managed(TokenWithExpansionLocs).init(pp.gpa);
defer curArgument.deinit();
while (true) {
var tok = try nextBufToken(pp, tokenizer, buf, start_idx, end_idx, extend_buf);
Expand Down Expand Up @@ -2645,7 +2645,7 @@ fn define(pp: *Preprocessor, tokenizer: *Tokenizer, define_tok: RawToken) Error!
/// Handle a function like #define directive.
fn defineFn(pp: *Preprocessor, tokenizer: *Tokenizer, define_tok: RawToken, macro_name: RawToken, l_paren: RawToken) Error!void {
assert(macro_name.id.isMacroIdentifier());
var params = std.ArrayList([]const u8).init(pp.gpa);
var params = std.array_list.Managed([]const u8).init(pp.gpa);
defer params.deinit();

// Parse the parameter list.
Expand Down Expand Up @@ -3471,7 +3471,7 @@ test "Preserve pragma tokens sometimes" {
const allocator = std.testing.allocator;
const Test = struct {
fn runPreprocessor(source_text: []const u8) ![]const u8 {
var buf = std.ArrayList(u8).init(allocator);
var buf = std.array_list.Managed(u8).init(allocator);
defer buf.deinit();

var comp = Compilation.init(allocator, std.fs.cwd());
Expand Down Expand Up @@ -3602,7 +3602,7 @@ test "Include guards" {

_ = try comp.addSourceFromBuffer(path, "int bar = 5;\n");

var buf = std.ArrayList(u8).init(allocator);
var buf = std.array_list.Managed(u8).init(allocator);
defer buf.deinit();

var writer = buf.writer();
Expand Down
12 changes: 6 additions & 6 deletions lib/compiler/aro/aro/Toolchain.zig
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ pub fn getLinkerPath(tc: *const Toolchain, buf: []u8) ![]const u8 {
return use_linker;
}
} else {
var linker_name = try std.ArrayList(u8).initCapacity(tc.driver.comp.gpa, 5 + use_linker.len); // "ld64." ++ use_linker
var linker_name = try std.array_list.Managed(u8).initCapacity(tc.driver.comp.gpa, 5 + use_linker.len); // "ld64." ++ use_linker
defer linker_name.deinit();
if (tc.getTarget().os.tag.isDarwin()) {
linker_name.appendSliceAssumeCapacity("ld64.");
Expand Down Expand Up @@ -198,7 +198,7 @@ fn possibleProgramNames(raw_triple: ?[]const u8, name: []const u8, buf: *[64]u8)
}

/// Add toolchain `file_paths` to argv as `-L` arguments
pub fn addFilePathLibArgs(tc: *const Toolchain, argv: *std.ArrayList([]const u8)) !void {
pub fn addFilePathLibArgs(tc: *const Toolchain, argv: *std.array_list.Managed([]const u8)) !void {
try argv.ensureUnusedCapacity(tc.file_paths.items.len);

var bytes_needed: usize = 0;
Expand Down Expand Up @@ -332,7 +332,7 @@ pub fn addPathFromComponents(tc: *Toolchain, components: []const []const u8, des

/// Add linker args to `argv`. Does not add path to linker executable as first item; that must be handled separately
/// Items added to `argv` will be string literals or owned by `tc.arena` so they must not be individually freed
pub fn buildLinkerArgs(tc: *Toolchain, argv: *std.ArrayList([]const u8)) !void {
pub fn buildLinkerArgs(tc: *Toolchain, argv: *std.array_list.Managed([]const u8)) !void {
return switch (tc.inner) {
.uninitialized => unreachable,
.linux => |*linux| linux.buildLinkerArgs(tc, argv),
Expand Down Expand Up @@ -412,7 +412,7 @@ fn getAsNeededOption(is_solaris: bool, needed: bool) []const u8 {
}
}

fn addUnwindLibrary(tc: *const Toolchain, argv: *std.ArrayList([]const u8)) !void {
fn addUnwindLibrary(tc: *const Toolchain, argv: *std.array_list.Managed([]const u8)) !void {
const unw = try tc.getUnwindLibKind();
const target = tc.getTarget();
if ((target.abi.isAndroid() and unw == .libgcc) or
Expand Down Expand Up @@ -450,7 +450,7 @@ fn addUnwindLibrary(tc: *const Toolchain, argv: *std.ArrayList([]const u8)) !voi
}
}

fn addLibGCC(tc: *const Toolchain, argv: *std.ArrayList([]const u8)) !void {
fn addLibGCC(tc: *const Toolchain, argv: *std.array_list.Managed([]const u8)) !void {
const libgcc_kind = tc.getLibGCCKind();
if (libgcc_kind == .static or libgcc_kind == .unspecified) {
try argv.append("-lgcc");
Expand All @@ -461,7 +461,7 @@ fn addLibGCC(tc: *const Toolchain, argv: *std.ArrayList([]const u8)) !void {
}
}

pub fn addRuntimeLibs(tc: *const Toolchain, argv: *std.ArrayList([]const u8)) !void {
pub fn addRuntimeLibs(tc: *const Toolchain, argv: *std.array_list.Managed([]const u8)) !void {
const target = tc.getTarget();
const rlt = tc.getRuntimeLibKind();
switch (rlt) {
Expand Down
2 changes: 1 addition & 1 deletion lib/compiler/aro/aro/Tree.zig
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ pub const TokenWithExpansionLocs = struct {

pub fn addExpansionLocation(tok: *TokenWithExpansionLocs, gpa: std.mem.Allocator, new: []const Source.Location) !void {
if (new.len == 0 or tok.id == .whitespace or tok.id == .macro_ws or tok.id == .placemarker) return;
var list = std.ArrayList(Source.Location).init(gpa);
var list = std.array_list.Managed(Source.Location).init(gpa);
defer {
@memset(list.items.ptr[list.items.len..list.capacity], .{});
// Add a sentinel to indicate the end of the list since
Expand Down
4 changes: 2 additions & 2 deletions lib/compiler/aro/aro/toolchains/Linux.zig
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ pub fn getDefaultLinker(self: *const Linux, target: std.Target) []const u8 {
return "ld";
}

pub fn buildLinkerArgs(self: *const Linux, tc: *const Toolchain, argv: *std.ArrayList([]const u8)) Compilation.Error!void {
pub fn buildLinkerArgs(self: *const Linux, tc: *const Toolchain, argv: *std.array_list.Managed([]const u8)) Compilation.Error!void {
const d = tc.driver;
const target = tc.getTarget();

Expand Down Expand Up @@ -465,7 +465,7 @@ test Linux {

try toolchain.discover();

var argv = std.ArrayList([]const u8).init(driver.comp.gpa);
var argv = std.array_list.Managed([]const u8).init(driver.comp.gpa);
defer argv.deinit();

var linker_path_buf: [std.fs.max_path_bytes]u8 = undefined;
Expand Down
2 changes: 1 addition & 1 deletion lib/compiler/aro/backend/Object.zig
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ pub const Section = union(enum) {
custom: []const u8,
};

pub fn getSection(obj: *Object, section: Section) !*std.ArrayList(u8) {
pub fn getSection(obj: *Object, section: Section) !*std.array_list.Managed(u8) {
switch (obj.format) {
.elf => return @as(*Elf, @alignCast(@fieldParentPtr("obj", obj))).getSection(section),
else => unreachable,
Expand Down
Loading
Loading