initial commit
This commit is contained in:
commit
7687a38864
8
.gitignore
vendored
Normal file
8
.gitignore
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
Test/teapot_v1_test.hxa
|
||||||
|
Test/teapot_v3_test.hxa
|
||||||
|
Test/teapot_v1_print_test.txt
|
||||||
|
Test/teapot_v3_print_test.txt
|
||||||
|
Test/teapot_converted_v1_v3.hxa
|
||||||
|
|
||||||
|
zig-cache/*
|
||||||
|
zig-out/*
|
34
README.md
Normal file
34
README.md
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
# zig-HxA
|
||||||
|
|
||||||
|
This is a parser for the [HxA](https://github.com/quelsolaar/HxA) file format (by the wonderful [Eskil Steenberg](http://www.quelsolaar.com/) in the zig programming language.
|
||||||
|
|
||||||
|
This is an alpha project and certain aspects of this implementation could not be testet. Feel free to open an issue or pull request if something catches your eye! Known correct HxA files are also welcome as a test tool!
|
||||||
|
|
||||||
|
## The Standard
|
||||||
|
The actual standard can be found in `types.hxa`. The struct represented there is the actual HxA format.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
```zig
|
||||||
|
const std = @import("std");
|
||||||
|
const hxa = @import("HxA.zig");
|
||||||
|
|
||||||
|
const allocator = std.heap.page_allocator;
|
||||||
|
|
||||||
|
const file1 = &try std.fs.cwd().openFile("Test/teapot_v1.hxa", .{ .read = true });
|
||||||
|
const file2 = &try std.fs.cwd().createFile("Test/teapot_v1_test.hxa", .{ .read = true });
|
||||||
|
const stdout = &std.io.getStdOut().writer();
|
||||||
|
|
||||||
|
// Load HxA file
|
||||||
|
const teapot_v1 = try hxa.load(allocator, file1);
|
||||||
|
// Free when done
|
||||||
|
defer hxa.free(allocator, teapot_v1);
|
||||||
|
|
||||||
|
// Print HxA in human readable format to stdout
|
||||||
|
try hxa.print(teapot_v1, stdout, .data);
|
||||||
|
|
||||||
|
// Save HxA to file
|
||||||
|
try hxa.save(teapot_v1, file2);
|
||||||
|
```
|
||||||
|
|
||||||
|
## Tests
|
||||||
|
Run all tests with `zig build test`. Since I am unsure if the parser handles all cases correctly, tests are more of a tool to communicate breaking code change than to validate absolute feature correctness.
|
BIN
Test/teapot_v1.hxa
Normal file
BIN
Test/teapot_v1.hxa
Normal file
Binary file not shown.
1102
Test/teapot_v1_print.txt
Normal file
1102
Test/teapot_v1_print.txt
Normal file
File diff suppressed because it is too large
Load Diff
BIN
Test/teapot_v3.hxa
Normal file
BIN
Test/teapot_v3.hxa
Normal file
Binary file not shown.
1103
Test/teapot_v3_print.txt
Normal file
1103
Test/teapot_v3_print.txt
Normal file
File diff suppressed because it is too large
Load Diff
25
build.zig
Normal file
25
build.zig
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
pub fn build(b: *std.build.Builder) void {
|
||||||
|
// Standard target options allows the person running `zig build` to choose
|
||||||
|
// what target to build for. Here we do not override the defaults, which
|
||||||
|
// means any target is allowed, and the default is native. Other options
|
||||||
|
// for restricting supported target set are available.
|
||||||
|
const target = b.standardTargetOptions(.{});
|
||||||
|
|
||||||
|
// Standard release options allow the person running `zig build` to select
|
||||||
|
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
|
||||||
|
const mode = b.standardReleaseOptions();
|
||||||
|
|
||||||
|
const exe = b.addExecutable("hxa", "src/test.zig");
|
||||||
|
exe.setTarget(target);
|
||||||
|
exe.setBuildMode(mode);
|
||||||
|
exe.install();
|
||||||
|
|
||||||
|
const exe_tests = b.addTest("src/test.zig");
|
||||||
|
exe_tests.setTarget(target);
|
||||||
|
exe_tests.setBuildMode(mode);
|
||||||
|
|
||||||
|
const test_step = b.step("test", "Run unit tests");
|
||||||
|
test_step.dependOn(&exe_tests.step);
|
||||||
|
}
|
493
src/HxA.zig
Normal file
493
src/HxA.zig
Normal file
@ -0,0 +1,493 @@
|
|||||||
|
const std = @import("std");
|
||||||
|
const Types = @import("types.zig");
|
||||||
|
|
||||||
|
pub const MAGIC_NUMBER = Types.MAGIC_NUMBER;
|
||||||
|
pub const VERSION = Types.VERSION;
|
||||||
|
|
||||||
|
const NodeType = Types.NodeType;
|
||||||
|
const ImageType = Types.ImageType;
|
||||||
|
const MetaType = Types.MetaType;
|
||||||
|
const Meta = Types.Meta;
|
||||||
|
const LayerType = Types.LayerType;
|
||||||
|
const Layer = Types.Layer;
|
||||||
|
const LayerStack = Types.LayerStack;
|
||||||
|
const Node = Types.Node;
|
||||||
|
const File = Types.File;
|
||||||
|
|
||||||
|
pub const Conventions = Types.Conventions {};
|
||||||
|
|
||||||
|
const Error = error {
|
||||||
|
UnexpectedEOF,
|
||||||
|
NoHxaFile,
|
||||||
|
NodeTypeNotRecognized,
|
||||||
|
MetaTypeNotRecognized,
|
||||||
|
LayerTypeNotRecognized,
|
||||||
|
UnexpectedPointerType,
|
||||||
|
InvalidComponents,
|
||||||
|
} || std.mem.Allocator.Error || std.fs.File.OpenError || std.os.ReadError || std.os.WriteError;
|
||||||
|
|
||||||
|
|
||||||
|
// TODO: The function relies on that alloc cannot fail (If it does an errdefer will trie to free unallocated memory)
|
||||||
|
// Read a HxA file and close it afterwards
|
||||||
|
pub fn load(allocator: std.mem.Allocator, file: *std.fs.File) Error!*File {
|
||||||
|
|
||||||
|
// close file when done
|
||||||
|
defer file.close();
|
||||||
|
|
||||||
|
// temporary variables are used to make the free function work on error and avoid dirty memory
|
||||||
|
var len: u32 = undefined;
|
||||||
|
var version: u32 = undefined;
|
||||||
|
|
||||||
|
// check magic number
|
||||||
|
var magic_number: @TypeOf(MAGIC_NUMBER) = undefined;
|
||||||
|
try loadData(file, &magic_number);
|
||||||
|
|
||||||
|
if (magic_number != MAGIC_NUMBER) {
|
||||||
|
return Error.NoHxaFile;
|
||||||
|
}
|
||||||
|
|
||||||
|
// allocate hxa object
|
||||||
|
const hxa = try allocator.create(File);
|
||||||
|
errdefer {
|
||||||
|
// std.debug.print("Error while reading file {s} at location {X}", .{ file., BYTES_READ });
|
||||||
|
free(allocator, hxa);
|
||||||
|
}
|
||||||
|
|
||||||
|
// get version
|
||||||
|
hxa.version = 0; hxa.len = 0;
|
||||||
|
|
||||||
|
try loadData(file, &version);
|
||||||
|
hxa.version = version;
|
||||||
|
|
||||||
|
// get node_count
|
||||||
|
try loadData(file, &len);
|
||||||
|
hxa.len = len;
|
||||||
|
|
||||||
|
// allocate nodes
|
||||||
|
hxa.nodes = try allocator.alloc(Node, hxa.len);
|
||||||
|
|
||||||
|
for (hxa.nodes) |*node| {
|
||||||
|
node.len = 0;
|
||||||
|
|
||||||
|
try loadData(file, &node.type);
|
||||||
|
if (@enumToInt(node.type) >= @typeInfo(NodeType).Enum.fields.len) {
|
||||||
|
return Error.NodeTypeNotRecognized;
|
||||||
|
}
|
||||||
|
|
||||||
|
// set zero defaults (needed for free to avoid dirty memory)
|
||||||
|
switch (node.type) {
|
||||||
|
.geometry => {
|
||||||
|
node.content.geometry.vertex_len = 0;
|
||||||
|
node.content.geometry.edge_corner_len = 0;
|
||||||
|
node.content.geometry.face_len = 0;
|
||||||
|
node.content.geometry.vertex_stack.len = 0;
|
||||||
|
node.content.geometry.edge_stack.len = 0;
|
||||||
|
node.content.geometry.face_stack.len = 0;
|
||||||
|
},
|
||||||
|
.image => {
|
||||||
|
node.content.image.type = .cube;
|
||||||
|
node.content.image.resolution = [_]u32 {1, 1, 1};
|
||||||
|
node.content.image.image_stack.len = 0;
|
||||||
|
},
|
||||||
|
else => {},
|
||||||
|
}
|
||||||
|
|
||||||
|
// get number of nodes
|
||||||
|
try loadData(file, &len);
|
||||||
|
node.len = len;
|
||||||
|
|
||||||
|
node.meta = try allocator.alloc(Meta, node.len);
|
||||||
|
try loadMeta(allocator, file, &node.meta);
|
||||||
|
|
||||||
|
switch (node.type) {
|
||||||
|
.geometry => {
|
||||||
|
try loadData(file, &node.content.geometry.vertex_len);
|
||||||
|
try loadStack(allocator, file, &node.content.geometry.vertex_stack, node.content.geometry.vertex_len);
|
||||||
|
try loadData(file, &node.content.geometry.edge_corner_len);
|
||||||
|
try loadStack(allocator, file, &node.content.geometry.corner_stack, node.content.geometry.edge_corner_len);
|
||||||
|
if (hxa.version > 2)
|
||||||
|
try loadStack(allocator, file, &node.content.geometry.edge_stack, node.content.geometry.edge_corner_len);
|
||||||
|
try loadData(file, &node.content.geometry.face_len);
|
||||||
|
try loadStack(allocator, file, &node.content.geometry.face_stack, node.content.geometry.face_len);
|
||||||
|
},
|
||||||
|
.image => {
|
||||||
|
try loadData(file, &node.content.image.type);
|
||||||
|
var dimensions = @enumToInt(node.content.image.type);
|
||||||
|
if (node.content.image.type == .cube)
|
||||||
|
dimensions = 2;
|
||||||
|
try loadData(file, node.content.image.resolution[0..dimensions]);
|
||||||
|
|
||||||
|
var size: u32 = node.content.image.resolution[0] * node.content.image.resolution[1] * node.content.image.resolution[2];
|
||||||
|
if (node.content.image.type == .cube)
|
||||||
|
size *= 6;
|
||||||
|
try loadStack(allocator, file, &node.content.image.image_stack, size);
|
||||||
|
},
|
||||||
|
else => {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return hxa;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn loadData(file: *std.fs.File, buffer: anytype) Error!void {
|
||||||
|
|
||||||
|
// represent passed value as slice of bytes
|
||||||
|
var bytes: []u8 = undefined;
|
||||||
|
|
||||||
|
const info = @typeInfo(@TypeOf(buffer)).Pointer;
|
||||||
|
switch (info.size) {
|
||||||
|
.Slice => bytes = @ptrCast([*]u8, buffer)[0 .. @sizeOf(info.child) * buffer.len],
|
||||||
|
.One => bytes = @ptrCast([*]u8, buffer)[0 .. @sizeOf(info.child)],
|
||||||
|
else => {
|
||||||
|
return Error.UnexpectedPointerType;
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// read file bytes into buffer
|
||||||
|
const read = try file.read(bytes);
|
||||||
|
|
||||||
|
// if file read less than bytes than buffer - return error
|
||||||
|
if (read != bytes.len) {
|
||||||
|
return Error.UnexpectedEOF;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn loadName(allocator: std.mem.Allocator, file: *std.fs.File, name: *[]u8) !void {
|
||||||
|
var len: u8 = undefined;
|
||||||
|
try loadData(file, &len);
|
||||||
|
|
||||||
|
name.* = try allocator.alloc(u8, len);
|
||||||
|
try loadData(file, name.*);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn loadMeta(allocator: std.mem.Allocator, file: *std.fs.File, meta: *[]Meta) Error!void {
|
||||||
|
for (meta.*) |*data| {
|
||||||
|
data.len = 0;
|
||||||
|
var len: u32 = undefined;
|
||||||
|
|
||||||
|
try loadName(allocator, file, &data.name);
|
||||||
|
try loadData(file, &data.type);
|
||||||
|
if (@enumToInt(data.type) >= @typeInfo(MetaType).Enum.fields.len)
|
||||||
|
return Error.MetaTypeNotRecognized;
|
||||||
|
|
||||||
|
try loadData(file, &len);
|
||||||
|
data.len = len;
|
||||||
|
|
||||||
|
switch (data.type) {
|
||||||
|
.u64 => {
|
||||||
|
data.values.u64 = try allocator.alloc(u64, data.len);
|
||||||
|
try loadData(file, data.values.u64);
|
||||||
|
},
|
||||||
|
.f64 => {
|
||||||
|
data.values.f64 = try allocator.alloc(f64, data.len);
|
||||||
|
try loadData(file, data.values.f64);
|
||||||
|
},
|
||||||
|
.node => {
|
||||||
|
data.values.node = try allocator.alloc(Node, data.len);
|
||||||
|
try loadData(file, data.values.node);
|
||||||
|
},
|
||||||
|
.text => {
|
||||||
|
data.values.text = try allocator.alloc(u8, data.len);
|
||||||
|
try loadData(file, data.values.text);
|
||||||
|
},
|
||||||
|
.binary => {
|
||||||
|
data.values.binary = try allocator.alloc(u8, data.len);
|
||||||
|
try loadData(file, data.values.binary);
|
||||||
|
},
|
||||||
|
.meta => {
|
||||||
|
data.values.meta = try allocator.alloc(Meta, data.len);
|
||||||
|
try loadMeta(allocator, file, &data.values.meta);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn loadStack(allocator: std.mem.Allocator, file: *std.fs.File, stack: *LayerStack, layer_len: u32) Error!void {
|
||||||
|
const type_sizes = [_]u32 { @sizeOf(u8), @sizeOf(u32), @sizeOf(f32), @sizeOf(f64) };
|
||||||
|
stack.len = 0;
|
||||||
|
var len: u32 = undefined;
|
||||||
|
|
||||||
|
try loadData(file, &len);
|
||||||
|
stack.len = len;
|
||||||
|
|
||||||
|
stack.layers = try allocator.alloc(Layer, stack.len);
|
||||||
|
|
||||||
|
for (stack.layers) |*layer| {
|
||||||
|
layer.components = 0;
|
||||||
|
|
||||||
|
try loadName(allocator, file, &layer.name);
|
||||||
|
try loadData(file, &layer.components);
|
||||||
|
if (layer.components == 0)
|
||||||
|
return Error.InvalidComponents;
|
||||||
|
try loadData(file, &layer.type);
|
||||||
|
|
||||||
|
if (@enumToInt(layer.type) >= @typeInfo(LayerType).Enum.fields.len) {
|
||||||
|
return Error.LayerTypeNotRecognized;
|
||||||
|
}
|
||||||
|
|
||||||
|
layer.data = try allocator.alloc(u8, type_sizes[@enumToInt(layer.type)] * layer.components * layer_len);
|
||||||
|
|
||||||
|
try loadData(file, layer.data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Frees a HxA object
|
||||||
|
pub fn free(allocator: std.mem.Allocator, hxa: *File) void {
|
||||||
|
|
||||||
|
for (hxa.nodes) |*node|
|
||||||
|
freeNode(allocator, node, hxa.version);
|
||||||
|
allocator.free(hxa.nodes);
|
||||||
|
|
||||||
|
allocator.destroy(hxa);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn freeNode(allocator: std.mem.Allocator, node: *Node, version: u32) void {
|
||||||
|
for (node.meta) |*data| {
|
||||||
|
freeMeta(allocator, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
allocator.free(node.meta);
|
||||||
|
|
||||||
|
switch (node.type) {
|
||||||
|
.geometry => {
|
||||||
|
freeStack(allocator, &node.content.geometry.corner_stack);
|
||||||
|
freeStack(allocator, &node.content.geometry.vertex_stack);
|
||||||
|
freeStack(allocator, &node.content.geometry.face_stack);
|
||||||
|
if (version > 2)
|
||||||
|
freeStack(allocator, &node.content.geometry.edge_stack);
|
||||||
|
},
|
||||||
|
.image => {
|
||||||
|
freeStack(allocator, &node.content.image.image_stack);
|
||||||
|
},
|
||||||
|
else => {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn freeMeta(allocator: std.mem.Allocator, meta: *Meta) void {
|
||||||
|
allocator.free(meta.name);
|
||||||
|
|
||||||
|
switch (meta.type) {
|
||||||
|
.u64 => allocator.free(meta.values.u64),
|
||||||
|
.f64 => allocator.free(meta.values.f64),
|
||||||
|
.node => allocator.free(meta.values.node),
|
||||||
|
.text => allocator.free(meta.values.text),
|
||||||
|
.binary => allocator.free(meta.values.binary),
|
||||||
|
.meta => {
|
||||||
|
for (meta.values.meta) |*data|
|
||||||
|
freeMeta(allocator, data);
|
||||||
|
allocator.free(meta.values.meta);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn freeStack(allocator: std.mem.Allocator, stack: *LayerStack) void {
|
||||||
|
for (stack.layers) |*layer| {
|
||||||
|
allocator.free(layer.name);
|
||||||
|
allocator.free(layer.data);
|
||||||
|
}
|
||||||
|
|
||||||
|
allocator.free(stack.layers);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save a HxA object to the file and close it afterwards
|
||||||
|
pub fn save(hxa: *File, file: *std.fs.File) !void {
|
||||||
|
|
||||||
|
defer file.close();
|
||||||
|
|
||||||
|
_ = try file.write(std.mem.asBytes(&MAGIC_NUMBER));
|
||||||
|
_ = try file.write(std.mem.asBytes(&hxa.version));
|
||||||
|
_ = try file.write(std.mem.asBytes(&hxa.len));
|
||||||
|
for (hxa.nodes) |*node| {
|
||||||
|
_ = try file.write(std.mem.asBytes(&node.type));
|
||||||
|
_ = try file.write(std.mem.asBytes(&node.len));
|
||||||
|
|
||||||
|
for (node.meta) |*data|
|
||||||
|
try saveMeta(file, data);
|
||||||
|
|
||||||
|
switch (node.type) {
|
||||||
|
.geometry => {
|
||||||
|
_ = try file.write(std.mem.asBytes(&node.content.geometry.vertex_len));
|
||||||
|
try saveStack(file, &node.content.geometry.vertex_stack);
|
||||||
|
_ = try file.write(std.mem.asBytes(&node.content.geometry.edge_corner_len));
|
||||||
|
try saveStack(file, &node.content.geometry.corner_stack);
|
||||||
|
if (hxa.version > 2)
|
||||||
|
try saveStack(file, &node.content.geometry.edge_stack);
|
||||||
|
_ = try file.write(std.mem.asBytes(&node.content.geometry.face_len));
|
||||||
|
try saveStack(file, &node.content.geometry.face_stack);
|
||||||
|
},
|
||||||
|
.image => {
|
||||||
|
_ = try file.write(std.mem.asBytes(&node.content.image.type));
|
||||||
|
var dimension: u32 = @enumToInt(node.content.image.type);
|
||||||
|
if (node.content.image.type == .cube)
|
||||||
|
dimension = 2;
|
||||||
|
_ = try file.write(std.mem.sliceAsBytes(node.content.image.resolution[0..dimension]));
|
||||||
|
try saveStack(file, &node.content.image.image_stack);
|
||||||
|
},
|
||||||
|
else => {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn saveMeta(file: *std.fs.File, meta: *Meta) Error!void {
|
||||||
|
_ = try file.write(std.mem.asBytes(&@intCast(u8, meta.name.len)));
|
||||||
|
_ = try file.write(meta.name);
|
||||||
|
_ = try file.write(std.mem.asBytes(&meta.type));
|
||||||
|
_ = try file.write(std.mem.asBytes(&meta.len));
|
||||||
|
|
||||||
|
switch (meta.type) {
|
||||||
|
.u64 => { _ = try file.write(std.mem.sliceAsBytes(meta.values.u64)); },
|
||||||
|
.f64 => { _ = try file.write(std.mem.sliceAsBytes(meta.values.f64)); },
|
||||||
|
.node => { _ = try file.write(std.mem.sliceAsBytes(meta.values.node)); },
|
||||||
|
.text => { _ = try file.write(std.mem.sliceAsBytes(meta.values.text)); },
|
||||||
|
.binary => { _ = try file.write(std.mem.sliceAsBytes(meta.values.binary)); },
|
||||||
|
.meta => {
|
||||||
|
for (meta.values.meta) |*data|
|
||||||
|
try saveMeta(file, data);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn saveStack(file: *std.fs.File, stack: *LayerStack) !void {
|
||||||
|
_ = try file.write(std.mem.asBytes(&stack.len));
|
||||||
|
|
||||||
|
for (stack.layers) |*layer| {
|
||||||
|
_ = try file.write(std.mem.asBytes(&@intCast(u8, layer.name.len)));
|
||||||
|
_ = try file.write(layer.name);
|
||||||
|
_ = try file.write(std.mem.asBytes(&layer.components));
|
||||||
|
_ = try file.write(std.mem.asBytes(&layer.type));
|
||||||
|
_ = try file.write(layer.data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Outline does not print layer data
|
||||||
|
const PrintOptions = enum { outline, data };
|
||||||
|
|
||||||
|
// Print a HxA file in human readable format to the writer object
|
||||||
|
pub fn print(hxa: *File, writer: *std.fs.File.Writer, option: PrintOptions) !void {
|
||||||
|
|
||||||
|
try writer.print("HxA version: {}\n", .{ hxa.version });
|
||||||
|
try writer.print("Node lenght: {}\n", .{ hxa.len });
|
||||||
|
for (hxa.nodes) |*node, i| {
|
||||||
|
try writer.print("-Node id: {}\n", .{ i });
|
||||||
|
try writer.print("\t-Node type: {s}\n", .{ @tagName(node.type) });
|
||||||
|
try writer.print("\t-Node meta length: {}\n", .{ node.len });
|
||||||
|
|
||||||
|
try printMeta(writer, &node.meta, 2, option);
|
||||||
|
|
||||||
|
switch (node.type) {
|
||||||
|
.geometry => {
|
||||||
|
try writer.print("\t-Geometry vertex length: {}\n", .{ node.content.geometry.vertex_len });
|
||||||
|
try printStack(writer, &node.content.geometry.vertex_stack, "Vertex", option);
|
||||||
|
try writer.print("\t-Geometry edge length: {}\n", .{ node.content.geometry.edge_corner_len });
|
||||||
|
try printStack(writer, &node.content.geometry.corner_stack, "Corner", option);
|
||||||
|
if (hxa.version > 2)
|
||||||
|
try printStack(writer, &node.content.geometry.edge_stack, "Edge", option);
|
||||||
|
try writer.print("\t-Geometry face length: {}\n", .{ node.content.geometry.face_len });
|
||||||
|
try printStack(writer, &node.content.geometry.face_stack, "Face", option);
|
||||||
|
},
|
||||||
|
.image => {
|
||||||
|
try writer.print("\t-Pixel type: {s}\n", .{ @tagName(node.content.image.type) });
|
||||||
|
switch (node.content.image.type) {
|
||||||
|
.cube => try writer.print("\t-Pixel resolution: {} x {} x 6", .{ node.content.image.resolution[0], node.content.image.resolution[1] }),
|
||||||
|
.@"1d" => try writer.print("\t-Pixel resolution: {}\n", .{ node.content.image.resolution[0] }),
|
||||||
|
.@"2d" => try writer.print("\t-Pixel resolution: {} x {}\n", .{ node.content.image.resolution[0], node.content.image.resolution[1] }),
|
||||||
|
.@"3d" => try writer.print("\t-Pixel resolution: {} x {} x {}\n", .{ node.content.image.resolution[0], node.content.image.resolution[1], node.content.image.resolution[2] }),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
else => {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn printMeta(writer: *std.fs.File.Writer, meta: *[]Meta, tab_len: usize, option: PrintOptions) Error!void {
|
||||||
|
const tabs = [_]u8 {'\t'} ** 16;
|
||||||
|
|
||||||
|
for (meta.*) |*data| {
|
||||||
|
try writer.print("{s}-Meta {s} \"{s}\" [{}]:", .{ tabs[0..tab_len % tabs.len], @tagName(data.type), data.name, data.len });
|
||||||
|
|
||||||
|
// print data
|
||||||
|
if (option == .data) {
|
||||||
|
switch(data.type) {
|
||||||
|
.u64 => try writer.print(" {any}", .{ data.values.u64[0..@minimum(tabs.len, data.values.u64.len)] }),
|
||||||
|
.f64 => try writer.print(" {any}", .{ data.values.f64[0..@minimum(tabs.len, data.values.f64.len)] }),
|
||||||
|
.node => try writer.print("{any}", .{ data.values.node[0..@minimum(tabs.len, data.values.text.len)] }),
|
||||||
|
.text => try writer.print(" {s}", .{ data.values.text[0..@minimum(tabs.len, data.values.binary.len)] }),
|
||||||
|
.binary => try writer.print(" {}", .{ std.fmt.fmtSliceHexUpper(data.values.binary) }),
|
||||||
|
.meta => try printMeta(writer, &data.values.meta, tab_len + 1, option),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.type != .meta) {
|
||||||
|
if (data.len > tabs.len and data.type != .text) {
|
||||||
|
try writer.print(" ...\n", .{});
|
||||||
|
} else {
|
||||||
|
try writer.print("\n", .{});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
fn printStack(writer: *std.fs.File.Writer, stack: *LayerStack, name: []const u8, option: PrintOptions) !void {
|
||||||
|
try writer.print("\t-{s} Layer length: {}\n", .{ name, stack.layers.len });
|
||||||
|
|
||||||
|
for (stack.layers) |*layer| {
|
||||||
|
try writer.print("\t\tLayer name: {s}\n", .{ layer.name });
|
||||||
|
try writer.print("\t\tLayer components: {}\n", .{ layer.components });
|
||||||
|
try writer.print("\t\tLayer type: {s}\n", .{ @tagName(layer.type) });
|
||||||
|
|
||||||
|
if (option == .data) {
|
||||||
|
switch (layer.type) {
|
||||||
|
.u8 => {
|
||||||
|
var i: usize = 0;
|
||||||
|
while (i < layer.data.len) : (i += layer.components )
|
||||||
|
try writer.print("\t\t\t{d} \n", .{ layer.data[i..i + layer.components] });
|
||||||
|
},
|
||||||
|
.i32 => {
|
||||||
|
if (std.mem.eql(u8, layer.name, "reference" ) and layer.components == 1) {
|
||||||
|
const data = @ptrCast([*]align(1) i32, layer.data)[0 .. layer.data.len / @sizeOf(i32)];
|
||||||
|
var i: usize = 0;
|
||||||
|
for (data) |d, j| {
|
||||||
|
if (d < 0) {
|
||||||
|
try writer.print("\t\t\t{d}\n", .{ data[i..j+1] });
|
||||||
|
i = j+1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var i: usize = 0;
|
||||||
|
const data = @ptrCast([*]align(1) i32, layer.data)[0 .. layer.data.len / @sizeOf(i32)];
|
||||||
|
while (i < data.len) : (i += layer.components )
|
||||||
|
try writer.print("\t\t\t{d} \n", .{ data[i..i + layer.components] });
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.f32 => {
|
||||||
|
var i: usize = 0;
|
||||||
|
const data = @ptrCast([*]align(1) f32, layer.data)[0 .. layer.data.len / @sizeOf(f32)];
|
||||||
|
while (i < data.len) : (i += layer.components )
|
||||||
|
try writer.print("\t\t\t{d:.6} \n", .{ data[i..i + layer.components] });
|
||||||
|
},
|
||||||
|
.f64 => {
|
||||||
|
var i: usize = 0;
|
||||||
|
const data = @ptrCast([*]align(1) f64, layer.data)[0 .. layer.data.len / @sizeOf(f64)];
|
||||||
|
while (i < data.len) : (i += layer.components )
|
||||||
|
try writer.print("\t\t\t{d:.6} \n", .{ data[i..i + layer.components] });
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upgrade old files to newer version
|
||||||
|
// Silent if hxa.version is newer than version
|
||||||
|
// Please upgrade right after loading, before working on the file
|
||||||
|
pub fn upgradeVersion(allocator: std.mem.Allocator, hxa: *File, version: u32) !void {
|
||||||
|
if (hxa.version < version) {
|
||||||
|
if (version > 2) {
|
||||||
|
hxa.version = version;
|
||||||
|
for (hxa.nodes) |*node| {
|
||||||
|
if (node.type == .geometry)
|
||||||
|
node.content.geometry.edge_stack.layers = try allocator.alloc(Layer, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
117
src/test.zig
Normal file
117
src/test.zig
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
const std = @import("std");
|
||||||
|
const hxa = @import("HxA.zig");
|
||||||
|
|
||||||
|
const allocator = std.testing.allocator;
|
||||||
|
|
||||||
|
test "Load and free HxA file v1" {
|
||||||
|
// Load and free
|
||||||
|
const teapot_v1 = try hxa.load(allocator, &try std.fs.cwd().openFile("Test/teapot_v1.hxa", .{ .read = true }));
|
||||||
|
defer hxa.free(allocator, teapot_v1);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Load and free HxA file v3" {
|
||||||
|
// Load and free
|
||||||
|
const teapot_v3 = try hxa.load(allocator, &try std.fs.cwd().openFile("Test/teapot_v3.hxa", .{ .read = true }));
|
||||||
|
defer hxa.free(allocator, teapot_v3);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Load, save v1" {
|
||||||
|
// Load and Save
|
||||||
|
const teapot_v1 = try hxa.load(allocator, &try std.fs.cwd().openFile("Test/teapot_v1.hxa", .{ .read = true }));
|
||||||
|
defer hxa.free(allocator, teapot_v1);
|
||||||
|
try hxa.save(teapot_v1, &try std.fs.cwd().createFile("Test/teapot_v1_test.hxa", .{ .read = true }));
|
||||||
|
|
||||||
|
// Read and compare
|
||||||
|
try checkFileEql("Test/teapot_v1.hxa", "Test/teapot_v1_test.hxa");
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Load, save v3" {
|
||||||
|
// Load and Save
|
||||||
|
const teapot_v3 = try hxa.load(allocator, &try std.fs.cwd().openFile("Test/teapot_v3.hxa", .{ .read = true }));
|
||||||
|
defer hxa.free(allocator, teapot_v3);
|
||||||
|
try hxa.save(teapot_v3, &try std.fs.cwd().createFile("Test/teapot_v3_test.hxa", .{ .read = true }));
|
||||||
|
|
||||||
|
// Read and compare
|
||||||
|
try checkFileEql("Test/teapot_v3.hxa", "Test/teapot_v3_test.hxa");
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Load and convert HxA file v1 to v3" {
|
||||||
|
// Load and free
|
||||||
|
const teapot = try hxa.load(allocator, &try std.fs.cwd().openFile("Test/teapot_v1.hxa", .{ .read = true }));
|
||||||
|
defer hxa.free(allocator, teapot);
|
||||||
|
|
||||||
|
try hxa.upgradeVersion(allocator, teapot, 3);
|
||||||
|
try hxa.save(teapot, &try std.fs.cwd().createFile("Test/teapot_converted_v1_v3.hxa", .{ .read = true }));
|
||||||
|
|
||||||
|
try checkFileEql("Test/teapot_v3.hxa", "Test/teapot_converted_v1_v3.hxa");
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Load and print HxA file v1" {
|
||||||
|
const file = try std.fs.cwd().createFile("Test/teapot_v1_print_test.txt", .{});
|
||||||
|
defer file.close();
|
||||||
|
|
||||||
|
const writer = &file.writer();
|
||||||
|
|
||||||
|
// Load and free
|
||||||
|
const teapot_v1 = try hxa.load(allocator, &try std.fs.cwd().openFile("Test/teapot_v1.hxa", .{ .read = true }));
|
||||||
|
defer hxa.free(allocator, teapot_v1);
|
||||||
|
|
||||||
|
try hxa.print(teapot_v1, writer, .data);
|
||||||
|
|
||||||
|
try checkFileEql("Test/teapot_v1_print.txt", "Test/teapot_v1_print_test.txt");
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Load and print HxA file v3" {
|
||||||
|
const file = try std.fs.cwd().createFile("Test/teapot_v3_print_test.txt", .{});
|
||||||
|
defer file.close();
|
||||||
|
|
||||||
|
const writer = &file.writer();
|
||||||
|
|
||||||
|
// Load and free
|
||||||
|
const teapot_v3 = try hxa.load(allocator, &try std.fs.cwd().openFile("Test/teapot_v3.hxa", .{ .read = true }));
|
||||||
|
defer hxa.free(allocator, teapot_v3);
|
||||||
|
|
||||||
|
try hxa.print(teapot_v3, writer, .data);
|
||||||
|
|
||||||
|
try checkFileEql("Test/teapot_v3_print.txt", "Test/teapot_v3_print_test.txt");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn checkFileEql(filepath1: []const u8, filepath2: []const u8) !void {
|
||||||
|
|
||||||
|
// Read and compare
|
||||||
|
const file1 = &try std.fs.cwd().openFile(filepath1, .{ .read = true });
|
||||||
|
defer file1.close();
|
||||||
|
const file2 = &try std.fs.cwd().openFile(filepath2, .{ .read = true });
|
||||||
|
defer file2.close();
|
||||||
|
|
||||||
|
const file1_stat = try file1.stat();
|
||||||
|
const file2_stat = try file2.stat();
|
||||||
|
|
||||||
|
if (file1_stat.size != file2_stat.size) {
|
||||||
|
std.debug.print("Files are of different sizes.\n\t{s}: {}\n{s}: {}\n", .{ filepath1, file1_stat.size, filepath2, file2_stat.size });
|
||||||
|
try std.testing.expect(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
const contents1 = try allocator.alloc(u8, file1_stat.size);
|
||||||
|
defer allocator.free(contents1);
|
||||||
|
|
||||||
|
const contents2 = try allocator.alloc(u8, file2_stat.size);
|
||||||
|
defer allocator.free(contents2);
|
||||||
|
|
||||||
|
_ = try file1.read(contents1);
|
||||||
|
_ = try file2.read(contents2);
|
||||||
|
|
||||||
|
var i: usize = 0;
|
||||||
|
var diffs: usize = 0;
|
||||||
|
while (i < contents1.len) : (i += 1) {
|
||||||
|
if (contents1[i] != contents2[i]) {
|
||||||
|
if (diffs == 0)
|
||||||
|
std.debug.print("Differences were detected.\n\tFirst Diff at: {X}\n", .{ i });
|
||||||
|
diffs += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (diffs != 0) {
|
||||||
|
std.debug.print("\tTotal Differences: {}\n", .{ i });
|
||||||
|
try std.testing.expect(false);
|
||||||
|
}
|
||||||
|
}
|
218
src/types.zig
Normal file
218
src/types.zig
Normal file
@ -0,0 +1,218 @@
|
|||||||
|
//
|
||||||
|
// HxA is a interchangeable graphics asset format. Written by Eskil Steenberg. @quelsolaar / eskil 'at' obsession 'dot' se / www.quelsolaar.com
|
||||||
|
//
|
||||||
|
|
||||||
|
//
|
||||||
|
// TODO: Structure
|
||||||
|
//
|
||||||
|
// HxA is designed to be extremely simple to parse, and is therefore based around conventions.
|
||||||
|
// It has a few basic structures, and depending on how they are used they mean different things.
|
||||||
|
// This means that you can implement a tool that loads the entire file, modifies the parts it cares about and leaves the rest intact.
|
||||||
|
// It is also possible to write a tool that makes all data in the file editable without the need to understand its use.
|
||||||
|
// It is also possible for anyone to use the format to store axillary data.
|
||||||
|
// Anyone who wants to store data not covered by a convention can submit a convention to extend the format.
|
||||||
|
// There should never be a convention for storing the same data in two differed ways.
|
||||||
|
|
||||||
|
// The data is story in a number of nodes that are stored in an array.
|
||||||
|
// Each node stores an array of meta data.
|
||||||
|
// Meta data can describe anything you want, and a lot of conventions will use meta data to store additional information, for things like transforms, lights, shaders and animation.
|
||||||
|
// Data for Vertices, Corners, Faces, and Pixels are stored in named layer stacks. Each stack consists of a number of named layers. All layers in the stack have the same number of elements.
|
||||||
|
// Each layer describes one property of the primitive. Each layer can have multiple channels and each layer can store data of a different type.
|
||||||
|
|
||||||
|
// HxA stores 3 kinds of nodes
|
||||||
|
// -Pixel data.
|
||||||
|
// -Polygon geometry data.
|
||||||
|
// -Meta data only.
|
||||||
|
|
||||||
|
// Pixel Nodes stores pixels in a layer stack. A layer may store things like Albedo, Roughness, Reflectance, Light maps, Masks, Normal maps, and Displacements.
|
||||||
|
// Layers use the channels of the layers to store things like color.
|
||||||
|
|
||||||
|
// Geometry data is stored in 3 separate layer stacks for: vertex data, corner data and face data.
|
||||||
|
// The vertex data stores things like veritices, blend shapes, weight maps, and vertex colors.
|
||||||
|
// The first layer in a vertex stack has to be a 3 channel layer named "position" describing the base position of the vertices.
|
||||||
|
// The corner stack describes data per corner or edge of the polygons. It can be used for things like UV, normals, and adjacency.
|
||||||
|
// The first layer in a corner stack has to be a 1 channel integer layer named "index" describing the vertices used to form polygons.
|
||||||
|
// The last value in each polygon has a negative - 1 index to indicate the end of the polygon.
|
||||||
|
|
||||||
|
// Example:
|
||||||
|
// A quad and a tri with the vertex index:
|
||||||
|
// [0, 1, 2, 3] [1, 4, 2]
|
||||||
|
// are stored as:
|
||||||
|
// [0, 1, 2, -4, 1, 4, -3]
|
||||||
|
|
||||||
|
// The face stack stores values per face. the length of the face stack has to match the number of negative values in the index layer in the corner stack.
|
||||||
|
// The face stack can be used to store things like material index.
|
||||||
|
|
||||||
|
//
|
||||||
|
// Storage
|
||||||
|
//
|
||||||
|
// All data is stored in little endian byte order with no padding. The layout mirrors the struct defined below with a few exceptions.
|
||||||
|
// All names are stored as u8 indicating the lenght of the name followed by that many characters. Termination is not stored in the file.
|
||||||
|
// Text strings stored in meta data are stored the same way as names, but inseatd of u8 for size a u32 is used.
|
||||||
|
|
||||||
|
pub const MAGIC_NUMBER = @ptrCast(*const u32, "HxA").*;
|
||||||
|
pub const VERSION: u32 = 3;
|
||||||
|
pub const NAME_MAX_LENGHT: u8 = 255;
|
||||||
|
|
||||||
|
//
|
||||||
|
// HxA stores 3 types of nodes
|
||||||
|
//
|
||||||
|
pub const NodeType = enum(u8) {
|
||||||
|
meta, // Node only containing meta data
|
||||||
|
geometry, // Node containing a geometry mesh and meta data
|
||||||
|
image, // Node containing a 1D, 2D, 3D, or Cube image and meta data
|
||||||
|
};
|
||||||
|
|
||||||
|
//
|
||||||
|
// Pixel data is arranged in the followign configurations
|
||||||
|
//
|
||||||
|
pub const ImageType = enum(u8) {
|
||||||
|
cube, // 6-sided cube in the order: +x, -x, +y, -y, +z, -z
|
||||||
|
@"1d", // One dimensional pixel data
|
||||||
|
@"2d", // Two dimensional pixel data
|
||||||
|
@"3d", // Three dimensional pixel data
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const MetaType = enum(u8) {
|
||||||
|
u64,
|
||||||
|
f64,
|
||||||
|
node,
|
||||||
|
text,
|
||||||
|
binary,
|
||||||
|
meta,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const Meta = struct {
|
||||||
|
name: []u8, // Name of meta data value
|
||||||
|
type: MetaType, // Type of values - stored in the file as a u8
|
||||||
|
len: u32, // How many values are stored / The length of the stored text string (excluding termination)
|
||||||
|
values: extern union {
|
||||||
|
u64: []u64,
|
||||||
|
f64: []f64,
|
||||||
|
node: []Node, // A reference to another Node
|
||||||
|
text: []u8,
|
||||||
|
binary: []u8,
|
||||||
|
meta: []Meta,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
//
|
||||||
|
// HxA stores layer data in the following types
|
||||||
|
//
|
||||||
|
pub const LayerType = enum(u8) {
|
||||||
|
u8,
|
||||||
|
i32,
|
||||||
|
f32,
|
||||||
|
f64,
|
||||||
|
};
|
||||||
|
|
||||||
|
//
|
||||||
|
// Layers are arrays of data used to store geometry and pixel data
|
||||||
|
//
|
||||||
|
pub const Layer = struct {
|
||||||
|
name: []u8, // Name of the layer. List of predefined names for common usages like uv, reference, blendshapes, weights...
|
||||||
|
components: u8, // 2 for uv, 3 for xyz or rgb, 4 for rgba. From 1 - 255 is legal TODO: 0 check
|
||||||
|
type: LayerType, // stored in the file as a u8
|
||||||
|
data: []u8,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const LayerStack = struct {
|
||||||
|
len: u32, // Number of layers in a stack
|
||||||
|
layers: []Layer, // An array of layers
|
||||||
|
};
|
||||||
|
|
||||||
|
//
|
||||||
|
// A file consists of an array of nodes. All nodes have meta data. Geometry nodes have geometry. Image nodes have pixel data
|
||||||
|
//
|
||||||
|
pub const Node = struct {
|
||||||
|
type: NodeType, // stored as u8 in file
|
||||||
|
len: u32, // how many meta data key/values are stored in the node
|
||||||
|
meta: []Meta, // array of key/values
|
||||||
|
content: extern union { // extern because zig includes extra fields for safety checking and that messes with later code (search: switch (node.type))
|
||||||
|
geometry: Geometry,
|
||||||
|
image: Image,
|
||||||
|
|
||||||
|
const Geometry = struct {
|
||||||
|
vertex_len: u32, // Number of vertices
|
||||||
|
vertex_stack: LayerStack, // Stack of vertex arrays. The first layer is always the vertex positions
|
||||||
|
edge_corner_len: u32, // Number of corners
|
||||||
|
corner_stack: LayerStack, // Stack of corner arrays: The first layer is allways a reference array (see below) TODO: see below
|
||||||
|
edge_stack: LayerStack, // Stack of edge arrays // Version > 2
|
||||||
|
face_len: u32, // Number of polygons
|
||||||
|
face_stack: LayerStack, // Stack of per polygon data.
|
||||||
|
};
|
||||||
|
const Image = struct {
|
||||||
|
type: ImageType = .cube, // type of image
|
||||||
|
resolution: [3]u32, // resolytion i X, Y and Z dimention;
|
||||||
|
image_stack: LayerStack, // the number of values in the stack is equal to the number of pixels depending on resolution
|
||||||
|
};
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const File = struct {
|
||||||
|
// The file begins with a file identifier. The first 4 bytes spell "HxA". See definition of MAGIC_NUMBER. Since the magic number is always the same we do not store it in this structure, even if it is always present in files.
|
||||||
|
// magic_number: u32
|
||||||
|
version: u32, // VERSION
|
||||||
|
len: u32, // number of nodes in the file
|
||||||
|
nodes: []Node, // array of nodes
|
||||||
|
};
|
||||||
|
|
||||||
|
//
|
||||||
|
// Conventions
|
||||||
|
//
|
||||||
|
// Much of HxA's use is based on conventions. HxA lets users store arbitrary data in its structure that can be parsed but who's semantic meaning does not need to be understood.
|
||||||
|
// A few conventions are hard, and some are soft.
|
||||||
|
// Hard conventions HAVE to be followed by users in order to produce a valid file. Hard conventions simplify parsing because the parser can make some assumtions.
|
||||||
|
// Soft conventions are basically recommendations of how to store sommon data.
|
||||||
|
pub const Conventions = struct {
|
||||||
|
Hard: Hard = Hard {},
|
||||||
|
Soft: Soft = Soft {},
|
||||||
|
|
||||||
|
const Hard = struct {
|
||||||
|
base_vertex_layer_name: []const u8 = "vertex",
|
||||||
|
base_vertex_layer_id: u32 = 0,
|
||||||
|
base_vertex_layer_components: u32 = 3,
|
||||||
|
base_corner_layer_name: []const u8 = "reference",
|
||||||
|
base_corner_layer_id: u32 = 0,
|
||||||
|
base_corner_layer_components: u32 = 1,
|
||||||
|
base_corner_layer_type: type = i32,
|
||||||
|
edge_neighbour_layer_name: []const u8 = "neighbour",
|
||||||
|
edge_neighbour_layer_type: type = i32,
|
||||||
|
};
|
||||||
|
|
||||||
|
const Soft = struct {
|
||||||
|
Geometry: Geometry = Geometry {},
|
||||||
|
Image: Image = Image {},
|
||||||
|
Tags: Tags = Tags {},
|
||||||
|
|
||||||
|
const Geometry = struct {
|
||||||
|
sequence0: []const u8 = "sequence",
|
||||||
|
uv0: []const u8 = "uv",
|
||||||
|
normals: []const u8 = "normal",
|
||||||
|
binormal: []const u8 = "binormal",
|
||||||
|
tangent: []const u8 = "tangent",
|
||||||
|
color: []const u8 = "color",
|
||||||
|
creases: []const u8 = "creases",
|
||||||
|
selection: []const u8 = "select",
|
||||||
|
skin_weight: []const u8 = "skin_weight",
|
||||||
|
skin_reference: []const u8 = "skin_reference",
|
||||||
|
blendshape: []const u8 = "blendshape",
|
||||||
|
add_blendshape: []const u8 = "addblendshape",
|
||||||
|
material_id: []const u8 = "material",
|
||||||
|
group_id: []const u8 = "group",
|
||||||
|
};
|
||||||
|
|
||||||
|
const Image = struct {
|
||||||
|
albedo: []const u8 = "albedo",
|
||||||
|
light: []const u8 = "light",
|
||||||
|
displacement: []const u8 = "displacement",
|
||||||
|
distortion: []const u8 = "distortion",
|
||||||
|
ambient_occlusion: []const u8 = "ambient_occlusion",
|
||||||
|
};
|
||||||
|
|
||||||
|
const Tags = struct {
|
||||||
|
name: []const u8 = "name",
|
||||||
|
transform: []const u8 = "transform",
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
Loading…
Reference in New Issue
Block a user