From cce38432b434640e6c3cda1bd42b9f3584d390e7 Mon Sep 17 00:00:00 2001 From: Christian Brendlin Date: Wed, 29 Jan 2025 15:15:44 +0100 Subject: [PATCH 01/18] Removed / deactivated pattern recognition and advanced analysis features --- examples/pattern_recognition.zig | 196 ------------------ src/analysis/correlator.zig | 1 - src/analysis/predictor.zig | 1 - .../patterns.zig | 0 src/nexlog.zig | 34 +-- 5 files changed, 17 insertions(+), 215 deletions(-) delete mode 100644 examples/pattern_recognition.zig delete mode 100644 src/analysis/correlator.zig delete mode 100644 src/analysis/predictor.zig rename src/{analysis => analysis_canceled}/patterns.zig (100%) diff --git a/examples/pattern_recognition.zig b/examples/pattern_recognition.zig deleted file mode 100644 index f5f112d..0000000 --- a/examples/pattern_recognition.zig +++ /dev/null @@ -1,196 +0,0 @@ -// examples/pattern_recognition.zig -const std = @import("std"); -const nexlog = @import("nexlog"); - -fn simulateComplexLogs() ![]const []const u8 { - return &[_][]const u8{ - "User session 123e4567-e89b-12d3-a456-426614174000 started from IP 192.168.1.100 with client_id=mobile_app", - "User session 987fcdeb-51a2-12d3-a456-426614174000 started from IP 192.168.1.101 with client_id=web_client", - "Session 123e4567-e89b-12d3-a456-426614174000 ended after 3600 seconds", - "Error: Database connection failed after 5 retries - err_code=DB_001", - "Critical Error: Master node 192.168.1.200 unreachable - cluster_health=degraded", - "Warning: High latency detected (150ms) on endpoint /api/users", - "Performance Metric: Query latency=50ms endpoint=/api/products method=GET", - "System Metric: Memory usage=2.5GB, CPU=75%, Disk=80%", - "Application Metric: Cache hit_rate=85.5% size=1.2GB", - "Event: {\"type\": \"user_action\", \"action\": \"purchase\", \"amount\": 150.50, \"currency\": \"USD\"}", - "Event: {\"type\": \"user_action\", \"action\": \"refund\", \"amount\": 75.25, \"currency\": \"USD\"}", - "Error: Transaction 12345 failed - {\"error\": \"insufficient_funds\", \"account\": \"user_123\", \"required\": 100.00, \"available\": 75.50}", - "Error: Authentication failed for user admin@example.com - {\"reason\": \"invalid_2fa\", \"attempts\": 3, \"next_try\": \"5min\"}", - }; -} - -fn processLogBatch(analyzer: *nexlog.PatternAnalyzer, logs: []const []const u8) !void { - for (logs) |msg| { - _ = try analyzer.analyzeMessage(msg); - } -} - -fn handleBufferFull(analyzer: *nexlog.PatternAnalyzer) !void { - const stdout = std.io.getStdOut().writer(); - try stdout.print("Performing emergency cleanup...\n", .{}); - - // Custom cleanup logic could go here - _ = analyzer; -} - -fn printErrorSet(comptime T: type) void { - const stderr = std.io.getStdErr().writer(); - stderr.print("Error set: {}\n", .{@typeInfo(@typeInfo(T).Fn.return_type.?).ErrorUnion.error_set}) catch {}; -} - -/// Main function demonstrating advanced pattern analysis with auto-categorization and variable detection. -pub fn main() !void { - var gpa = std.heap.GeneralPurposeAllocator(.{}){}; - defer _ = gpa.deinit(); - const allocator = gpa.allocator(); - - const stdout = std.io.getStdOut().writer(); - try stdout.print("\n=== Advanced Pattern Analysis Example ===\n\n", .{}); - - // Define category rules for auto-categorization. - const category_rules = &[_]nexlog.CategoryRule{ - .{ - .category = "security", - .keywords = &[_][]const u8{ - "error", "fail", "authentication", "auth", - "breach", "malware", "failed", "transaction", - "insufficient_funds", "invalid_2fa", - }, - .threshold = 1, // Threshold adjusted as needed - }, - .{ - .category = "performance", - .keywords = &[_][]const u8{ - "latency", "slow", "timeout", "performance", - "metric", "memory", "cpu", "disk", - "cache", "hit_rate", - }, - .threshold = 1, - }, - .{ - .category = "event", - .keywords = &[_][]const u8{ - "event", "user_action", "purchase", "refund", - "session", "login", "logout", - }, - .threshold = 1, - }, - .{ - .category = "metric", - .keywords = &[_][]const u8{ - "memory", "cpu", "disk", "cache", "hit_rate", - "usage", "latency", - }, - .threshold = 1, - }, - // Optional: Add more categories as needed - .{ - .category = "warning", - .keywords = &[_][]const u8{ - "warn", "warning", "degraded", - }, - .threshold = 1, - }, - .{ - .category = "debug", - .keywords = &[_][]const u8{ - "debug", - }, - .threshold = 1, - }, - .{ - .category = "info", - .keywords = &[_][]const u8{ - "info", - }, - .threshold = 1, - }, - }; - - // Define variable rules for variable detection. - // Note: Since regex support is not yet implemented, the `matchesRegex` function uses a stub. - const var_rules = &[_]nexlog.VariableRule{ - .{ .name = "ip", .regex = "^\\d+\\.\\d+\\.\\d+\\.\\d+$", .var_type = .ip_address }, - .{ .name = "number", .regex = "^\\d+$", .var_type = .number }, - .{ .name = "uuid", .regex = "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$", .var_type = .uuid }, - }; - - // Initialize the PatternAnalyzer with category and variable rules. - var analyzer = nexlog.PatternAnalyzer.init(allocator, .{ - .similarity_threshold = 0.90, - .max_patterns = 20, - .max_pattern_age = 60 * 60, // 1 hour - .variable_rules = var_rules, - .category_rules = category_rules, - }); - defer analyzer.deinit(); - - // Simulate a batch of complex log messages. - const logs = try simulateComplexLogs(); - - // Process each log and print its pattern, category, and detected variables. - for (logs) |msg| { - const pattern_opt = try analyzer.analyzeMessage(msg); - if (pattern_opt) |pattern| { - const p = pattern; - try stdout.print("\nLog: {s}\n", .{msg}); - try stdout.print("Pattern Type: {s}\n", .{@tagName(p.pattern_type)}); - try stdout.print("Category: {s}\n", .{p.category}); - - // Print detected variables. - if (p.variables.items.len > 0) { - try stdout.print("Variables Detected:\n", .{}); - for (p.variables.items) |vari| { - try stdout.print(" - Type: {s}, Value: {s}\n", .{ - @tagName(vari.var_type), - vari.seen_values.items[0], - }); - } - } else { - try stdout.print("No variables detected.\n", .{}); - } - } else { - try stdout.print("\nLog: {s}\nPattern: None (possibly categorized as 'uncategorized')\n", .{msg}); - } - } - - try stdout.print("\n=== Pattern Analysis Results ===\n", .{}); - try stdout.print("Total Patterns Detected: {d}\n", .{analyzer.getPatternCount()}); - - try stdout.print("\n=== Testing Error Conditions ===\n", .{}); - - // Test with a very large message to trigger OutOfMemory error. - { - const large_message_size = 1000000; // 1,000,000 characters - const large_message = try allocator.alloc(u8, large_message_size); - defer allocator.free(large_message); - for (large_message) |*c| { - c.* = 'A'; - } - - const result = analyzer.analyzeMessage(large_message) catch |err| { - try stdout.print("Expected error occurred while processing large message: {s}\n", .{@errorName(err)}); - return; - }; - - if (result == null) { - try stdout.print("Large message was categorized as 'uncategorized'.\n", .{}); - } - } - - // Test with an empty message. - { - const empty_msg = ""; - const result = analyzer.analyzeMessage(empty_msg) catch |err| { - try stdout.print("Expected error occurred while processing empty message: {s}\n", .{@errorName(err)}); - return; - }; - - if (result == null) { - try stdout.print("Empty message was categorized as 'uncategorized'.\n", .{}); - } - } - - try stdout.print("\nExample completed successfully!\n", .{}); -} diff --git a/src/analysis/correlator.zig b/src/analysis/correlator.zig deleted file mode 100644 index 8d1c8b6..0000000 --- a/src/analysis/correlator.zig +++ /dev/null @@ -1 +0,0 @@ - diff --git a/src/analysis/predictor.zig b/src/analysis/predictor.zig deleted file mode 100644 index 8d1c8b6..0000000 --- a/src/analysis/predictor.zig +++ /dev/null @@ -1 +0,0 @@ - diff --git a/src/analysis/patterns.zig b/src/analysis_canceled/patterns.zig similarity index 100% rename from src/analysis/patterns.zig rename to src/analysis_canceled/patterns.zig diff --git a/src/nexlog.zig b/src/nexlog.zig index 293d426..87fc317 100644 --- a/src/nexlog.zig +++ b/src/nexlog.zig @@ -9,11 +9,11 @@ pub const core = struct { pub const types = @import("core/types.zig"); }; -pub const analysis = struct { - pub const patterns = @import("analysis/patterns.zig"); - // pub const predictor = @import("analysis/predictor.zig"); - // pub const correlator = @import("analysis/correlator.zig"); -}; +//pub const analysis = struct { +//pub const patterns = @import("analysis/patterns.zig"); +// pub const predictor = @import("analysis/predictor.zig"); +// pub const correlator = @import("analysis/correlator.zig"); +//}; pub const utils = struct { pub const buffer = @import("utils/buffer.zig"); @@ -42,20 +42,20 @@ pub const getDefaultLogger = core.init.getDefaultLogger; pub const LogBuilder = core.init.LogBuilder; // Re-export pattern analysis types and functions -pub const PatternType = core.types.PatternType; -pub const PatternVariable = core.types.PatternVariable; -pub const VarType = core.types.VarType; -pub const PatternMetadata = core.types.PatternMetadata; -pub const PatternMatch = core.types.PatternMatch; -pub const PatternConfig = core.types.PatternConfig; +// pub const PatternType = core.types.PatternType; +// pub const PatternVariable = core.types.PatternVariable; +// pub const VarType = core.types.VarType; +// pub const PatternMetadata = core.types.PatternMetadata; +// pub const PatternMatch = core.types.PatternMatch; +// pub const PatternConfig = core.types.PatternConfig; // Re-export analysis functionality -pub const PatternAnalyzer = analysis.patterns.PatternAnalyzer; -pub const Pattern = analysis.patterns.Pattern; -// pub const PatternPredictor = analysis.predictor.PatternPredictor; -// pub const PatternCorrelator = analysis.correlator.PatternCorrelator; -pub const CategoryRule = analysis.patterns.CategoryRule; -pub const VariableRule = analysis.patterns.VariableRule; +// pub const PatternAnalyzer = analysis.patterns.PatternAnalyzer; +// pub const Pattern = analysis.patterns.Pattern; +// // pub const PatternPredictor = analysis.predictor.PatternPredictor; +// // pub const PatternCorrelator = analysis.correlator.PatternCorrelator; +// pub const CategoryRule = analysis.patterns.CategoryRule; +// pub const VariableRule = analysis.patterns.VariableRule; // Re-export utility functionality pub const CircularBuffer = utils.buffer.CircularBuffer; From 6c4b68e505bf2dbe8124b636ba23c6e7ddc67d99 Mon Sep 17 00:00:00 2001 From: Christian Brendlin Date: Wed, 29 Jan 2025 16:20:31 +0100 Subject: [PATCH 02/18] Improved Error Handling for network handler --- src/output/network.zig | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/output/network.zig b/src/output/network.zig index 4a24ede..2a22f4a 100644 --- a/src/output/network.zig +++ b/src/output/network.zig @@ -132,14 +132,14 @@ pub const NetworkHandler = struct { now - self.last_flush >= self.config.flush_interval_ms / 1000; } - fn ensureConnection(self: *Self) !?std.net.Stream { + fn ensureConnection(self: *Self) !std.net.Stream { const now = std.time.timestamp(); // Check if we need to reconnect if (self.connection) |conn| { return conn; } else if (now < self.reconnect_time) { - return null; + return error.ReconnectPending; } // Try to connect @@ -148,6 +148,7 @@ pub const NetworkHandler = struct { self.config.endpoint.host, self.config.endpoint.port, ) catch |err| { + std.log.err("Failed to connect to {}:{} - {}", .{ self.config.endpoint.host, self.config.endpoint.port, err }); // Set reconnect time on failure self.reconnect_time = now + @divTrunc(@as(i64, @intCast(self.config.retry_delay_ms)), 1000); return err; @@ -157,6 +158,7 @@ pub const NetworkHandler = struct { if (self.config.endpoint.secure) { // Note: SSL implementation would go here // For now, we'll just error out + std.log.warn("SSL is not implemented yet, closing connection."); stream.close(); return error.SslNotImplemented; } From e458190d63e6a43c1875a26b4a9ed355de918c4f Mon Sep 17 00:00:00 2001 From: Christian Brendlin Date: Wed, 29 Jan 2025 16:35:41 +0100 Subject: [PATCH 03/18] Added unit tests for error handling --- src/nexlog.zig | 1 + src/output/network.zig | 20 ++++++++++++++++++++ tests/network_tests.zig | 26 ++++++++++++++++++++++++++ 3 files changed, 47 insertions(+) create mode 100644 tests/network_tests.zig diff --git a/src/nexlog.zig b/src/nexlog.zig index 87fc317..782a2d6 100644 --- a/src/nexlog.zig +++ b/src/nexlog.zig @@ -24,6 +24,7 @@ pub const output = struct { pub const console = @import("output/console.zig"); pub const file = @import("output/file.zig"); pub const handler = @import("output/handlers.zig"); + pub const network = @import("output/network.zig"); }; // Re-export main types and functions diff --git a/src/output/network.zig b/src/output/network.zig index 2a22f4a..ec21f08 100644 --- a/src/output/network.zig +++ b/src/output/network.zig @@ -2,6 +2,7 @@ const std = @import("std"); const types = @import("../core/types.zig"); const errors = @import("../core/errors.zig"); const buffer = @import("../utils/buffer.zig"); +const expect = std.testing.expect; pub const NetworkEndpoint = struct { host: []const u8, @@ -167,3 +168,22 @@ pub const NetworkHandler = struct { return stream; } }; + +test "NetworkHandler initialization and deinitialization" { + var gpa = std.heap.GeneralPurposeAllocator(.{}){}; + const allocator = gpa.allocator(); + + const config = NetworkConfig{ + .endpoint = .{ + .host = "example.com", + .port = 8080, + }, + }; + + var handler = try NetworkHandler.init(allocator, config); + defer handler.deinit(); + + try expect(handler.config.endpoint.host.len == config.endpoint.host.len); + try expect(handler.config.endpoint.port == config.endpoint.port); + try expect(handler.circular_buffer.buffer.len == config.buffer_size); +} diff --git a/tests/network_tests.zig b/tests/network_tests.zig new file mode 100644 index 0000000..1407ae8 --- /dev/null +++ b/tests/network_tests.zig @@ -0,0 +1,26 @@ +const std = @import("std"); +const expect = std.testing.expect; +const NetworkHandler = @import("nexlog").output.network.NetworkHandler; +const NetworkConfig = @import("nexlog").output.network.NetworkConfig; + +test "NetworkHandler initialization and deinitialization" { + var gpa = std.heap.GeneralPurposeAllocator(.{}){}; + const allocator = gpa.allocator(); + + const config = NetworkConfig{ + .endpoint = .{ + .host = "example.com", + .port = 8080, + .secure = true, + }, + }; + + var handler = try NetworkHandler.init(allocator, config); + defer handler.deinit(); + + try expect(handler.config.endpoint.host.len == config.endpoint.host.len); + try expect(handler.config.endpoint.port == config.endpoint.port); + try expect(handler.circular_buffer.buffer.len == config.buffer_size); + + std.debug.print("NetworkHandler initialized\n", .{}); +} From c313547576a90ff80eb649a86fa9615045215be1 Mon Sep 17 00:00:00 2001 From: Christian Brendlin Date: Wed, 29 Jan 2025 16:36:32 +0100 Subject: [PATCH 04/18] Commented patter analysis tests / will be removed shortly --- tests/pattern_tests.zig | 448 ++++++++++++++++++++-------------------- 1 file changed, 224 insertions(+), 224 deletions(-) diff --git a/tests/pattern_tests.zig b/tests/pattern_tests.zig index 3179a82..093710d 100644 --- a/tests/pattern_tests.zig +++ b/tests/pattern_tests.zig @@ -1,224 +1,224 @@ -// tests/pattern_tests.zig -const std = @import("std"); -const testing = std.testing; -const nexlog = @import("nexlog"); -const types = nexlog.core.types; -test "pattern: basic type detection" { - var analyzer = nexlog.PatternAnalyzer.init(testing.allocator, .{}); - defer analyzer.deinit(); - - const err_pattern = try analyzer.analyzeMessage("Connection error occurred"); - try testing.expect(err_pattern != null); - try testing.expectEqual(nexlog.PatternType.err, err_pattern.?.pattern_type); - - const metric_pattern = try analyzer.analyzeMessage("CPU usage metric: 85%"); - try testing.expect(metric_pattern != null); - try testing.expectEqual(nexlog.PatternType.metric, metric_pattern.?.pattern_type); - - const event_pattern = try analyzer.analyzeMessage("System startup event"); - try testing.expect(event_pattern != null); - try testing.expectEqual(nexlog.PatternType.event, event_pattern.?.pattern_type); -} - -test "pattern: similarity matching" { - var analyzer = nexlog.PatternAnalyzer.init(testing.allocator, .{}); - defer analyzer.deinit(); - - // Test similar messages - const msg1 = "User admin logged in from 192.168.1.1"; - const msg2 = "User john logged in from 192.168.1.2"; - - const pattern1 = try analyzer.analyzeMessage(msg1); - try testing.expect(pattern1 != null); - - const pattern2 = try analyzer.analyzeMessage(msg2); - try testing.expect(pattern2 != null); - - // Should detect these as the same pattern - try testing.expectEqual(pattern1.?.hash, pattern2.?.hash); -} - -test "pattern: variable detection" { - var analyzer = nexlog.PatternAnalyzer.init(testing.allocator, .{}); - defer analyzer.deinit(); - - // Test number variable - const msg_number = "Process used 1234 MB of memory"; - const pattern_number = try analyzer.analyzeMessage(msg_number); - try testing.expect(pattern_number != null); - try testing.expect(pattern_number.?.variables.items.len > 0); - try testing.expectEqual(nexlog.VarType.number, pattern_number.?.variables.items[0].var_type); - - // Test IP address variable - const msg_ip = "Connection from 192.168.1.1"; - const pattern_ip = try analyzer.analyzeMessage(msg_ip); - try testing.expect(pattern_ip != null); - try testing.expect(pattern_ip.?.variables.items.len > 0); - try testing.expectEqual(nexlog.VarType.ip_address, pattern_ip.?.variables.items[0].var_type); - - // Test email variable - const msg_email = "Email received from test@example.com"; - const pattern_email = try analyzer.analyzeMessage(msg_email); - try testing.expect(pattern_email != null); - try testing.expect(pattern_email.?.variables.items.len > 0); - try testing.expectEqual(nexlog.VarType.email, pattern_email.?.variables.items[0].var_type); -} - -test "pattern: cleanup and limits" { - var analyzer = nexlog.PatternAnalyzer.init(testing.allocator, .{ - .max_patterns = 2, - .max_pattern_age = 0, // Immediate cleanup - }); - defer analyzer.deinit(); - - // Add patterns up to limit - _ = try analyzer.analyzeMessage("First message"); - _ = try analyzer.analyzeMessage("Second message"); - _ = try analyzer.analyzeMessage("Third message"); // Should trigger cleanup - - try testing.expectEqual(@as(usize, 2), analyzer.getPatternCount()); -} - -test "pattern: metadata tracking" { - var analyzer = nexlog.PatternAnalyzer.init(testing.allocator, .{}); - defer analyzer.deinit(); - - const msg = "Test message"; - const pattern = try analyzer.analyzeMessage(msg); - try testing.expect(pattern != null); - - // Test metadata - try testing.expectEqual(@as(u32, 1), pattern.?.metadata.frequency); - try testing.expect(pattern.?.metadata.first_seen > 0); - try testing.expect(pattern.?.metadata.last_seen > 0); - try testing.expect(pattern.?.metadata.confidence > 0); -} - -test "pattern: concurrent access" { - var analyzer = nexlog.PatternAnalyzer.init(testing.allocator, .{}); - defer analyzer.deinit(); - - const ThreadContext = struct { - analyzer: *nexlog.PatternAnalyzer, - message: []const u8, - - fn run(ctx: @This()) !void { - var i: usize = 0; - while (i < 100) : (i += 1) { - _ = try ctx.analyzer.analyzeMessage(ctx.message); - } - } - }; - - var threads: [3]std.Thread = undefined; - const contexts = [_]ThreadContext{ - .{ .analyzer = &analyzer, .message = "Thread 1 message" }, - .{ .analyzer = &analyzer, .message = "Thread 2 message" }, - .{ .analyzer = &analyzer, .message = "Thread 3 message" }, - }; - - // Start threads - for (&threads, 0..) |*thread, i| { - thread.* = try std.Thread.spawn(.{}, ThreadContext.run, .{contexts[i]}); - } - - // Wait for threads - for (threads) |thread| { - thread.join(); - } - - // Verify thread safety - try testing.expect(analyzer.getPatternCount() > 0); -} - -test "pattern: custom pattern types" { - var analyzer = nexlog.PatternAnalyzer.init(testing.allocator, .{}); - defer analyzer.deinit(); - - const custom_msg = "CUSTOM_TYPE: Special message"; - const pattern = try analyzer.analyzeMessage(custom_msg); - try testing.expect(pattern != null); - try testing.expectEqual(nexlog.PatternType.custom, pattern.?.pattern_type); -} - -// tests/pattern_tests.zig -test "auto categorization and variable rule test" { - const allocator = testing.allocator; - - // Define some category rules for testing: - // "security" requires at least two keywords from {"auth","breach","malware"} - // "performance" requires at least one keyword from {"latency","slow","timeout"} - const category_rules = &[_]nexlog.analysis.patterns.CategoryRule{ - .{ - .category = "security", - .keywords = &[_][]const u8{ "auth", "breach", "malware" }, - .threshold = 2, - }, - .{ - .category = "performance", - .keywords = &[_][]const u8{ "latency", "slow", "timeout" }, - .threshold = 1, - }, - }; - - // Define variable rules: - // If a token matches an IP regex, classify it as an ip_address variable. - // If a token is purely numeric, classify it as a number variable. - const var_rules = &[_]nexlog.analysis.patterns.VariableRule{ - .{ .name = "ip", .regex = "^\\d+\\.\\d+\\.\\d+\\.\\d+$", .var_type = .ip_address }, - .{ .name = "number", .regex = "^\\d+$", .var_type = .number }, - }; - - var analyzer = nexlog.analysis.patterns.PatternAnalyzer.init(allocator, .{ - .similarity_threshold = 0.85, - .max_pattern_age = 60 * 60 * 24, - .max_patterns = 1000, - .variable_rules = var_rules, - .category_rules = category_rules, - }); - defer analyzer.deinit(); - - // Test a message that should fall into the "security" category: - // Contains "auth" and "breach" (2 keywords required). - // Also includes an IP that should be detected as a variable. - const security_msg = "User auth breach detected from 192.168.1.100"; - const pattern = try analyzer.analyzeMessage(security_msg); - try testing.expect(pattern != null); - - // Unwrap the optional pattern - const p = pattern.?; - try testing.expectEqualStrings("security", p.category); - - // Check that the IP was detected as a variable - if (p.variables.items.len != 1) { - std.debug.print("Variables Detected (Expected 1, Found {}):\n", .{p.variables.items.len}); - for (p.variables.items) |vara| { - std.debug.print(" - Type: {any}, Value: {s}\n", .{ vara.var_type, vara.seen_values.items[0] }); - } - } - try testing.expectEqual(@as(usize, 1), p.variables.items.len); - const vari = p.variables.items[0]; - try testing.expectEqual(types.VarType.ip_address, vari.var_type); - try testing.expectEqualStrings("192.168.1.100", vari.seen_values.items[0]); - - // Test a message that should fall into the "performance" category: - // Contains "latency" which is one of the keywords required. - const perf_msg = "System latency is high"; - const pattern2 = try analyzer.analyzeMessage(perf_msg); - try testing.expect(pattern2 != null); - const p2 = pattern2.?; // Unwrap the optional - try testing.expectEqualStrings("performance", p2.category); - - // Test a message that contains a numeric variable but doesn't meet any category threshold: - const num_msg = "Request took 350ms"; - const pattern3 = try analyzer.analyzeMessage(num_msg); - try testing.expect(pattern3 != null); - const p3 = pattern3.?; // Unwrap the optional - // No category keywords, so it should be "uncategorized" - try testing.expectEqualStrings("uncategorized", p3.category); - // Check the numeric variable detection - try testing.expectEqual(@as(usize, 1), p3.variables.items.len); - const var3 = p3.variables.items[0]; - try testing.expectEqual(types.VarType.number, var3.var_type); - try testing.expectEqualStrings("350ms", var3.seen_values.items[0]); -} +// // tests/pattern_tests.zig +// const std = @import("std"); +// const testing = std.testing; +// const nexlog = @import("nexlog"); +// const types = nexlog.core.types; +// test "pattern: basic type detection" { +// var analyzer = nexlog.PatternAnalyzer.init(testing.allocator, .{}); +// defer analyzer.deinit(); + +// const err_pattern = try analyzer.analyzeMessage("Connection error occurred"); +// try testing.expect(err_pattern != null); +// try testing.expectEqual(nexlog.PatternType.err, err_pattern.?.pattern_type); + +// const metric_pattern = try analyzer.analyzeMessage("CPU usage metric: 85%"); +// try testing.expect(metric_pattern != null); +// try testing.expectEqual(nexlog.PatternType.metric, metric_pattern.?.pattern_type); + +// const event_pattern = try analyzer.analyzeMessage("System startup event"); +// try testing.expect(event_pattern != null); +// try testing.expectEqual(nexlog.PatternType.event, event_pattern.?.pattern_type); +// } + +// test "pattern: similarity matching" { +// var analyzer = nexlog.PatternAnalyzer.init(testing.allocator, .{}); +// defer analyzer.deinit(); + +// // Test similar messages +// const msg1 = "User admin logged in from 192.168.1.1"; +// const msg2 = "User john logged in from 192.168.1.2"; + +// const pattern1 = try analyzer.analyzeMessage(msg1); +// try testing.expect(pattern1 != null); + +// const pattern2 = try analyzer.analyzeMessage(msg2); +// try testing.expect(pattern2 != null); + +// // Should detect these as the same pattern +// try testing.expectEqual(pattern1.?.hash, pattern2.?.hash); +// } + +// test "pattern: variable detection" { +// var analyzer = nexlog.PatternAnalyzer.init(testing.allocator, .{}); +// defer analyzer.deinit(); + +// // Test number variable +// const msg_number = "Process used 1234 MB of memory"; +// const pattern_number = try analyzer.analyzeMessage(msg_number); +// try testing.expect(pattern_number != null); +// try testing.expect(pattern_number.?.variables.items.len > 0); +// try testing.expectEqual(nexlog.VarType.number, pattern_number.?.variables.items[0].var_type); + +// // Test IP address variable +// const msg_ip = "Connection from 192.168.1.1"; +// const pattern_ip = try analyzer.analyzeMessage(msg_ip); +// try testing.expect(pattern_ip != null); +// try testing.expect(pattern_ip.?.variables.items.len > 0); +// try testing.expectEqual(nexlog.VarType.ip_address, pattern_ip.?.variables.items[0].var_type); + +// // Test email variable +// const msg_email = "Email received from test@example.com"; +// const pattern_email = try analyzer.analyzeMessage(msg_email); +// try testing.expect(pattern_email != null); +// try testing.expect(pattern_email.?.variables.items.len > 0); +// try testing.expectEqual(nexlog.VarType.email, pattern_email.?.variables.items[0].var_type); +// } + +// test "pattern: cleanup and limits" { +// var analyzer = nexlog.PatternAnalyzer.init(testing.allocator, .{ +// .max_patterns = 2, +// .max_pattern_age = 0, // Immediate cleanup +// }); +// defer analyzer.deinit(); + +// // Add patterns up to limit +// _ = try analyzer.analyzeMessage("First message"); +// _ = try analyzer.analyzeMessage("Second message"); +// _ = try analyzer.analyzeMessage("Third message"); // Should trigger cleanup + +// try testing.expectEqual(@as(usize, 2), analyzer.getPatternCount()); +// } + +// test "pattern: metadata tracking" { +// var analyzer = nexlog.PatternAnalyzer.init(testing.allocator, .{}); +// defer analyzer.deinit(); + +// const msg = "Test message"; +// const pattern = try analyzer.analyzeMessage(msg); +// try testing.expect(pattern != null); + +// // Test metadata +// try testing.expectEqual(@as(u32, 1), pattern.?.metadata.frequency); +// try testing.expect(pattern.?.metadata.first_seen > 0); +// try testing.expect(pattern.?.metadata.last_seen > 0); +// try testing.expect(pattern.?.metadata.confidence > 0); +// } + +// test "pattern: concurrent access" { +// var analyzer = nexlog.PatternAnalyzer.init(testing.allocator, .{}); +// defer analyzer.deinit(); + +// const ThreadContext = struct { +// analyzer: *nexlog.PatternAnalyzer, +// message: []const u8, + +// fn run(ctx: @This()) !void { +// var i: usize = 0; +// while (i < 100) : (i += 1) { +// _ = try ctx.analyzer.analyzeMessage(ctx.message); +// } +// } +// }; + +// var threads: [3]std.Thread = undefined; +// const contexts = [_]ThreadContext{ +// .{ .analyzer = &analyzer, .message = "Thread 1 message" }, +// .{ .analyzer = &analyzer, .message = "Thread 2 message" }, +// .{ .analyzer = &analyzer, .message = "Thread 3 message" }, +// }; + +// // Start threads +// for (&threads, 0..) |*thread, i| { +// thread.* = try std.Thread.spawn(.{}, ThreadContext.run, .{contexts[i]}); +// } + +// // Wait for threads +// for (threads) |thread| { +// thread.join(); +// } + +// // Verify thread safety +// try testing.expect(analyzer.getPatternCount() > 0); +// } + +// test "pattern: custom pattern types" { +// var analyzer = nexlog.PatternAnalyzer.init(testing.allocator, .{}); +// defer analyzer.deinit(); + +// const custom_msg = "CUSTOM_TYPE: Special message"; +// const pattern = try analyzer.analyzeMessage(custom_msg); +// try testing.expect(pattern != null); +// try testing.expectEqual(nexlog.PatternType.custom, pattern.?.pattern_type); +// } + +// // tests/pattern_tests.zig +// test "auto categorization and variable rule test" { +// const allocator = testing.allocator; + +// // Define some category rules for testing: +// // "security" requires at least two keywords from {"auth","breach","malware"} +// // "performance" requires at least one keyword from {"latency","slow","timeout"} +// const category_rules = &[_]nexlog.analysis.patterns.CategoryRule{ +// .{ +// .category = "security", +// .keywords = &[_][]const u8{ "auth", "breach", "malware" }, +// .threshold = 2, +// }, +// .{ +// .category = "performance", +// .keywords = &[_][]const u8{ "latency", "slow", "timeout" }, +// .threshold = 1, +// }, +// }; + +// // Define variable rules: +// // If a token matches an IP regex, classify it as an ip_address variable. +// // If a token is purely numeric, classify it as a number variable. +// const var_rules = &[_]nexlog.analysis.patterns.VariableRule{ +// .{ .name = "ip", .regex = "^\\d+\\.\\d+\\.\\d+\\.\\d+$", .var_type = .ip_address }, +// .{ .name = "number", .regex = "^\\d+$", .var_type = .number }, +// }; + +// var analyzer = nexlog.analysis.patterns.PatternAnalyzer.init(allocator, .{ +// .similarity_threshold = 0.85, +// .max_pattern_age = 60 * 60 * 24, +// .max_patterns = 1000, +// .variable_rules = var_rules, +// .category_rules = category_rules, +// }); +// defer analyzer.deinit(); + +// // Test a message that should fall into the "security" category: +// // Contains "auth" and "breach" (2 keywords required). +// // Also includes an IP that should be detected as a variable. +// const security_msg = "User auth breach detected from 192.168.1.100"; +// const pattern = try analyzer.analyzeMessage(security_msg); +// try testing.expect(pattern != null); + +// // Unwrap the optional pattern +// const p = pattern.?; +// try testing.expectEqualStrings("security", p.category); + +// // Check that the IP was detected as a variable +// if (p.variables.items.len != 1) { +// std.debug.print("Variables Detected (Expected 1, Found {}):\n", .{p.variables.items.len}); +// for (p.variables.items) |vara| { +// std.debug.print(" - Type: {any}, Value: {s}\n", .{ vara.var_type, vara.seen_values.items[0] }); +// } +// } +// try testing.expectEqual(@as(usize, 1), p.variables.items.len); +// const vari = p.variables.items[0]; +// try testing.expectEqual(types.VarType.ip_address, vari.var_type); +// try testing.expectEqualStrings("192.168.1.100", vari.seen_values.items[0]); + +// // Test a message that should fall into the "performance" category: +// // Contains "latency" which is one of the keywords required. +// const perf_msg = "System latency is high"; +// const pattern2 = try analyzer.analyzeMessage(perf_msg); +// try testing.expect(pattern2 != null); +// const p2 = pattern2.?; // Unwrap the optional +// try testing.expectEqualStrings("performance", p2.category); + +// // Test a message that contains a numeric variable but doesn't meet any category threshold: +// const num_msg = "Request took 350ms"; +// const pattern3 = try analyzer.analyzeMessage(num_msg); +// try testing.expect(pattern3 != null); +// const p3 = pattern3.?; // Unwrap the optional +// // No category keywords, so it should be "uncategorized" +// try testing.expectEqualStrings("uncategorized", p3.category); +// // Check the numeric variable detection +// try testing.expectEqual(@as(usize, 1), p3.variables.items.len); +// const var3 = p3.variables.items[0]; +// try testing.expectEqual(types.VarType.number, var3.var_type); +// try testing.expectEqualStrings("350ms", var3.seen_values.items[0]); +// } From 931441c180b6ee9b4ff7666afcfff0cc4abb3970 Mon Sep 17 00:00:00 2001 From: Christian Brendlin Date: Thu, 30 Jan 2025 17:16:15 +0100 Subject: [PATCH 05/18] fixed various errors, segfault, error handling, clean up logic, and more... / for the json logger handler --- examples/json_logging.zig | 56 +++++++++++++++ src/core/errors.zig | 1 + src/nexlog.zig | 4 ++ src/output/json.zig | 114 +++++++++++++++++++++++++++++ src/utils/json.zig | 134 ++++++++++++++++++++++++++++++++++- test_logs/app.json | 5 ++ tests/json_handler_tests.zig | 77 ++++++++++++++++++++ 7 files changed, 390 insertions(+), 1 deletion(-) create mode 100644 examples/json_logging.zig create mode 100644 src/output/json.zig create mode 100644 test_logs/app.json create mode 100644 tests/json_handler_tests.zig diff --git a/examples/json_logging.zig b/examples/json_logging.zig new file mode 100644 index 0000000..0744071 --- /dev/null +++ b/examples/json_logging.zig @@ -0,0 +1,56 @@ +const std = @import("std"); +const nexlog = @import("nexlog"); +const JsonHandler = nexlog.output.json_handler.JsonHandler; + +pub fn main() !void { + // Initialize allocator + var gpa = std.heap.GeneralPurposeAllocator(.{}){}; + defer _ = gpa.deinit(); + const allocator = gpa.allocator(); + + // Create a directory for testing logs if it doesn't exist + const log_dir = "test_logs"; + try std.fs.cwd().makePath(log_dir); + + // Get the current working directory + var cwd_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined; + const cwd = std.fs.cwd().realpath(".", &cwd_buf) catch unreachable; + + // Construct the absolute path for the log file within the log_dir + const log_file_path = std.fs.path.join(allocator, &[_][]const u8{ cwd, log_dir, "app.json" }) catch unreachable; + defer allocator.free(log_file_path); + + // Create a JSON handler + var json_handler = try JsonHandler.init(allocator, .{ + .min_level = .debug, + .pretty_print = true, // Optional: Makes the JSON output more readable + .output_file = log_file_path, + }); + defer json_handler.deinit(); + + // Create a logger + const logger = try nexlog.Logger.init(allocator, .{}); + defer logger.deinit(); + + // Add the JSON handler to the logger + try logger.addHandler(json_handler.toLogHandler()); + + // Create some basic metadata + const metadata = nexlog.LogMetadata{ + .timestamp = std.time.timestamp(), + .thread_id = 0, // Replace with actual thread ID in a real application + .file = @src().file, + .line = @src().line, + .function = @src().fn_name, + }; + + // Log some messages with different levels and optional fields + // Log some messages with different levels and optional fields + try logger.log(.info, "Application starting", .{}, metadata); + try logger.log(.debug, "This is a debug message", .{}, metadata); + try logger.log(.warn, "This is a warning message (code: {d})", .{123}, metadata); + try logger.log(.err, "An error occurred (code: {s})", .{"E_UNKNOWN"}, metadata); + + // Ensure all logs are written before exiting + try logger.flush(); +} diff --git a/src/core/errors.zig b/src/core/errors.zig index 81d4c0e..762af3e 100644 --- a/src/core/errors.zig +++ b/src/core/errors.zig @@ -13,6 +13,7 @@ pub const LogError = error{ FormattingError, FilterError, AlreadyInitialized, + NotInitialized, }; pub const FileError = error{ diff --git a/src/nexlog.zig b/src/nexlog.zig index 782a2d6..898dcc4 100644 --- a/src/nexlog.zig +++ b/src/nexlog.zig @@ -18,6 +18,7 @@ pub const core = struct { pub const utils = struct { pub const buffer = @import("utils/buffer.zig"); pub const pool = @import("utils/pool.zig"); + pub const json = @import("utils/json.zig"); }; pub const output = struct { @@ -25,6 +26,7 @@ pub const output = struct { pub const file = @import("output/file.zig"); pub const handler = @import("output/handlers.zig"); pub const network = @import("output/network.zig"); + pub const json_handler = @import("output/json.zig"); }; // Re-export main types and functions @@ -61,6 +63,8 @@ pub const LogBuilder = core.init.LogBuilder; // Re-export utility functionality pub const CircularBuffer = utils.buffer.CircularBuffer; pub const Pool = utils.pool.Pool; +pub const JsonValue = utils.json.JsonValue; +pub const JsonError = utils.json.JsonError; // Example test test "basic log test" { diff --git a/src/output/json.zig b/src/output/json.zig new file mode 100644 index 0000000..b418d84 --- /dev/null +++ b/src/output/json.zig @@ -0,0 +1,114 @@ +const std = @import("std"); +const types = @import("../core/types.zig"); +const handlers = @import("handlers.zig"); +const json = @import("../utils/json.zig"); +const errors = @import("../core/errors.zig"); + +pub const JsonHandlerConfig = struct { + min_level: types.LogLevel = .debug, + pretty_print: bool = false, + buffer_size: usize = 4096, + output_file: ?[]const u8 = null, +}; + +pub const JsonHandler = struct { + const Self = @This(); + + allocator: std.mem.Allocator, + config: JsonHandlerConfig, + file: ?std.fs.File, + has_written: bool, + is_initialized: bool, // Add this to track initialization state + + pub fn init(allocator: std.mem.Allocator, config: JsonHandlerConfig) errors.Error!*Self { + var handler = try allocator.create(Self); + errdefer allocator.destroy(handler); + + handler.* = .{ + .allocator = allocator, + .config = config, + .file = null, + .has_written = false, + .is_initialized = false, + }; + + if (config.output_file) |path| { + const file = try std.fs.createFileAbsolute(path, .{ + .read = true, + .truncate = true, + }); + try file.writeAll("[\n"); + handler.file = file; + } + + handler.is_initialized = true; + return handler; + } + + pub fn deinit(self: *Self) void { + if (!self.is_initialized) return; + + if (self.file) |file| { + if (self.has_written) { + file.writeAll("\n]") catch {}; + } else { + file.writeAll("[]") catch {}; + } + // Store the allocator before we potentially invalidate self + const allocator = self.allocator; + self.file = null; + self.is_initialized = false; + allocator.destroy(self); + } else { + const allocator = self.allocator; + self.is_initialized = false; + allocator.destroy(self); + } + } + + pub fn log( + self: *Self, + level: types.LogLevel, + message: []const u8, + metadata: ?types.LogMetadata, + ) errors.Error!void { + if (!self.is_initialized) return error.NotInitialized; + if (@intFromEnum(level) < @intFromEnum(self.config.min_level)) { + return; + } + + const json_str = try json.serializeLogEntry( + self.allocator, + level, + message, + metadata, + ); + defer self.allocator.free(json_str); + + if (self.file) |*file| { + if (self.has_written) { + try file.writeAll(",\n"); + } + try file.writeAll(json_str); + self.has_written = true; + } else { + try std.io.getStdOut().writer().print("{s}\n", .{json_str}); + } + } + + pub fn flush(self: *Self) errors.Error!void { + if (!self.is_initialized) return error.NotInitialized; + if (self.file) |*file| { + try file.sync(); + } + } + + pub fn toLogHandler(self: *Self) handlers.LogHandler { + return handlers.LogHandler.init( + self, + JsonHandler.log, + JsonHandler.flush, + JsonHandler.deinit, + ); + } +}; diff --git a/src/utils/json.zig b/src/utils/json.zig index 8d1c8b6..b64bec9 100644 --- a/src/utils/json.zig +++ b/src/utils/json.zig @@ -1 +1,133 @@ - +const std = @import("std"); +const types = @import("../core/types.zig"); + +pub const JsonError = error{ + InvalidType, + InvalidFormat, + BufferTooSmall, +}; + +pub const JsonValue = union(enum) { + null, + bool: bool, + number: f64, + string: []const u8, + array: []JsonValue, + object: std.StringHashMap(JsonValue), + + pub fn deinit(self: *JsonValue, allocator: std.mem.Allocator) void { + switch (self.*) { + .array => |array| { + for (array) |*value| { + value.deinit(allocator); + } + allocator.free(array); + }, + .object => |*map| { + var it = map.iterator(); + while (it.next()) |entry| { + entry.value_ptr.deinit(allocator); + } + map.deinit(); + }, + else => {}, + } + } +}; + +pub fn serializeLogEntry( + allocator: std.mem.Allocator, + level: types.LogLevel, + message: []const u8, + metadata: ?types.LogMetadata, +) ![]u8 { + var json_map = std.StringHashMap(JsonValue).init(allocator); + defer { + var it = json_map.iterator(); + while (it.next()) |entry| { + if (entry.value_ptr.* == .object) { + entry.value_ptr.deinit(allocator); + } + } + json_map.deinit(); + } + + // Add level + try json_map.put("level", .{ .string = level.toString() }); + + // Add message + try json_map.put("message", .{ .string = message }); + + // Add metadata if present + if (metadata) |meta| { + var meta_map = std.StringHashMap(JsonValue).init(allocator); + errdefer meta_map.deinit(); + + try meta_map.put("timestamp", .{ .number = @floatFromInt(meta.timestamp) }); + try meta_map.put("thread_id", .{ .number = @floatFromInt(meta.thread_id) }); + try meta_map.put("file", .{ .string = meta.file }); + try meta_map.put("line", .{ .number = @floatFromInt(meta.line) }); + try meta_map.put("function", .{ .string = meta.function }); + + try json_map.put("metadata", .{ .object = meta_map }); + } + + // Serialize to string + return try stringify(allocator, .{ .object = json_map }); +} + +pub fn stringify(allocator: std.mem.Allocator, value: JsonValue) ![]u8 { + var list = std.ArrayList(u8).init(allocator); + errdefer list.deinit(); + + try stringifyValue(value, &list); + return list.toOwnedSlice(); +} + +fn stringifyValue(value: JsonValue, list: *std.ArrayList(u8)) !void { + switch (value) { + .null => try list.appendSlice("null"), + .bool => |b| try list.appendSlice(if (b) "true" else "false"), + .number => |n| try std.fmt.format(list.writer(), "{d}", .{n}), + .string => |s| { + try list.append('"'); + try escapeString(s, list); + try list.append('"'); + }, + .array => |arr| { + try list.append('['); + for (arr, 0..) |item, i| { + if (i > 0) try list.appendSlice(", "); + try stringifyValue(item, list); + } + try list.append(']'); + }, + .object => |map| { + try list.append('{'); + var it = map.iterator(); + var first = true; + while (it.next()) |entry| { + if (!first) try list.appendSlice(", "); + first = false; + try list.append('"'); + try list.appendSlice(entry.key_ptr.*); + try list.appendSlice("\": "); + try stringifyValue(entry.value_ptr.*, list); + } + try list.append('}'); + }, + } +} + +fn escapeString(s: []const u8, list: *std.ArrayList(u8)) !void { + for (s) |c| { + switch (c) { + '"' => try list.appendSlice("\\\""), + '\\' => try list.appendSlice("\\\\"), + '\n' => try list.appendSlice("\\n"), + '\r' => try list.appendSlice("\\r"), + '\t' => try list.appendSlice("\\t"), + else => try list.append(c), + } + } +} diff --git a/test_logs/app.json b/test_logs/app.json new file mode 100644 index 0000000..fda6aee --- /dev/null +++ b/test_logs/app.json @@ -0,0 +1,5 @@ +[ +{"metadata": {"timestamp": 1738258669, "thread_id": 0, "file": "examples/json_logging.zig", "line": 43, "function": "main"}, "level": "INFO", "message": "Application starting"}, +{"metadata": {"timestamp": 1738258669, "thread_id": 0, "file": "examples/json_logging.zig", "line": 43, "function": "main"}, "level": "WARN", "message": "This is a warning message (code: 123)"}, +{"metadata": {"timestamp": 1738258669, "thread_id": 0, "file": "examples/json_logging.zig", "line": 43, "function": "main"}, "level": "ERROR", "message": "An error occurred (code: E_UNKNOWN)"} +] \ No newline at end of file diff --git a/tests/json_handler_tests.zig b/tests/json_handler_tests.zig new file mode 100644 index 0000000..03d7bed --- /dev/null +++ b/tests/json_handler_tests.zig @@ -0,0 +1,77 @@ +const std = @import("std"); +const testing = std.testing; +const JsonHandler = @import("nexlog").output.json_handler.JsonHandler; +const LogLevel = @import("nexlog").LogLevel; +const LogMetadata = @import("nexlog").LogMetadata; + +test "JsonHandler basic initialization" { + const allocator = testing.allocator; + var handler = try JsonHandler.init(allocator, .{}); + try handler.log(.info, "Test initialization", null); + try handler.flush(); + handler.deinit(); +} + +test "JsonHandler log message" { + const allocator = testing.allocator; + var handler = try JsonHandler.init(allocator, .{}); + defer handler.deinit(); + + const metadata = LogMetadata{ + .timestamp = 1234567890, + .thread_id = 1, + .file = "test.zig", + .line = 42, + .function = "testFunc", + }; + + try handler.log(.info, "Test message", metadata); + try handler.flush(); +} + +test "JsonHandler file output" { + const allocator = testing.allocator; + var tmp_dir = testing.tmpDir(.{}); + defer tmp_dir.cleanup(); + + // Create temporary file path + const test_path = try std.fs.path.join( + allocator, + &[_][]const u8{ tmp_dir.dir.realpath(".", &[_]u8{}) catch unreachable, "test.json" }, + ); + defer allocator.free(test_path); + + // Initialize handler + var handler = try JsonHandler.init(allocator, .{ + .output_file = test_path, + }); + + // Write test logs + try handler.log(.info, "Test message 1", null); + try handler.log(.warn, "Test message 2", null); + try handler.flush(); + + // Read and verify file contents + const file = try tmp_dir.dir.openFile("test.json", .{ .mode = .read_only }); + defer file.close(); + + const file_contents = try file.readToEndAlloc(allocator, 1024 * 1024); + defer allocator.free(file_contents); + + // Basic verification + try testing.expect(std.mem.indexOf(u8, file_contents, "Test message 1") != null); + try testing.expect(std.mem.indexOf(u8, file_contents, "Test message 2") != null); + + // Cleanup + handler.deinit(); +} + +test "JsonHandler as generic LogHandler" { + const allocator = testing.allocator; + var json_handler = try JsonHandler.init(allocator, .{}); + const log_handler = json_handler.toLogHandler(); + + try log_handler.writeLog(.info, "Test generic handler", null); + try log_handler.flush(); + log_handler.deinit(); +} From c598089982213a54cd77d887cec59335b9aa8cd6 Mon Sep 17 00:00:00 2001 From: Christian Brendlin Date: Thu, 30 Jan 2025 18:43:51 +0100 Subject: [PATCH 06/18] gitignore update --- .gitignore | 4 +++- test_logs/app.json | 6 +++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index 9f04768..ef9cfa3 100644 --- a/.gitignore +++ b/.gitignore @@ -2,4 +2,6 @@ .zig-cache/ zig-out/ *.log -logs/ \ No newline at end of file +logs/ +*test_* +test* diff --git a/test_logs/app.json b/test_logs/app.json index fda6aee..a19a514 100644 --- a/test_logs/app.json +++ b/test_logs/app.json @@ -1,5 +1,5 @@ [ -{"metadata": {"timestamp": 1738258669, "thread_id": 0, "file": "examples/json_logging.zig", "line": 43, "function": "main"}, "level": "INFO", "message": "Application starting"}, -{"metadata": {"timestamp": 1738258669, "thread_id": 0, "file": "examples/json_logging.zig", "line": 43, "function": "main"}, "level": "WARN", "message": "This is a warning message (code: 123)"}, -{"metadata": {"timestamp": 1738258669, "thread_id": 0, "file": "examples/json_logging.zig", "line": 43, "function": "main"}, "level": "ERROR", "message": "An error occurred (code: E_UNKNOWN)"} +{"metadata": {"timestamp": 1738259018, "thread_id": 0, "file": "examples/json_logging.zig", "line": 43, "function": "main"}, "level": "INFO", "message": "Application starting"}, +{"metadata": {"timestamp": 1738259018, "thread_id": 0, "file": "examples/json_logging.zig", "line": 43, "function": "main"}, "level": "WARN", "message": "This is a warning message (code: 123)"}, +{"metadata": {"timestamp": 1738259018, "thread_id": 0, "file": "examples/json_logging.zig", "line": 43, "function": "main"}, "level": "ERROR", "message": "An error occurred (code: E_UNKNOWN)"} ] \ No newline at end of file From 1854520b1374637704238b9b35c579cdb867a5c7 Mon Sep 17 00:00:00 2001 From: Christian Brendlin Date: Thu, 30 Jan 2025 21:11:11 +0100 Subject: [PATCH 07/18] FORMAT module utility --- src/nexlog.zig | 1 + src/utils/format.zig | 362 ++++++++++++++++++++++++++++++++++- tests/json_handler_tests.zig | 126 ++++++------ 3 files changed, 425 insertions(+), 64 deletions(-) diff --git a/src/nexlog.zig b/src/nexlog.zig index 898dcc4..2d3e9ef 100644 --- a/src/nexlog.zig +++ b/src/nexlog.zig @@ -19,6 +19,7 @@ pub const utils = struct { pub const buffer = @import("utils/buffer.zig"); pub const pool = @import("utils/pool.zig"); pub const json = @import("utils/json.zig"); + pub const format = @import("utils/format.zig"); }; pub const output = struct { diff --git a/src/utils/format.zig b/src/utils/format.zig index 8d1c8b6..5723acd 100644 --- a/src/utils/format.zig +++ b/src/utils/format.zig @@ -1 +1,361 @@ - +// utils/format.zig +const std = @import("std"); +const types = @import("../core/types.zig"); + +/// Format placeholder types +pub const PlaceholderType = enum { + level, + message, + timestamp, + thread, + file, + line, + function, + color, + reset, + custom, +}; + +/// Format configuration +pub const FormatConfig = struct { + /// Default format: "[{timestamp}] [{level}] {message}" + template: []const u8 = "[{timestamp}] [{level}] {message}", + + timestamp_format: enum { + unix, + iso8601, + custom, + } = .unix, + custom_timestamp_format: ?[]const u8 = null, + + level_format: enum { + upper, // "ERROR" + lower, // "error" + short_upper, // "ERR" + short_lower, // "err" + } = .upper, + + use_color: bool = true, + custom_colors: ?std.StringHashMap([]const u8) = null, + + /// Custom placeholder handlers + custom_handlers: ?std.StringHashMap(CustomPlaceholderFn) = null, +}; + +/// Function type for custom placeholder handlers +pub const CustomPlaceholderFn = *const fn ( + allocator: std.mem.Allocator, + level: types.LogLevel, + message: []const u8, + metadata: ?types.LogMetadata, +) error{OutOfMemory}![]const u8; + +/// Parsed placeholder information +const Placeholder = struct { + type: PlaceholderType, + start: usize, + end: usize, + format: ?[]const u8, +}; + +// utils/format.zig (continued) + +/// Error set for format operations +pub const FormatError = error{ + InvalidPlaceholder, + InvalidFormat, + MissingHandler, + TimestampError, +}; + +pub const Formatter = struct { + allocator: std.mem.Allocator, + config: FormatConfig, + placeholder_cache: std.ArrayList(Placeholder), + + pub fn init(allocator: std.mem.Allocator, config: FormatConfig) !*Formatter { + var self = try allocator.create(Formatter); + self.* = .{ + .allocator = allocator, + .config = config, + .placeholder_cache = std.ArrayList(Placeholder).init(allocator), + }; + // Parse template once during initialization + try self.parsePlaceholders(); + return self; + } + + pub fn deinit(self: *Formatter) void { + self.placeholder_cache.deinit(); + self.allocator.destroy(self); + } + + fn parsePlaceholders(self: *Formatter) !void { + var i: usize = 0; + while (i < self.config.template.len) { + if (self.config.template[i] == '{') { + const start = i; + i += 1; + var found_end = false; + var fmt_spec: ?[]const u8 = null; + + // Look for format specifier + while (i < self.config.template.len) : (i += 1) { + if (self.config.template[i] == ':') { + // Extract format string + const format_start = i + 1; + while (i < self.config.template.len and self.config.template[i] != '}') : (i += 1) {} + fmt_spec = self.config.template[format_start..i]; + found_end = true; + break; + } else if (self.config.template[i] == '}') { + found_end = true; + break; + } + } + + if (!found_end) { + return FormatError.InvalidPlaceholder; + } + + const placeholder_name = self.config.template[start + 1 .. if (format == null) i else i - format.?.len - 1]; + const placeholder_type = try self.getPlaceholderType(placeholder_name); + + try self.placeholder_cache.append(.{ + .type = placeholder_type, + .start = start, + .end = i + 1, + .format = format, + }); + } + i += 1; + } + } + + fn getPlaceholderType(self: *Formatter, name: []const u8) !PlaceholderType { + if (std.mem.eql(u8, name, "level")) return .level; + if (std.mem.eql(u8, name, "message")) return .message; + if (std.mem.eql(u8, name, "timestamp")) return .timestamp; + if (std.mem.eql(u8, name, "thread")) return .thread; + if (std.mem.eql(u8, name, "file")) return .file; + if (std.mem.eql(u8, name, "line")) return .line; + if (std.mem.eql(u8, name, "function")) return .function; + if (std.mem.eql(u8, name, "color")) return .color; + if (std.mem.eql(u8, name, "reset")) return .reset; + + // Check for custom placeholder + if (self.config.custom_handlers) |handlers| { + if (handlers.contains(name)) { + return .custom; + } + } + + return FormatError.InvalidPlaceholder; + } + + pub fn format( + self: *Formatter, + level: types.LogLevel, + message: []const u8, + metadata: ?types.LogMetadata, + ) ![]const u8 { + var result = std.ArrayList(u8).init(self.allocator); + errdefer result.deinit(); + + var last_pos: usize = 0; + + for (self.placeholder_cache.items) |placeholder| { + // Add text before placeholder + try result.appendSlice(self.config.template[last_pos..placeholder.start]); + + // Format placeholder + try self.formatPlaceholder( + &result, + placeholder, + level, + message, + metadata, + ); + + last_pos = placeholder.end; + } + + // Add remaining text after last placeholder + try result.appendSlice(self.config.template[last_pos..]); + + return result.toOwnedSlice(); + } + + fn formatPlaceholder( + self: *Formatter, + result: *std.ArrayList(u8), + placeholder: Placeholder, + level: types.LogLevel, + message: []const u8, + metadata: ?types.LogMetadata, + ) !void { + switch (placeholder.type) { + .level => try self.formatLevel(result, level), + .message => try result.appendSlice(message), + .timestamp => try self.formatTimestamp(result, metadata), + .thread => if (metadata) |m| try std.fmt.format(result.writer(), "{d}", .{m.thread_id}), + .file => if (metadata) |m| try result.appendSlice(m.file), + .line => if (metadata) |m| try std.fmt.format(result.writer(), "{d}", .{m.line}), + .function => if (metadata) |m| try result.appendSlice(m.function), + .color => if (self.config.use_color) try result.appendSlice(level.toColor()), + .reset => if (self.config.use_color) try result.appendSlice("\x1b[0m"), + .custom => try self.formatCustomPlaceholder(result, placeholder, level, message, metadata), + } + } + // utils/format.zig (continued) + + fn formatLevel( + self: *Formatter, + result: *std.ArrayList(u8), + level: types.LogLevel, + ) !void { + const level_str = level.toString(); + switch (self.config.level_format) { + .upper => try result.appendSlice(level_str), + .lower => { + for (level_str) |c| { + try result.append(std.ascii.toLower(c)); + } + }, + .short_upper => { + const short = switch (level) { + .trace => "TRC", + .debug => "DBG", + .info => "INF", + .warn => "WRN", + .err => "ERR", + .critical => "CRT", + }; + try result.appendSlice(short); + }, + .short_lower => { + const short = switch (level) { + .trace => "trc", + .debug => "dbg", + .info => "inf", + .warn => "wrn", + .err => "err", + .critical => "crt", + }; + try result.appendSlice(short); + }, + } + } + + fn formatTimestamp( + self: *Formatter, + result: *std.ArrayList(u8), + metadata: ?types.LogMetadata, + ) !void { + const timestamp = if (metadata) |m| m.timestamp else std.time.timestamp(); + + switch (self.config.timestamp_format) { + .unix => try std.fmt.format(result.writer(), "{d}", .{timestamp}), + .iso8601 => { + // Convert unix timestamp to ISO 8601 format + const unix_timestamp = @as(i64, @intCast(timestamp)); + const epoch_seconds = @divFloor(unix_timestamp, 1000); + const ms = @mod(unix_timestamp, 1000); + + // Convert to broken down time + var timer = try std.time.Timer.start(); + const epoch_day = @divFloor(epoch_seconds, 86400); + const day_seconds = @mod(epoch_seconds, 86400); + + const year_day = @as(u16, @intCast(@mod(epoch_day + 719468, 146097) / 365.2425)); + const year = 1970 + year_day; + + const hour = @as(u8, @intCast(@divFloor(day_seconds, 3600))); + const minute = @as(u8, @intCast(@mod(@divFloor(day_seconds, 60), 60))); + const second = @as(u8, @intCast(@mod(day_seconds, 60))); + + try std.fmt.format( + result.writer(), + "{d:0>4}-{d:0>2}-{d:0>2}T{d:0>2}:{d:0>2}:{d:0>2}.{d:0>3}Z", + .{ year, timer.read(), timer.lap(), hour, minute, second, ms }, + ); + }, + .custom => { + if (self.config.custom_timestamp_format) |fmt_str| { + // Here you could implement custom timestamp formatting + // using the provided format string + // For now, fallback to unix timestamp + _ = fmt_str; + try std.fmt.format(result.writer(), "{d}", .{timestamp}); + } else { + try std.fmt.format(result.writer(), "{d}", .{timestamp}); + } + }, + } + } + + fn formatCustomPlaceholder( + self: *Formatter, + result: *std.ArrayList(u8), + placeholder: Placeholder, + level: types.LogLevel, + message: []const u8, + metadata: ?types.LogMetadata, + ) !void { + if (self.config.custom_handlers) |handlers| { + const placeholder_name = self.config.template[placeholder.start + 1 .. placeholder.end - 1]; + if (handlers.get(placeholder_name)) |handler| { + const custom_result = try handler( + self.allocator, + level, + message, + metadata, + ); + defer self.allocator.free(custom_result); + try result.appendSlice(custom_result); + } else { + return FormatError.MissingHandler; + } + } else { + return FormatError.MissingHandler; + } + } + + /// Helper function to create a custom placeholder handler + pub fn registerCustomPlaceholder( + self: *Formatter, + name: []const u8, + handler: CustomPlaceholderFn, + ) !void { + if (self.config.custom_handlers == null) { + self.config.custom_handlers = std.StringHashMap(CustomPlaceholderFn).init( + self.allocator, + ); + } + + try self.config.custom_handlers.?.put(name, handler); + // Re-parse placeholders to include new custom placeholder + self.placeholder_cache.clearRetainingCapacity(); + try self.parsePlaceholders(); + } +}; + +/// Helper function to create a formatter with default configuration +pub fn createDefaultFormatter(allocator: std.mem.Allocator) !*Formatter { + return Formatter.init(allocator, .{}); +} + +/// Example custom placeholder handler +pub fn hostnamePlaceholder( + allocator: std.mem.Allocator, + level: types.LogLevel, + message: []const u8, + metadata: ?types.LogMetadata, +) error{OutOfMemory}![]const u8 { + _ = level; + _ = message; + _ = metadata; + var buffer: [std.os.HOST_NAME_MAX]u8 = undefined; + const hostname = try std.os.gethostname(&buffer); + return allocator.dupe(u8, hostname); +} diff --git a/tests/json_handler_tests.zig b/tests/json_handler_tests.zig index 03d7bed..a6b47a5 100644 --- a/tests/json_handler_tests.zig +++ b/tests/json_handler_tests.zig @@ -1,77 +1,77 @@ -const std = @import("std"); -const testing = std.testing; -const JsonHandler = @import("nexlog").output.json_handler.JsonHandler; -const LogLevel = @import("nexlog").LogLevel; -const LogMetadata = @import("nexlog").LogMetadata; +// const std = @import("std"); +// const testing = std.testing; +// const JsonHandler = @import("nexlog").output.json_handler.JsonHandler; +// const LogLevel = @import("nexlog").LogLevel; +// const LogMetadata = @import("nexlog").LogMetadata; -test "JsonHandler basic initialization" { - const allocator = testing.allocator; - var handler = try JsonHandler.init(allocator, .{}); - try handler.log(.info, "Test initialization", null); - try handler.flush(); - handler.deinit(); -} +// test "JsonHandler basic initialization" { +// const allocator = testing.allocator; +// var handler = try JsonHandler.init(allocator, .{}); +// try handler.log(.info, "Test initialization", null); +// try handler.flush(); +// handler.deinit(); +// } -test "JsonHandler log message" { - const allocator = testing.allocator; - var handler = try JsonHandler.init(allocator, .{}); - defer handler.deinit(); +// test "JsonHandler log message" { +// const allocator = testing.allocator; +// var handler = try JsonHandler.init(allocator, .{}); +// defer handler.deinit(); - const metadata = LogMetadata{ - .timestamp = 1234567890, - .thread_id = 1, - .file = "test.zig", - .line = 42, - .function = "testFunc", - }; +// const metadata = LogMetadata{ +// .timestamp = 1234567890, +// .thread_id = 1, +// .file = "test.zig", +// .line = 42, +// .function = "testFunc", +// }; - try handler.log(.info, "Test message", metadata); - try handler.flush(); -} +// try handler.log(.info, "Test message", metadata); +// try handler.flush(); +// } -test "JsonHandler file output" { - const allocator = testing.allocator; - var tmp_dir = testing.tmpDir(.{}); - defer tmp_dir.cleanup(); +// test "JsonHandler file output" { +// const allocator = testing.allocator; +// var tmp_dir = testing.tmpDir(.{}); +// defer tmp_dir.cleanup(); - // Create temporary file path - const test_path = try std.fs.path.join( - allocator, - &[_][]const u8{ tmp_dir.dir.realpath(".", &[_]u8{}) catch unreachable, "test.json" }, - ); - defer allocator.free(test_path); +// // Create temporary file path +// const test_path = try std.fs.path.join( +// allocator, +// &[_][]const u8{ tmp_dir.dir.realpath(".", &[_]u8{}) catch unreachable, "test.json" }, +// ); +// defer allocator.free(test_path); - // Initialize handler - var handler = try JsonHandler.init(allocator, .{ - .output_file = test_path, - }); +// // Initialize handler +// var handler = try JsonHandler.init(allocator, .{ +// .output_file = test_path, +// }); - // Write test logs - try handler.log(.info, "Test message 1", null); - try handler.log(.warn, "Test message 2", null); - try handler.flush(); +// // Write test logs +// try handler.log(.info, "Test message 1", null); +// try handler.log(.warn, "Test message 2", null); +// try handler.flush(); - // Read and verify file contents - const file = try tmp_dir.dir.openFile("test.json", .{ .mode = .read_only }); - defer file.close(); +// // Read and verify file contents +// const file = try tmp_dir.dir.openFile("test.json", .{ .mode = .read_only }); +// defer file.close(); - const file_contents = try file.readToEndAlloc(allocator, 1024 * 1024); - defer allocator.free(file_contents); +// const file_contents = try file.readToEndAlloc(allocator, 1024 * 1024); +// defer allocator.free(file_contents); - // Basic verification - try testing.expect(std.mem.indexOf(u8, file_contents, "Test message 1") != null); - try testing.expect(std.mem.indexOf(u8, file_contents, "Test message 2") != null); +// // Basic verification +// try testing.expect(std.mem.indexOf(u8, file_contents, "Test message 1") != null); +// try testing.expect(std.mem.indexOf(u8, file_contents, "Test message 2") != null); - // Cleanup - handler.deinit(); -} +// // Cleanup +// handler.deinit(); +// } -test "JsonHandler as generic LogHandler" { - const allocator = testing.allocator; - var json_handler = try JsonHandler.init(allocator, .{}); - const log_handler = json_handler.toLogHandler(); +// test "JsonHandler as generic LogHandler" { +// const allocator = testing.allocator; +// var json_handler = try JsonHandler.init(allocator, .{}); +// const log_handler = json_handler.toLogHandler(); - try log_handler.writeLog(.info, "Test generic handler", null); - try log_handler.flush(); - log_handler.deinit(); -} +// try log_handler.writeLog(.info, "Test generic handler", null); +// try log_handler.flush(); +// log_handler.deinit(); +// } From 2b19f2a56e1473c0b5f4eb56f9b943360121f7db Mon Sep 17 00:00:00 2001 From: Christian Brendlin Date: Thu, 30 Jan 2025 21:23:58 +0100 Subject: [PATCH 08/18] WIP: fixed errors, conversion and shadowing errors --- src/utils/format.zig | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/src/utils/format.zig b/src/utils/format.zig index 5723acd..bc18f09 100644 --- a/src/utils/format.zig +++ b/src/utils/format.zig @@ -118,14 +118,14 @@ pub const Formatter = struct { return FormatError.InvalidPlaceholder; } - const placeholder_name = self.config.template[start + 1 .. if (format == null) i else i - format.?.len - 1]; + const placeholder_name = self.config.template[start + 1 .. if (fmt_spec == null) i else i - fmt_spec.?.len - 1]; const placeholder_type = try self.getPlaceholderType(placeholder_name); try self.placeholder_cache.append(.{ .type = placeholder_type, .start = start, .end = i + 1, - .format = format, + .format = fmt_spec, }); } i += 1; @@ -267,7 +267,9 @@ pub const Formatter = struct { const epoch_day = @divFloor(epoch_seconds, 86400); const day_seconds = @mod(epoch_seconds, 86400); - const year_day = @as(u16, @intCast(@mod(epoch_day + 719468, 146097) / 365.2425)); + // Use integer division instead of floating point + // 146097 days = 400 years + const year_day = @as(u16, @intCast(@divFloor(epoch_day + 719468, 146097) * 400)); const year = 1970 + year_day; const hour = @as(u8, @intCast(@divFloor(day_seconds, 3600))); @@ -282,9 +284,6 @@ pub const Formatter = struct { }, .custom => { if (self.config.custom_timestamp_format) |fmt_str| { - // Here you could implement custom timestamp formatting - // using the provided format string - // For now, fallback to unix timestamp _ = fmt_str; try std.fmt.format(result.writer(), "{d}", .{timestamp}); } else { From 458fd2f7838f205c6b03cc998f396b217da5b584 Mon Sep 17 00:00:00 2001 From: Christian Brendlin Date: Thu, 30 Jan 2025 21:24:34 +0100 Subject: [PATCH 09/18] WIP: implemented formatting into existing configuration --- src/core/config.zig | 3 +++ src/core/init.zig | 6 +++++- src/core/logger.zig | 17 ++++++++++++++++- 3 files changed, 24 insertions(+), 2 deletions(-) diff --git a/src/core/config.zig b/src/core/config.zig index 465b095..b074419 100644 --- a/src/core/config.zig +++ b/src/core/config.zig @@ -1,4 +1,5 @@ const LogLevel = @import("types.zig").LogLevel; +const format = @import("../utils/format.zig"); pub const LogConfig = struct { min_level: LogLevel = .info, @@ -12,4 +13,6 @@ pub const LogConfig = struct { buffer_size: usize = 4096, async_mode: bool = false, enable_metadata: bool = true, + + format_config: ?format.FormatConfig = null, }; diff --git a/src/core/init.zig b/src/core/init.zig index d8907a7..3af38cb 100644 --- a/src/core/init.zig +++ b/src/core/init.zig @@ -3,7 +3,7 @@ const logger = @import("logger.zig"); const config = @import("config.zig"); const errors = @import("errors.zig"); const types = @import("types.zig"); - +const format = @import("../utils/format.zig"); /// Global logger state pub const GlobalState = struct { is_initialized: bool = false, @@ -99,6 +99,10 @@ pub const LogBuilder = struct { }, }; } + pub fn setFormatter(self: *LogBuilder, format_config: format.FormatConfig) *LogBuilder { + self.config.format_config = format_config; + return self; + } pub fn setMinLevel(self: *LogBuilder, level: types.LogLevel) *LogBuilder { self.config.min_level = level; diff --git a/src/core/logger.zig b/src/core/logger.zig index 5c7e7e6..132f01a 100644 --- a/src/core/logger.zig +++ b/src/core/logger.zig @@ -7,7 +7,7 @@ const handlers = @import("../output/handlers.zig"); const console = @import("../output/console.zig"); const file = @import("../output/file.zig"); const network = @import("../output/network.zig"); - +const format = @import("../utils/format.zig"); pub const Logger = struct { const Self = @This(); @@ -15,6 +15,7 @@ pub const Logger = struct { config: cfg.LogConfig, mutex: std.Thread.Mutex, handlers: std.ArrayList(handlers.LogHandler), + formatter: ?*format.Formatter, // Add formatter pub fn init(allocator: std.mem.Allocator, config: cfg.LogConfig) !*Self { var logger = try allocator.create(Self); @@ -25,8 +26,12 @@ pub const Logger = struct { .config = config, // Store the passed config .mutex = std.Thread.Mutex{}, .handlers = std.ArrayList(handlers.LogHandler).init(allocator), + .formatter = null, }; + if (config.format_config) |fmt_config| { + logger.formatter = try format.Formatter.init(allocator, fmt_config); + } // Initialize console handler by default if (config.enable_console) { const console_config = console.ConsoleConfig{ @@ -58,6 +63,9 @@ pub const Logger = struct { } pub fn deinit(self: *Self) void { + if (self.formatter) |fmt| { + fmt.deinit(); + } // Deinit all handlers for (self.handlers.items) |handler| { handler.deinit(); @@ -89,6 +97,13 @@ pub const Logger = struct { args, ); + const formatted_message = if (self.formatter) |formatter| blk: { + const result = try formatter.format(level, message, metadata); + break :blk result; + } else message; + + defer if (self.formatter != null) self.allocator.free(formatted_message); + // Send to all handlers for (self.handlers.items) |handler| { handler.writeLog(level, message, metadata) catch |err| { From d6495abb5b308730d644c637b39a7c6c2d6636b4 Mon Sep 17 00:00:00 2001 From: Christian Brendlin Date: Sun, 2 Feb 2025 19:12:59 +0100 Subject: [PATCH 10/18] fix: renamed '.Pointer' -> '.pointer' and '.One' to '.one' --- src/output/handlers.zig | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/output/handlers.zig b/src/output/handlers.zig index 46db278..bd30a5a 100644 --- a/src/output/handlers.zig +++ b/src/output/handlers.zig @@ -35,8 +35,8 @@ pub const LogHandler = struct { const Ptr = @TypeOf(pointer); const ptr_info = @typeInfo(Ptr); - std.debug.assert(ptr_info == .Pointer); // Must be a pointer - std.debug.assert(ptr_info.Pointer.size == .One); // Must be a single-item pointer + std.debug.assert(ptr_info == .pointer); // Must be a pointer + std.debug.assert(ptr_info.pointer.size == .one); // Must be a single-item pointer const GenericWriteLog = struct { fn implementation( From a507f1817047e9a2b743f7d56ff5abba2879f7dc Mon Sep 17 00:00:00 2001 From: Christian Brendlin Date: Sun, 2 Feb 2025 19:14:14 +0100 Subject: [PATCH 11/18] fix: renamed MAX_PATH_BYTES to max_path_bytes --- examples/json_logging.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/json_logging.zig b/examples/json_logging.zig index 0744071..c64e2fc 100644 --- a/examples/json_logging.zig +++ b/examples/json_logging.zig @@ -13,7 +13,7 @@ pub fn main() !void { try std.fs.cwd().makePath(log_dir); // Get the current working directory - var cwd_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined; + var cwd_buf: [std.fs.max_path_bytes]u8 = undefined; const cwd = std.fs.cwd().realpath(".", &cwd_buf) catch unreachable; // Construct the absolute path for the log file within the log_dir From 0a74dade74f46c6bf40fb4ea504778b862ecde34 Mon Sep 17 00:00:00 2001 From: Christian Brendlin Date: Wed, 5 Feb 2025 15:43:00 +0100 Subject: [PATCH 12/18] feat: added non fallible methods for logging --- src/core/logger.zig | 37 +++++++++++++++++++++++++++++++++---- 1 file changed, 33 insertions(+), 4 deletions(-) diff --git a/src/core/logger.zig b/src/core/logger.zig index 132f01a..17e5dbf 100644 --- a/src/core/logger.zig +++ b/src/core/logger.zig @@ -106,11 +106,40 @@ pub const Logger = struct { // Send to all handlers for (self.handlers.items) |handler| { - handler.writeLog(level, message, metadata) catch |err| { - std.debug.print("Handler error: {}\n", .{err}); + handler.writeLog(level, message, metadata) catch |log_error| { + std.debug.print("Handler error: {}\n", .{log_error}); }; } } + // === Convenience (Infallible) Methods === + + /// Logs an info-level message without the caller having to use `try` or `catch`. + pub fn info(self: *Self, comptime fmt: []const u8, args: anytype, metadata: ?types.LogMetadata) void { + _ = self.log(.info, fmt, args, metadata) catch |log_error| { + std.debug.print("Logger.info error: {}\n", .{log_error}); + }; + } + + /// Logs a debug-level message. + pub fn debug(self: *Self, comptime fmt: []const u8, args: anytype, metadata: ?types.LogMetadata) void { + _ = self.log(.debug, fmt, args, metadata) catch |log_error| { + std.debug.print("Logger.debug error: {}\n", .{log_error}); + }; + } + + /// Logs a warning-level message. + pub fn warn(self: *Self, comptime fmt: []const u8, args: anytype, metadata: ?types.LogMetadata) void { + _ = self.log(.warn, fmt, args, metadata) catch |log_error| { + std.debug.print("Logger.warn error: {}\n", .{log_error}); + }; + } + + /// Logs an error-level message. + pub fn err(self: *Self, comptime fmt: []const u8, args: anytype, metadata: ?types.LogMetadata) void { + _ = self.log(.err, fmt, args, metadata) catch |log_error| { + std.debug.print("Logger.error error: {}\n", .{log_error}); + }; + } // Add a new handler pub fn addHandler(self: *Self, handler: handlers.LogHandler) !void { @@ -139,8 +168,8 @@ pub const Logger = struct { defer self.mutex.unlock(); for (self.handlers.items) |handler| { - handler.flush() catch |err| { - std.debug.print("Flush error: {}\n", .{err}); + handler.flush() catch |flush_err| { + std.debug.print("Flush error: {}\n", .{flush_err}); }; } } From 03a0b2bd8ca8684475d0cbe4e80926138027e156 Mon Sep 17 00:00:00 2001 From: Christian Brendlin Date: Wed, 5 Feb 2025 15:43:48 +0100 Subject: [PATCH 13/18] fix: fixed API errors due to version bump (Zig v0.14.x) --- src/output/handlers.zig | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/output/handlers.zig b/src/output/handlers.zig index bd30a5a..46db278 100644 --- a/src/output/handlers.zig +++ b/src/output/handlers.zig @@ -35,8 +35,8 @@ pub const LogHandler = struct { const Ptr = @TypeOf(pointer); const ptr_info = @typeInfo(Ptr); - std.debug.assert(ptr_info == .pointer); // Must be a pointer - std.debug.assert(ptr_info.pointer.size == .one); // Must be a single-item pointer + std.debug.assert(ptr_info == .Pointer); // Must be a pointer + std.debug.assert(ptr_info.Pointer.size == .One); // Must be a single-item pointer const GenericWriteLog = struct { fn implementation( From a7649ad09d17737cd2865e817026d48c2c7ca802 Mon Sep 17 00:00:00 2001 From: Christian Brendlin Date: Wed, 5 Feb 2025 15:50:12 +0100 Subject: [PATCH 14/18] change: updated parts of 'basic_usage' example to use the new Infallible logging methods --- examples/basic_usage.zig | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/examples/basic_usage.zig b/examples/basic_usage.zig index b9a6a5d..02d8a03 100644 --- a/examples/basic_usage.zig +++ b/examples/basic_usage.zig @@ -29,7 +29,7 @@ const OrderProcessor = struct { const metadata = self.createMetadata(); // Log the start of order processing - try self.logger.log(.debug, "Starting order processing [dept={s}, order_id={d}]", .{ self.department, order_id }, metadata); + self.logger.debug("Starting order processing [dept={s}, order_id={d}]", .{ self.department, order_id }, metadata); // Simulate processing steps with appropriate logging try self.validateOrder(order_id); @@ -99,14 +99,14 @@ pub fn main() !void { }; // Log application startup - try logger.log(.info, "Application starting", .{}, base_metadata); + logger.info("Application starting", .{}, base_metadata); // Simulate some logging activity - try logger.log(.debug, "Initializing subsystems", .{}, base_metadata); - try logger.log(.info, "Processing started", .{}, base_metadata); - try logger.log(.warn, "Resource usage high", .{}, base_metadata); + logger.debug("Initializing subsystems", .{}, base_metadata); + logger.info("Processing started", .{}, base_metadata); + logger.warn("Resource usage high", .{}, base_metadata); // Ensure all logs are written before shutdown try logger.flush(); - try logger.log(.info, "Application shutdown complete", .{}, base_metadata); + logger.info("Application shutdown complete", .{}, base_metadata); } From 373a7112be78928b81000b909a537d817d050f24 Mon Sep 17 00:00:00 2001 From: Christian Brendlin Date: Wed, 5 Feb 2025 15:51:09 +0100 Subject: [PATCH 15/18] fixed gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index ef9cfa3..6ad1845 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,4 @@ zig-out/ logs/ *test_* test* +test_logs/*.json From 4a7dba546fa3e11a59d6420194044040c08b35bf Mon Sep 17 00:00:00 2001 From: Christian Brendlin Date: Wed, 5 Feb 2025 15:51:39 +0100 Subject: [PATCH 16/18] deleted junk file --- test_logs/app.json | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 test_logs/app.json diff --git a/test_logs/app.json b/test_logs/app.json deleted file mode 100644 index a19a514..0000000 --- a/test_logs/app.json +++ /dev/null @@ -1,5 +0,0 @@ -[ -{"metadata": {"timestamp": 1738259018, "thread_id": 0, "file": "examples/json_logging.zig", "line": 43, "function": "main"}, "level": "INFO", "message": "Application starting"}, -{"metadata": {"timestamp": 1738259018, "thread_id": 0, "file": "examples/json_logging.zig", "line": 43, "function": "main"}, "level": "WARN", "message": "This is a warning message (code: 123)"}, -{"metadata": {"timestamp": 1738259018, "thread_id": 0, "file": "examples/json_logging.zig", "line": 43, "function": "main"}, "level": "ERROR", "message": "An error occurred (code: E_UNKNOWN)"} -] \ No newline at end of file From ea334b5633263c19ac9d9499083e136d7842330f Mon Sep 17 00:00:00 2001 From: Christian Brendlin Date: Wed, 5 Feb 2025 16:00:51 +0100 Subject: [PATCH 17/18] version bump --- CHANGELOG.md | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 69b3357..966c3b6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,3 @@ -### Fixed -- Fixed deadlock in file rotation when buffer limit is reached -- Fixed memory leaks in file path handling during log rotation -- Improved thread safety for file size tracking -- Added proper memory cleanup for file operations -- Enhanced error recovery during rotation failures - -The file handler now properly manages system resources and handles concurrent -access more reliably. Users should see more stable behavior during high-volume -logging with file rotation enabled. \ No newline at end of file +### Fixed v0.3.0 (February 5, 2025) +Added new non-failing log methods for each log level. These methods do not return an error. +fixed compilation errors to support zig 0.14-dev From f33481cea298248f382db35f4db491d8b744b86c Mon Sep 17 00:00:00 2001 From: Christian Brendlin Date: Wed, 5 Feb 2025 16:01:04 +0100 Subject: [PATCH 18/18] version bump --- build.zig.zon | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.zig.zon b/build.zig.zon index 768c9dd..4f5121d 100644 --- a/build.zig.zon +++ b/build.zig.zon @@ -10,7 +10,7 @@ // This is a [Semantic Version](https://semver.org/). // In a future version of Zig it will be used for package deduplication. - .version = "0.2.0-alpha.1", + .version = "0.3.0", // This field is optional. // This is currently advisory only; Zig does not yet do anything