Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 19 additions & 0 deletions rs/log-fetcher/src/journald_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -354,4 +354,23 @@ mod tests {
let entries = parse_journal_entries_new(body);
assert_eq!(entries.len(), 0);
}

#[test]
fn test_cycles_canister() {
let mut body = vec![];
let mut serialized_data = vec![];
serialize_string_field("MESSAGE", "2025-05-19 07:24:19.132448340 UTC: [Canister rkp4c-7iaaa-aaaaa-aaaca-cai] [cycles] conversion rate update: IcpXdrConversionRate { timestamp_seconds: 1747639380, xdr_permyriad_per_icp: 37520 }", &mut serialized_data).unwrap();
body.extend(serialized_data);
let entries = parse_journal_entries_new(&body);

let first = entries.first().unwrap();
let (_, message) = first.fields.iter().find(|(k, _)| k == "MESSAGE").unwrap();

let message = match message {
JournalField::Utf8(_) => panic!("Expected a binary field"),
JournalField::Binary(s) => s,
};

assert!(message.starts_with("2025-05-19 07:24:19"));
}
}
41 changes: 28 additions & 13 deletions rs/log-fetcher/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ async fn main() -> Result<(), anyhow::Error> {
let mut interval = tokio::time::interval(Duration::from_secs(15));
let mut should_run = true;
while should_run {
let mut leftover_buffer = String::new();
let mut leftover_buffer = vec![];
select! {
biased;
_ = token.cancelled() => {
Expand Down Expand Up @@ -114,21 +114,20 @@ async fn main() -> Result<(), anyhow::Error> {
break;
}
};
let chunk_str = String::from_utf8_lossy(&chunk);
let combined = leftover_buffer.clone() + &chunk_str;

let to_parse = if let Some(pos) = combined.rfind("\n\n") {
// Splitting at the pos + two new lines to finish the entry
let (to_parse, rest) = combined.split_at(pos + 2);

leftover_buffer = rest.to_string();
to_parse
leftover_buffer.extend_from_slice(&chunk);

let split_pos = leftover_buffer.windows(2).rposition(|window| window == b"\n\n");
let to_parse = if let Some(pos) = split_pos {
let (to_parse, rest) = leftover_buffer.split_at(pos + 2);
let parsed = to_parse.to_vec();
leftover_buffer = rest.to_vec();
parsed
} else {
leftover_buffer = combined;
// Not a complete single entry, nothing to parse
continue;
};

let entries = parse_journal_entries_new(to_parse.as_bytes());
let entries = parse_journal_entries_new(&to_parse);

for entry in &entries {
let map: BTreeMap<String, String> = entry
Expand All @@ -139,7 +138,23 @@ async fn main() -> Result<(), anyhow::Error> {
})
.collect();

if map["__CURSOR"] == cursor {
let curr_cursor = match map.get("__CURSOR") {
Some(v) => v,
None => {
error!(
"Didn't find a cursor for the following entry: \n{:?}\n\n\\
Leftover buffer: {}\n\n\\
Current chunk: {}\n\n\\
",
map,
std::str::from_utf8(&leftover_buffer).unwrap(),
Copy link

Copilot AI May 20, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Using unwrap here may cause a panic if leftover_buffer contains non-UTF8 bytes. Consider using std::str::from_utf8_lossy to safely display the data.

Suggested change
std::str::from_utf8(&leftover_buffer).unwrap(),
std::str::from_utf8_lossy(&leftover_buffer),

Copilot uses AI. Check for mistakes.
std::str::from_utf8(&chunk).unwrap()
Copy link

Copilot AI May 20, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Using unwrap here may cause a panic if chunk contains non-UTF8 bytes. Consider using std::str::from_utf8_lossy to safely convert the bytes for logging.

Suggested change
std::str::from_utf8(&chunk).unwrap()
std::str::from_utf8_lossy(&chunk)

Copilot uses AI. Check for mistakes.
);
continue;
}
};

if curr_cursor == &cursor {
continue;
}

Expand Down
Loading