Skip to content

Commit a063c3f

Browse files
authored
Merge pull request #1544 from epage/pub
fix(tokens): Ignore ssh ed25519 pub keys
2 parents 93c857b + 5d5e80b commit a063c3f

1 file changed

Lines changed: 67 additions & 0 deletions

File tree

crates/typos/src/tokens.rs

Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -193,6 +193,7 @@ mod parser {
193193
terminated(email_literal, sep1),
194194
terminated(url_literal, sep1),
195195
terminated(jwt, sep1),
196+
terminated(ssh_ed25519_pub_key, sep1),
196197
terminated(base64_literal, sep1), // base64 should be quoted or something
197198
alt((
198199
terminated(hash_literal, sep1),
@@ -348,6 +349,26 @@ mod parser {
348349
|| c == '-'
349350
}
350351

352+
fn ssh_ed25519_pub_key<'i, T>(input: &mut T) -> Result<<T as Stream>::Slice, ()>
353+
where
354+
T: Compare<char>,
355+
T: Compare<&'i str>,
356+
T: Stream + StreamIsPartial + PartialEq,
357+
<T as Stream>::Slice: AsBStr + SliceLen + Default,
358+
<T as Stream>::Token: AsChar + Copy,
359+
{
360+
trace(
361+
"ssh_ed25519_pub_key",
362+
(
363+
"AAAAC3NzaC1lZDI1NTE5AAAAI",
364+
// Technically the next digit can only be in `[A-P]` but not worth the complexity
365+
take_while(43, is_base64_digit),
366+
)
367+
.take(),
368+
)
369+
.parse_next(input)
370+
}
371+
351372
fn uuid_literal<T>(input: &mut T) -> Result<<T as Stream>::Slice, ()>
352373
where
353374
T: Compare<char>,
@@ -1614,6 +1635,52 @@ mod test {
16141635
},
16151636
]
16161637
1638+
"#]]
1639+
);
1640+
}
1641+
1642+
#[test]
1643+
fn tokenize_ignore_ssh_ed25519_public_key() {
1644+
let parser = TokenizerBuilder::new().build();
1645+
1646+
let input =
1647+
"Start AAAAC3NzaC1lZDI1NTE5AAAAIJSKdqkBEQY2y9f8C8RXxee3ZKXyWYR0QIW7oxISXrrf end";
1648+
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
1649+
assert_data_eq!(
1650+
actual.to_debug(),
1651+
str![[r#"
1652+
[
1653+
Identifier {
1654+
token: "Start",
1655+
case: None,
1656+
offset: 0,
1657+
},
1658+
Identifier {
1659+
token: "end",
1660+
case: None,
1661+
offset: 75,
1662+
},
1663+
]
1664+
1665+
"#]]
1666+
);
1667+
let actual: Vec<_> = parser.parse_str(input).collect();
1668+
assert_data_eq!(
1669+
actual.to_debug(),
1670+
str![[r#"
1671+
[
1672+
Identifier {
1673+
token: "Start",
1674+
case: None,
1675+
offset: 0,
1676+
},
1677+
Identifier {
1678+
token: "end",
1679+
case: None,
1680+
offset: 75,
1681+
},
1682+
]
1683+
16171684
"#]]
16181685
);
16191686
}

0 commit comments

Comments
 (0)