feat(users/Profpatsch/{netencode,http-parse}): use HashMap
`U::Record` is required to be a hash map (later keys should be ignored), so why not do the hash map immediately. This surfaced a problem with read-http, because duplicate headers in http are possible, but before they’d be silently ignored. Now we merge them into a `U::List` in case, to be handled by consumers of read-http. Change-Id: Ifd594916f76e5acf9d08e705e0dec2c10a0081c9 Reviewed-on: https://cl.tvl.fyi/c/depot/+/2490 Tested-by: BuildkiteCI Reviewed-by: Profpatsch <mail@profpatsch.de>
This commit is contained in:
parent
7d28f121c7
commit
e207785e1f
3 changed files with 48 additions and 30 deletions
|
|
@ -48,7 +48,7 @@ pub enum U<'a> {
|
|||
Binary(&'a [u8]),
|
||||
// Tags
|
||||
Sum(Tag<&'a str, U<'a>>),
|
||||
Record(Vec<(&'a str, U<'a>)>),
|
||||
Record(HashMap<&'a str, U<'a>>),
|
||||
List(Vec<U<'a>>),
|
||||
}
|
||||
|
||||
|
|
@ -290,14 +290,11 @@ pub mod parse {
|
|||
let (s, r) = record_g(t_t)(s)?;
|
||||
Ok((s,
|
||||
r.into_iter()
|
||||
// ignore duplicated tag names that appear later
|
||||
// by reverting the vector now
|
||||
.rev()
|
||||
.map(|(k, v)| (k.to_string(), v))
|
||||
.collect::<HashMap<_,_>>()))
|
||||
}
|
||||
|
||||
fn record_g<'a, P, O>(inner: P) -> impl Fn(&'a [u8]) -> IResult<&'a [u8], Vec<(&'a str, O)>>
|
||||
fn record_g<'a, P, O>(inner: P) -> impl Fn(&'a [u8]) -> IResult<&'a [u8], HashMap<&'a str, O>>
|
||||
where
|
||||
O: Clone,
|
||||
P: Fn(&'a [u8]) -> IResult<&'a [u8], O>
|
||||
|
|
@ -306,9 +303,13 @@ pub mod parse {
|
|||
sized('{', '}'),
|
||||
nom::multi::fold_many1(
|
||||
tag_g(inner),
|
||||
Vec::new(),
|
||||
|mut acc: Vec<_>, Tag { tag, mut val }| {
|
||||
acc.push((tag, *val));
|
||||
HashMap::new(),
|
||||
|mut acc: HashMap<_,_>, Tag { tag, mut val }| {
|
||||
// ignore duplicated tag names that appear later
|
||||
// according to netencode spec
|
||||
if ! acc.contains_key(tag) {
|
||||
acc.insert(tag, *val);
|
||||
}
|
||||
acc
|
||||
}
|
||||
)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue