aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHimbeerserverDE <himbeerserverde@gmail.com>2023-07-23 19:05:48 +0200
committerHimbeerserverDE <himbeerserverde@gmail.com>2023-07-23 19:05:48 +0200
commit2345b5e13f1899d291e55a4cf1617f26961bd13f (patch)
treee0f5ca0876757d94112936f3883f9e705375798c
parentd68e4e0126195397aff67db48457c46f82d6de0a (diff)
reverse order of len_for and discriminant_for
-rw-r--r--src/lib.rs46
1 files changed, 23 insertions, 23 deletions
diff --git a/src/lib.rs b/src/lib.rs
index 45b7219..4e9a23d 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -26,23 +26,23 @@ pub fn derive_serialize(item: TokenStream) -> TokenStream {
let args = Args::from_attributes(&field.attrs).unwrap();
- if let Some(attr) = args.len_for {
+ if let Some(attr) = args.discriminant_for {
let attr_ident = Ident::new(&attr, Span::call_site());
out.extend(
vec![quote!(
- self.#attr_ident.len().serialize(w)?;
+ self.#attr_ident.discriminant().serialize(w)?;
)]
.into_iter(),
);
}
- if let Some(attr) = args.discriminant_for {
+ if let Some(attr) = args.len_for {
let attr_ident = Ident::new(&attr, Span::call_site());
out.extend(
vec![quote!(
- self.#attr_ident.discriminant().serialize(w)?;
+ self.#attr_ident.len().serialize(w)?;
)]
.into_iter(),
);
@@ -75,34 +75,34 @@ pub fn derive_deserialize(item: TokenStream) -> TokenStream {
let ast: ItemStruct = parse(item).unwrap();
let name = ast.ident;
- let mut len_for = HashMap::new();
let mut discriminant_for = HashMap::new();
+ let mut len_for = HashMap::new();
- let has_len_annotations = ast.fields.iter().any(|field| {
+ let has_discriminant_annotations = ast.fields.iter().any(|field| {
let args = Args::from_attributes(&field.attrs).unwrap();
- args.len_for.is_some()
+ args.discriminant_for.is_some()
});
- let has_discriminant_annotations = ast.fields.iter().any(|field| {
+ let has_len_annotations = ast.fields.iter().any(|field| {
let args = Args::from_attributes(&field.attrs).unwrap();
- args.discriminant_for.is_some()
+ args.len_for.is_some()
});
let mut map_declarations = TokenStream2::new();
- if has_len_annotations {
+ if has_discriminant_annotations {
map_declarations.extend(
vec![quote!(
- let mut len_for = std::collections::HashMap::new();
+ let mut discriminant_for = std::collections::HashMap::new();
)]
.into_iter(),
);
}
- if has_discriminant_annotations {
+ if has_len_annotations {
map_declarations.extend(
vec![quote!(
- let mut discriminant_for = std::collections::HashMap::new();
+ let mut len_for = std::collections::HashMap::new();
)]
.into_iter(),
);
@@ -116,32 +116,32 @@ pub fn derive_deserialize(item: TokenStream) -> TokenStream {
let args = Args::from_attributes(&field.attrs).unwrap();
- if let Some(attr) = args.len_for {
+ if let Some(attr) = args.discriminant_for {
out.extend(
vec![quote!(
- let mut len = 0u16;
- len.deserialize(r)?;
+ let mut discriminant = 0u8;
+ discriminant.deserialize(r)?;
- len_for.insert(#attr, len);
+ discriminant_for.insert(#attr, discriminant);
)]
.into_iter(),
);
- len_for.insert(attr, ());
+ discriminant_for.insert(attr, ());
}
- if let Some(attr) = args.discriminant_for {
+ if let Some(attr) = args.len_for {
out.extend(
vec![quote!(
- let mut discriminant = 0u8;
- discriminant.deserialize(r)?;
+ let mut len = 0u16;
+ len.deserialize(r)?;
- discriminant_for.insert(#attr, discriminant);
+ len_for.insert(#attr, len);
)]
.into_iter(),
);
- discriminant_for.insert(attr, ());
+ len_for.insert(attr, ());
}
if len_for.contains_key(&field_name.to_string()) {