diff --git a/.travis.yml b/.travis.yml index 0a3a7769..50a35d7d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,3 +9,6 @@ matrix: allow_failures: - rust: nightly fast_finish: true +before_script: + - rustup component add rustfmt-preview + - which rustfmt diff --git a/Cargo.toml b/Cargo.toml index 3f47802e..49fce8a0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,19 +1,5 @@ -[package] -name = "ublox" -version = "0.1.0" -authors = ["Lane Kolbly "] -edition = "2018" -license = "MIT" -description = "A crate to communicate with u-blox GPS devices using the UBX protocol" +[workspace] +members = ["ublox", "ublox_derive", "ubx_protocol"] -[dependencies] -serde = "1.0" -serde_derive = "1.0" -serialport = "3.3.0" -bincode = "1.2.1" -chrono = "0.4" -crc = "1.8.1" -syn = "1.0.14" -ublox_derive = { path = "ublox_derive/" } -num-traits = "0.2" -num-derive = "0.2" +[patch.'crates-io'] +ublox_derive = { path = "ublox_derive" } \ No newline at end of file diff --git a/ublox/Cargo.toml b/ublox/Cargo.toml new file mode 100644 index 00000000..f3c331d5 --- /dev/null +++ b/ublox/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "ublox" +version = "0.1.0" +authors = ["Lane Kolbly "] +edition = "2018" +license = "MIT" +description = "A crate to communicate with u-blox GPS devices using the UBX protocol" + +[dependencies] +serde = "1.0" +serde_derive = "1.0" +serialport = "3.3.0" +bincode = "1.2.1" +chrono = "0.4" +crc = "1.8.1" diff --git a/src/error.rs b/ublox/src/error.rs similarity index 100% rename from src/error.rs rename to ublox/src/error.rs diff --git a/src/lib.rs b/ublox/src/lib.rs similarity index 96% rename from src/lib.rs rename to ublox/src/lib.rs index ca9e9ae2..f46d28a0 100644 --- a/src/lib.rs +++ b/ublox/src/lib.rs @@ -3,18 +3,18 @@ //! `ublox` is a library to talk to u-blox GPS devices using the UBX protocol. //! At time of writing this library is developed for a device which behaves like //! a NEO-6M device. +use crate::error::{Error, Result}; use chrono::prelude::*; use crc::{crc16, Hasher16}; use std::io; use std::time::{Duration, Instant}; -use crate::error::{Error, Result}; -pub use crate::ubx_packets::*; pub use crate::segmenter::Segmenter; +pub use crate::ubx_packets::*; mod error; -mod ubx_packets; mod segmenter; +mod ubx_packets; #[derive(Debug)] pub enum ResetType { @@ -32,7 +32,6 @@ pub struct Device { port: Box, segmenter: Segmenter, //buf: Vec, - alp_data: Vec, alp_file_id: u16, @@ -61,7 +60,7 @@ impl Device { /// /// This function will panic if it cannot open the serial port. pub fn new(device: &str) -> Result { - let s = serialport::SerialPortSettings{ + let s = serialport::SerialPortSettings { baud_rate: 9600, data_bits: serialport::DataBits::Eight, flow_control: serialport::FlowControl::None, @@ -70,7 +69,7 @@ impl Device { timeout: Duration::from_millis(1), }; let port = serialport::open_with_settings(device, &s).unwrap(); - let mut dev = Device{ + let mut dev = Device { port: port, segmenter: Segmenter::new(), alp_data: Vec::new(), @@ -174,7 +173,7 @@ impl Device { pub fn get_position(&mut self) -> Option { match (&self.navstatus, &self.navpos) { (Some(status), Some(pos)) => { - if status.itow != pos.get_itow() { + if status.itow != pos.itow { None } else if status.flags & 0x1 == 0 { None @@ -190,7 +189,7 @@ impl Device { pub fn get_velocity(&mut self) -> Option { match (&self.navstatus, &self.navvel) { (Some(status), Some(vel)) => { - if status.itow != vel.get_itow() { + if status.itow != vel.itow { None } else if status.flags & 0x1 == 0 { None @@ -264,7 +263,7 @@ impl Device { pub fn load_aid_data( &mut self, position: Option, - tm: Option> + tm: Option>, ) -> Result<()> { let mut aid = AidIni::new(); match position { @@ -313,7 +312,10 @@ impl Device { return Ok(Some(Packet::AckAck(packet))); } Some(Packet::MonVer(packet)) => { - println!("Got versions: SW={} HW={}", packet.sw_version, packet.hw_version); + println!( + "Got versions: SW={} HW={}", + packet.sw_version, packet.hw_version + ); return Ok(None); } Some(Packet::NavPosVelTime(packet)) => { @@ -382,7 +384,12 @@ impl Device { } fn send(&mut self, packet: UbxPacket) -> Result<()> { - CfgMsg{classid: 5, msgid: 4, rates: [0, 0, 0, 0, 0, 0]}.to_bytes(); + CfgMsg { + classid: 5, + msgid: 4, + rates: [0, 0, 0, 0, 0, 0], + } + .to_bytes(); let serialized = packet.serialize(); self.port.write_all(&serialized)?; Ok(()) diff --git a/src/main.rs b/ublox/src/main.rs similarity index 82% rename from src/main.rs rename to ublox/src/main.rs index 70318695..b6bc6e62 100644 --- a/src/main.rs +++ b/ublox/src/main.rs @@ -1,16 +1,20 @@ -use ublox::{Device, Position}; -use std::time::Duration; use chrono::prelude::*; +use std::time::Duration; +use ublox::{Device, Position}; fn main() { let mut dev = Device::new("/dev/ttyUSB0").unwrap(); - let pos = Position{lon: -97.5, lat: 30.2, alt: 200.0}; + let pos = Position { + lon: -97.5, + lat: 30.2, + alt: 200.0, + }; println!("Setting AID data..."); match dev.load_aid_data(Some(pos), Some(Utc::now())) { Err(e) => { println!("Got error setting AID data: {:?}", e); - }, + } _ => {} } diff --git a/src/segmenter.rs b/ublox/src/segmenter.rs similarity index 100% rename from src/segmenter.rs rename to ublox/src/segmenter.rs diff --git a/src/ubx_packets.rs b/ublox/src/ubx_packets.rs similarity index 78% rename from src/ubx_packets.rs rename to ublox/src/ubx_packets.rs index e99beaca..af5acbef 100644 --- a/src/ubx_packets.rs +++ b/ublox/src/ubx_packets.rs @@ -2,15 +2,8 @@ use crate::error::Result; use bincode; use chrono::prelude::*; use serde_derive::{Deserialize, Serialize}; -use std::vec::Vec; use std::str; -//use syn::{parse_macro_input, parse_quote, DeriveInput, Data, TokenStream}; -use ublox_derive::ubx_packet; - -// These are needed for ubx_packet -use std::convert::TryInto; -use num_derive::{FromPrimitive, ToPrimitive}; -use num_traits::{FromPrimitive, ToPrimitive}; +use std::vec::Vec; #[derive(Debug)] pub struct Position { @@ -73,35 +66,6 @@ impl UbxPacket { } } -/*#[proc_macro_attribute] -fn ubx_packet(attr: TokenStream, input: TokenStream) -> TokenStream { - let input = parse_macro_input!(input as DeriveInput); - match input.data { - Data::Struct(ref data) => { - match data.fields { - Fields::Named(ref fields) => { - println!("{:?}", fields); - } - Fields::Unnamed(ref fields) => { - // - } - } - } - Data::Enum(_) | Data::Union(_) => unimplemented!() - } -}*/ - -/*#[ubx_packet] -struct MyPacket { - tow: u32, - lon: i32, - lat: i32, - height: i32, - height_msl: i32, - h_acc: u32, - v_acc: u32, -}*/ - pub trait UbxMeta { fn get_classid() -> u8; fn get_msgid() -> u8; @@ -148,18 +112,7 @@ macro_rules! ubx_meta { }; } -#[ubx_packet] -pub struct NavPosLLH { - itow: u32, - lon: i32, - lat: i32, - height: i32, - height_msl: i32, - horizontal_accuracy: u32, - vertical_accuracy: u32, -} - -/*#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct NavPosLLH { pub itow: u32, pub lon: i32, @@ -168,45 +121,21 @@ pub struct NavPosLLH { pub height_msl: i32, pub horizontal_accuracy: u32, pub vertical_accuracy: u32, -}*/ +} -//ubx_meta!(NavPosLLH, 0x01, 0x02); +ubx_meta!(NavPosLLH, 0x01, 0x02); impl From<&NavPosLLH> for Position { fn from(packet: &NavPosLLH) -> Self { Position { - lon: packet.get_lon() as f32 / 10_000_000.0, - lat: packet.get_lat() as f32 / 10_000_000.0, - alt: packet.get_height_msl() as f32 / 1000.0, - } - } -} - -trait FooTrait { - fn foo() -> u32; -} - -impl From<&T> for Velocity { - fn from(packet: &T) -> Self { - Velocity { - speed: 0.0, - heading: 0.0, + lon: packet.lon as f32 / 10_000_000.0, + lat: packet.lat as f32 / 10_000_000.0, + alt: packet.height_msl as f32 / 1000.0, } } } -#[ubx_packet] -pub struct NavVelNED { - pub itow: u32, - pub vel_north: i32, // cm/s - pub vel_east: i32, - pub vel_down: i32, - pub speed: u32, - pub ground_speed: u32, - pub heading: i32, // 1e-5 degrees -} - -/*#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct NavVelNED { pub itow: u32, pub vel_north: i32, // cm/s @@ -217,33 +146,17 @@ pub struct NavVelNED { pub heading: i32, // 1e-5 degrees } -ubx_meta!(NavVelNED, 0x01, 0x12);*/ +ubx_meta!(NavVelNED, 0x01, 0x12); impl From<&NavVelNED> for Velocity { fn from(packet: &NavVelNED) -> Self { Velocity { - speed: packet.get_ground_speed() as f32 / 1_000.0, - heading: packet.get_heading() as f32 / 100_000.0, + speed: packet.ground_speed as f32 / 1_000.0, + heading: packet.heading as f32 / 100_000.0, } } } -/*pub struct NavPosVelTime { - itow: u32, - year: u16, - month: u8, - day: u8, - hour: u8, - min: u8, - sec: u8, - - #[ubx_bitfield(8)] - #[ubx_range(0:0)] - valid: bool, - - // etc. -}*/ - #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct NavPosVelTime { pub itow: u32, @@ -301,14 +214,13 @@ impl From<&NavPosVelTime> for Velocity { impl From<&NavPosVelTime> for DateTime { fn from(sol: &NavPosVelTime) -> Self { - let ns = if sol.nanosecond < 0 { 0 } else { sol.nanosecond } as u32; + let ns = if sol.nanosecond < 0 { + 0 + } else { + sol.nanosecond + } as u32; Utc.ymd(sol.year as i32, sol.month.into(), sol.day.into()) - .and_hms_nano( - sol.hour.into(), - sol.min.into(), - sol.sec.into(), - ns, - ) + .and_hms_nano(sol.hour.into(), sol.min.into(), sol.sec.into(), ns) } } @@ -482,8 +394,12 @@ pub struct MonVer { } impl UbxMeta for MonVer { - fn get_classid() -> u8 { 0x0a } - fn get_msgid() -> u8 { 0x04 } + fn get_classid() -> u8 { + 0x0a + } + fn get_msgid() -> u8 { + 0x04 + } fn to_bytes(&self) -> Vec { unimplemented!("Sending MonVer packets is unimplemented"); @@ -516,16 +432,10 @@ macro_rules! parse_packet_branch { impl Packet { pub fn deserialize(classid: u8, msgid: u8, payload: &[u8]) -> Result { match (classid, msgid) { - //(0x01, 0x02) => parse_packet_branch!(Packet::NavPosLLH, payload), - (0x01, 0x02) => { - Ok(Packet::NavPosLLH(NavPosLLH::new(payload.try_into().unwrap()))) - }, + (0x01, 0x02) => parse_packet_branch!(Packet::NavPosLLH, payload), (0x01, 0x03) => parse_packet_branch!(Packet::NavStatus, payload), (0x01, 0x07) => parse_packet_branch!(Packet::NavPosVelTime, payload), - //(0x01, 0x12) => parse_packet_branch!(Packet::NavVelNED, payload), - (0x01, 0x12) => { - Ok(Packet::NavVelNED(NavVelNED::new(payload.try_into().unwrap()))) - } + (0x01, 0x12) => parse_packet_branch!(Packet::NavVelNED, payload), (0x05, 0x01) => parse_packet_branch!(Packet::AckAck, payload), (0x06, 0x00) => { // Depending on the port ID, we parse different packets @@ -542,11 +452,11 @@ impl Packet { (0x0A, 0x04) => { let sw_version = str::from_utf8(&payload[0..30]).unwrap(); let hw_version = str::from_utf8(&payload[31..40]).unwrap(); - return Ok(Packet::MonVer(MonVer{ + return Ok(Packet::MonVer(MonVer { sw_version: sw_version.to_string(), hw_version: hw_version.to_string(), })); - }, + } (0x0B, 0x01) => parse_packet_branch!(Packet::AidIni, payload), (0x0B, 0x32) => parse_packet_branch!(Packet::AlpSrv, payload), (c, m) => { diff --git a/ublox_derive/Cargo.toml b/ublox_derive/Cargo.toml index d401160d..ee5a4ff0 100644 --- a/ublox_derive/Cargo.toml +++ b/ublox_derive/Cargo.toml @@ -1,19 +1,15 @@ [package] name = "ublox_derive" -version = "0.0.0" +version = "0.0.1" authors = ["Lane Kolbly "] edition = "2018" publish = false -[lib] -proc-macro = true - [dependencies] -proc-macro2 = "1.0" quote = "1.0" -syn = { version = "1.0.14", features = ["extra-traits"] } -Inflector = "0.11.4" -proc-macro-error = "0.4.8" -itertools = "0.8.2" -num-traits = "0.2" -num-derive = "0.2" +syn = { version = "1.0.14", features = ["extra-traits", "parsing", "full"] } +proc-macro2 = { version = "1.0", features = ["span-locations"] } +log = "0.4" + +[dev-dependencies] +which = { version = "3.0", default-features = false } \ No newline at end of file diff --git a/ublox_derive/src/error.rs b/ublox_derive/src/error.rs new file mode 100644 index 00000000..f8207229 --- /dev/null +++ b/ublox_derive/src/error.rs @@ -0,0 +1,47 @@ +use std::{fmt::Write, path::Path}; +use syn::Error; + +pub fn panic_on_parse_error((src_path, src_cnt): (&Path, &str), err: &Error) -> ! { + let span = err.span(); + let start = span.start(); + let end = span.end(); + + let mut code_problem = String::new(); + let nlines = end.line - start.line + 1; + for (i, line) in src_cnt + .lines() + .skip(start.line - 1) + .take(nlines) + .enumerate() + { + code_problem.push_str(&line); + code_problem.push('\n'); + if i == 0 && start.column > 0 { + write!(&mut code_problem, "{:1$}", ' ', start.column).expect("write to String failed"); + } + let code_problem_len = if i == 0 { + if i == nlines - 1 { + end.column - start.column + } else { + line.len() - start.column - 1 + } + } else if i != nlines - 1 { + line.len() + } else { + end.column + }; + writeln!(&mut code_problem, "{:^^1$}", '^', code_problem_len).expect("Not enought memory"); + if i == end.line { + break; + } + } + + panic!( + "parsing of {name} failed\nerror: {err}\n{code_problem}\nAt {name}:{line_s}:{col_s}", + name = src_path.display(), + err = err, + code_problem = code_problem, + line_s = start.line, + col_s = start.column, + ); +} diff --git a/ublox_derive/src/file_cache.rs b/ublox_derive/src/file_cache.rs new file mode 100644 index 00000000..1358b0b8 --- /dev/null +++ b/ublox_derive/src/file_cache.rs @@ -0,0 +1,47 @@ +/// To prevent modification time changing +use std::{ + fs::File, + io, + io::{Read, Write}, + path::PathBuf, +}; + +/// Implement write cache in memory, and update file only if necessary +pub struct FileWriteCache { + cnt: Vec, + path: PathBuf, +} + +impl FileWriteCache { + pub fn new>(p: P) -> FileWriteCache { + let path = p.into(); + FileWriteCache { cnt: vec![], path } + } + + pub fn update_file_if_necessary(self) -> Result<(), io::Error> { + if let Ok(mut f) = File::open(&self.path) { + let mut cur_cnt = vec![]; + f.read_to_end(&mut cur_cnt)?; + if cur_cnt == self.cnt { + return Ok(()); + } + } + let mut f = File::create(&self.path)?; + f.write_all(&self.cnt)?; + Ok(()) + } + + pub fn replace_content(&mut self, bytes: Vec) { + self.cnt = bytes; + } +} + +impl io::Write for FileWriteCache { + fn write(&mut self, data: &[u8]) -> Result { + self.cnt.extend_from_slice(data); + Ok(data.len()) + } + fn flush(&mut self) -> Result<(), io::Error> { + Ok(()) + } +} diff --git a/ublox_derive/src/input.rs b/ublox_derive/src/input.rs new file mode 100644 index 00000000..5c50bb71 --- /dev/null +++ b/ublox_derive/src/input.rs @@ -0,0 +1,371 @@ +use crate::types::{ + PackDesc, PackField, PackFieldMap, PackHeader, UbxEnum, UbxEnumRestHandling, UbxTypeFromFn, +}; +use log::trace; +use std::num::NonZeroUsize; +use syn::{parse::Parse, spanned::Spanned, Attribute, Error, Ident, Token, Type}; + +pub fn parse_packet_description(input: syn::ItemStruct) -> syn::Result { + let struct_name = &input.ident; + let main_sp = input.span(); + + let header = parse_ubx_attr(&input.attrs, &struct_name)?; + let struct_comment = extract_item_comment(&input.attrs)?; + + let name = struct_name.to_string(); + let fields = parse_fields(input)?; + + let ret = PackDesc { + name, + header, + comment: struct_comment, + fields, + }; + + if ret.header.fixed_payload_len.map(usize::from) == ret.packet_payload_size() { + Ok(ret) + } else { + Err(Error::new( + main_sp, + format!( + "Calculated packet size ({:?}) doesn't match specified ({:?})", + ret.packet_payload_size(), + ret.header.fixed_payload_len + ), + )) + } +} + +pub fn parse_ubx_enum_type(input: syn::ItemEnum) -> syn::Result { + let enum_name = &input.ident; + let attr = input + .attrs + .iter() + .find(|a| a.path.is_ident("ubx")) + .ok_or_else(|| { + Error::new( + enum_name.span(), + format!("No ubx attribute for ubx_type enum {}", enum_name), + ) + })?; + let meta = attr.parse_meta()?; + trace!("parse_ubx_enum_type: ubx_type meta {:?}", meta); + let mut from_fn = None; + let mut to_fn = false; + let mut rest_handling = None; + match meta { + syn::Meta::List(list) => { + for item in list.nested { + if let syn::NestedMeta::Meta(syn::Meta::Path(p)) = item { + if p.is_ident("from") { + from_fn = Some(UbxTypeFromFn::From); + } else if p.is_ident("to") { + to_fn = true; + } else if p.is_ident("from_unchecked") { + from_fn = Some(UbxTypeFromFn::FromUnchecked); + } else if p.is_ident("rest_reserved") { + rest_handling = Some(UbxEnumRestHandling::Reserved); + } else if p.is_ident("rest_error") { + rest_handling = Some(UbxEnumRestHandling::ErrorProne); + } else { + return Err(syn::Error::new(p.span(), "Invalid ubx attribute")); + } + } else { + return Err(syn::Error::new(item.span(), "Invalid ubx attribute")); + } + } + } + _ => return Err(syn::Error::new(attr.span(), "Invalid ubx attributes")), + } + + let attr = input + .attrs + .iter() + .find(|a| a.path.is_ident("repr")) + .ok_or_else(|| { + Error::new( + enum_name.span(), + format!("No repr attribute for ubx_type enum {}", enum_name), + ) + })?; + let meta = attr.parse_meta()?; + let repr: Type = match meta { + syn::Meta::List(list) if list.nested.len() == 1 => { + if let syn::NestedMeta::Meta(syn::Meta::Path(ref p)) = list.nested[0] { + if !p.is_ident("u8") { + unimplemented!(); + } + } else { + return Err(syn::Error::new( + list.nested[0].span(), + "Invalid repr attribute for ubx_type enum", + )); + } + syn::parse_quote! { u8 } + } + _ => { + return Err(syn::Error::new( + attr.span(), + "Invalid repr attribute for ubx_type enum", + )) + } + }; + let mut variants = Vec::with_capacity(input.variants.len()); + for var in input.variants { + if syn::Fields::Unit != var.fields { + return Err(syn::Error::new( + var.fields.span(), + "Invalid variant for ubx_type enum", + )); + } + let var_sp = var.ident.span(); + let (_, expr) = var + .discriminant + .ok_or_else(|| Error::new(var_sp, "ubx_type enum variant should has value"))?; + let variant_value = if let syn::Expr::Lit(syn::ExprLit { + lit: syn::Lit::Int(litint), + .. + }) = expr + { + litint.base10_parse::()? + } else { + return Err(syn::Error::new( + expr.span(), + "Invalid variant value for ubx_type enum", + )); + }; + variants.push((var.ident, variant_value)); + } + + let attrs = input + .attrs + .into_iter() + .filter(|x| !x.path.is_ident("ubx") && !x.path.is_ident("ubx_type")) + .collect(); + + Ok(UbxEnum { + attrs, + name: input.ident, + repr, + from_fn, + to_fn, + rest_handling, + variants, + }) +} + +fn parse_ubx_attr(attrs: &[Attribute], struct_name: &Ident) -> syn::Result { + let attr = attrs + .iter() + .find(|a| a.path.is_ident("ubx")) + .ok_or_else(|| { + Error::new( + struct_name.span(), + format!("No ubx attribute for payload struct {}", struct_name), + ) + })?; + let meta = attr.parse_meta()?; + trace!("parse_ubx_attr: ubx meta {:?}", meta); + let meta = match meta { + syn::Meta::List(x) => x, + _ => return Err(Error::new(meta.span(), "Invalid ubx attribute syntax")), + }; + + let mut class = None; + let mut id = None; + let mut fixed_payload_len = None; + + for e in &meta.nested { + match e { + syn::NestedMeta::Meta(syn::Meta::NameValue(syn::MetaNameValue { + path, lit, .. + })) => { + if path.is_ident("class") { + if class.is_some() { + return Err(Error::new(e.span(), "Duplicate \"class\" attribute")); + } + class = match lit { + syn::Lit::Int(x) => Some(x.base10_parse::()?), + _ => return Err(Error::new(lit.span(), "Should be integer literal")), + }; + } else if path.is_ident("id") { + if id.is_some() { + return Err(Error::new(e.span(), "Duplicate \"id\" attribute")); + } + id = match lit { + syn::Lit::Int(x) => Some(x.base10_parse::()?), + _ => return Err(Error::new(lit.span(), "Should be integer literal")), + }; + } else if path.is_ident("fixed_payload_len") { + if fixed_payload_len.is_some() { + return Err(Error::new( + e.span(), + "Duplicate \"fixed_payload_len\" attribute", + )); + } + fixed_payload_len = match lit { + syn::Lit::Int(x) => Some(x.base10_parse::()?), + _ => return Err(Error::new(lit.span(), "Should be integer literal")), + }; + } else { + return Err(Error::new(path.span(), "Unsupported attribute")); + } + } + _ => return Err(Error::new(e.span(), "Unsupported attribute")), + } + } + let class = class.ok_or_else(|| Error::new(meta.span(), "No \"class\" attribute"))?; + let id = id.ok_or_else(|| Error::new(meta.span(), "No \"id\" attribute"))?; + + Ok(PackHeader { + class, + id, + fixed_payload_len, + }) +} + +fn extract_item_comment(attrs: &[Attribute]) -> syn::Result { + let mut doc_comments = String::new(); + for a in attrs { + if a.path.is_ident("doc") { + let meta = a.parse_meta()?; + match meta { + syn::Meta::NameValue(syn::MetaNameValue { lit, .. }) => { + let lit = match lit { + syn::Lit::Str(s) => s, + _ => return Err(Error::new(lit.span(), "Invalid comment")), + }; + doc_comments.push_str(&lit.value()); + } + _ => return Err(Error::new(a.span(), "Invalid comments")), + } + } + } + Ok(doc_comments) +} + +fn parse_fields(struct_data: syn::ItemStruct) -> syn::Result> { + let fields = match struct_data.fields { + syn::Fields::Named(x) => x, + _ => { + return Err(Error::new( + struct_data.fields.span(), + "Unsupported fields format", + )); + } + }; + let mut ret = Vec::with_capacity(fields.named.len()); + for f in fields.named { + let f_sp = f.span(); + let syn::Field { + ident: name, + attrs, + ty, + .. + } = f; + let size_bytes = field_size_bytes(&ty)?; + let name = name.ok_or_else(|| Error::new(f_sp, "No field name"))?; + let comment = extract_item_comment(&attrs)?; + let mut map = PackFieldMap::none(); + for a in attrs { + if !a.path.is_ident("doc") { + if !map.is_none() { + return Err(Error::new( + a.span(), + "Two map attributes for the same field", + )); + } + map = a.parse_args::()?; + } + } + + if let Some(ref map_ty) = map.map_type { + if *map_ty == ty { + return Err(Error::new(map_ty.span(), "You map type to the same type")); + } + } + + ret.push(PackField { + name, + ty, + map, + comment, + size_bytes, + }); + } + + Ok(ret) +} + +mod kw { + syn::custom_keyword!(map_type); + syn::custom_keyword!(scale); + syn::custom_keyword!(alias); +} + +impl Parse for PackFieldMap { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut map = PackFieldMap::none(); + if input.peek(kw::map_type) { + input.parse::()?; + input.parse::()?; + map.map_type = Some(input.parse()?); + } + if input.peek(Token![,]) { + input.parse::()?; + } + if input.peek(kw::scale) { + input.parse::()?; + input.parse::()?; + map.scale = Some(input.parse()?); + } + if input.peek(Token![,]) { + input.parse::()?; + } + if input.peek(kw::alias) { + input.parse::()?; + input.parse::()?; + map.alias = Some(input.parse()?); + } + if input.peek(Token![,]) { + input.parse::()?; + } + + Ok(map) + } +} + +struct Comment(String); + +impl Parse for Comment { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + if input.peek(Token![#]) && input.peek2(syn::token::Bracket) && input.peek3(Ident) { + let attrs = input.call(Attribute::parse_outer)?; + + Ok(Comment(extract_item_comment(&attrs)?)) + } else { + Ok(Comment(String::new())) + } + } +} + +fn field_size_bytes(ty: &Type) -> syn::Result> { + //TODO: make this array static + //TODO: support f32, f64 + let valid_types: [(Type, NonZeroUsize); 6] = [ + (syn::parse_quote!(u8), NonZeroUsize::new(1).unwrap()), + (syn::parse_quote!(i8), NonZeroUsize::new(1).unwrap()), + (syn::parse_quote!(u16), NonZeroUsize::new(2).unwrap()), + (syn::parse_quote!(i16), NonZeroUsize::new(2).unwrap()), + (syn::parse_quote!(u32), NonZeroUsize::new(4).unwrap()), + (syn::parse_quote!(i32), NonZeroUsize::new(4).unwrap()), + ]; + if let Some((_ty, size)) = valid_types.iter().find(|x| x.0 == *ty) { + Ok(Some(*size)) + } else { + Err(Error::new( + ty.span(), + format!("Not supported type, expect one of {:?}", valid_types), + )) + } +} diff --git a/ublox_derive/src/lib.rs b/ublox_derive/src/lib.rs index 61ce5983..4283d899 100644 --- a/ublox_derive/src/lib.rs +++ b/ublox_derive/src/lib.rs @@ -1,546 +1,105 @@ -extern crate proc_macro; -use proc_macro2::TokenStream; -use syn::{parse_macro_input, parse_quote, DeriveInput, Data, Fields, FieldsNamed, Ident, Attribute}; -use syn::parse::Parser; -use syn::spanned::Spanned; -use quote::{quote, quote_spanned, format_ident}; -use std::convert::TryInto; -use inflector::Inflector; -use proc_macro_error::{abort, proc_macro_error}; -use syn::parse::Parse; -use itertools::Itertools; - -#[derive(Clone, Debug)] -struct Bitrange { - lsb: usize, - msb: usize, - ident: Ident, - enum_type: Option, - field_type: syn::TypePath, -} - -#[derive(Clone, Debug)] -struct Bitfield { - num_bits: usize, - ranges: Vec, -} - -impl Bitfield { - fn new(num_bits: usize) -> Bitfield { - Bitfield { - num_bits: num_bits, - ranges: vec!(), - } - } - - fn add_range(&mut self, range: Bitrange) { - self.ranges.push(range); - } -} - -#[derive(Debug)] -enum Member { - Bitfield(Bitfield), - Primitive(syn::Field), -} - -enum UbxAttribute { - UbxBitRange((usize, usize)), - UbxBitField(usize), - UbxEnum(Ident), -} - -fn parse_attribute(attr: &Attribute) -> Option { - //println!("{:#?}", attr); - let parser = syn::punctuated::Punctuated::::parse_separated_nonempty; - - let name = attr.path.get_ident().unwrap(); - let arguments = attr.parse_args_with(parser).unwrap(); - - match name.to_string().as_str() { - "ubx_bitfield" => { - if arguments.len() != 1 { - panic!("Incorrect number of arguments to ubx_bitfield"); - } - let arg = syn::parse2(arguments[0].clone()).unwrap(); - match &arg { - syn::Lit::Int(litint) => { - Some(UbxAttribute::UbxBitField(litint.base10_parse().unwrap())) - } - _ => { abort!(&arguments[0], "Only int literals allowed!"); } - } - } - "ubx_bitrange" => { - if arguments.len() != 1 { - panic!("Incorrect number of arguments to ubx_bitrange"); - } - let parser = syn::punctuated::Punctuated::::parse_separated_nonempty; - let bits = parser.parse2(arguments[0].clone()).unwrap(); - if bits.len() != 2 { - panic!("Bit slice may only contain 2 elements in ubx_bitrange"); - } - let msb: usize = match &bits[0] { - syn::Lit::Int(litint) => { - litint.base10_parse().unwrap() - } - _ => { abort!(&bits[0], "Only int literals allowed!"); } - }; - let lsb: usize = match &bits[1] { - syn::Lit::Int(litint) => { - litint.base10_parse().unwrap() - } - _ => { abort!(&bits[1], "Only int literals allowed!"); } - }; - Some(UbxAttribute::UbxBitRange((msb, lsb))) - } - "ubx_enum" => { - if arguments.len() != 1 { - panic!("Incorrect number of arguments to ubx_enum"); - } - Some(UbxAttribute::UbxEnum(match arguments[0].clone().into_iter().next().unwrap() { - proc_macro2::TokenTree::Ident(ident) => ident, - _ => { abort!(arguments[0], "Must specify an identifier for ubx_enum"); } - })) - } - _ => { None } - } +mod error; +mod file_cache; +mod input; +mod output; +mod types; + +pub use error::panic_on_parse_error; +use quote::ToTokens; +use std::{collections::HashMap, path::Path}; +use types::HowCodeForPackage; + +/// process `src` and save result of macro expansion to `dst` +/// +/// # Panics +/// Panics on error +pub fn expand_ubx_packets_code_in_file(src: S, dst: D) +where + S: AsRef, + D: AsRef, +{ + let src_cnt = std::fs::read_to_string(src.as_ref()).unwrap_or_else(|err| { + panic!( + "Error during read for file {}: {}", + src.as_ref().display(), + err + ) + }); + let data = match expand_ubx_packets_code_in_str(&src_cnt) { + Ok(x) => x, + Err(ref err) => panic_on_parse_error((src.as_ref(), &src_cnt), err), + }; + let mut file = file_cache::FileWriteCache::new(dst.as_ref()); + file.replace_content(data.into_bytes()); + file.update_file_if_necessary().unwrap_or_else(|err| { + panic!( + "Error during write to file {}: {}", + dst.as_ref().display(), + err + ); + }); } -fn find_struct_segments(fields: &FieldsNamed) -> Vec { - let mut segments = vec!(); - let mut current_bitfield: Option = None; - for field in fields.named.iter() { - let tags: Vec<_> = field.attrs.iter().map(parse_attribute).collect(); - let bitfield: Vec<_> = tags.iter().filter_map(|x| { - match x { - Some(UbxAttribute::UbxBitField(size)) => Some(size), - _ => None, - } - }).collect(); - let has_bitfield = bitfield.len() > 0; - - let bitrange: Vec<_> = tags.iter().filter_map(|x| { - match x { - Some(UbxAttribute::UbxBitRange((msb, lsb))) => Some((msb, lsb)), - _ => None, - } - }).collect(); - let has_bitrange = bitrange.len() > 0; - - let enum_type: Vec<_> = tags.iter().filter_map(|x| { - match x { - Some(UbxAttribute::UbxEnum(e)) => Some(e), - _ => None, - } - }).collect(); - let enum_type = if enum_type.len() > 0 { - Some(enum_type[0].clone()) - } else { - None - }; - - if has_bitfield { - if let Some(field) = current_bitfield { - segments.push(Member::Bitfield(field)); - } - current_bitfield = Some(Bitfield::new(*bitfield[0])); - } - - if has_bitrange { - let (msb, lsb) = bitrange[0]; - let bitrange = Bitrange{ - lsb: *lsb, - msb: *msb, - ident: field.ident.as_ref().unwrap().clone(), - enum_type: enum_type, - field_type: match &field.ty { - syn::Type::Path(path) => { - path.clone() +pub fn expand_ubx_packets_code_in_str(src_cnt: &str) -> syn::Result { + let mut ret = String::new(); + let syn_file = syn::parse_file(src_cnt)?; + let mut packets = Vec::with_capacity(100); + let mut ubx_types = HashMap::new(); + for item in syn_file.items { + match item { + syn::Item::Struct(s) => { + let mut send = false; + let mut recv = false; + for a in &s.attrs { + if !send { + send = a.path.is_ident("ubx_packet_send"); } - _ => { - abort!(field, "Only path types allowed for bitmap ranges"); + if !recv { + recv = a.path.is_ident("ubx_packet_recv"); } } - }; - match &mut current_bitfield { - Some(bitfield) => { - bitfield.add_range(bitrange); - } - None => { - abort!(field, "Must have an active bitfield to specify a bitrange!"); - } - } - } else { - if let Some(bitfield) = current_bitfield { - segments.push(Member::Bitfield(bitfield)); - } - current_bitfield = None; - - segments.push(Member::Primitive(field.clone())); - } - } - if let Some(field) = current_bitfield { - segments.push(Member::Bitfield(field)); - } - //println!("{:#?}", segments); - segments -} - -struct Accessor { - getter: TokenStream, - setter: TokenStream, - trait_getter: TokenStream, - trait_setter: TokenStream, -} - -//fn build_bitrange_accessors(offset: &TokenStream, bitfield: &Bitfield, bitrange: &Bitrange) -> (TokenStream, TokenStream, TokenStream) { -fn build_bitrange_accessors(offset: &TokenStream, bitfield: &Bitfield, bitrange: &Bitrange) -> Accessor { - let underlying_fn_name = format_ident!("get_{}_underlying", bitrange.ident); - let underlying_set_fn_name = format_ident!("set_{}_underlying", bitrange.ident); - let underlying_type = format_ident!("u{}", bitfield.num_bits); - let getter_fn_name = format_ident!("get_{}", bitrange.ident); - let setter_fn_name = format_ident!("set_{}", bitrange.ident); - let span = bitrange.ident.span(); - let return_type = if let Some(ident) = &bitrange.enum_type { - let field_type = &ident; - parse_quote!{ Option<#field_type> } - } else { - bitrange.field_type.clone() - }; - let field_type = if let Some(ident) = &bitrange.enum_type { - parse_quote!{ #ident } - } else { - bitrange.field_type.clone() - }; - let msb = bitrange.msb; - let lsb = bitrange.lsb; - let shifter_fn_name = format_ident!("shift_{}", bitrange.ident); - let type_cvt_name = format_ident!("type_cvt_{}", bitrange.ident); - let type_uncvt_name = format_ident!("type_uncvt_{}", bitrange.ident); - let type_cvt = if let Some(enumtype) = &bitrange.enum_type { - let fromname = format_ident!("from_{}", underlying_type); - quote_spanned! { - span => - #enumtype::#fromname(value) - } - } else if return_type.path.get_ident().unwrap().to_string() == "bool" { - quote_spanned! { - span => - value != 0 - } - } else { - quote_spanned! { - span => - value.try_into().unwrap() - } - }; - let type_uncvt = if let Some(enumtype) = &bitrange.enum_type { - let toname = format_ident!("to_{}", underlying_type); - quote_spanned! { - span => - value.#toname().unwrap() - } - } else if return_type.path.get_ident().unwrap().to_string() == "bool" { - quote_spanned! { - span => - if value { 1 } else { 0 } - } - } else { - quote_spanned! { - span => - value.try_into().unwrap() - } - }; - Accessor { - getter: quote_spanned! { - span => - fn #underlying_fn_name(&self) -> #underlying_type { - #underlying_type::from_le_bytes(self.data[#offset..#offset + std::mem::size_of::<#underlying_type>()].try_into().unwrap()) - } - - fn #type_cvt_name(value: #underlying_type) -> #return_type { - #type_cvt - } - - fn #shifter_fn_name(&self) -> #underlying_type { - let underlying = self.#underlying_fn_name(); - (underlying >> #lsb) & ((1 << (#msb - #lsb + 1)) - 1) - } - - fn #getter_fn_name(&self) -> #return_type { - Self::#type_cvt_name(self.#shifter_fn_name()) - } - }, - trait_getter: quote_spanned! { - span => - fn #getter_fn_name(&self) -> #return_type; - }, - setter: quote_spanned! { - span => - fn #underlying_set_fn_name(&mut self, value: #underlying_type) { - let bytes = value.to_le_bytes(); - self.data[#offset..#offset + std::mem::size_of::<#underlying_type>()].clone_from_slice(&bytes); - } - - fn #type_uncvt_name(value: #field_type) -> #underlying_type { - #type_uncvt - } - - fn #setter_fn_name(&mut self, value: #field_type) { - let original = self.#underlying_fn_name(); - let new_field_value = Self::#type_uncvt_name(value); - let mask = ((1 << (#msb - #lsb + 1)) - 1) << #lsb; - let newval = (original & !mask) | (new_field_value << #lsb); - //println!("{} {} {} {}", original, new_field_value, mask, newval); - self.#underlying_set_fn_name(newval); - } - }, - trait_setter: quote_spanned! { - span => - fn #setter_fn_name(&mut self, value: #field_type); - }, - } -} - -fn process_struct(struct_name: &Ident, fields: &FieldsNamed) -> TokenStream { - let segments = find_struct_segments(fields); - - let fs: Vec<_> = fields.named.iter().map(|f| { - for attr in f.attrs.iter() { - parse_attribute(&attr); - } - }).collect(); - - let sizes: Vec<_> = segments.iter().map(|f| { - match f { - Member::Bitfield(bitfield) => { - let nbits = bitfield.num_bits; - quote_spanned! { - bitfield.ranges[0].ident.span() => - (#nbits / 8) - } - } - Member::Primitive(f) => { - let ftype = &f.ty; - quote_spanned! { - f.span() => - std::mem::size_of::<#ftype>() - } - } - } - }).collect(); - - let offsets: Vec<_> = sizes.iter().scan(quote! { 0 }, |state, size| { - let orig_state = state.clone(); - *state = quote! { - #state + #size - }; - Some(orig_state) - }).collect(); - - let accessors: Vec = segments.iter().zip(offsets).map(|(f, o)| { - match f { - Member::Bitfield(bitfield) => { - let accessors: Vec = bitfield.ranges.iter().map(|range| { - //bitfield.ranges.iter().map(|range| { - build_bitrange_accessors(&o, &bitfield, &range) - }).collect(); - accessors - /*((quote_spanned! { - bitfield.ranges[0].ident.span() => - #(#getters)* - }, quote_spanned! { - bitfield.ranges[0].ident.span() => - #(#trait_getters)* - }), - quote_spanned! { - bitfield.ranges[0].ident.span() => - #(#setters)* - })*/ - } - Member::Primitive(f) => { - let enum_attrs: Vec<_> = f.attrs.iter().filter_map(|attr| { - parse_attribute(attr) - }).filter_map(|attr| { - match attr { - UbxAttribute::UbxEnum(ident) => Some(ident), - _ => None, - } - }).collect(); - - let membername = f.ident.as_ref().unwrap(); - let get_fname = format_ident!("get_{}", membername); - let set_fname = format_ident!("set_{}", membername); - let ftype = &f.ty; - if enum_attrs.len() > 0 { - let enumtype = &enum_attrs[0]; - let fromname = format_ident!("from_{}", match ftype { - syn::Type::Path(path) => { path.path.get_ident().unwrap() } - _ => { abort!(f, "Must specify a primitive int field type"); } - }); - let enum_cvt = format_ident!("to_{}", match ftype { - syn::Type::Path(path) => { path.path.get_ident().unwrap() } - _ => { abort!(f, "Must specify a primitive int field type"); } - }); - vec![Accessor { - getter: quote_spanned! { - f.span() => - fn #get_fname(&self) -> Option<#enumtype> { - let x = #ftype::from_le_bytes(self.data[#o..#o + std::mem::size_of::<#ftype>()].try_into().unwrap()); - #enumtype::#fromname(x) - } - }, - trait_getter: quote_spanned! { - f.span() => - fn #get_fname(&self) -> Option<#enumtype>; - }, - setter: quote_spanned! { - f.span() => - fn #set_fname(&mut self, value: #enumtype) { - let value = value.#enum_cvt(); - let bytes = value.to_le_bytes(); - self.data[#o..#o + std::mem::size_of::<#ftype>()].clone_from_slice(&bytes); - } - }, - trait_setter: quote_spanned! { - f.span() => - fn #set_fname(&mut self, value: #enumtype); - } - }] + if send || recv { + let mode = if send && recv { + HowCodeForPackage::SendRecv + } else if send { + HowCodeForPackage::SendOnly + } else if recv { + HowCodeForPackage::RecvOnly + } else { + unreachable!(); + }; + packets.push((s, mode)); } else { - vec![Accessor { - getter: quote_spanned! { - f.span() => - fn #get_fname(&self) -> #ftype { - #ftype::from_le_bytes(self.data[#o..#o + std::mem::size_of::<#ftype>()].try_into().unwrap()) - } - }, - trait_getter: quote_spanned! { - f.span() => - fn #get_fname(&self) -> #ftype; - }, - setter: quote_spanned! { - f.span() => - fn #set_fname(&mut self, value: #ftype) { - let bytes = value.to_le_bytes(); - self.data[#o..#o + std::mem::size_of::<#ftype>()].clone_from_slice(&bytes); - } - }, - trait_setter: quote_spanned! { - f.span() => - fn #set_fname(&mut self, value: #ftype); - } - }] - } - } - } - }).flatten().collect(); - //let (getters, trait_getters) = getters.iter().unzip(); - - let mut getters = vec!(); - let mut trait_getters = vec!(); - let mut setters = vec!(); - let mut trait_setters = vec!(); - for accessor in accessors.iter() { - getters.push(&accessor.getter); - trait_getters.push(&accessor.trait_getter); - setters.push(&accessor.setter); - trait_setters.push(&accessor.trait_setter); - } - - let getter_trait_name = format_ident!("{}Getter", struct_name); - let setter_trait_name = format_ident!("{}Setter", struct_name); - let ref_struct_name = format_ident!("{}Ref", struct_name); - let mut_ref_struct_name = format_ident!("{}MutRef", struct_name); - - quote! { - pub trait #getter_trait_name { - #(#trait_getters)* - } - - pub trait #setter_trait_name { - #(#trait_setters)* - } - - struct #ref_struct_name<'a> { - data: &'a [u8; 0 #(+ #sizes)*], - } - - impl<'a> #ref_struct_name<'a> { - pub fn new(data: &'a [u8; 0 #(+ #sizes)*]) -> #ref_struct_name { - #ref_struct_name { - data: data, + ret.push_str(&s.into_token_stream().to_string()); } } - } - - impl<'a> #getter_trait_name for #ref_struct_name<'a> { - #(#getters)* - } - - struct #mut_ref_struct_name<'a> { - data: &'a mut [u8; 0 #(+ #sizes)*], - } - - impl<'a> #mut_ref_struct_name<'a> { - pub fn new(data: &'a mut [u8; 0 #(+ #sizes)*]) -> #mut_ref_struct_name { - #mut_ref_struct_name { - data: data, + syn::Item::Enum(e) => { + if e.attrs.iter().any(|x| x.path.is_ident("ubx_type")) { + let en = input::parse_ubx_enum_type(e)?; + let code = output::generate_code_for_ubx_enum(&en); + ubx_types.insert(en.name.to_string(), en); + ret.push_str(&code); + } else { + ret.push_str(&e.into_token_stream().to_string()); } } - } - - impl<'a> #getter_trait_name for #mut_ref_struct_name<'a> { - #(#getters)* - } - - impl<'a> #setter_trait_name for #mut_ref_struct_name<'a> { - #(#setters)* - } - - // TODO: Implement a more logical Debug trait - // TODO: Make the pub-ness optional - #[derive(Debug, PartialEq)] - pub struct #struct_name { - data: [u8; 0 #(+ #sizes)*], - } - - impl #struct_name { - pub fn new(data: [u8; 0 #(+ #sizes)*]) -> #struct_name { - #struct_name { - data: data, - } + _ => { + ret.push_str(&item.into_token_stream().to_string()); } } + } - impl #getter_trait_name for #struct_name { - #(#getters)* - } + let mut all_packs = Vec::with_capacity(packets.len()); - impl #setter_trait_name for #struct_name { - #(#setters)* - } + for (pack_desc, mode) in packets { + let pack = input::parse_packet_description(pack_desc)?; + let code = output::generate_code_for_packet(&pack, &ubx_types, mode); + all_packs.push((pack, mode)); + ret.push_str(&code); } -} -#[proc_macro_error] -#[proc_macro_attribute] -pub fn ubx_packet(attr: proc_macro::TokenStream, input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input = parse_macro_input!(input as DeriveInput); - match input.data { - Data::Struct(ref data) => { - match data.fields { - Fields::Named(ref fields) => { - proc_macro::TokenStream::from(process_struct(&input.ident, fields)) - } - Fields::Unnamed(ref fields) => { - unimplemented!(); - } - Fields::Unit => { - unimplemented!(); - } - } - } - Data::Enum(_) | Data::Union(_) => unimplemented!() - } + let code = output::generate_code_for_packet_parser(&all_packs, &ubx_types); + ret.push_str(&code); + + Ok(ret) } diff --git a/ublox_derive/src/output.rs b/ublox_derive/src/output.rs new file mode 100644 index 00000000..e76e5f5d --- /dev/null +++ b/ublox_derive/src/output.rs @@ -0,0 +1,408 @@ +use crate::types::{HowCodeForPackage, PackDesc, UbxEnum, UbxEnumRestHandling, UbxTypeFromFn}; +use proc_macro2::{Span, TokenStream}; +use quote::{format_ident, quote, ToTokens}; +use std::{ + collections::{HashMap, HashSet}, + convert::TryFrom, +}; +use syn::Ident; + +pub fn generate_code_for_packet( + pack_descr: &PackDesc, + ubx_types: &HashMap, + mode: HowCodeForPackage, +) -> String { + let mut ret = String::with_capacity(10 * 1024); + let code = generate_types_for_packet(pack_descr); + ret.push_str(&code.to_string()); + + if mode == HowCodeForPackage::RecvOnly || mode == HowCodeForPackage::SendRecv { + let code = generate_recv_code_for_packet(pack_descr, ubx_types); + ret.push_str(&code.to_string()); + } + + if mode == HowCodeForPackage::SendOnly || mode == HowCodeForPackage::SendRecv { + let code_frags = generate_send_code_for_packet(pack_descr); + for code in code_frags { + ret.push_str(&code.to_string()); + } + } + + ret +} + +pub fn generate_code_for_ubx_enum(ubx_enum: &UbxEnum) -> String { + assert_eq!(ubx_enum.repr, { + let ty: syn::Type = syn::parse_quote! { u8 }; + ty + }); + let name = &ubx_enum.name; + let mut variants = ubx_enum.variants.clone(); + let attrs = &ubx_enum.attrs; + if let Some(UbxEnumRestHandling::Reserved) = ubx_enum.rest_handling { + let defined: HashSet = ubx_enum.variants.iter().map(|x| x.1).collect(); + for i in 0..=u8::max_value() { + if !defined.contains(&i) { + let name = format_ident!("Reserved{}", i); + variants.push((name, i)); + } + } + } + let repr_ty = &ubx_enum.repr; + let from_code = match ubx_enum.from_fn { + Some(UbxTypeFromFn::From) => { + assert_ne!( + Some(UbxEnumRestHandling::ErrorProne), + ubx_enum.rest_handling + ); + let mut match_branches = Vec::with_capacity(variants.len()); + for (id, val) in &variants { + match_branches.push(quote! { #val => #name :: #id }); + } + + quote! { + impl #name { + fn from(x: #repr_ty) -> Self { + match x { + #(#match_branches),* + } + } + } + } + } + Some(UbxTypeFromFn::FromUnchecked) => { + assert_ne!(Some(UbxEnumRestHandling::Reserved), ubx_enum.rest_handling); + let mut match_branches = Vec::with_capacity(variants.len()); + for (id, val) in &variants { + match_branches.push(quote! { #val => #name :: #id }); + } + + let mut values = Vec::with_capacity(variants.len()); + for (i, (_, val)) in variants.iter().enumerate() { + if i != 0 { + values.push(quote! { | #val }); + } else { + values.push(quote! { #val }); + } + } + + quote! { + impl #name { + fn from_unchecked(x: #repr_ty) -> Self { + match x { + #(#match_branches),*, + _ => unreachable!(), + } + } + fn is_valid_to_convert(x: #repr_ty) -> bool { + match x { + #(#values)* => true, + _ => false, + } + } + } + } + } + None => quote! {}, + }; + + let mut enum_variants = Vec::with_capacity(variants.len()); + for (id, val) in &variants { + enum_variants.push(quote! { #id = #val }); + } + + let code = quote! { + #(#attrs)* + pub enum #name { + #(#enum_variants),* + } + + #from_code + }; + code.to_string() +} + +pub fn generate_code_for_packet_parser( + all_packs: &[(PackDesc, HowCodeForPackage)], + ubx_types: &HashMap, +) -> String { + let mut packet_enum_variants = vec![]; + let mut matches = vec![]; + + for (pack_descr, mode) in all_packs { + if *mode == HowCodeForPackage::RecvOnly || *mode == HowCodeForPackage::SendRecv { + let ref_name = format_ident!("{}Ref", pack_descr.name); + let name = Ident::new(&pack_descr.name, Span::call_site()); + packet_enum_variants.push(quote! { + #name(#ref_name <'a>) + }); + let packet_name = &pack_descr.name; + let check_len = match pack_descr.packet_payload_size() { + Some(len) => quote! { + if #len != payload.len() { + return Some(Err(ParserError::InvalidPacketLen(#packet_name))); + } + }, + None => quote! {}, + }; + + let mut validators = vec![]; + for f in &pack_descr.fields { + if let Some(ref out_ty) = f.map.map_type { + if let Some(ubx_type) = ubx_types.get(&out_ty.into_token_stream().to_string()) { + if ubx_type.from_fn == Some(UbxTypeFromFn::FromUnchecked) { + let name = &ubx_type.name; + let name_str = name.to_string(); + + validators.push(quote! { + if ! #name::is_valid_to_convert(TODO) { + return Some(Err(ParserError::InvalidField(#name_str))); + } + }); + } + } + } + } + + matches.push(quote! { + (#name::CLASS, #name::ID) => { + #check_len + #(#validators)* + Some(Ok(PacketRef::#name(#ref_name(payload)))) + } + }); + } + } + + let packet_enum_define = quote! { + #[doc = "All possible packets enum"] + pub enum PacketRef<'a> { + #(#packet_enum_variants),*, + Unknown(UnknownPacketRef<'a>) + } + }; + + let matcher_func = quote! { + fn match_packet(class: u8, msg_id: u8, payload: &[u8]) -> Option> { + match (class, msg_id) { + #(#matches)* + _ => Some(Ok(PacketRef::Unknown(UnknownPacketRef { + payload, + class, + msg_id + }))), + } + } + }; + let mut code = packet_enum_define.to_string(); + code.push_str(&matcher_func.into_token_stream().to_string()); + code +} + +fn generate_recv_code_for_packet( + pack_descr: &PackDesc, + ubx_types: &HashMap, +) -> TokenStream { + let ref_name = format_ident!("{}Ref", pack_descr.name); + let mut getters = Vec::with_capacity(pack_descr.fields.len()); + + let mut off = 0usize; + for f in &pack_descr.fields { + let ty = f.intermidiate_type(); + let get_name = f.intermidiate_field_name(); + + let size_bytes = match f.size_bytes { + Some(x) => x, + None => unimplemented!(), + }; + let mut bytes = Vec::with_capacity(size_bytes.get()); + for i in 0..size_bytes.get() { + let byte_off = off.checked_add(i).unwrap(); + bytes.push(quote! { self.0[#byte_off] }); + } + let raw_ty = &f.ty; + + let mut get_value_lines = if size_bytes.get() != 1 { + vec![quote! { <#raw_ty>::from_le_bytes([#(#bytes),*]) }] + } else { + vec![quote! { self.0[#off] }] + }; + + if let Some(ref out_ty) = f.map.map_type { + let use_from_unchecked = + if let Some(ubx_type) = ubx_types.get(&out_ty.into_token_stream().to_string()) { + ubx_type.from_fn == Some(UbxTypeFromFn::FromUnchecked) + } else { + false + }; + + let get_raw = &get_value_lines[0]; + let new_line = quote! { let val = #get_raw ; }; + get_value_lines[0] = new_line; + if use_from_unchecked { + get_value_lines.push(quote! { + <#out_ty>::from_unchecked(val) + }); + } else { + get_value_lines.push(quote! { + <#out_ty>::from(val) + }); + } + } + + if let Some(ref scale) = f.map.scale { + let last_i = get_value_lines.len() - 1; + let last_line = &get_value_lines[last_i]; + let new_last_line = quote! { let val = #last_line ; }; + get_value_lines[last_i] = new_last_line; + get_value_lines.push(quote! {val * #scale }); + } + let field_comment = &f.comment; + getters.push(quote! { + #[doc = #field_comment] + #[inline] + pub fn #get_name(&self) -> #ty { + #(#get_value_lines)* + } + }); + off += size_bytes.get(); + } + let struct_comment = &pack_descr.comment; + + quote! { + #[doc = #struct_comment] + #[doc = "It is just reference to internal parser's buffer"] + pub struct #ref_name<'a>(&'a [u8]); + impl<'a> #ref_name<'a> { + #(#getters)* + } + } +} + +fn generate_types_for_packet(pack_descr: &PackDesc) -> TokenStream { + let name = Ident::new(&pack_descr.name, Span::call_site()); + let class = pack_descr.header.class; + let id = pack_descr.header.id; + let fixed_payload_len = match pack_descr.header.fixed_payload_len { + Some(x) => quote! { Some(#x) }, + None => quote! { None }, + }; + let struct_comment = &pack_descr.comment; + quote! { + + #[doc = #struct_comment] + pub struct #name; + impl UbxPacket for #name { + const CLASS: u8 = #class; + const ID: u8 = #id; + const FIXED_PAYLOAD_LENGTH: Option = #fixed_payload_len; + } + } +} + +fn generate_send_code_for_packet(pack_descr: &PackDesc) -> Vec { + let main_name = Ident::new(&pack_descr.name, Span::call_site()); + let payload_struct = format_ident!("{}Builder", pack_descr.name); + + let mut fields = Vec::with_capacity(pack_descr.fields.len()); + let mut pack_fields = Vec::with_capacity(pack_descr.fields.len()); + let mut write_fields = Vec::with_capacity(pack_descr.fields.len()); + let mut off = 6usize; + for f in &pack_descr.fields { + let ty = f.intermidiate_type(); + let name = f.intermidiate_field_name(); + let field_comment = &f.comment; + fields.push(quote! { + #[doc = #field_comment] + pub #name: #ty + }); + let size_bytes = match f.size_bytes { + Some(x) => x, + None => unimplemented!(), + }; + if f.has_intermidiate_type() { + pack_fields.push(quote! { + let bytes = self.#name.as_raw_value().to_le_bytes() + }); + } else { + pack_fields.push(quote! { + let bytes = self.#name.to_le_bytes() + }); + } + write_fields.push(pack_fields.last().unwrap().clone()); + write_fields.push(quote! { + out.write(&bytes)?; + checksum_calc.update(&bytes) + }); + for i in 0..size_bytes.get() { + let byte_off = off.checked_add(i).unwrap(); + pack_fields.push(quote! { + ret[#byte_off] = bytes[#i] + }); + } + + off += size_bytes.get(); + } + + let mut ret = Vec::with_capacity(4); + let struct_comment = &pack_descr.comment; + ret.push(quote! { + #[doc = #struct_comment] + #[doc = "Struct that used as \"builder\" for packet"] + pub struct #payload_struct { + #(#fields),* + } + }); + + if let Some(packet_payload_size) = pack_descr.packet_payload_size() { + let packet_size = packet_payload_size + 8; + let packet_payload_size_u16 = u16::try_from(packet_payload_size).unwrap(); + ret.push(quote! { + impl #payload_struct { + #[inline] + pub fn to_packet_bytes(self) -> [u8; #packet_size] { + let mut ret = [0u8; #packet_size]; + ret[0] = SYNC_CHAR_1; + ret[1] = SYNC_CHAR_2; + ret[2] = #main_name::CLASS; + ret[3] = #main_name::ID; + let pack_len_bytes = #packet_payload_size_u16 .to_le_bytes(); + ret[4] = pack_len_bytes[0]; + ret[5] = pack_len_bytes[1]; + #(#pack_fields);*; + let (ck_a, ck_b) = ubx_checksum(&ret[2..#packet_size-2]); + ret[#packet_size-2] = ck_a; + ret[#packet_size-1] = ck_b; + ret + } + } + impl From<#payload_struct> for [u8; #packet_size] { + fn from(x: #payload_struct) -> Self { + x.to_packet_bytes() + } + } + }); + + ret.push(quote! { + impl UbxPacketCreator for #payload_struct { + #[inline] + fn create_packet(self, out: &mut dyn MemWriter) -> Result<(), NotEnoughMem> { + out.reserve_allocate(#packet_size)?; + let len_bytes = #packet_payload_size_u16 .to_le_bytes(); + let header = [SYNC_CHAR_1, SYNC_CHAR_2, #main_name::CLASS, #main_name::ID, len_bytes[0], len_bytes[1]]; + out.write(&header)?; + let mut checksum_calc = UbxChecksumCalc::default(); + checksum_calc.update(&header[2..]); + #(#write_fields);*; + let (ck_a, ck_b) = checksum_calc.result(); + out.write(&[ck_a, ck_b])?; + Ok(()) + } + } + }); + } else { + unimplemented!(); + } + + ret +} diff --git a/ublox_derive/src/types.rs b/ublox_derive/src/types.rs new file mode 100644 index 00000000..946ec57f --- /dev/null +++ b/ublox_derive/src/types.rs @@ -0,0 +1,97 @@ +use std::num::NonZeroUsize; +use syn::{Ident, Type}; + +pub struct PackDesc { + pub name: String, + pub header: PackHeader, + pub comment: String, + pub fields: Vec, +} + +impl PackDesc { + /// if packet has variable size, then `None` + pub fn packet_payload_size(&self) -> Option { + let mut ret: usize = 0; + for f in &self.fields { + let size = f.size_bytes?; + ret = ret + .checked_add(size.get()) + .expect("overflow during packet size calculation"); + } + Some(ret) + } +} + +pub struct PackHeader { + pub class: u8, + pub id: u8, + pub fixed_payload_len: Option, +} + +pub struct PackField { + pub name: Ident, + pub ty: Type, + pub map: PackFieldMap, + pub comment: String, + pub size_bytes: Option, +} + +impl PackField { + pub fn has_intermidiate_type(&self) -> bool { + self.map.map_type.is_some() + } + pub fn intermidiate_type(&self) -> &Type { + self.map.map_type.as_ref().unwrap_or(&self.ty) + } + pub fn intermidiate_field_name(&self) -> &Ident { + self.map.alias.as_ref().unwrap_or(&self.name) + } +} + +pub struct PackFieldMap { + pub map_type: Option, + pub scale: Option, + pub alias: Option, +} + +impl PackFieldMap { + pub fn is_none(&self) -> bool { + self.map_type.is_none() && self.scale.is_none() && self.alias.is_none() + } + pub fn none() -> Self { + Self { + map_type: None, + scale: None, + alias: None, + } + } +} + +#[derive(Clone, Copy, PartialEq)] +pub enum HowCodeForPackage { + SendOnly, + RecvOnly, + SendRecv, +} + +pub struct UbxEnum { + pub name: Ident, + pub repr: Type, + pub from_fn: Option, + pub to_fn: bool, + pub rest_handling: Option, + pub variants: Vec<(Ident, u8)>, + pub attrs: Vec, +} + +#[derive(Clone, Copy, PartialEq)] +pub enum UbxTypeFromFn { + From, + FromUnchecked, +} + +#[derive(Clone, Copy, PartialEq, Debug)] +pub enum UbxEnumRestHandling { + Reserved, + ErrorProne, +} diff --git a/ublox_derive/tests/test.rs b/ublox_derive/tests/test.rs index 3e39e423..eb665254 100644 --- a/ublox_derive/tests/test.rs +++ b/ublox_derive/tests/test.rs @@ -1,138 +1,264 @@ -use std::convert::TryInto; -use ublox_derive::{ubx_packet}; -use num_derive::{FromPrimitive, ToPrimitive}; -use num_traits::{FromPrimitive, ToPrimitive}; - -#[ubx_packet] -struct TestPacket { - field1: u32, - field2: u32, - field3: u8, - field4: u16, - field5: i32, -} +use proc_macro2::TokenStream; +use quote::{quote, ToTokens}; +use std::{ + io::{self, Write}, + process::{Command, Stdio}, + sync::Arc, +}; +use ublox_derive::{expand_ubx_packets_code_in_str, panic_on_parse_error}; #[test] -fn foo() { - assert_eq!(std::mem::size_of::(), 15); -} +fn test_nav_pos_llh() { + run_compare_test( + quote! { + #[ubx_packet_recv] + #[ubx(class = 1, id = 2, fixed_payload_len = 12)] + #[doc = "Geodetic Position Solution"] + struct NavPosLLH { + itow: u32, + #[ubx(map_type = f64, scale = 1e-7, alias = lon_degrees)] + lon: i32, + #[doc = "Horizontal Accuracy Estimate"] + horizontal_accuracy: u32, + } + }, + quote! { + #[doc = "Geodetic Position Solution"] + pub struct NavPosLLH; -#[ubx_packet] -struct TestPacket2 { - field1: u32, - field2: u8, - field3: u32, -} + impl UbxPacket for NavPosLLH { + const CLASS: u8 = 1u8; + const ID: u8 = 2u8; + const FIXED_PAYLOAD_LENGTH: Option = Some(12u16); + } -#[no_mangle] -#[inline(never)] -fn helper(packet: &TestPacket2) -> u32 { - packet.get_field3() -} + #[doc = "Geodetic Position Solution"] + #[doc = "It is just reference to internal parser's buffer"] + pub struct NavPosLLHRef<'a>(&'a [u8]); + impl<'a> NavPosLLHRef<'a> { + #[doc = ""] + #[inline] + pub fn itow(&self) -> u32 { + ::from_le_bytes([ + self.0[0usize], + self.0[1usize], + self.0[2usize], + self.0[3usize]] + ) + } + #[doc = ""] + #[inline] + pub fn lon_degrees(&self) -> f64 { + let val = ::from_le_bytes([ + self.0[4usize], + self.0[5usize], + self.0[6usize], + self.0[7usize]] + ); + let val = ::from(val); + val * 1e-7 + } -#[test] -#[no_mangle] -#[inline(never)] -fn foo2() { - let data = [1, 0, 0, 0, 0, 2, 0, 0, 0]; - let packet = TestPacket2::new(data); - assert_eq!(helper(&packet), 2); - assert_eq!(packet.get_field2(), 0); -} - -#[derive(Debug, PartialEq, FromPrimitive, ToPrimitive)] -enum CfgPrtId { - Usb = 1, - Spi = 2, -} + #[doc = "Horizontal Accuracy Estimate"] + #[inline] + pub fn horizontal_accuracy(&self) -> u32 { + ::from_le_bytes([ + self.0[8usize], + self.0[9usize], + self.0[10usize], + self.0[11usize] + ]) + } + } -#[derive(Debug, PartialEq, FromPrimitive, ToPrimitive)] -enum CfgPrtCharLen { - FiveBit = 0, - SixBit = 1, - SevenBit = 2, - EightBit = 3, + #[doc = "All possible packets enum"] + pub enum PacketRef<'a> { + NavPosLLH(NavPosLLHRef<'a>), + Unknown(UnknownPacketRef<'a>), + } + fn match_packet(class: u8, msg_id: u8, payload: &[u8]) -> Option> { + match (class, msg_id) { + (NavPosLLH::CLASS, NavPosLLH::ID) => { + if 12usize != payload.len() { + return Some(Err(ParserError::InvalidPacketLen("NavPosLLH"))); + } + Some(Ok(PacketRef::NavPosLLH(NavPosLLHRef(payload)))) + } + _ => Some(Ok(PacketRef::Unknown(UnknownPacketRef { + payload, + class, + msg_id, + }))), + } + } + }, + Flags::Equal, + ); } -#[ubx_packet] -struct TestPacket3 { - #[ubx_enum(CfgPrtId)] - port_id: u8, - - rfu0: u8, // TODO: This should be hidden from the user - - #[ubx_bitfield(16)] - #[ubx_bitrange(0:0)] - tx_ready_en: bool, - - #[ubx_bitrange(1:1)] - tx_ready_polarity: bool, - - #[ubx_bitrange(6:2)] - tx_ready_pin: u8, +#[test] +fn test_nav_status() { + run_compare_test( + quote! { + #[ubx_type] + #[ubx(from, to, rest_reserved)] + #[repr(u8)] + #[derive(Debug, Copy, Clone)] + enum GpsFix { + NoFix = 0, + DeadReckoningOnly = 1, + Fix2D = 2, + Fix3D = 3, + GPS = 4, + } - #[ubx_bitrange(15:7)] - tx_ready_threshold: u16, // TODO: u8 should throw an error + #[ubx_type] + #[ubx(from_unchecked, to, rest_error)] + #[derive(Copy, Clone)] + #[repr(u8)] + enum DGPSCorrectionStatus { + None = 0, + PrPrrCorrected = 1, + } - #[ubx_bitfield(32)] - #[ubx_bitrange(7:6)] - #[ubx_enum(CfgPrtCharLen)] - mode_charlen: u8, - #[ubx_bitrange(11:9)] - parity: u8, + #[ubx_packet_recv] + #[ubx(class = 1, id = 3, fixed_payload_len = 6)] + struct Status { + itow: u32, + #[ubx(map_type = GpsFix)] + gps_fix: u8, + #[ubx(map_type = DGPSCorrectionStatus)] + dgps_status: u8, + } + }, + quote! { + #[doc = ""] + pub struct Status; + impl UbxPacket for Status { + const CLASS: u8 = 1u8; + const ID: u8 = 3u8; + const FIXED_PAYLOAD_LENGTH: Option = Some(6u16); + } - #[ubx_bitrange(13:12)] - num_stop_bits: u8, + #[doc = ""] + #[doc = "It is just reference to internal parser's buffer"] + pub struct StatusRef<'a>(&'a [u8]); + impl<'a> StatusRef<'a> { + #[doc = ""] + #[inline] + pub fn itow(&self) -> u32 { + ::from_le_bytes([ + self.0[0usize], + self.0[1usize], + self.0[2usize], + self.0[3usize], + ]) + } + #[doc = ""] + #[inline] + pub fn gps_fix(&self) -> GpsFix { + let val = self.0[4usize]; + ::from(val) + } + #[doc = ""] + #[inline] + pub fn dgps_status(&self) -> DGPSCorrectionStatus { + let val = self.0[5usize]; + ::from_unchecked(val) + } + } + }, + Flags::Contains, + ); +} - baudrate: u32, +enum Flags { + Equal, + Contains, +} - #[ubx_bitfield(16)] // TODO: Bitfield without bitrange should error - #[ubx_bitrange(0:0)] - in_ubx: bool, +fn run_compare_test(input: TokenStream, expect_output: TokenStream, flags: Flags) { + let src = input.to_string(); + let res = match expand_ubx_packets_code_in_str(&src) { + Ok(x) => x, + Err(err) => panic_on_parse_error((std::path::Path::new(""), &src), &err), + }; + let output = String::from_utf8(rustfmt_cnt(res.into_bytes()).unwrap()).unwrap(); - #[ubx_bitrange(1:1)] - in_nmea: bool, + let expect_output = expect_output.into_token_stream().to_string(); + let expect_output = + String::from_utf8(rustfmt_cnt(expect_output.into_bytes()).unwrap()).unwrap(); - #[ubx_bitrange(2:2)] // TODO: Bitrange without bitfield should error - in_rtcm: bool, + match flags { + Flags::Equal => { + if expect_output != output { + for (e, g) in expect_output.lines().zip(output.lines()) { + if e != g { + println!("first mismatch:\ne {}\ng {}", e, g); + break; + } + } + panic!("Expect:\n{}\nGot:\n{}\n", expect_output, output); + } + } + Flags::Contains => { + if !output.contains(&expect_output) { + panic!( + "Output doesn't contain Expect\n +Expect:\n{}\nGot:\n{}\n", + expect_output, output + ); + } + } + } +} - #[ubx_bitfield(16)] - #[ubx_bitrange(0:0)] - out_ubx: bool, +fn rustfmt_cnt(source: Vec) -> io::Result> { + let rustfmt = which::which("rustfmt") + .map_err(|e| io::Error::new(io::ErrorKind::Other, format!("{}", e)))?; - #[ubx_bitrange(1:1)] - out_nmea: bool, + let mut cmd = Command::new(&*rustfmt); - #[ubx_bitfield(16)] - #[ubx_bitrange(0:0)] - extended_tx_timeout: bool, + cmd.stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::null()); - rfu5: u16, -} + let mut child = cmd.spawn()?; + let mut child_stdin = child.stdin.take().unwrap(); + let mut child_stdout = child.stdout.take().unwrap(); + let src_len = source.len(); + let src = Arc::new(source); + // Write to stdin in a new thread, so that we can read from stdout on this + // thread. This keeps the child from blocking on writing to its stdout which + // might block us from writing to its stdin. + let stdin_handle = ::std::thread::spawn(move || { + let _ = child_stdin.write_all(src.as_slice()); + src + }); -#[test] -#[no_mangle] -#[inline(never)] -fn bitfields() { - let mut data = [0; std::mem::size_of::()]; - data[0] = 1; - data[2] = 0x5; - data[3] = 0x1; - data[16] = 0x1; - let mut packet = TestPacket3::new(data); - assert_eq!(packet.get_port_id(), Some(CfgPrtId::Usb)); - assert_eq!(packet.get_tx_ready_en(), true); - assert_eq!(packet.get_tx_ready_polarity(), false); - assert_eq!(packet.get_tx_ready_pin(), 1); - assert_eq!(packet.get_tx_ready_threshold(), 2); - assert_eq!(packet.get_extended_tx_timeout(), true); - assert_eq!(packet.get_mode_charlen(), Some(CfgPrtCharLen::FiveBit)); - - packet.set_baudrate(9600); - assert_eq!(packet.get_baudrate(), 9600); - - packet.set_mode_charlen(CfgPrtCharLen::SixBit); - packet.set_parity(2); - assert_eq!(packet.get_mode_charlen(), Some(CfgPrtCharLen::SixBit)); + let mut output = Vec::with_capacity(src_len); + io::copy(&mut child_stdout, &mut output)?; + let status = child.wait()?; + let src = stdin_handle.join().expect( + "The thread writing to rustfmt's stdin doesn't do \ + anything that could panic", + ); + let src = + Arc::try_unwrap(src).expect("Internal error: rusftfmt_cnt should only one Arc refernce"); + match status.code() { + Some(0) => Ok(output), + Some(2) => Err(io::Error::new( + io::ErrorKind::Other, + "Rustfmt parsing errors.".to_string(), + )), + Some(3) => { + println!("warning=Rustfmt could not format some lines."); + Ok(src) + } + _ => { + println!("warning=Internal rustfmt error"); + Ok(src) + } + } } diff --git a/ubx_protocol/Cargo.toml b/ubx_protocol/Cargo.toml new file mode 100644 index 00000000..d4994da5 --- /dev/null +++ b/ubx_protocol/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "ubx_protocol" +version = "0.0.1" +authors = ["Evgeniy A. Dushistov ", "Lane Kolbly "] +edition = "2018" +license = "MIT" +description = "A crate to parse/generate messages of UBX protocol (proprietary u-blox GPS devices protocol)" +build = "build.rs" + +[dependencies] +bitflags = "1.2.1" + +[dev-dependencies] +rand = "0.7.3" +cpu-time = "1.0.0" + +[build-dependencies] +ublox_derive = "0.0.1" +env_logger = "0.7" \ No newline at end of file diff --git a/ubx_protocol/build.rs b/ubx_protocol/build.rs new file mode 100644 index 00000000..30de7d9f --- /dev/null +++ b/ubx_protocol/build.rs @@ -0,0 +1,13 @@ +use std::{env, path::Path}; + +fn main() { + env_logger::init(); + + let out_dir = env::var("OUT_DIR").unwrap(); + let in_src = Path::new("src").join("packets.rs.in"); + let out_src = Path::new(&out_dir).join("packets.rs"); + + ublox_derive::expand_ubx_packets_code_in_file(&in_src, &out_src); + + println!("cargo:rerun-if-changed={}", in_src.display()); +} diff --git a/ubx_protocol/src/error.rs b/ubx_protocol/src/error.rs new file mode 100644 index 00000000..feb72310 --- /dev/null +++ b/ubx_protocol/src/error.rs @@ -0,0 +1,22 @@ +use std::fmt; + +#[derive(Debug, Clone, Copy)] +pub struct NotEnoughMem; + +impl fmt::Display for NotEnoughMem { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("Not enough memory error") + } +} + +impl std::error::Error for NotEnoughMem {} + +/// Error that possible during packets parsing +#[derive(Debug, PartialEq)] +pub enum ParserError { + InvalidChecksum, + InvalidField(&'static str), + InvalidPacketLen(&'static str), +} + +//impl std::error::Error for ParserError {} diff --git a/ubx_protocol/src/lib.rs b/ubx_protocol/src/lib.rs new file mode 100644 index 00000000..bae41a2a --- /dev/null +++ b/ubx_protocol/src/lib.rs @@ -0,0 +1,86 @@ +mod error; +mod parser; + +pub use error::{NotEnoughMem, ParserError}; +pub use parser::{Parser, ParserIter}; + +/// Information about concrete UBX protocol's packet +pub trait UbxPacket { + const CLASS: u8; + const ID: u8; + const FIXED_PAYLOAD_LENGTH: Option; +} + +const SYNC_CHAR_1: u8 = 0xb5; +const SYNC_CHAR_2: u8 = 0x62; + +/// The checksum is calculated over the packet, starting and including the CLASS field, +/// up until, but excluding, the Checksum Field +/// So slice should starts with class id +/// Return ck_a and ck_b +fn ubx_checksum(data: &[u8]) -> (u8, u8) { + let mut ck_a = 0_u8; + let mut ck_b = 0_u8; + for byte in data { + ck_a = ck_a.overflowing_add(*byte).0; + ck_b = ck_b.overflowing_add(ck_a).0; + } + (ck_a, ck_b) +} + +/// For ubx checksum on the fly +#[derive(Default)] +struct UbxChecksumCalc { + ck_a: u8, + ck_b: u8, +} + +impl UbxChecksumCalc { + fn update(&mut self, chunk: &[u8]) { + for byte in chunk { + self.ck_a = self.ck_a.overflowing_add(*byte).0; + self.ck_b = self.ck_b.overflowing_add(self.ck_a).0; + } + } + fn result(self) -> (u8, u8) { + (self.ck_a, self.ck_b) + } +} + +/// Abstraction for buffer creation/reallocation +/// to storing packet +pub trait MemWriter { + /// make sure that we have at least `len` bytes for writing + fn reserve_allocate(&mut self, len: usize) -> Result<(), NotEnoughMem>; + fn write(&mut self, buf: &[u8]) -> Result<(), NotEnoughMem>; +} + +impl MemWriter for Vec { + fn reserve_allocate(&mut self, len: usize) -> Result<(), NotEnoughMem> { + self.reserve(len); + Ok(()) + } + fn write(&mut self, buf: &[u8]) -> Result<(), NotEnoughMem> { + let ret = ::write(self, buf).map_err(|_| NotEnoughMem)?; + if ret == buf.len() { + Ok(()) + } else { + Err(NotEnoughMem) + } + } +} + +pub trait UbxPacketCreator { + /// Create packet and store bytes sequence to somewhere using `out` + fn create_packet(self, out: &mut dyn MemWriter) -> Result<(), NotEnoughMem>; +} + +/// Packet not supported yet by this crate +#[derive(Debug)] +pub struct UnknownPacketRef<'a> { + pub payload: &'a [u8], + pub class: u8, + pub msg_id: u8, +} + +include!(concat!(env!("OUT_DIR"), "/packets.rs")); diff --git a/ubx_protocol/src/packets.rs.in b/ubx_protocol/src/packets.rs.in new file mode 100644 index 00000000..d1f1408b --- /dev/null +++ b/ubx_protocol/src/packets.rs.in @@ -0,0 +1,321 @@ +/// Geodetic Position Solution +#[ubx_packet_recv] +#[ubx(class = 1, id = 2, fixed_payload_len = 28)] +struct NavPosLLH { + /// GPS Millisecond Time of Week + itow: u32, + /// Longitude + #[ubx(map_type = f64, scale = 1e-7, alias = lon_degrees)] + lon: i32, + /// Latitude + #[ubx(map_type = f64, scale = 1e-7, alias = lat_degrees)] + lat: i32, + /// Height above Ellipsoid + #[ubx(map_type = f64, scale = 1e-3)] + height_meters: i32, + /// Height above mean sea level + #[ubx(map_type = f64, scale = 1e-3)] + height_msl: i32, + /// Horizontal Accuracy Estimate + #[ubx(map_type = f64, scale = 1e-3)] + h_ack: u32, + /// Vertical Accuracy Estimate + #[ubx(map_type = f64, scale = 1e-3)] + v_acc: u32, +} + +/// GPS fix Type +#[ubx_type] +#[ubx(from, rest_reserved)] +#[repr(u8)] +#[derive(Debug, Copy, Clone)] +enum GpsFix { + NoFix = 0, + DeadReckoningOnly = 1, + Fix2D = 2, + Fix3D = 3, + GPSPlusDeadReckoning = 4, + TimeOnlyFix = 5, +} + +bitflags::bitflags! { + /// Navigation Status Flags + pub struct NavStatusFlags: u8 { + /// position and velocity valid and within DOP and ACC Masks, + const GPS_FIX_OK = 1; + /// DGPS used + const DIFF_SOLN = 2; + /// Week Number valid + const WKN_SET = 4; + /// Time of Week valid + const TOW_SET = 8; + } +} + +impl NavStatusFlags { + const fn from(x: u8) -> Self { + NavStatusFlags::from_bits_truncate(x) + } +} + +/// Fix Status Information +#[repr(transparent)] +#[derive(Copy, Clone)] +pub struct FixStatusInfo(u8); + +impl FixStatusInfo { + pub const fn has_pr_prr_correction(self) -> bool { + (self.0 & 1) == 1 + } + pub fn map_matching(self) -> MapMatchingStatus { + let bits = (self.0 >> 6) & 3; + match bits { + 0 => MapMatchingStatus::None, + 1 => MapMatchingStatus::Valid, + 2 => MapMatchingStatus::Used, + 3 => MapMatchingStatus::Dr, + _ => unreachable!(), + } + } + pub const fn from(x: u8) -> Self { + Self(x) + } +} + +#[derive(Copy, Clone, Debug)] +pub enum MapMatchingStatus { + None = 0, + /// valid, i.e. map matching data was received, but was too old + Valid = 1, + /// used, map matching data was applied + Used = 2, + /// map matching was the reason to enable the dead reckoning + /// gpsFix type instead of publishing no fix + Dr = 3, +} + +/// Further information about navigation output +/// Only for FW version >= 7.01; undefined otherwise +#[ubx_type] +#[ubx(from, rest_reserved)] +#[repr(u8)] +#[derive(Debug, Copy, Clone)] +enum NavStatusFlags2 { + Acquisition = 0, + Tracking = 1, + PowerOptimizedTracking = 2, + Inactive = 3, +} + +/// Receiver Navigation Status +#[ubx_packet_recv] +#[ubx(class = 1, id = 3, fixed_payload_len = 16)] +struct NavStatus { + /// GPS Millisecond Time of Week + itow: u32, + /// GPS fix Type, this value does not qualify a fix as + /// valid and within the limits + #[ubx(map_type = GpsFix)] + gps_fix: u8, + /// Navigation Status Flags + #[ubx(map_type = NavStatusFlags)] + flags: u8, + /// Fix Status Information + #[ubx(map_type = FixStatusInfo)] + fix_stat: u8, + /// further information about navigation output + #[ubx(map_type = NavStatusFlags2)] + flags2: u8, + /// Time to first fix (millisecond time tag) + ttff: u32, + /// Milliseconds since Startup / Reset + msss: u32, +} + +bitflags::bitflags! { + /// Validity Flags of `NavTimeUTC` + pub struct NavTimeUtcFlags: u8 { + /// Valid Time of Week + const VALID_TOW = 1; + /// Valid Week Number + const VALID_WKN = 2; + /// Valid UTC (Leap Seconds already known) + const VALID_UTC = 4; + const RESERVED3 = (1u8 << 3); + const RESERVED4 = (1u8 << 4); + const RESERVED5 = (1u8 << 5); + const RESERVED6 = (1u8 << 6); + const RESERVED7 = (1u8 << 7); + } +} + +impl NavTimeUtcFlags { + const fn from(x: u8) -> Self { + Self::from_bits_truncate(x) + } +} + +/// UTC Time Solution +#[ubx_packet_recv] +#[ubx(class = 1, id = 0x21, fixed_payload_len = 20)] +struct NavTimeUTC { + /// GPS Millisecond Time of Week + itow: u32, + time_accuracy_estimate_ns: u32, + /// Nanoseconds of second, range -1e9 .. 1e9 + nanos: i32, + /// Year, range 1999..2099 + year: u16, + /// Month, range 1..12 + month: u8, + /// Day of Month, range 1..31 + day: u8, + /// Hour of Day, range 0..23 + hour: u8, + /// Minute of Hour, range 0..59 + min: u8, + /// Seconds of Minute, range 0..59 + sec: u8, + /// Validity Flags + #[ubx(map_type = NavTimeUtcFlags)] + valid: u8, +} + +/// Velocity Solution in NED +#[ubx_packet_recv] +#[ubx(class = 1, id = 0x12, fixed_payload_len = 36)] +struct NavVelNed { + /// GPS Millisecond Time of Week + itow: u32, + /// north velocity (m/s) + #[ubx(map_type = f64, scale = 1e-2)] + vel_north: i32, + /// east velocity (m/s) + #[ubx(map_type = f64, scale = 1e-2)] + vel_east: i32, + /// down velocity (m/s) + #[ubx(map_type = f64, scale = 1e-2)] + vel_down: i32, + /// Speed 3-D (m/s) + #[ubx(map_type = f64, scale = 1e-2)] + speed_3d: u32, + /// Ground speed (m/s) + #[ubx(map_type = f64, scale = 1e-2)] + ground_speed: u32, + /// Heading of motion 2-D (degrees) + #[ubx(map_type = f64, scale = 1e-5, alias = heading_degrees)] + heading: i32, + /// Speed Accuracy Estimate (m/s) + #[ubx(map_type = f64, scale = 1e-2)] + speed_accuracy_estimate: u32, + /// Course / Heading Accuracy Estimate (degrees) + #[ubx(map_type = f64, scale = 1e-5)] + course_heading_accuracy_estimate: u32, +} + +bitflags::bitflags! { + /// Battery backed RAM sections to clear + pub struct NavBbrMask: u16 { + const EPHEMERIS = 1; + const ALMANACH = 2; + const HEALTH = 4; + const KLOBUCHARD = 8; + const POSITION = 16; + const CLOCK_DRIFT = (1 << 5); + const OSCILATOR_PARAMETER = (1 << 6); + const UTC_CORRECTION_PARAMETERS = (1 << 7); + const RTC = (1 << 8); + const SFDR_PARAMETERS = (1 << 11); + const SFDR_VEHICLE_MONITORING_PARAMETERS = (1 << 12); + const TCT_PARAMETERS = (1 << 13); + const AUTONOMOUS_ORBIT_PARAMETERS = (1 << 15); + } +} + +impl NavBbrMask { + fn as_raw_value(self) -> u16 { + self.bits() + } +} + +/// Reset Type +#[repr(u8)] +#[derive(Clone, Copy, Debug)] +pub enum ResetMode { + /// Hardware reset (Watchdog) immediately + HardwareResetImmediately = 0, + ControlledSoftwareReset = 0x1, + ControlledSoftwareResetGpsOnly = 0x02, + /// Hardware reset (Watchdog) after shutdown (>=FW6.0) + HardwareResetAfterShutdown = 0x04, + ControlledGpsStop = 0x08, + ControlledGpsStart = 0x09, +} + +impl ResetMode { + fn as_raw_value(self) -> u8 { + self as u8 + } +} + +/// Reset Receiver / Clear Backup Data Structures +#[ubx_packet_send] +#[ubx(class = 6, id = 4, fixed_payload_len = 4)] +struct CfgRst { + /// Battery backed RAM sections to clear + #[ubx(map_type = NavBbrMask)] + nav_bbr_mask: u16, + /// Reset Type + #[ubx(map_type = ResetMode)] + reset_mode: u8, + reserved1: u8, +} + +/// Alignment to reference time +#[repr(u16)] +#[derive(Clone, Copy, Debug)] +pub enum AlignmentToReferenceTime { + Utc = 0, + Gps = 1, +} + +impl AlignmentToReferenceTime { + fn as_raw_value(self) -> u16 { + self as u16 + } +} + +/// Navigation/Measurement Rate Settings +#[ubx_packet_send] +#[ubx(class = 6, id = 8, fixed_payload_len = 6)] +struct CfgRate { + /// Measurement Rate, GPS measurements aretaken every `measure_rate_ms` milliseconds + measure_rate_ms: u16, + /// Navigation Rate, in number of measurement cycles. + /// On u-blox 5 and u-blox 6, this parametercannot be changed, and is always equals 1. + nav_rate: u16, + /// Alignment to reference time + #[ubx(map_type = AlignmentToReferenceTime)] + time_ref: u16, +} + +/// Set Message Rate +#[ubx_packet_send] +#[ubx(class = 6, id = 1, fixed_payload_len = 3)] +struct CfgMsg3 { + msg_class: u8, + msg_id: u8, + /// Send rate on current Target + rate: u8, +} + +/// Messages in this class are sent as a result of a CFG message being +/// received, decoded and processed by thereceiver. +#[ubx_packet_recv] +#[ubx(class = 5, id = 1, fixed_payload_len = 2)] +struct AckAck { + /// Class ID of the Acknowledged Message + class: u8, + /// Message ID of the Acknowledged Message + msg_id: u8, +} diff --git a/ubx_protocol/src/parser.rs b/ubx_protocol/src/parser.rs new file mode 100644 index 00000000..d0f525e3 --- /dev/null +++ b/ubx_protocol/src/parser.rs @@ -0,0 +1,115 @@ +use crate::{error::ParserError, match_packet, ubx_checksum, PacketRef, SYNC_CHAR_1, SYNC_CHAR_2}; + +/// Some big number, TODO: need validation against all known packets +const MAX_PACK_LEN: usize = 1022; + +/// Streaming parser for UBX protocol with buffer +#[derive(Default)] +pub struct Parser { + buf: Vec, +} + +impl Parser { + pub fn is_buffer_empty(&self) -> bool { + self.buf.is_empty() + } + pub fn buffer_len(&self) -> usize { + self.buf.len() + } + pub fn consume<'a, 'b, 'c>(&'a mut self, new_data: &'b [u8]) -> ParserIter<'c> + where + 'a: 'c, + { + match self + .buf + .iter() + .chain(new_data.iter()) + .position(|x| *x == SYNC_CHAR_1) + { + Some(mut off) => { + if off >= self.buf.len() { + off -= self.buf.len(); + self.buf.clear(); + self.buf.extend_from_slice(&new_data[off..]); + off = 0; + } else { + self.buf.extend_from_slice(new_data); + } + ParserIter { + buf: &mut self.buf, + off, + } + } + None => { + self.buf.clear(); + ParserIter { + buf: &mut self.buf, + off: 0, + } + } + } + } +} + +/// Iterator over data stored in `Parser` buffer +pub struct ParserIter<'a> { + buf: &'a mut Vec, + off: usize, +} + +impl<'a> Drop for ParserIter<'a> { + fn drop(&mut self) { + if self.off <= self.buf.len() { + self.buf.drain(0..self.off); + } + } +} + +impl<'a> ParserIter<'a> { + /// Analog of `core::iter::Iterator::next`, should be switched to + /// trait implmentation after merge of https://github.com/rust-lang/rust/issues/44265 + pub fn next(&mut self) -> Option> { + while self.off < self.buf.len() { + let data = &self.buf[self.off..]; + let pos = match data.iter().position(|x| *x == SYNC_CHAR_1) { + Some(x) => x, + None => return None, + }; + + if (pos + 1) >= data.len() { + return None; + } + if data[pos + 1] != SYNC_CHAR_2 { + self.off += pos + 1; + continue; + } + + if (pos + 5) >= data.len() { + return None; + } + + let pack_len: usize = u16::from_le_bytes([data[pos + 4], data[pos + 5]]).into(); + if pack_len > MAX_PACK_LEN { + self.off += pos + 1; + continue; + } + if (pos + pack_len + 6 + 2 - 1) >= data.len() { + return None; + } + let (ck_a, ck_b) = ubx_checksum(&data[(pos + 2)..(pos + pack_len + 4 + 2)]); + + let (expect_ck_a, expect_ck_b) = + (data[pos + 6 + pack_len], data[pos + 6 + pack_len + 1]); + if (ck_a, ck_b) != (expect_ck_a, expect_ck_b) { + self.off += pos + 2; + return Some(Err(ParserError::InvalidChecksum)); + } + let msg_data = &data[(pos + 6)..(pos + 6 + pack_len)]; + let class_id = data[pos + 2]; + let msg_id = data[pos + 3]; + self.off += pos + 6 + pack_len + 2; + return match_packet(class_id, msg_id, msg_data); + } + None + } +} diff --git a/ubx_protocol/tests/generator_test.rs b/ubx_protocol/tests/generator_test.rs new file mode 100644 index 00000000..e9a03504 --- /dev/null +++ b/ubx_protocol/tests/generator_test.rs @@ -0,0 +1,24 @@ +use ubx_protocol::{CfgMsg3Builder, NavPosLLH, NavStatus, UbxPacket}; + +#[test] +fn test_cfg_msg_simple() { + assert_eq!( + [0xb5, 0x62, 0x06, 0x01, 0x03, 0x00, 0x01, 0x02, 0x01, 0x0E, 0x47], + CfgMsg3Builder { + msg_class: NavPosLLH::CLASS, + msg_id: NavPosLLH::ID, + rate: 1, + } + .to_packet_bytes() + ); + + assert_eq!( + [0xb5, 0x62, 0x06, 0x01, 0x03, 0x00, 0x01, 0x03, 0x01, 0x0F, 0x49], + CfgMsg3Builder { + msg_class: NavStatus::CLASS, + msg_id: NavStatus::ID, + rate: 1, + } + .to_packet_bytes() + ); +} diff --git a/ubx_protocol/tests/parser_tests.rs b/ubx_protocol/tests/parser_tests.rs new file mode 100644 index 00000000..5626f593 --- /dev/null +++ b/ubx_protocol/tests/parser_tests.rs @@ -0,0 +1,154 @@ +use cpu_time::ProcessTime; +use rand::{thread_rng, Rng}; +use std::{env, fs, path::Path}; +use ubx_protocol::{PacketRef, Parser, ParserError, ParserIter}; + +#[test] +fn test_ack_ack_simple() { + type ParseResult = Result<(u8, u8), ParserError>; + fn extract_ack_ack(mut it: ParserIter) -> Vec { + let mut ret = vec![]; + while let Some(pack) = it.next() { + match pack { + Ok(PacketRef::AckAck(pack)) => { + ret.push(Ok((pack.class(), pack.msg_id()))); + } + Err(err) => ret.push(Err(err)), + _ => assert!(false), + } + } + ret + } + macro_rules! my_vec { + ($($x:expr),*) => {{ + let v: Vec = vec![$($x),*]; + v + }} + } + + let mut parser = Parser::default(); + assert!(parser.is_buffer_empty()); + assert_eq!( + my_vec![], + extract_ack_ack(parser.consume(&[])), + "empty buffer parsing" + ); + assert!(parser.is_buffer_empty()); + + let full_pack = [0xb5, 0x62, 0x5, 0x1, 0x2, 0x0, 0x6, 0x1, 0xf, 0x38]; + assert_eq!( + my_vec![Ok((6, 1))], + extract_ack_ack(parser.consume(&full_pack)), + "full packet parsing" + ); + assert!(parser.is_buffer_empty()); + + let mut bad_pack = full_pack.clone(); + bad_pack[bad_pack.len() - 3] = 5; + assert_eq!( + my_vec![Err(ParserError::InvalidChecksum)], + extract_ack_ack(parser.consume(&bad_pack)), + "invalid checksum" + ); + assert_eq!(bad_pack.len() - 2, parser.buffer_len()); + + let mut two_packs = full_pack.to_vec(); + two_packs.extend_from_slice(&full_pack); + assert_eq!( + my_vec![Ok((6, 1)), Ok((6, 1))], + extract_ack_ack(parser.consume(&two_packs)), + "two packets" + ); + assert!(parser.is_buffer_empty()); + + assert_eq!( + my_vec![], + extract_ack_ack(parser.consume(&full_pack[0..5])), + "part of packet" + ); + assert_eq!(5, parser.buffer_len()); + let mut rest_and_next = (&full_pack[5..]).to_vec(); + rest_and_next.extend_from_slice(&full_pack); + assert_eq!( + my_vec![Ok((6, 1)), Ok((6, 1))], + extract_ack_ack(parser.consume(&two_packs)), + "two packets" + ); + assert!(parser.is_buffer_empty()); + + let mut garbage_before = vec![0x00, 0x06, 0x01, 0x0f, 0x38]; + garbage_before.extend_from_slice(&full_pack); + assert_eq!( + my_vec![Ok((6, 1))], + extract_ack_ack(parser.consume(&garbage_before)), + "garbage before1" + ); + + let mut garbage_before = vec![0xb5, 0xb5, 0x62, 0x62, 0x38]; + garbage_before.extend_from_slice(&full_pack); + assert_eq!( + my_vec![Ok((6, 1))], + extract_ack_ack(parser.consume(&garbage_before)), + "garbage before1" + ); +} + +#[test] +#[ignore] +fn test_parse_big_file() { + let ubx_big_log_path = env::var("UBX_BIG_LOG_PATH").unwrap(); + let ubx_big_log_path = Path::new(&ubx_big_log_path); + + let biglog = fs::read(ubx_big_log_path).unwrap(); + const MAX_SIZE: usize = 100; + let mut read_sizes = Vec::with_capacity(biglog.len() / MAX_SIZE / 2); + let mut rng = thread_rng(); + let mut i = 0; + while i < biglog.len() { + let chunk: usize = rng.gen_range(1, MAX_SIZE); + let chunk = (biglog.len() - i).min(chunk); + read_sizes.push(chunk); + i += chunk; + } + + let mut wrong_chksum = 0usize; + let mut other_errors = 0usize; + let mut nav_pos_llh = 0usize; + let mut nav_stat = 0usize; + let mut ack_ack = 0usize; + let mut unknown = 0usize; + + let mut log = biglog.as_slice(); + let mut parser = Parser::default(); + + let start = ProcessTime::now(); + for chunk_size in &read_sizes { + let (buf, rest) = log.split_at(*chunk_size); + log = rest; + let mut it = parser.consume(buf); + while let Some(pack) = it.next() { + match pack { + Ok(pack) => match pack { + PacketRef::AckAck(_) => ack_ack += 1, + PacketRef::NavPosLLH(_) => nav_pos_llh += 1, + PacketRef::NavStatus(_) => nav_stat += 1, + _ => unknown += 1, + }, + Err(ParserError::InvalidChecksum) => wrong_chksum += 1, + Err(_) => other_errors += 1, + } + } + } + let cpu_time = start.elapsed(); + println!( + "parse time of {}: {:?}", + ubx_big_log_path.display(), + cpu_time + ); + assert_eq!(0, wrong_chksum); + assert_eq!(0, other_errors); + assert_eq!(38291, nav_pos_llh); + assert_eq!(38291, nav_stat); + assert_eq!(120723, unknown); + assert_eq!(1, ack_ack); +}