time parsing
This commit is contained in:
parent
4e5898c0c0
commit
733d2d61a3
@ -1,4 +1,4 @@
|
||||
use std::{convert::TryFrom, env::args, fs::File, io};
|
||||
use std::{collections::HashMap, convert::TryFrom, env::args, fs::File, io};
|
||||
|
||||
use eyre::Context;
|
||||
use log::{error, info, trace};
|
||||
@ -7,7 +7,7 @@ use regex::Regex;
|
||||
use ros_message::{MessagePath, Msg};
|
||||
use rsbag::{
|
||||
chunk::ChunkHeader,
|
||||
index::{BagIndex, ConnInfo},
|
||||
index::{BagIndex, ConnInfo, IndexData},
|
||||
reader::{BagReader, MmapReader},
|
||||
Result,
|
||||
};
|
||||
@ -57,6 +57,23 @@ fn read_chunk<R: BagReader>(bag_reader: &mut R, pos: u64) -> Result<Vec<u8>> {
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
struct BagInfo {
|
||||
total_uncompressed: u64,
|
||||
per_connection: HashMap<u32, u64>,
|
||||
}
|
||||
|
||||
impl BagInfo {
|
||||
fn combine(mut self, other: BagInfo) -> BagInfo {
|
||||
self.total_uncompressed += other.total_uncompressed;
|
||||
for (conn, count) in other.per_connection {
|
||||
*self.per_connection.entry(conn).or_insert(0) += count;
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
color_eyre::install()?;
|
||||
env_logger::init();
|
||||
@ -91,28 +108,30 @@ fn main() -> Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
let total_size = index
|
||||
let data = index
|
||||
.chunks
|
||||
.par_iter()
|
||||
.try_fold(
|
||||
|| 0u64,
|
||||
|total_size, chunk| -> rsbag::Result<_> {
|
||||
let mut reader = bag_reader.clone();
|
||||
let chunk_header = ChunkHeader::read(&mut reader, chunk.pos)?;
|
||||
reader.skip_data()?;
|
||||
.try_fold(BagInfo::default, |mut data, chunk| -> rsbag::Result<_> {
|
||||
let mut reader = bag_reader.clone();
|
||||
let chunk_header = ChunkHeader::read(&mut reader, chunk.pos)?;
|
||||
data.total_uncompressed += chunk_header.uncompressed_size as u64;
|
||||
reader.skip_data()?;
|
||||
|
||||
// let data = read_chunk(&mut bag_reader.clone(), chunk.pos)?;
|
||||
// chunks.push(data);
|
||||
Ok(total_size + chunk_header.uncompressed_size as u64)
|
||||
},
|
||||
)
|
||||
.reduce(
|
||||
// || Ok(Vec::new()),
|
||||
|| Ok(0),
|
||||
|a, b| a.and_then(|a| b.map(|b| a + b)),
|
||||
)
|
||||
for _ in &chunk.connections {
|
||||
let index = IndexData::read(&mut reader)?;
|
||||
*data.per_connection.entry(index.conn_id).or_insert(0) +=
|
||||
index.entries.len() as u64;
|
||||
}
|
||||
|
||||
// let data = read_chunk(&mut bag_reader.clone(), chunk.pos)?;
|
||||
// chunks.push(data);
|
||||
Ok(data)
|
||||
})
|
||||
.try_reduce(BagInfo::default, |a, b| Ok(a.combine(b)))
|
||||
.unwrap();
|
||||
|
||||
info!("bag data: {:#?}", data);
|
||||
|
||||
// let total_size = index
|
||||
// .chunks
|
||||
// .par_iter()
|
||||
@ -129,7 +148,5 @@ fn main() -> Result<()> {
|
||||
// |a, b| a.and_then(|a| b.map(|b| a + b)),
|
||||
// )?;
|
||||
|
||||
info!("total uncompressed size: {}", total_size);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
56
src/index.rs
56
src/index.rs
@ -6,7 +6,7 @@ use nom::{number::streaming::le_u32, sequence::tuple, Parser};
|
||||
|
||||
use crate::{
|
||||
error,
|
||||
parse::{self, Header, Op},
|
||||
parse::{self, header::Time, Header, Op},
|
||||
reader::BagReader,
|
||||
Result,
|
||||
};
|
||||
@ -41,8 +41,8 @@ impl ConnInfo {
|
||||
#[derive(Debug)]
|
||||
pub struct ChunkInfo {
|
||||
pub pos: u64,
|
||||
pub start_time: u64, // TODO: unpack time
|
||||
pub end_time: u64,
|
||||
pub start_time: Time,
|
||||
pub end_time: Time,
|
||||
pub connections: Vec<ChunkConnection>,
|
||||
}
|
||||
|
||||
@ -63,8 +63,8 @@ impl ChunkInfo {
|
||||
}
|
||||
Ok(ChunkInfo {
|
||||
pos: header.read_u64(b"chunk_pos")?,
|
||||
start_time: header.read_u64(b"start_time")?,
|
||||
end_time: header.read_u64(b"end_time")?,
|
||||
start_time: header.read_time(b"start_time")?,
|
||||
end_time: header.read_time(b"end_time")?,
|
||||
connections,
|
||||
})
|
||||
}
|
||||
@ -151,3 +151,49 @@ impl BagIndex {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IndexData {
|
||||
pub conn_id: u32,
|
||||
pub entries: Vec<IndexEntry>,
|
||||
}
|
||||
|
||||
impl IndexData {
|
||||
pub fn read<R: BagReader>(reader: &mut R) -> Result<Self> {
|
||||
let header = reader.read_header_op(Op::IndexData)?;
|
||||
if header.read_u32(b"ver")? != 1 {
|
||||
bail!("unsupported IndexData version");
|
||||
}
|
||||
|
||||
reader.read_data_length()?; // Data length not needed
|
||||
|
||||
let entry_count = header.read_u32(b"count")?;
|
||||
let mut entries = Vec::with_capacity(entry_count as usize);
|
||||
for _ in 0..entry_count {
|
||||
let conn = IndexEntry::read(reader)?;
|
||||
entries.push(conn);
|
||||
}
|
||||
Ok(IndexData {
|
||||
conn_id: header.read_u32(b"conn")?,
|
||||
entries,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IndexEntry {
|
||||
pub time: Time,
|
||||
pub offset: u32,
|
||||
}
|
||||
|
||||
impl IndexEntry {
|
||||
pub fn parse(input: parse::Input) -> parse::IResult<Self> {
|
||||
tuple((Time::parse, le_u32))
|
||||
.map(|(time, offset)| IndexEntry { time, offset })
|
||||
.parse(input)
|
||||
}
|
||||
|
||||
pub fn read<R: BagReader>(reader: &mut R) -> Result<Self> {
|
||||
reader.read_parser(Self::parse)
|
||||
}
|
||||
}
|
||||
|
@ -19,7 +19,7 @@ mod fields;
|
||||
|
||||
pub use self::{
|
||||
error::{FieldDataError, MissingFieldError},
|
||||
fields::Op,
|
||||
fields::{Op, Time},
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@ -87,19 +87,23 @@ impl Header {
|
||||
}
|
||||
|
||||
pub fn read_op(&self) -> Result<fields::Op> {
|
||||
self.find_field(b"op").and_then(fields::Op::parse)
|
||||
self.find_field(b"op").and_then(fields::Op::read)
|
||||
}
|
||||
|
||||
pub fn read_u64(&self, field: &[u8]) -> Result<u64> {
|
||||
self.find_field(field).and_then(fields::parse_u64)
|
||||
self.find_field(field).and_then(fields::read_u64)
|
||||
}
|
||||
|
||||
pub fn read_time(&self, field: &[u8]) -> Result<Time> {
|
||||
self.read_u64(field).map(Time::from_packed)
|
||||
}
|
||||
|
||||
pub fn read_u32(&self, field: &[u8]) -> Result<u32> {
|
||||
self.find_field(field).and_then(fields::parse_u32)
|
||||
self.find_field(field).and_then(fields::read_u32)
|
||||
}
|
||||
|
||||
pub fn read_string(&self, field: &[u8]) -> Result<String> {
|
||||
self.find_field(field).and_then(fields::parse_string)
|
||||
self.find_field(field).and_then(fields::read_string)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5,6 +5,8 @@ use nom::{
|
||||
};
|
||||
use num_enum::TryFromPrimitive;
|
||||
|
||||
use crate::parse::IResult;
|
||||
|
||||
use super::{FieldDataError, Input, Result};
|
||||
|
||||
#[derive(Clone, Copy, Debug, TryFromPrimitive, PartialEq)]
|
||||
@ -19,7 +21,7 @@ pub enum Op {
|
||||
}
|
||||
|
||||
impl Op {
|
||||
pub fn parse(input: Input) -> Result<Self> {
|
||||
pub fn read(input: Input) -> Result<Self> {
|
||||
let (_, op) = all_consuming(le_u8)
|
||||
.parse(input)
|
||||
.map_err(FieldDataError::from)?;
|
||||
@ -27,20 +29,44 @@ impl Op {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_u32(input: Input) -> Result<u32> {
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct Time {
|
||||
pub sec: u32,
|
||||
pub nsec: u32,
|
||||
}
|
||||
|
||||
impl Time {
|
||||
pub fn from_packed(packed: u64) -> Time {
|
||||
Time {
|
||||
sec: packed as u32,
|
||||
nsec: (packed >> 32) as u32,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse(input: Input) -> IResult<Self> {
|
||||
le_u64.map(Time::from_packed).parse(input)
|
||||
}
|
||||
|
||||
pub fn read(input: Input) -> Result<Self> {
|
||||
let (_, x) = Self::parse(input).map_err(FieldDataError::from)?;
|
||||
Ok(x)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read_u32(input: Input) -> Result<u32> {
|
||||
let (_, x) = all_consuming(le_u32)
|
||||
.parse(input)
|
||||
.map_err(FieldDataError::from)?;
|
||||
Ok(x)
|
||||
}
|
||||
|
||||
pub fn parse_u64(input: Input) -> Result<u64> {
|
||||
pub fn read_u64(input: Input) -> Result<u64> {
|
||||
let (_, x) = all_consuming(le_u64)
|
||||
.parse(input)
|
||||
.map_err(FieldDataError::from)?;
|
||||
Ok(x)
|
||||
}
|
||||
|
||||
pub fn parse_string(input: Input) -> Result<String> {
|
||||
pub fn read_string(input: Input) -> Result<String> {
|
||||
Ok(String::from_utf8(input.to_owned())?)
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user