use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use std::borrow::Cow;
use std::{fmt, io, mem};
#[derive(Debug)]
pub enum Error {
Io(::std::io::Error),
Version(u32),
Magic([u8; 4]),
Length {
length: u32,
length_read: usize,
},
ChunkLength {
ty: ChunkType,
length: u32,
length_read: usize,
},
ChunkType(ChunkType),
UnknownChunkType([u8; 4]),
}
#[derive(Clone, Debug)]
pub struct Glb<'a> {
pub header: Header,
pub json: Cow<'a, [u8]>,
pub bin: Option<Cow<'a, [u8]>>,
}
#[derive(Copy, Clone, Debug)]
#[repr(C)]
pub struct Header {
pub magic: [u8; 4],
pub version: u32,
pub length: u32,
}
#[derive(Copy, Clone, Debug)]
pub enum ChunkType {
Json,
Bin,
}
#[derive(Copy, Clone, Debug)]
#[repr(C)]
struct ChunkHeader {
length: u32,
ty: ChunkType,
}
impl Header {
fn from_reader<R: io::Read>(mut reader: R) -> Result<Self, Error> {
use self::Error::Io;
let mut magic = [0; 4];
reader.read_exact(&mut magic).map_err(Io)?;
if &magic == b"glTF" {
Ok(Self {
magic,
version: reader.read_u32::<LittleEndian>().map_err(Io)?,
length: reader.read_u32::<LittleEndian>().map_err(Io)?,
})
} else {
Err(Error::Magic(magic))
}
}
fn size_of() -> usize {
12
}
}
impl ChunkHeader {
fn from_reader<R: io::Read>(mut reader: R) -> Result<Self, Error> {
use self::Error::Io;
let length = reader.read_u32::<LittleEndian>().map_err(Io)?;
let mut ty = [0; 4];
reader.read_exact(&mut ty).map_err(Io)?;
let ty = match &ty {
b"JSON" => Ok(ChunkType::Json),
b"BIN\0" => Ok(ChunkType::Bin),
_ => Err(Error::UnknownChunkType(ty)),
}?;
Ok(Self { length, ty })
}
}
fn align_to_multiple_of_four(n: &mut usize) {
*n = (*n + 3) & !3;
}
fn split_binary_gltf(mut data: &[u8]) -> Result<(&[u8], Option<&[u8]>), Error> {
let (json, mut data) = ChunkHeader::from_reader(&mut data)
.and_then(|json_h| {
if let ChunkType::Json = json_h.ty {
Ok(json_h)
} else {
Err(Error::ChunkType(json_h.ty))
}
})
.and_then(|json_h| {
if json_h.length as usize <= data.len() {
Ok(json_h)
} else {
Err(Error::ChunkLength {
ty: json_h.ty,
length: json_h.length,
length_read: data.len(),
})
}
})
.map(|json_h| data.split_at(json_h.length as usize))?;
let bin = if !data.is_empty() {
ChunkHeader::from_reader(&mut data)
.and_then(|bin_h| {
if let ChunkType::Bin = bin_h.ty {
Ok(bin_h)
} else {
Err(Error::ChunkType(bin_h.ty))
}
})
.and_then(|bin_h| {
if bin_h.length as usize <= data.len() {
Ok(bin_h)
} else {
Err(Error::ChunkLength {
ty: bin_h.ty,
length: bin_h.length,
length_read: data.len(),
})
}
})
.map(|bin_h| data.split_at(bin_h.length as usize))
.map(|(x, _)| Some(x))?
} else {
None
};
Ok((json, bin))
}
impl<'a> Glb<'a> {
pub fn to_writer<W>(&self, mut writer: W) -> Result<(), crate::Error>
where
W: io::Write,
{
{
let magic = b"glTF";
let version = 2;
let mut length =
mem::size_of::<Header>() + mem::size_of::<ChunkHeader>() + self.json.len();
align_to_multiple_of_four(&mut length);
if let Some(bin) = self.bin.as_ref() {
length += mem::size_of::<ChunkHeader>() + bin.len();
align_to_multiple_of_four(&mut length);
}
writer.write_all(&magic[..])?;
writer.write_u32::<LittleEndian>(version)?;
writer.write_u32::<LittleEndian>(length as u32)?;
}
{
let magic = b"JSON";
let mut length = self.json.len();
align_to_multiple_of_four(&mut length);
let padding = length - self.json.len();
writer.write_u32::<LittleEndian>(length as u32)?;
writer.write_all(&magic[..])?;
writer.write_all(&self.json)?;
for _ in 0..padding {
writer.write_u8(0x20)?;
}
}
if let Some(bin) = self.bin.as_ref() {
let magic = b"BIN\0";
let mut length = bin.len();
align_to_multiple_of_four(&mut length);
let padding = length - bin.len();
writer.write_u32::<LittleEndian>(length as u32)?;
writer.write_all(&magic[..])?;
writer.write_all(bin)?;
for _ in 0..padding {
writer.write_u8(0)?;
}
}
Ok(())
}
pub fn to_vec(&self) -> Result<Vec<u8>, crate::Error> {
let mut length = mem::size_of::<Header>() + mem::size_of::<ChunkHeader>() + self.json.len();
align_to_multiple_of_four(&mut length);
if let Some(bin) = self.bin.as_ref() {
length += mem::size_of::<ChunkHeader>() + bin.len();
align_to_multiple_of_four(&mut length);
}
let mut vec = Vec::with_capacity(length);
self.to_writer(&mut vec as &mut dyn io::Write)?;
Ok(vec)
}
pub fn from_slice(mut data: &'a [u8]) -> Result<Self, crate::Error> {
let header = Header::from_reader(&mut data)
.and_then(|header| {
let contents_length = header.length as usize - Header::size_of();
if contents_length <= data.len() {
Ok(header)
} else {
Err(Error::Length {
length: contents_length as u32,
length_read: data.len(),
})
}
})
.map_err(crate::Error::Binary)?;
match header.version {
2 => split_binary_gltf(data)
.map(|(json, bin)| Glb {
header,
json: json.into(),
bin: bin.map(Into::into),
})
.map_err(crate::Error::Binary),
x => Err(crate::Error::Binary(Error::Version(x))),
}
}
pub fn from_reader<R: io::Read>(mut reader: R) -> Result<Self, crate::Error> {
let header = Header::from_reader(&mut reader).map_err(crate::Error::Binary)?;
match header.version {
2 => {
let glb_len = header.length - Header::size_of() as u32;
let mut buf = vec![0; glb_len as usize];
if let Err(e) = reader.read_exact(&mut buf).map_err(Error::Io) {
Err(crate::Error::Binary(e))
} else {
split_binary_gltf(&buf)
.map(|(json, bin)| Glb {
header,
json: json.to_vec().into(),
bin: bin.map(<[u8]>::to_vec).map(Into::into),
})
.map_err(crate::Error::Binary)
}
}
x => Err(crate::Error::Binary(Error::Version(x))),
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}",
match *self {
Error::Io(ref e) => return e.fmt(f),
Error::Version(_) => "unsupported version",
Error::Magic(_) => "not glTF magic",
Error::Length { .. } => "could not completely read the object",
Error::ChunkLength { ty, .. } => match ty {
ChunkType::Json => "JSON chunk length exceeds that of slice",
ChunkType::Bin => "BIN\\0 chunk length exceeds that of slice",
},
Error::ChunkType(ty) => match ty {
ChunkType::Json => "was not expecting JSON chunk",
ChunkType::Bin => "was not expecting BIN\\0 chunk",
},
Error::UnknownChunkType(_) => "unknown chunk type",
}
)
}
}
impl ::std::error::Error for Error {}