WIP - translations now really build WIP WIP: Python part WIP: core part WIP WIP - change_language.py WIP - python imports WIP - C side verification of write validity WIP - loader for (at least) USB transfer part WIP - tweaks WIP - fix compilation on older nightly, clippy lints & style WIP - translation data from upstream WIP - improve blob generation WIP - reorganize tests WIP - ignore generated translation blobs
parent
c5b76a90ae
commit
452633ef48
@ -0,0 +1,381 @@
|
||||
use core::{mem, str};
|
||||
|
||||
use crate::{
|
||||
crypto::{cosi, ed25519, merkle::merkle_root, sha256},
|
||||
error::Error,
|
||||
io::InputStream,
|
||||
};
|
||||
|
||||
use super::public_keys;
|
||||
|
||||
pub const MAX_HEADER_LEN: u16 = 1024;
|
||||
pub const EMPTY_BYTE: u8 = 0xFF;
|
||||
const SENTINEL_ID: u16 = 0xFFFF;
|
||||
|
||||
const SIGNATURE_THRESHOLD: u8 = 2;
|
||||
|
||||
// Maximum padding at the end of an offsets table (typically for alignment
|
||||
// purposes). We allow at most 3 for alignment 4. In practice right now this
|
||||
// should be max 1.
|
||||
const MAX_TABLE_PADDING: usize = 3;
|
||||
|
||||
const INVALID_TRANSLATIONS_BLOB: Error = value_error!("Invalid translations blob");
|
||||
|
||||
#[repr(packed)]
|
||||
struct OffsetEntry {
|
||||
pub id: u16,
|
||||
pub offset: u16,
|
||||
}
|
||||
|
||||
pub struct Table<'a> {
|
||||
offsets: &'a [OffsetEntry],
|
||||
data: &'a [u8],
|
||||
}
|
||||
|
||||
fn validate_offset_table(
|
||||
data_len: usize,
|
||||
mut iter: impl Iterator<Item = u16>,
|
||||
) -> Result<(), Error> {
|
||||
// every offset table must have at least the sentinel
|
||||
let mut prev = iter.next().ok_or(value_error!("offset table too short"))?;
|
||||
for next in iter {
|
||||
// offsets must be in ascending order
|
||||
if prev > next {
|
||||
return Err(value_error!("offsets not in ascending order"));
|
||||
}
|
||||
prev = next;
|
||||
}
|
||||
// sentinel needs to be at least data_len - MAX_TABLE_PADDING, and at most
|
||||
// data_len
|
||||
let sentinel = prev as usize;
|
||||
if sentinel < data_len - MAX_TABLE_PADDING || sentinel > data_len {
|
||||
return Err(value_error!("invalid sentinel offset"));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl<'a> Table<'a> {
|
||||
pub fn new(mut reader: InputStream<'a>) -> Result<Self, Error> {
|
||||
let count = reader.read_u16_le()?;
|
||||
// The offsets table is (count + 1) entries long, the last entry is a sentinel.
|
||||
let offsets_data = reader.read((count + 1) as usize * mem::size_of::<OffsetEntry>())?;
|
||||
// SAFETY: OffsetEntry is repr(packed) of two u16 values, so any four bytes are
|
||||
// a valid OffsetEntry value.
|
||||
let (_prefix, offsets, _suffix) = unsafe { offsets_data.align_to::<OffsetEntry>() };
|
||||
if !_prefix.is_empty() || !_suffix.is_empty() {
|
||||
return Err(value_error!("misaligned offsets table"));
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
offsets,
|
||||
data: reader.rest(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn validate(&self) -> Result<(), Error> {
|
||||
validate_offset_table(self.data.len(), self.offsets.iter().map(|it| it.offset))?;
|
||||
if !matches!(
|
||||
self.offsets.iter().last().map(|it| it.id),
|
||||
Some(SENTINEL_ID)
|
||||
) {
|
||||
return Err(value_error!("invalid sentinel id"));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get(&self, id: u16) -> Option<&'a [u8]> {
|
||||
self.offsets
|
||||
.iter()
|
||||
.position(|it| it.id == id)
|
||||
.and_then(|idx| {
|
||||
let start = self.offsets[idx].offset as usize;
|
||||
let end = self.offsets[idx + 1].offset as usize;
|
||||
self.data.get(start..end)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = (u16, &'a [u8])> + '_ {
|
||||
let mut prev_offset = 0usize;
|
||||
self.offsets.iter().skip(1).map(move |entry| {
|
||||
let start = prev_offset;
|
||||
let end = entry.offset as usize;
|
||||
prev_offset = end;
|
||||
(entry.id, &self.data[start..end])
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Translations<'a> {
|
||||
pub header: TranslationsHeader<'a>,
|
||||
translations: &'a [u8],
|
||||
translations_offsets: &'a [u16],
|
||||
fonts: Table<'a>,
|
||||
}
|
||||
|
||||
fn read_u16_prefixed_block<'a>(reader: &mut InputStream<'a>) -> Result<InputStream<'a>, Error> {
|
||||
let len = reader.read_u16_le()? as usize;
|
||||
reader.read_stream(len)
|
||||
}
|
||||
|
||||
impl<'a> Translations<'a> {
|
||||
const MAGIC: &'static [u8] = b"TRTR00";
|
||||
|
||||
pub fn new(blob: &'a [u8]) -> Result<Self, Error> {
|
||||
let mut blob_reader = InputStream::new(blob);
|
||||
|
||||
let (header, payload_reader) = TranslationsHeader::parse_from(&mut blob_reader)?;
|
||||
|
||||
// validate that the trailing bytes, if any, are empty
|
||||
let remaining = blob_reader.rest();
|
||||
if !remaining.iter().all(|&b| b == EMPTY_BYTE) {
|
||||
// TODO optimize to quadwords?
|
||||
return Err(value_error!("Trailing data in translations blob"));
|
||||
}
|
||||
|
||||
let payload_bytes = payload_reader.rest();
|
||||
|
||||
let payload_digest = sha256::digest(payload_bytes);
|
||||
if payload_digest != header.data_hash {
|
||||
return Err(value_error!("hash mismatch"));
|
||||
}
|
||||
|
||||
let mut payload_reader = InputStream::new(payload_bytes);
|
||||
|
||||
let mut translations_reader = read_u16_prefixed_block(&mut payload_reader)?;
|
||||
let fonts_reader = read_u16_prefixed_block(&mut payload_reader)?;
|
||||
|
||||
if payload_reader.remaining() > 0 {
|
||||
return Err(value_error!("Trailing data in translations blob"));
|
||||
}
|
||||
|
||||
// construct translations data
|
||||
let translations_count = translations_reader.read_u16_le()? as usize;
|
||||
let translations_offsets_bytes =
|
||||
translations_reader.read((translations_count + 1) * mem::size_of::<u16>())?;
|
||||
// SAFETY: any bytes are valid u16 values, so casting any data to
|
||||
// a sequence of u16 values is safe.
|
||||
let (_prefix, translations_offsets, _suffix) =
|
||||
unsafe { translations_offsets_bytes.align_to::<u16>() };
|
||||
if !_prefix.is_empty() || !_suffix.is_empty() {
|
||||
return Err(value_error!("Invalid translations table"));
|
||||
}
|
||||
let translations = translations_reader.rest();
|
||||
validate_offset_table(translations.len(), translations_offsets.iter().copied())?;
|
||||
|
||||
// construct and validate font table
|
||||
let fonts = Table::new(fonts_reader)?;
|
||||
fonts.validate()?;
|
||||
for (_, font_data) in fonts.iter() {
|
||||
let reader = InputStream::new(font_data);
|
||||
let font_table = Table::new(reader)?;
|
||||
font_table.validate()?;
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
header,
|
||||
translations,
|
||||
translations_offsets,
|
||||
fonts,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the translation at the given index.
|
||||
pub fn translation(&self, index: usize) -> Option<&str> {
|
||||
if index + 1 >= self.translations_offsets.len() {
|
||||
// The index is out of bounds.
|
||||
// (The last entry is a sentinel, so the last valid index is len - 2)
|
||||
// May happen when new firmware is using older translations and the string
|
||||
// is not defined yet.
|
||||
// Fallback to english.
|
||||
return None;
|
||||
}
|
||||
|
||||
let start_offset = self.translations_offsets[index] as usize;
|
||||
let end_offset = self.translations_offsets[index + 1] as usize;
|
||||
|
||||
// Construct the relevant slice
|
||||
let string = &self.translations[start_offset..end_offset];
|
||||
|
||||
if string.is_empty() {
|
||||
// The string is not defined in the blob.
|
||||
// May happen when old firmware is using newer translations and the string
|
||||
// was deleted in the newer version.
|
||||
// Fallback to english.
|
||||
return None;
|
||||
}
|
||||
|
||||
str::from_utf8(string).ok()
|
||||
}
|
||||
|
||||
pub fn font(&'a self, index: u16) -> Option<Table<'a>> {
|
||||
self.fonts
|
||||
.get(index)
|
||||
.and_then(|data| Table::new(InputStream::new(data)).ok())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TranslationsHeader<'a> {
|
||||
/// Raw content of the header, for signature verification
|
||||
pub header_bytes: &'a [u8],
|
||||
/// Human readable language identifier (e.g. "cs" of "fr")
|
||||
pub language: &'a str,
|
||||
/// 4 bytes of version (major, minor, patch, build)
|
||||
pub version: [u8; 4],
|
||||
/// Length of the raw data, i.e. translations section + fonts section
|
||||
pub data_len: usize,
|
||||
/// Hash of the data blob (excluding the header)
|
||||
pub data_hash: sha256::Digest,
|
||||
/// Title to show user before changing the language
|
||||
pub change_language_title: &'a str,
|
||||
/// Text to show user before changing the language
|
||||
pub change_language_prompt: &'a str,
|
||||
/// Merkle proof items
|
||||
pub merkle_proof: &'a [sha256::Digest],
|
||||
/// CoSi signature
|
||||
pub signature: cosi::Signature,
|
||||
/// Expected total length of the blob
|
||||
pub total_len: usize,
|
||||
}
|
||||
|
||||
fn read_fixedsize_str<'a>(reader: &mut InputStream<'a>, len: usize) -> Result<&'a str, Error> {
|
||||
let bytes = reader.read(len)?;
|
||||
core::str::from_utf8(bytes).map_err(|_| value_error!("invalid fixedsize string"))
|
||||
}
|
||||
|
||||
fn read_pascal_str<'a>(reader: &mut InputStream<'a>) -> Result<&'a str, Error> {
|
||||
let len = reader.read_byte()? as usize;
|
||||
read_fixedsize_str(reader, len)
|
||||
}
|
||||
|
||||
impl<'a> TranslationsHeader<'a> {
|
||||
const BLOB_MAGIC: &'static [u8] = b"TRTR00";
|
||||
const HEADER_MAGIC: &'static [u8] = b"TR";
|
||||
|
||||
/// Parse a translations header out of a stream.
|
||||
///
|
||||
/// The returned tuple consists of:
|
||||
/// (a) the parsed header and
|
||||
/// (b) reader of the payload section of the translations blob.
|
||||
/// The caller can use the returned reader to parse the payload.
|
||||
///
|
||||
/// The input stream is positioned at the end of the translations blob (or
|
||||
/// at the end of stream, whichever comes sooner). The caller can use this
|
||||
/// to verify that there is no unexpected trailing data in the input
|
||||
/// stream. (Also, you cannot make a mistake and read the payload out of
|
||||
/// the input stream).
|
||||
pub fn parse_from(reader: &mut InputStream<'a>) -> Result<(Self, InputStream<'a>), Error> {
|
||||
//
|
||||
// 1. parse outer container
|
||||
//
|
||||
let magic = reader.read(Self::BLOB_MAGIC.len())?;
|
||||
if magic != Self::BLOB_MAGIC {
|
||||
return Err(value_error!("invalid header magic"));
|
||||
}
|
||||
|
||||
// read length of contained data
|
||||
let container_length = reader.read_u16_le()? as usize;
|
||||
// continue working on the contained data (i.e., read beyond the bounds of
|
||||
// container_length will result in EOF).
|
||||
let mut reader = reader.read_stream(container_length.min(reader.remaining()))?;
|
||||
|
||||
//
|
||||
// 2. parse the header section
|
||||
//
|
||||
let header_bytes = read_u16_prefixed_block(&mut reader)?.rest();
|
||||
|
||||
let mut header_reader = InputStream::new(header_bytes);
|
||||
|
||||
let magic = header_reader.read(Self::HEADER_MAGIC.len())?;
|
||||
if magic != Self::HEADER_MAGIC {
|
||||
return Err(value_error!("bad header magic"));
|
||||
}
|
||||
|
||||
let language = read_fixedsize_str(&mut header_reader, 4)?;
|
||||
let model = read_fixedsize_str(&mut header_reader, 4)?;
|
||||
|
||||
if model != crate::trezorhal::model::INTERNAL_NAME {
|
||||
return Err(value_error!("Wrong Trezor model"));
|
||||
}
|
||||
|
||||
let version_bytes = header_reader.read(4)?;
|
||||
let version = unwrap!(version_bytes.try_into());
|
||||
|
||||
let data_len = header_reader.read_u16_le()? as usize;
|
||||
let data_hash: sha256::Digest =
|
||||
unwrap!(header_reader.read(sha256::DIGEST_SIZE)?.try_into());
|
||||
|
||||
let change_language_title = read_pascal_str(&mut header_reader)?;
|
||||
let change_language_prompt = read_pascal_str(&mut header_reader)?;
|
||||
|
||||
// ignore the rest of the header reader - this allows older firmware to
|
||||
// understand newer header if there are only added items
|
||||
_ = header_reader.rest();
|
||||
|
||||
//
|
||||
// 3. parse the proof section
|
||||
//
|
||||
let mut proof_reader = read_u16_prefixed_block(&mut reader)?;
|
||||
let proof_count = proof_reader.read_byte()? as usize;
|
||||
let proof_length = proof_count * sha256::DIGEST_SIZE;
|
||||
let proof_bytes = proof_reader.read(proof_length)?;
|
||||
|
||||
// create a list of the proof items
|
||||
// SAFETY: sha256::Digest is a plain array of u8, so any bytes are valid
|
||||
let (_prefix, merkle_proof, _suffix) = unsafe { proof_bytes.align_to::<sha256::Digest>() };
|
||||
if !_prefix.is_empty() || !_suffix.is_empty() {
|
||||
return Err(value_error!("misaligned proof table"));
|
||||
}
|
||||
let signature = cosi::Signature::new(
|
||||
proof_reader.read_byte()?,
|
||||
unwrap!(proof_reader.read(ed25519::SIGNATURE_SIZE)?.try_into()),
|
||||
);
|
||||
|
||||
// check that there is no trailing data in the proof section
|
||||
if proof_reader.remaining() > 0 {
|
||||
return Err(value_error!("trailing data in proof"));
|
||||
}
|
||||
|
||||
// check that the declared data section length matches the container size
|
||||
if container_length - reader.tell() != data_len {
|
||||
println!("container length: ", heapless::String::<10>::from(container_length as u32).as_str());
|
||||
println!("reader pos: ", heapless::String::<10>::from(reader.tell() as u32).as_str());
|
||||
println!("data_len: ", heapless::String::<10>::from(data_len as u32).as_str());
|
||||
return Err(value_error!("data length mismatch"));
|
||||
}
|
||||
|
||||
let new = Self {
|
||||
header_bytes,
|
||||
language,
|
||||
version,
|
||||
data_len,
|
||||
data_hash,
|
||||
change_language_title,
|
||||
change_language_prompt,
|
||||
merkle_proof,
|
||||
signature,
|
||||
total_len: container_length + Self::BLOB_MAGIC.len() + mem::size_of::<u16>(),
|
||||
};
|
||||
new.verify()?;
|
||||
Ok((new, reader))
|
||||
}
|
||||
|
||||
fn verify_with_keys(&self, public_keys: &[ed25519::PublicKey]) -> Result<(), Error> {
|
||||
let merkle_root = merkle_root(self.header_bytes, self.merkle_proof);
|
||||
Ok(cosi::verify(
|
||||
SIGNATURE_THRESHOLD,
|
||||
&merkle_root,
|
||||
public_keys,
|
||||
&self.signature,
|
||||
)?)
|
||||
}
|
||||
|
||||
pub fn verify(&self) -> Result<(), Error> {
|
||||
let mut result = self.verify_with_keys(&public_keys::PUBLIC_KEYS);
|
||||
#[cfg(feature = "debug")]
|
||||
if result.is_err() {
|
||||
// allow development keys
|
||||
result = self.verify_with_keys(&public_keys::PUBLIC_KEYS_DEVEL);
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
@ -1,241 +1,30 @@
|
||||
mod blob;
|
||||
mod flash;
|
||||
mod generated;
|
||||
#[cfg(feature = "micropython")]
|
||||
mod micropython;
|
||||
mod obj;
|
||||
mod public_keys;
|
||||
mod translated_string;
|
||||
|
||||
pub use blob::MAX_HEADER_LEN;
|
||||
pub use translated_string::TranslatedString as TR;
|
||||
pub const DEFAULT_LANGUAGE: &str = "enUS";
|
||||
|
||||
use crate::{error::Error, io::InputStream};
|
||||
use core::{ptr::null, str};
|
||||
|
||||
const HEADER_LEN: usize = 256;
|
||||
|
||||
// TODO: somehow make it a singleton object, so it is loaded just once
|
||||
// TODO: split it into header.rs, translations.rs and font.rs?
|
||||
// TODO: --- put the {en,cs,fr}.* files to `languages` dir?
|
||||
// TODO: not generate fr/cs.rs files not to confuse, rather do .txt?
|
||||
// TODO: add some tests?
|
||||
|
||||
#[repr(packed)]
|
||||
struct OffsetEntry {
|
||||
pub id: u16,
|
||||
pub offset: u16,
|
||||
}
|
||||
|
||||
struct Table<'a> {
|
||||
offsets: &'a [OffsetEntry],
|
||||
data: &'a [u8],
|
||||
}
|
||||
|
||||
impl<'a> Table<'a> {
|
||||
pub fn new(data: &'a [u8]) -> Result<Self, Error> {
|
||||
let mut reader = crate::io::InputStream::new(data);
|
||||
let count = reader.read_u16_le()?;
|
||||
// The offsets table is (count + 1) entries long, the last entry is a sentinel.
|
||||
let offsets_data =
|
||||
reader.read((count + 1) as usize * core::mem::size_of::<OffsetEntry>())?;
|
||||
// SAFETY: OffsetEntry is repr(packed) of two u16 values, so any four bytes are
|
||||
// a valid OffsetEntry value.
|
||||
let (_prefix, offsets, _suffix) = unsafe { offsets_data.align_to::<OffsetEntry>() };
|
||||
if !_prefix.is_empty() || !_suffix.is_empty() {
|
||||
return Err(value_error!("Invalid offsets table"));
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
offsets,
|
||||
data: reader.rest(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get(&self, id: u16) -> Option<&'a [u8]> {
|
||||
self.offsets
|
||||
.iter()
|
||||
.position(|it| it.id == id)
|
||||
.and_then(|idx| {
|
||||
let start = self.offsets[idx].offset as usize;
|
||||
let end = self.offsets[idx + 1].offset as usize;
|
||||
self.data.get(start..end)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
struct Translations<'a> {
|
||||
pub header: TranslationsHeader<'a>,
|
||||
translations: &'a [u8],
|
||||
translation_offsets: &'a [u16],
|
||||
fonts: Table<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Translations<'a> {
|
||||
pub fn new(blob: &'a [u8]) -> Result<Self, Error> {
|
||||
let header = TranslationsHeader::parse(&blob[..HEADER_LEN])?;
|
||||
let data_len = header.data_length as usize;
|
||||
if blob.len() < HEADER_LEN + data_len {
|
||||
return Err(Error::EOFError);
|
||||
}
|
||||
let data = &blob[HEADER_LEN..HEADER_LEN + data_len];
|
||||
if header.translations_length > data.len() as u16 {
|
||||
return Err(value_error!("Invalid translations length field"));
|
||||
}
|
||||
|
||||
let translations = &data[..header.translations_length as usize];
|
||||
// length of offsets table is (count + 1) entries, the last one is a sentinel
|
||||
let table_len = (header.translations_count + 1) as usize * core::mem::size_of::<u16>();
|
||||
if table_len > translations.len() {
|
||||
return Err(value_error!("Invalid translations table"));
|
||||
}
|
||||
// SAFETY: any bytes are valid u16 values, so casting any data to
|
||||
// a sequence of u16 values is safe.
|
||||
let (_prefix, translation_offsets, _suffix) =
|
||||
unsafe { data[..table_len].align_to::<u16>() };
|
||||
if !_prefix.is_empty() || !_suffix.is_empty() {
|
||||
return Err(value_error!("Invalid translations table"));
|
||||
}
|
||||
|
||||
let fonts_data = &data[header.translations_length as usize..];
|
||||
|
||||
Ok(Self {
|
||||
header,
|
||||
translations,
|
||||
translation_offsets,
|
||||
fonts: Table::new(fonts_data)?,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the translation at the given index.
|
||||
pub fn translation(&self, index: usize) -> Option<&str> {
|
||||
if index >= self.translation_offsets.len() + 1 {
|
||||
// The index is out of bounds.
|
||||
// May happen when new firmware is using older translations and the string
|
||||
// is not defined yet.
|
||||
// Fallback to english.
|
||||
return None;
|
||||
}
|
||||
|
||||
let start_offset = self.translation_offsets[index] as usize;
|
||||
let end_offset = self.translation_offsets[index + 1] as usize;
|
||||
|
||||
// Construct the relevant slice
|
||||
let string = &self.translations[start_offset..end_offset];
|
||||
|
||||
if string.is_empty() {
|
||||
// The string is not defined in the blob.
|
||||
// May happen when old firmware is using newer translations and the string
|
||||
// was deleted in the newer version.
|
||||
// Fallback to english.
|
||||
return None;
|
||||
}
|
||||
|
||||
str::from_utf8(string).ok()
|
||||
}
|
||||
|
||||
pub fn font(&'a self, index: u16) -> Option<Table<'a>> {
|
||||
self.fonts.get(index).and_then(|data| Table::new(data).ok())
|
||||
}
|
||||
}
|
||||
|
||||
struct TranslationsHeader<'a> {
|
||||
/// Human readable language identifier (e.g. "cs" of "fr")
|
||||
pub language: &'a str,
|
||||
/// 4 bytes of version (major, minor, patch, build)
|
||||
pub version: [u8; 4],
|
||||
/// Overall length of the data blob (excluding the header)
|
||||
pub data_length: u16,
|
||||
/// Length of the header
|
||||
pub header_length: u16,
|
||||
/// Length of the translation data
|
||||
pub translations_length: u16,
|
||||
/// Number of translation items
|
||||
pub translations_count: u16,
|
||||
/// Hash of the data blob (excluding the header)
|
||||
pub data_hash: [u8; 32],
|
||||
/// Title to show user before changing the language
|
||||
pub change_language_title: &'a str,
|
||||
/// Text to show user before changing the language
|
||||
pub change_language_prompt: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> TranslationsHeader<'a> {
|
||||
const MAGIC: &'static [u8] = b"TRTR00";
|
||||
const VERSION_LEN: usize = 16;
|
||||
const LANG_LEN: usize = 32;
|
||||
const DATA_HASH_LEN: usize = 32;
|
||||
const CHANGE_LANG_TITLE_LEN: usize = 20;
|
||||
const CHANGE_LANG_PROMPT_LEN: usize = 40;
|
||||
const SIGNATURE_LEN: usize = 65;
|
||||
|
||||
fn read_fixedsize_str<'b, 'c: 'b>(reader: &'b mut InputStream<'c>, len: usize) -> Result<&'c str, Error> {
|
||||
let bytes = reader.read(len)?;
|
||||
let string = core::str::from_utf8(bytes)
|
||||
.map_err(|_| value_error!("Invalid string"))?
|
||||
.trim_end_matches(0 as char);
|
||||
Ok(string)
|
||||
}
|
||||
|
||||
pub fn parse(data: &'a [u8]) -> Result<Self, Error> {
|
||||
if data.len() != HEADER_LEN {
|
||||
return Err(value_error!("Invalid header length"));
|
||||
}
|
||||
|
||||
let mut reader = crate::io::InputStream::new(data);
|
||||
|
||||
let magic = reader.read(Self::MAGIC.len())?;
|
||||
if magic != Self::MAGIC {
|
||||
return Err(value_error!("Invalid header magic"));
|
||||
}
|
||||
|
||||
let version_bytes = reader.read(4)?;
|
||||
let version = unwrap!(version_bytes.try_into());
|
||||
let language = Self::read_fixedsize_str(&mut reader, Self::LANG_LEN)?;
|
||||
let data_length = reader.read_u16_le()?;
|
||||
let translations_length = reader.read_u16_le()?;
|
||||
let translations_count = reader.read_u16_le()?;
|
||||
let data_hash = unwrap!(reader.read(32)?.try_into());
|
||||
let change_language_title =
|
||||
Self::read_fixedsize_str(&mut reader, Self::CHANGE_LANG_TITLE_LEN)?;
|
||||
let change_language_prompt =
|
||||
Self::read_fixedsize_str(&mut reader, Self::CHANGE_LANG_PROMPT_LEN)?;
|
||||
|
||||
if translations_length > data_length {
|
||||
return Err(value_error!("Invalid translations length field"));
|
||||
}
|
||||
|
||||
let trailing_data = reader.rest();
|
||||
if trailing_data.len() < Self::SIGNATURE_LEN {
|
||||
return Err(value_error!("Invalid header signature"));
|
||||
}
|
||||
|
||||
// TODO check signature
|
||||
|
||||
Ok(Self {
|
||||
language,
|
||||
version,
|
||||
data_length,
|
||||
header_length: 0,
|
||||
translations_length,
|
||||
translations_count,
|
||||
data_hash,
|
||||
change_language_title,
|
||||
change_language_prompt,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// SAFETY: Returned pointer will only point to valid font data for as long as the flash
|
||||
// content is not invalidated by `erase()` or `write()`.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn get_utf8_glyph(codepoint: cty::uint16_t, font: cty::c_int) -> *const u8 {
|
||||
// C will send a negative number
|
||||
let font_abs = font.unsigned_abs() as u16;
|
||||
|
||||
// SAFETY:
|
||||
// We rely on the fact that the caller (`display_text_render` and friends) uses
|
||||
// and discards this data immediately.
|
||||
// SAFETY: Reference is discarded at the end of the function.
|
||||
// We do return a _pointer_ to the same memory location, but the pointer is always valid.
|
||||
let Some(tr) = (unsafe { flash::get() }) else {
|
||||
return null();
|
||||
return core::ptr::null();
|
||||
};
|
||||
if let Some(glyph) = tr.font(font_abs).and_then(|t| t.get(codepoint)) {
|
||||
glyph.as_ptr()
|
||||
} else {
|
||||
null()
|
||||
core::ptr::null()
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,14 @@
|
||||
use crate::crypto::ed25519;
|
||||
|
||||
#[cfg(feature = "debug")]
|
||||
pub const PUBLIC_KEYS_DEVEL: [ed25519::PublicKey; 3] = [
|
||||
*b"\x68\x46\x0e\xbe\xf3\xb1\x38\x16\x4e\xc7\xfd\x86\x10\xe9\x58\x00\xdf\x75\x98\xf7\x0f\x2f\x2e\xa7\xdb\x51\x72\xac\x74\xeb\xc1\x44",
|
||||
*b"\x8d\x4a\xbe\x07\x4f\xef\x92\x29\xd3\xb4\x41\xdf\xea\x4f\x98\xf8\x05\xb1\xa2\xb3\xa0\x6a\xe6\x45\x81\x0e\xfe\xce\x77\xfd\x50\x44",
|
||||
*b"\x97\xf7\x13\x5a\x9a\x26\x90\xe7\x3b\xeb\x26\x55\x6f\x1c\xb1\x63\xbe\xa2\x53\x2a\xff\xa1\xe7\x78\x24\x30\xbe\x98\xc0\xe5\x68\x12",
|
||||
];
|
||||
|
||||
pub const PUBLIC_KEYS: [ed25519::PublicKey; 3] = [
|
||||
*b"\x43\x34\x99\x63\x43\x62\x3e\x46\x2f\x0f\xc9\x33\x11\xfe\xf1\x48\x4c\xa2\x3d\x2f\xf1\xee\xc6\xdf\x1f\xa8\xeb\x7e\x35\x73\xb3\xdb",
|
||||
*b"\xa9\xa2\x2c\xc2\x65\xa0\xcb\x1d\x6c\xb3\x29\xbc\x0e\x60\xbc\x45\xdf\x76\xb9\xab\x28\xfb\x87\xb6\x11\x36\xfe\xaf\x8d\x8f\xdc\x96",
|
||||
*b"\xb8\xd2\xb2\x1d\xe2\x71\x24\xf0\x51\x1f\x90\x3a\xe7\xe6\x0e\x07\x96\x18\x10\xa0\xb8\xf2\x8e\xa7\x55\xfa\x50\x36\x7a\x8a\x2b\x8b",
|
||||
];
|
@ -1,2 +1,6 @@
|
||||
import trezorconfig as config # noqa: F401
|
||||
import trezorio as io # noqa: F401
|
||||
|
||||
import trezortranslate as translations # noqa: F401
|
||||
|
||||
TR = translations.TR
|
||||
|
@ -1,29 +0,0 @@
|
||||
from trezor import config
|
||||
|
||||
DEFAULT_LANGUAGE = "en-US"
|
||||
|
||||
|
||||
def get_language() -> str:
|
||||
from trezortranslate import language_name
|
||||
|
||||
translation_lang = language_name()
|
||||
if translation_lang:
|
||||
return translation_lang
|
||||
return DEFAULT_LANGUAGE
|
||||
|
||||
|
||||
def write(data: bytes | bytearray, offset: int) -> None:
|
||||
from trezor import wire
|
||||
|
||||
if offset + len(data) > data_max_size():
|
||||
raise wire.DataError("Language data too long")
|
||||
|
||||
config.translations_set(data, offset)
|
||||
|
||||
|
||||
def wipe() -> None:
|
||||
config.translations_wipe()
|
||||
|
||||
|
||||
def data_max_size() -> int:
|
||||
return config.translations_max_bytesize()
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue