Remove pt_EN
This commit is contained in:
parent
43523b58e5
commit
f407176a5e
71
src/main.rs
71
src/main.rs
|
@ -4,27 +4,33 @@ use std::io::{BufWriter, Read, Seek, SeekFrom, Write};
|
|||
|
||||
#[derive(Debug)]
|
||||
struct DatHeader {
|
||||
pub total: u16, // 2621 - Para obrigar os dados começarem a partir do offset 65536
|
||||
// Its always 2621
|
||||
// To force the contents of files to start at offset 65536
|
||||
pub total: u16,
|
||||
pub files: Vec<DatFile>
|
||||
}
|
||||
|
||||
impl DatHeader {
|
||||
fn new(filename: &str) -> Self {
|
||||
let mut f = File::open(filename).unwrap();
|
||||
// Read Total Files
|
||||
// Read total files - always 2621
|
||||
let mut file_dat: [u8; 2] = [0; 2];
|
||||
let _ = f.read(&mut file_dat);
|
||||
let mut result = DatHeader::from_bytes(file_dat);
|
||||
for _ in 0..result.total {
|
||||
// Create a new DatHeader
|
||||
let mut dat = DatHeader::from_bytes(file_dat);
|
||||
// Get parse info about every file
|
||||
for _ in 0..dat.total {
|
||||
let mut file_dat: [u8; 25] = [0; 25];
|
||||
let _ = f.read(&mut file_dat);
|
||||
// If the first char of the filename is empty, skip
|
||||
if file_dat[0] == 0 {
|
||||
continue
|
||||
}
|
||||
// Add a new DatFile to DatHeader
|
||||
let my_file = DatFile::from_bytes(file_dat);
|
||||
result.files.push(my_file);
|
||||
dat.files.push(my_file);
|
||||
}
|
||||
result
|
||||
dat
|
||||
}
|
||||
|
||||
fn from_bytes(buffer: [u8; 2]) -> Self {
|
||||
|
@ -41,7 +47,8 @@ impl DatHeader {
|
|||
|
||||
#[derive(Debug)]
|
||||
struct DatFile {
|
||||
// 16 Bytes - Apenas 12 Bytes usados, resto com \0
|
||||
// 16 bytes
|
||||
// Only 12 bytes should be used
|
||||
pub name: String,
|
||||
pub size: u32,
|
||||
pub offset: u32,
|
||||
|
@ -54,29 +61,29 @@ impl DatFile {
|
|||
}
|
||||
}
|
||||
|
||||
fn from_bytes(file_bytes: [u8; 25]) -> Self {
|
||||
fn from_bytes(bytes: [u8; 25]) -> Self {
|
||||
Self {
|
||||
name: std::str::from_utf8(&file_bytes[0..16]).unwrap().to_string().replace('\0', ""),
|
||||
size: u32::from_le_bytes(file_bytes[16..20].try_into().expect("Slice with incorrect length")),
|
||||
offset: u32::from_le_bytes(file_bytes[20..24].try_into().expect("Slice with incorrect length")),
|
||||
flag: file_bytes[24]
|
||||
name: std::str::from_utf8(&bytes[0..16]).unwrap().to_string().replace('\0', ""),
|
||||
size: u32::from_le_bytes(bytes[16..20].try_into().expect("Slice with incorrect length")),
|
||||
offset: u32::from_le_bytes(bytes[20..24].try_into().expect("Slice with incorrect length")),
|
||||
flag: bytes[24]
|
||||
}
|
||||
}
|
||||
|
||||
fn to_bytes(&self) -> [u8; 25] {
|
||||
let mut file_bytes: [u8; 25] = [0; 25];
|
||||
let mut bytes: [u8; 25] = [0; 25];
|
||||
// name
|
||||
if self.name.len() > 12 {
|
||||
panic!("Filename invalido");
|
||||
panic!("Invalid filename: {}", self.name);
|
||||
}
|
||||
file_bytes[0..self.name.as_bytes().len()].clone_from_slice(self.name.as_bytes());
|
||||
bytes[0..self.name.as_bytes().len()].clone_from_slice(self.name.as_bytes());
|
||||
// size
|
||||
file_bytes[16..20].clone_from_slice(&self.size.to_le_bytes());
|
||||
bytes[16..20].clone_from_slice(&self.size.to_le_bytes());
|
||||
// offset
|
||||
file_bytes[20..24].clone_from_slice(&self.offset.to_le_bytes());
|
||||
// flag
|
||||
file_bytes[24] = self.flag;
|
||||
file_bytes
|
||||
bytes[20..24].clone_from_slice(&self.offset.to_le_bytes());
|
||||
// flag - not used
|
||||
bytes[24] = self.flag;
|
||||
bytes
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -109,40 +116,42 @@ fn create_json(dat: &DatHeader) {
|
|||
}
|
||||
|
||||
fn read_json() -> Vec<String> {
|
||||
let data = fs::read_to_string("list.json").expect("Unable to read file");
|
||||
serde_json::from_str(&data).expect("Unable to parse")
|
||||
let list = fs::read_to_string("list.json").expect("Unable to read file");
|
||||
serde_json::from_str(&list).expect("Unable to parse")
|
||||
}
|
||||
|
||||
fn create_dat(files: Vec<String>) {
|
||||
let mut f = File::create("NOVO.DAT").unwrap();
|
||||
|
||||
let dat: DatHeader = DatHeader { total: 2621, files: Vec::new() };
|
||||
let _ = f.write(&dat.to_bytes());
|
||||
|
||||
let mut jsonfiles : Vec<JsonFile> = Vec::new();
|
||||
let mut address: i32 = 65536;
|
||||
for file in files {
|
||||
// Obter tamanho do ficheiro
|
||||
// Get file size
|
||||
let file_size: i32 = fs::metadata(&file).unwrap().len() as i32;
|
||||
// Separar o Nome da Path
|
||||
// Split name from path
|
||||
let split: Vec<_> = file.split('/').collect();
|
||||
let file_name = split[1].clone();
|
||||
// Gravar info da Header
|
||||
// Save file info header
|
||||
let datfile = DatFile::new(file_name.to_string(), file_size as u32, address as u32, 0);
|
||||
let _ = f.write(&datfile.to_bytes());
|
||||
// Ver se o final do ficheiro está alinhado a 16
|
||||
// Check if end if aligned at 16 bytes
|
||||
address += file_size;
|
||||
address += (-address).checked_rem_euclid(16).unwrap();
|
||||
jsonfiles.push(JsonFile { name: file.to_string(), padding: address as u32 });
|
||||
}
|
||||
|
||||
// Preencher ate 65535 com 0
|
||||
let new_position = f.seek(SeekFrom::Current(0)).unwrap();
|
||||
if new_position < 65536 {
|
||||
for _ in new_position..65536 {
|
||||
// Fill with zeros until 65535
|
||||
let current_position = f.seek(SeekFrom::Current(0)).unwrap();
|
||||
if current_position < 65536 {
|
||||
for _ in current_position..65536 {
|
||||
let _ = f.write(&[0]);
|
||||
}
|
||||
}
|
||||
|
||||
// Gravar conteudo dos ficheiros no DAT
|
||||
// Save files content into DAT file
|
||||
for file in jsonfiles {
|
||||
let mut source = File::open(file.name).unwrap();
|
||||
let mut contents: Vec<u8> = Vec::new();
|
||||
|
|
Loading…
Reference in New Issue