Bundle crate rewrite
This commit is contained in:
parent
ed60761808
commit
b6b3485a0a
|
@ -2782,6 +2782,7 @@ dependencies = [
|
||||||
"tectonic_geturl",
|
"tectonic_geturl",
|
||||||
"tectonic_io_base",
|
"tectonic_io_base",
|
||||||
"tectonic_status_base",
|
"tectonic_status_base",
|
||||||
|
"url",
|
||||||
"zip",
|
"zip",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -106,13 +106,7 @@ pub trait DriverHooks {
|
||||||
/// argument specifies the cryptographic digest of the data that were
|
/// argument specifies the cryptographic digest of the data that were
|
||||||
/// written. Note that this function takes ownership of the name and
|
/// written. Note that this function takes ownership of the name and
|
||||||
/// digest.
|
/// digest.
|
||||||
fn event_output_closed(
|
fn event_output_closed(&mut self, _name: String, _digest: DigestData) {}
|
||||||
&mut self,
|
|
||||||
_name: String,
|
|
||||||
_digest: DigestData,
|
|
||||||
_status: &mut dyn StatusBackend,
|
|
||||||
) {
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This function is called when an input file is closed. The "digest"
|
/// This function is called when an input file is closed. The "digest"
|
||||||
/// argument specifies the cryptographic digest of the data that were
|
/// argument specifies the cryptographic digest of the data that were
|
||||||
|
@ -560,7 +554,7 @@ impl<'a> CoreBridgeState<'a> {
|
||||||
rv = true;
|
rv = true;
|
||||||
}
|
}
|
||||||
let (name, digest) = oh.into_name_digest();
|
let (name, digest) = oh.into_name_digest();
|
||||||
self.hooks.event_output_closed(name, digest, self.status);
|
self.hooks.event_output_closed(name, digest);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "tectonic_bundles"
|
name = "tectonic_bundles"
|
||||||
version = "0.0.0-dev.0" # assigned with cranko (see README)
|
version = "0.0.0-dev.0" # assigned with cranko (see README)
|
||||||
authors = ["Peter Williams <peter@newton.cx>"]
|
authors = ["Peter Williams <peter@newton.cx>"]
|
||||||
description = """
|
description = """
|
||||||
Tectonic "bundle" (support file collection) implementations.
|
Tectonic "bundle" (support file collection) implementations.
|
||||||
|
@ -25,6 +25,7 @@ tectonic_geturl = { path = "../geturl", version = "0.0.0-dev.0", default-feature
|
||||||
tectonic_io_base = { path = "../io_base", version = "0.0.0-dev.0" }
|
tectonic_io_base = { path = "../io_base", version = "0.0.0-dev.0" }
|
||||||
tectonic_status_base = { path = "../status_base", version = "0.0.0-dev.0" }
|
tectonic_status_base = { path = "../status_base", version = "0.0.0-dev.0" }
|
||||||
zip = { version = "^0.6", default-features = false, features = ["deflate"] }
|
zip = { version = "^0.6", default-features = false, features = ["deflate"] }
|
||||||
|
url = "^2.0"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["geturl-reqwest"]
|
default = ["geturl-reqwest"]
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -5,11 +5,13 @@
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
fs,
|
fs,
|
||||||
|
io::Read,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
|
str::FromStr,
|
||||||
};
|
};
|
||||||
use tectonic_errors::prelude::*;
|
use tectonic_errors::prelude::*;
|
||||||
use tectonic_io_base::{filesystem::FilesystemIo, InputHandle, IoProvider, OpenResult};
|
use tectonic_io_base::{digest, filesystem::FilesystemIo, InputHandle, IoProvider, OpenResult};
|
||||||
use tectonic_status_base::StatusBackend;
|
use tectonic_status_base::{NoopStatusBackend, StatusBackend};
|
||||||
|
|
||||||
use super::Bundle;
|
use super::Bundle;
|
||||||
|
|
||||||
|
@ -56,21 +58,34 @@ impl IoProvider for DirBundle {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Bundle for DirBundle {
|
impl Bundle for DirBundle {
|
||||||
fn all_files(&mut self, _status: &mut dyn StatusBackend) -> Result<Vec<String>> {
|
fn all_files(&self) -> Vec<String> {
|
||||||
let mut files = Vec::new();
|
fs::read_dir(self.0.root())
|
||||||
|
.unwrap()
|
||||||
|
.filter_map(|x| x.ok())
|
||||||
|
.filter(|x| !x.file_type().map(|x| x.is_dir()).unwrap_or(false))
|
||||||
|
.map(|x| x.file_name().to_str().unwrap_or("").to_owned())
|
||||||
|
.filter(|x| !x.is_empty())
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
// We intentionally do not explore the directory recursively.
|
fn get_digest(&mut self) -> Result<tectonic_io_base::digest::DigestData> {
|
||||||
for entry in fs::read_dir(self.0.root())? {
|
let digest_text = match self.input_open_name(digest::DIGEST_NAME, &mut NoopStatusBackend {})
|
||||||
let entry = entry?;
|
{
|
||||||
|
OpenResult::Ok(h) => {
|
||||||
// This catches both regular files and symlinks:`
|
let mut text = String::new();
|
||||||
if !entry.file_type()?.is_dir() {
|
h.take(64).read_to_string(&mut text)?;
|
||||||
if let Some(s) = entry.file_name().to_str() {
|
text
|
||||||
files.push(s.to_owned());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
Ok(files)
|
OpenResult::NotAvailable => {
|
||||||
|
bail!("bundle does not provide needed SHA256SUM file");
|
||||||
|
}
|
||||||
|
|
||||||
|
OpenResult::Err(e) => {
|
||||||
|
return Err(e);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(atry!(digest::DigestData::from_str(&digest_text); ["corrupted SHA256 digest data"]))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,10 +3,11 @@
|
||||||
|
|
||||||
//! The web-friendly "indexed tar" bundle backend.
|
//! The web-friendly "indexed tar" bundle backend.
|
||||||
//!
|
//!
|
||||||
//! The main type offered by this module is the [`IndexedTarBackend`] struct,
|
//! The main type offered by this module is the [`ItarBundle`] struct,
|
||||||
//! which cannot be used directly as a [`tectonic_io_base::IoProvider`] but is
|
//! which can (but should not) be used directly as any other bundle.
|
||||||
//! the default backend for cached web-based bundle access through the
|
//!
|
||||||
//! [`crate::cache::CachingBundle`] framework.
|
//! Instead, wrap it in a [`crate::BundleCache`] for filesystem-backed
|
||||||
|
//! caching.
|
||||||
//!
|
//!
|
||||||
//! While the on-server file format backing the "indexed tar" backend is indeed
|
//! While the on-server file format backing the "indexed tar" backend is indeed
|
||||||
//! a standard `tar` file, as far as the client is concerned, this backend is
|
//! a standard `tar` file, as far as the client is concerned, this backend is
|
||||||
|
@ -14,187 +15,273 @@
|
||||||
//! resource, the index file merely contains a byte offset and length that are
|
//! resource, the index file merely contains a byte offset and length that are
|
||||||
//! then used to construct an HTTP Range request to obtain the file as needed.
|
//! then used to construct an HTTP Range request to obtain the file as needed.
|
||||||
|
|
||||||
|
use crate::{Bundle, CachableBundle, FileIndex, FileInfo, NET_RETRY_ATTEMPTS, NET_RETRY_SLEEP_MS};
|
||||||
use flate2::read::GzDecoder;
|
use flate2::read::GzDecoder;
|
||||||
use std::{convert::TryInto, io::Read, str::FromStr};
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
io::{BufRead, BufReader, Cursor, Read},
|
||||||
|
str::FromStr,
|
||||||
|
thread,
|
||||||
|
time::Duration,
|
||||||
|
};
|
||||||
use tectonic_errors::prelude::*;
|
use tectonic_errors::prelude::*;
|
||||||
use tectonic_geturl::{DefaultBackend, DefaultRangeReader, GetUrlBackend, RangeReader};
|
use tectonic_geturl::{DefaultBackend, DefaultRangeReader, GetUrlBackend, RangeReader};
|
||||||
use tectonic_io_base::digest::{self, DigestData};
|
use tectonic_io_base::{digest, InputHandle, InputOrigin, IoProvider, OpenResult};
|
||||||
use tectonic_status_base::{tt_note, tt_warning, StatusBackend};
|
use tectonic_status_base::{tt_note, tt_warning, NoopStatusBackend, StatusBackend};
|
||||||
|
|
||||||
use crate::cache::{BackendPullData, CacheBackend};
|
/// The internal file-information struct used by the [`ItarBundle`].
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
const MAX_HTTP_ATTEMPTS: usize = 4;
|
pub struct ItarFileInfo {
|
||||||
|
name: String,
|
||||||
/// The internal file-information struct used by the [`IndexedTarBackend`].
|
|
||||||
#[derive(Clone, Copy, Debug)]
|
|
||||||
pub struct FileInfo {
|
|
||||||
offset: u64,
|
offset: u64,
|
||||||
length: u64,
|
length: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A simple web-based file backend based on HTTP Range requests.
|
impl FileInfo for ItarFileInfo {
|
||||||
///
|
fn name(&self) -> &str {
|
||||||
/// This type implements the [`CacheBackend`] trait and so can be used for
|
&self.name
|
||||||
/// web-based bundle access thorugh the [`crate::cache::CachingBundle`]
|
}
|
||||||
/// framework.
|
fn path(&self) -> &str {
|
||||||
#[derive(Debug)]
|
&self.name
|
||||||
pub struct IndexedTarBackend {
|
}
|
||||||
reader: DefaultRangeReader,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CacheBackend for IndexedTarBackend {
|
/// A simple FileIndex for compatiblity with [`crate::BundleCache`]
|
||||||
type FileInfo = FileInfo;
|
#[derive(Default, Debug)]
|
||||||
|
pub struct ItarFileIndex {
|
||||||
|
content: HashMap<String, ItarFileInfo>,
|
||||||
|
}
|
||||||
|
|
||||||
fn open_with_pull(
|
impl<'this> FileIndex<'this> for ItarFileIndex {
|
||||||
start_url: &str,
|
type InfoType = ItarFileInfo;
|
||||||
status: &mut dyn StatusBackend,
|
|
||||||
) -> Result<(Self, BackendPullData)> {
|
|
||||||
// Step 1: resolve URL
|
|
||||||
let mut geturl_backend = DefaultBackend::default();
|
|
||||||
let resolved_url = geturl_backend.resolve_url(start_url, status)?;
|
|
||||||
|
|
||||||
// Step 2: fetch index
|
fn iter(&'this self) -> Box<dyn Iterator<Item = &'this ItarFileInfo> + 'this> {
|
||||||
let index = {
|
Box::new(self.content.values())
|
||||||
let mut index = String::new();
|
|
||||||
let index_url = format!("{}.index.gz", &resolved_url);
|
|
||||||
tt_note!(status, "downloading index {}", index_url);
|
|
||||||
GzDecoder::new(geturl_backend.get_url(&index_url, status)?)
|
|
||||||
.read_to_string(&mut index)?;
|
|
||||||
index
|
|
||||||
};
|
|
||||||
|
|
||||||
// Step 3: get digest, setting up instance as we go
|
|
||||||
|
|
||||||
let mut cache_backend = IndexedTarBackend {
|
|
||||||
reader: geturl_backend.open_range_reader(&resolved_url),
|
|
||||||
};
|
|
||||||
|
|
||||||
let digest_info = {
|
|
||||||
let mut digest_info = None;
|
|
||||||
|
|
||||||
for line in index.lines() {
|
|
||||||
if let Ok((name, info)) = Self::parse_index_line(line) {
|
|
||||||
if name == digest::DIGEST_NAME {
|
|
||||||
digest_info = Some(info);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
atry!(
|
|
||||||
digest_info;
|
|
||||||
["backend does not provide needed {} file", digest::DIGEST_NAME]
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
let digest_text =
|
|
||||||
String::from_utf8(cache_backend.get_file(digest::DIGEST_NAME, &digest_info, status)?)
|
|
||||||
.map_err(|e| e.utf8_error())?;
|
|
||||||
let digest = DigestData::from_str(&digest_text)?;
|
|
||||||
|
|
||||||
// All done.
|
|
||||||
Ok((
|
|
||||||
cache_backend,
|
|
||||||
BackendPullData {
|
|
||||||
resolved_url,
|
|
||||||
digest,
|
|
||||||
index,
|
|
||||||
},
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn open_with_quick_check(
|
fn len(&self) -> usize {
|
||||||
resolved_url: &str,
|
self.content.len()
|
||||||
digest_file_info: &Self::FileInfo,
|
}
|
||||||
status: &mut dyn StatusBackend,
|
|
||||||
) -> Result<Option<(Self, DigestData)>> {
|
|
||||||
let mut cache_backend = IndexedTarBackend {
|
|
||||||
reader: DefaultBackend::default().open_range_reader(resolved_url),
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Ok(d) = cache_backend.get_file(digest::DIGEST_NAME, digest_file_info, status) {
|
fn initialize(&mut self, reader: &mut dyn Read) -> Result<()> {
|
||||||
if let Ok(d) = String::from_utf8(d) {
|
self.content.clear();
|
||||||
if let Ok(d) = DigestData::from_str(&d) {
|
|
||||||
return Ok(Some((cache_backend, d)));
|
for line in BufReader::new(reader).lines() {
|
||||||
}
|
let line = line?;
|
||||||
|
let mut bits = line.split_whitespace();
|
||||||
|
|
||||||
|
if let (Some(name), Some(offset), Some(length)) =
|
||||||
|
(bits.next(), bits.next(), bits.next())
|
||||||
|
{
|
||||||
|
self.content.insert(
|
||||||
|
name.to_owned(),
|
||||||
|
ItarFileInfo {
|
||||||
|
name: name.to_owned(),
|
||||||
|
offset: offset.parse::<u64>()?,
|
||||||
|
length: length.parse::<usize>()?,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
// TODO: preserve the warning info or something!
|
||||||
|
bail!("malformed index line");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
Ok(None)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_index_line(line: &str) -> Result<(String, Self::FileInfo)> {
|
/// Find a file in this index
|
||||||
let mut bits = line.split_whitespace();
|
fn search(&'this mut self, name: &str) -> Option<ItarFileInfo> {
|
||||||
|
self.content.get(name).cloned()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if let (Some(name), Some(offset), Some(length)) = (bits.next(), bits.next(), bits.next()) {
|
/// The old-fashoned Tectonic web bundle format.
|
||||||
Ok((
|
pub struct ItarBundle {
|
||||||
name.to_owned(),
|
url: String,
|
||||||
FileInfo {
|
/// Maps all available file names to [`FileInfo`]s.
|
||||||
offset: offset.parse::<u64>()?,
|
/// This is empty after we create this bundle, so we don't need network
|
||||||
length: length.parse::<u64>()?,
|
/// to make an object. It is automatically filled by get_index when we need it.
|
||||||
},
|
index: ItarFileIndex,
|
||||||
))
|
|
||||||
} else {
|
/// RangeReader object, responsible for sending queries.
|
||||||
// TODO: preserve the warning info or something!
|
/// Will be None when the object is created, automatically
|
||||||
bail!("malformed index line");
|
/// replaced with Some(...) once needed.
|
||||||
|
reader: Option<DefaultRangeReader>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ItarBundle {
|
||||||
|
/// Make a new ItarBundle.
|
||||||
|
/// This method does not require network access.
|
||||||
|
/// It will succeed even in we can't connect to the bundle, or if we're given a bad url.
|
||||||
|
pub fn new(url: String) -> Result<ItarBundle> {
|
||||||
|
Ok(ItarBundle {
|
||||||
|
index: ItarFileIndex::default(),
|
||||||
|
reader: None,
|
||||||
|
url,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn connect_reader(&mut self) {
|
||||||
|
let geturl_backend = DefaultBackend::default();
|
||||||
|
// Connect reader if it is not already connected
|
||||||
|
if self.reader.is_none() {
|
||||||
|
self.reader = Some(geturl_backend.open_range_reader(&self.url));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_file(
|
/// Fill this bundle's index, if it is empty.
|
||||||
|
fn ensure_index(&mut self) -> Result<()> {
|
||||||
|
// Fetch index if it is empty
|
||||||
|
if self.index.is_initialized() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
self.connect_reader();
|
||||||
|
|
||||||
|
let mut reader = self.get_index_reader()?;
|
||||||
|
self.index.initialize(&mut reader)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IoProvider for ItarBundle {
|
||||||
|
fn input_open_name(
|
||||||
&mut self,
|
&mut self,
|
||||||
name: &str,
|
name: &str,
|
||||||
info: &Self::FileInfo,
|
|
||||||
status: &mut dyn StatusBackend,
|
status: &mut dyn StatusBackend,
|
||||||
) -> Result<Vec<u8>> {
|
) -> OpenResult<InputHandle> {
|
||||||
tt_note!(status, "downloading {}", name);
|
if let Err(e) = self.ensure_index() {
|
||||||
|
return OpenResult::Err(e);
|
||||||
|
};
|
||||||
|
|
||||||
// Historically, sometimes our web service would drop connections when
|
let info = match self.index.search(name) {
|
||||||
// fetching a bunch of resource files (i.e., on the first invocation).
|
Some(a) => a,
|
||||||
// The error manifested itself in a way that has a not-so-nice user
|
None => return OpenResult::NotAvailable,
|
||||||
// experience. Our solution: retry the request a few times in case it
|
};
|
||||||
// was a transient problem.
|
|
||||||
|
|
||||||
let n = info.length.try_into().unwrap();
|
// Retries are handled in open_fileinfo,
|
||||||
let mut buf = Vec::with_capacity(n);
|
// since BundleCache never calls input_open_name.
|
||||||
let mut overall_failed = true;
|
self.open_fileinfo(&info, status)
|
||||||
let mut any_failed = false;
|
}
|
||||||
|
}
|
||||||
// Our HTTP implementation actually has problems with zero-sized ranged
|
|
||||||
// reads (Azure gives us a 200 response, which we don't properly
|
impl Bundle for ItarBundle {
|
||||||
// handle), but when the file is 0-sized we're all set anyway!
|
fn all_files(&self) -> Vec<String> {
|
||||||
if n > 0 {
|
self.index.iter().map(|x| x.path().to_owned()).collect()
|
||||||
for _ in 0..MAX_HTTP_ATTEMPTS {
|
}
|
||||||
let mut stream = match self.reader.read_range(info.offset, n) {
|
|
||||||
Ok(r) => r,
|
fn get_digest(&mut self) -> Result<tectonic_io_base::digest::DigestData> {
|
||||||
Err(e) => {
|
let digest_text = match self.input_open_name(digest::DIGEST_NAME, &mut NoopStatusBackend {})
|
||||||
tt_warning!(status, "failure requesting \"{}\" from network", name; e);
|
{
|
||||||
any_failed = true;
|
OpenResult::Ok(h) => {
|
||||||
continue;
|
let mut text = String::new();
|
||||||
}
|
h.take(64).read_to_string(&mut text)?;
|
||||||
};
|
text
|
||||||
|
}
|
||||||
if let Err(e) = stream.read_to_end(&mut buf) {
|
|
||||||
tt_warning!(status, "failure downloading \"{}\" from network", name; e.into());
|
OpenResult::NotAvailable => {
|
||||||
any_failed = true;
|
// Broken or un-cacheable backend.
|
||||||
continue;
|
bail!("bundle does not provide needed SHA256SUM file");
|
||||||
}
|
}
|
||||||
|
|
||||||
overall_failed = false;
|
OpenResult::Err(e) => {
|
||||||
break;
|
return Err(e);
|
||||||
}
|
}
|
||||||
|
};
|
||||||
if overall_failed {
|
|
||||||
bail!(
|
Ok(atry!(digest::DigestData::from_str(&digest_text); ["corrupted SHA256 digest data"]))
|
||||||
"failed to retrieve \"{}\" from the network; \
|
}
|
||||||
this most probably is not Tectonic's fault \
|
}
|
||||||
-- please check your network connection.",
|
|
||||||
name
|
impl<'this> CachableBundle<'this, ItarFileIndex> for ItarBundle {
|
||||||
);
|
fn get_location(&mut self) -> String {
|
||||||
} else if any_failed {
|
self.url.clone()
|
||||||
tt_note!(status, "download succeeded after retry");
|
}
|
||||||
}
|
|
||||||
}
|
fn initialize_index(&mut self, source: &mut dyn Read) -> Result<()> {
|
||||||
|
self.index.initialize(source)?;
|
||||||
Ok(buf)
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn index(&mut self) -> &mut ItarFileIndex {
|
||||||
|
&mut self.index
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search(&mut self, name: &str) -> Option<ItarFileInfo> {
|
||||||
|
self.index.search(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_index_reader(&mut self) -> Result<Box<dyn Read>> {
|
||||||
|
let mut geturl_backend = DefaultBackend::default();
|
||||||
|
let index_url = format!("{}.index.gz", &self.url);
|
||||||
|
let reader = GzDecoder::new(geturl_backend.get_url(&index_url)?);
|
||||||
|
Ok(Box::new(reader))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn open_fileinfo(
|
||||||
|
&mut self,
|
||||||
|
info: &ItarFileInfo,
|
||||||
|
status: &mut dyn StatusBackend,
|
||||||
|
) -> OpenResult<InputHandle> {
|
||||||
|
match self.ensure_index() {
|
||||||
|
Ok(_) => {}
|
||||||
|
Err(e) => return OpenResult::Err(e),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut v = Vec::with_capacity(info.length);
|
||||||
|
tt_note!(status, "downloading {}", info.name);
|
||||||
|
|
||||||
|
// Edge case for zero-sized reads
|
||||||
|
// (these cause errors on some web hosts)
|
||||||
|
if info.length == 0 {
|
||||||
|
return OpenResult::Ok(InputHandle::new_read_only(
|
||||||
|
info.name.to_owned(),
|
||||||
|
Cursor::new(v),
|
||||||
|
InputOrigin::Other,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get file with retries
|
||||||
|
for i in 0..NET_RETRY_ATTEMPTS {
|
||||||
|
let mut stream = match self
|
||||||
|
.reader
|
||||||
|
.as_mut()
|
||||||
|
.unwrap()
|
||||||
|
.read_range(info.offset, info.length)
|
||||||
|
{
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(e) => {
|
||||||
|
tt_warning!(status,
|
||||||
|
"failure fetching \"{}\" from network ({}/{NET_RETRY_ATTEMPTS})",
|
||||||
|
info.name, i+1; e
|
||||||
|
);
|
||||||
|
thread::sleep(Duration::from_millis(NET_RETRY_SLEEP_MS));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
match stream.read_to_end(&mut v) {
|
||||||
|
Ok(_) => {}
|
||||||
|
Err(e) => {
|
||||||
|
tt_warning!(status,
|
||||||
|
"failure downloading \"{}\" from network ({}/{NET_RETRY_ATTEMPTS})",
|
||||||
|
info.name, i+1; e.into()
|
||||||
|
);
|
||||||
|
thread::sleep(Duration::from_millis(NET_RETRY_SLEEP_MS));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return OpenResult::Ok(InputHandle::new_read_only(
|
||||||
|
info.name.to_owned(),
|
||||||
|
Cursor::new(v),
|
||||||
|
InputOrigin::Other,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
OpenResult::Err(anyhow!(
|
||||||
|
"failed to download \"{}\"; please check your network connection.",
|
||||||
|
info.name
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,22 +11,82 @@
|
||||||
//!
|
//!
|
||||||
//! This crate provides the following bundle implementations:
|
//! This crate provides the following bundle implementations:
|
||||||
//!
|
//!
|
||||||
//! - [`cache::CachingBundle`] for access to remote bundles with local
|
//! - [`cache::BundleCache`] provides filesystem-backed caching for any bundle
|
||||||
//! filesystem caching.
|
//! - [`itar::ItarBundle`] provides filesystem-backed caching for any bundle
|
||||||
//! - [`dir::DirBundle`] turns a directory full of files into a bundle; it is
|
//! - [`dir::DirBundle`] turns a directory full of files into a bundle; it is
|
||||||
//! useful for testing and lightweight usage.
|
//! useful for testing and lightweight usage.
|
||||||
//! - [`zip::ZipBundle`] for a ZIP-format bundle.
|
//! - [`zip::ZipBundle`] for a ZIP-format bundle.
|
||||||
|
|
||||||
use std::{io::Read, str::FromStr};
|
use std::{fmt::Debug, io::Read, path::PathBuf};
|
||||||
use tectonic_errors::{anyhow::bail, atry, Result};
|
use tectonic_errors::{prelude::bail, Result};
|
||||||
use tectonic_io_base::{digest, digest::DigestData, IoProvider, OpenResult};
|
use tectonic_io_base::{digest::DigestData, InputHandle, IoProvider, OpenResult};
|
||||||
use tectonic_status_base::StatusBackend;
|
use tectonic_status_base::StatusBackend;
|
||||||
|
|
||||||
pub mod cache;
|
pub mod cache;
|
||||||
pub mod dir;
|
pub mod dir;
|
||||||
pub mod itar;
|
pub mod itar;
|
||||||
|
mod ttb;
|
||||||
|
pub mod ttb_fs;
|
||||||
|
pub mod ttb_net;
|
||||||
pub mod zip;
|
pub mod zip;
|
||||||
|
|
||||||
|
use cache::BundleCache;
|
||||||
|
use dir::DirBundle;
|
||||||
|
use itar::ItarBundle;
|
||||||
|
use ttb_fs::TTBFsBundle;
|
||||||
|
use ttb_net::TTBNetBundle;
|
||||||
|
use zip::ZipBundle;
|
||||||
|
|
||||||
|
// How many times network bundles should retry
|
||||||
|
// a download, and how long they should wait
|
||||||
|
// between attempts.
|
||||||
|
const NET_RETRY_ATTEMPTS: usize = 3;
|
||||||
|
const NET_RETRY_SLEEP_MS: u64 = 500;
|
||||||
|
|
||||||
|
/// Uniquely identifies a file in a bundle.
|
||||||
|
pub trait FileInfo: Clone + Debug {
|
||||||
|
/// Return a path to this file, relative to the bundle.
|
||||||
|
fn path(&self) -> &str;
|
||||||
|
|
||||||
|
/// Return the name of this file
|
||||||
|
fn name(&self) -> &str;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Keeps track of
|
||||||
|
pub trait FileIndex<'this>
|
||||||
|
where
|
||||||
|
Self: Sized + 'this + Debug,
|
||||||
|
{
|
||||||
|
/// The FileInfo this index handles
|
||||||
|
type InfoType: FileInfo;
|
||||||
|
|
||||||
|
/// Iterate over all [`FileInfo`]s in this index
|
||||||
|
fn iter(&'this self) -> Box<dyn Iterator<Item = &'this Self::InfoType> + 'this>;
|
||||||
|
|
||||||
|
/// Get the number of [`FileInfo`]s in this index
|
||||||
|
fn len(&self) -> usize;
|
||||||
|
|
||||||
|
/// Returns true if this index is empty
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.len() == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Has this index been filled with bundle data?
|
||||||
|
/// This is always false until we call [`self.initialize()`],
|
||||||
|
/// and is always true afterwards.
|
||||||
|
fn is_initialized(&self) -> bool {
|
||||||
|
!self.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fill this index from a file
|
||||||
|
fn initialize(&mut self, reader: &mut dyn Read) -> Result<()>;
|
||||||
|
|
||||||
|
/// Search for a file in this index, obeying search order.
|
||||||
|
///
|
||||||
|
/// Returns a `Some(FileInfo)` if a file was found, and `None` otherwise.
|
||||||
|
fn search(&'this mut self, name: &str) -> Option<Self::InfoType>;
|
||||||
|
}
|
||||||
|
|
||||||
/// A trait for bundles of Tectonic support files.
|
/// A trait for bundles of Tectonic support files.
|
||||||
///
|
///
|
||||||
/// A "bundle" is an [`IoProvider`] with a few special properties. Bundles are
|
/// A "bundle" is an [`IoProvider`] with a few special properties. Bundles are
|
||||||
|
@ -39,59 +99,154 @@ pub mod zip;
|
||||||
/// of TeX support files, and that you can generate one or more TeX format files
|
/// of TeX support files, and that you can generate one or more TeX format files
|
||||||
/// using only the files contained in a bundle.
|
/// using only the files contained in a bundle.
|
||||||
pub trait Bundle: IoProvider {
|
pub trait Bundle: IoProvider {
|
||||||
/// Get a cryptographic digest summarizing this bundle’s contents.
|
/// Get a cryptographic digest summarizing this bundle’s contents,
|
||||||
///
|
/// which summarizes the exact contents of every file in the bundle.
|
||||||
/// The digest summarizes the exact contents of every file in the bundle. It
|
fn get_digest(&mut self) -> Result<DigestData>;
|
||||||
/// is computed from the sorted names and SHA256 digests of the component
|
|
||||||
/// files [as implemented in the TeXLive bundle builder][x].
|
|
||||||
///
|
|
||||||
/// [x]: https://github.com/tectonic-typesetting/tectonic-texlive-bundles/blob/master/scripts/ttb_utils.py#L321
|
|
||||||
///
|
|
||||||
/// The default implementation gets the digest from a file named
|
|
||||||
/// `SHA256SUM`, which is expected to contain the digest in hex-encoded
|
|
||||||
/// format.
|
|
||||||
fn get_digest(&mut self, status: &mut dyn StatusBackend) -> Result<DigestData> {
|
|
||||||
let digest_text = match self.input_open_name(digest::DIGEST_NAME, status) {
|
|
||||||
OpenResult::Ok(h) => {
|
|
||||||
let mut text = String::new();
|
|
||||||
h.take(64).read_to_string(&mut text)?;
|
|
||||||
text
|
|
||||||
}
|
|
||||||
|
|
||||||
OpenResult::NotAvailable => {
|
/// Iterate over all file paths in this bundle.
|
||||||
// Broken or un-cacheable backend.
|
/// This is used for the `bundle search` command
|
||||||
bail!("bundle does not provide needed SHA256SUM file");
|
fn all_files(&self) -> Vec<String>;
|
||||||
}
|
|
||||||
|
|
||||||
OpenResult::Err(e) => {
|
|
||||||
return Err(e);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(atry!(DigestData::from_str(&digest_text); ["corrupted SHA256 digest data"]))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Enumerate the files in this bundle.
|
|
||||||
///
|
|
||||||
/// This interface is intended to be used for diagnostics, not by anything
|
|
||||||
/// during actual execution of an engine. This should include meta-files
|
|
||||||
/// such as the `SHA256SUM` file. The ordering of the returned filenames is
|
|
||||||
/// unspecified.
|
|
||||||
///
|
|
||||||
/// To ease implementation, the filenames are returned in one big vector of
|
|
||||||
/// owned strings. For a large bundle, the memory consumed by this operation
|
|
||||||
/// might be fairly substantial (although we are talking megabytes, not
|
|
||||||
/// gigabytes).
|
|
||||||
fn all_files(&mut self, status: &mut dyn StatusBackend) -> Result<Vec<String>>;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<B: Bundle + ?Sized> Bundle for Box<B> {
|
impl<B: Bundle + ?Sized> Bundle for Box<B> {
|
||||||
fn get_digest(&mut self, status: &mut dyn StatusBackend) -> Result<DigestData> {
|
fn get_digest(&mut self) -> Result<DigestData> {
|
||||||
(**self).get_digest(status)
|
(**self).get_digest()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn all_files(&mut self, status: &mut dyn StatusBackend) -> Result<Vec<String>> {
|
fn all_files(&self) -> Vec<String> {
|
||||||
(**self).all_files(status)
|
(**self).all_files()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A bundle that may be cached.
|
||||||
|
///
|
||||||
|
/// These methods do not implement any new features.
|
||||||
|
/// Instead, they give the [`cache::BundleCache`] wrapper
|
||||||
|
/// more direct access to existing bundle functionality.
|
||||||
|
pub trait CachableBundle<'this, T>
|
||||||
|
where
|
||||||
|
Self: Bundle + 'this,
|
||||||
|
T: FileIndex<'this>,
|
||||||
|
{
|
||||||
|
/// Initialize this bundle's file index from an external reader
|
||||||
|
/// This allows us to retrieve the FileIndex from the cache WITHOUT
|
||||||
|
/// touching the network.
|
||||||
|
fn initialize_index(&mut self, _source: &mut dyn Read) -> Result<()> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a `Read` instance to this bundle's index,
|
||||||
|
/// reading directly from the backend.
|
||||||
|
fn get_index_reader(&mut self) -> Result<Box<dyn Read>>;
|
||||||
|
|
||||||
|
/// Return a reference to this bundle's FileIndex.
|
||||||
|
fn index(&mut self) -> &mut T;
|
||||||
|
|
||||||
|
/// Open the file that `info` points to.
|
||||||
|
fn open_fileinfo(
|
||||||
|
&mut self,
|
||||||
|
info: &T::InfoType,
|
||||||
|
status: &mut dyn StatusBackend,
|
||||||
|
) -> OpenResult<InputHandle>;
|
||||||
|
|
||||||
|
/// Search for a file in this bundle.
|
||||||
|
/// This should foward the call to `self.index`
|
||||||
|
fn search(&mut self, name: &str) -> Option<T::InfoType>;
|
||||||
|
|
||||||
|
/// Return a string that corresponds to this bundle's location, probably a URL.
|
||||||
|
/// We should NOT need to do any network IO to get this value.
|
||||||
|
fn get_location(&mut self) -> String;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'this, T: FileIndex<'this>, B: CachableBundle<'this, T> + ?Sized> CachableBundle<'this, T>
|
||||||
|
for Box<B>
|
||||||
|
{
|
||||||
|
fn initialize_index(&mut self, source: &mut dyn Read) -> Result<()> {
|
||||||
|
(**self).initialize_index(source)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_location(&mut self) -> String {
|
||||||
|
(**self).get_location()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_index_reader(&mut self) -> Result<Box<dyn Read>> {
|
||||||
|
(**self).get_index_reader()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn index(&mut self) -> &mut T {
|
||||||
|
(**self).index()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn open_fileinfo(
|
||||||
|
&mut self,
|
||||||
|
info: &T::InfoType,
|
||||||
|
status: &mut dyn StatusBackend,
|
||||||
|
) -> OpenResult<InputHandle> {
|
||||||
|
(**self).open_fileinfo(info, status)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search(&mut self, name: &str) -> Option<T::InfoType> {
|
||||||
|
(**self).search(name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Try to open a bundle from a string,
|
||||||
|
/// detecting its type.
|
||||||
|
///
|
||||||
|
/// Returns None if auto-detection fails.
|
||||||
|
pub fn detect_bundle(
|
||||||
|
source: String,
|
||||||
|
only_cached: bool,
|
||||||
|
custom_cache_dir: Option<PathBuf>,
|
||||||
|
) -> Result<Option<Box<dyn Bundle>>> {
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
// Parse URL and detect bundle type
|
||||||
|
if let Ok(url) = Url::parse(&source) {
|
||||||
|
if url.scheme() == "https" || url.scheme() == "http" {
|
||||||
|
if source.ends_with("ttb") {
|
||||||
|
let bundle = BundleCache::new(
|
||||||
|
Box::new(TTBNetBundle::new(source)?),
|
||||||
|
only_cached,
|
||||||
|
custom_cache_dir,
|
||||||
|
)?;
|
||||||
|
return Ok(Some(Box::new(bundle)));
|
||||||
|
} else {
|
||||||
|
let bundle = BundleCache::new(
|
||||||
|
Box::new(ItarBundle::new(source)?),
|
||||||
|
only_cached,
|
||||||
|
custom_cache_dir,
|
||||||
|
)?;
|
||||||
|
return Ok(Some(Box::new(bundle)));
|
||||||
|
}
|
||||||
|
} else if url.scheme() == "file" {
|
||||||
|
let file_path = url.to_file_path().map_err(|_| {
|
||||||
|
std::io::Error::new(
|
||||||
|
std::io::ErrorKind::InvalidInput,
|
||||||
|
"failed to parse local path",
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
return bundle_from_path(file_path);
|
||||||
|
} else {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// If we couldn't parse the URL, this is probably a local path.
|
||||||
|
return bundle_from_path(PathBuf::from(source));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn bundle_from_path(p: PathBuf) -> Result<Option<Box<dyn Bundle>>> {
|
||||||
|
let ext = p.extension().map_or("", |x| x.to_str().unwrap_or(""));
|
||||||
|
|
||||||
|
if p.is_dir() {
|
||||||
|
Ok(Some(Box::new(DirBundle::new(p))))
|
||||||
|
} else if ext == "zip" {
|
||||||
|
Ok(Some(Box::new(ZipBundle::open(p)?)))
|
||||||
|
} else if ext == "ttb" {
|
||||||
|
Ok(Some(Box::new(TTBFsBundle::open(p)?)))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -127,12 +282,11 @@ pub fn get_fallback_bundle_url(format_version: u32) -> String {
|
||||||
/// `tectonic` crate provides a configuration mechanism to allow the user to
|
/// `tectonic` crate provides a configuration mechanism to allow the user to
|
||||||
/// override the bundle URL setting, and that should be preferred if you’re in a
|
/// override the bundle URL setting, and that should be preferred if you’re in a
|
||||||
/// position to use it.
|
/// position to use it.
|
||||||
pub fn get_fallback_bundle(
|
pub fn get_fallback_bundle(format_version: u32, only_cached: bool) -> Result<Box<dyn Bundle>> {
|
||||||
format_version: u32,
|
|
||||||
only_cached: bool,
|
|
||||||
status: &mut dyn StatusBackend,
|
|
||||||
) -> Result<cache::CachingBundle<itar::IndexedTarBackend>> {
|
|
||||||
let url = get_fallback_bundle_url(format_version);
|
let url = get_fallback_bundle_url(format_version);
|
||||||
let mut cache = cache::Cache::get_user_default()?;
|
let bundle = detect_bundle(url, only_cached, None)?;
|
||||||
cache.open(&url, only_cached, status)
|
if bundle.is_none() {
|
||||||
|
bail!("could not open default bundle")
|
||||||
|
}
|
||||||
|
Ok(bundle.unwrap())
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,288 @@
|
||||||
|
// Copyright 2023-2024 the Tectonic Project
|
||||||
|
// Licensed under the MIT License.
|
||||||
|
|
||||||
|
//! Common tools for the ttbv1 format, used in both
|
||||||
|
//! network and filesystem bundles.
|
||||||
|
|
||||||
|
use crate::{FileIndex, FileInfo};
|
||||||
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
convert::{TryFrom, TryInto},
|
||||||
|
io::{BufRead, BufReader, Read},
|
||||||
|
str::FromStr,
|
||||||
|
};
|
||||||
|
use tectonic_errors::prelude::*;
|
||||||
|
use tectonic_io_base::digest::{self, DigestData};
|
||||||
|
|
||||||
|
pub struct TTBv1Header {
|
||||||
|
pub index_start: u64,
|
||||||
|
pub index_real_len: u32,
|
||||||
|
pub index_gzip_len: u32,
|
||||||
|
pub digest: DigestData,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<[u8; 70]> for TTBv1Header {
|
||||||
|
type Error = Error;
|
||||||
|
|
||||||
|
fn try_from(header: [u8; 70]) -> Result<Self, Self::Error> {
|
||||||
|
let signature = &header[0..14];
|
||||||
|
let version = u32::from_le_bytes(header[14..18].try_into()?);
|
||||||
|
let index_start = u64::from_le_bytes(header[18..26].try_into()?);
|
||||||
|
let index_gzip_len = u32::from_le_bytes(header[26..30].try_into()?);
|
||||||
|
let index_real_len = u32::from_le_bytes(header[30..34].try_into()?);
|
||||||
|
let digest: DigestData = DigestData::from_str(&digest::bytes_to_hex(&header[34..66]))?;
|
||||||
|
|
||||||
|
if signature != b"tectonicbundle" {
|
||||||
|
bail!("this is not a bundle");
|
||||||
|
}
|
||||||
|
|
||||||
|
if version != 1 {
|
||||||
|
bail!("wrong ttb version");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(TTBv1Header {
|
||||||
|
digest,
|
||||||
|
index_start,
|
||||||
|
index_real_len,
|
||||||
|
index_gzip_len,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// file info for TTbundle
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct TTBFileInfo {
|
||||||
|
pub start: u64,
|
||||||
|
pub real_len: u32,
|
||||||
|
pub gzip_len: u32,
|
||||||
|
pub path: String,
|
||||||
|
pub name: String,
|
||||||
|
pub hash: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FileInfo for TTBFileInfo {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
&self.name
|
||||||
|
}
|
||||||
|
|
||||||
|
fn path(&self) -> &str {
|
||||||
|
&self.path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Debug)]
|
||||||
|
pub struct TTBFileIndex {
|
||||||
|
// Vector of fileinfos.
|
||||||
|
// This MUST be sorted by path for search() to work properly!
|
||||||
|
pub content: Vec<TTBFileInfo>,
|
||||||
|
|
||||||
|
search_orders: HashMap<String, Vec<String>>,
|
||||||
|
default_search_order: String,
|
||||||
|
|
||||||
|
// Remember previous searches so we don't have to iterate over content again.
|
||||||
|
search_cache: HashMap<String, Option<TTBFileInfo>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TTBFileIndex {
|
||||||
|
fn read_filelist_line(&mut self, line: String) -> Result<()> {
|
||||||
|
let mut bits = line.split_whitespace();
|
||||||
|
|
||||||
|
if let (Some(start), Some(gzip_len), Some(real_len), Some(hash)) =
|
||||||
|
(bits.next(), bits.next(), bits.next(), bits.next())
|
||||||
|
{
|
||||||
|
let path = bits.collect::<Vec<&str>>().join(" ");
|
||||||
|
let (_, name) = path.rsplit_once('/').unwrap_or(("", &path));
|
||||||
|
|
||||||
|
// Basic path validation.
|
||||||
|
// TODO: more robust checks
|
||||||
|
if path.starts_with('/')
|
||||||
|
|| path.contains("./") // Also catches "/../"
|
||||||
|
|| path.contains("//")
|
||||||
|
{
|
||||||
|
bail!("bad bundle file path `{path}`");
|
||||||
|
}
|
||||||
|
|
||||||
|
self.content.push(TTBFileInfo {
|
||||||
|
start: start.parse::<u64>()?,
|
||||||
|
gzip_len: gzip_len.parse::<u32>()?,
|
||||||
|
real_len: real_len.parse::<u32>()?,
|
||||||
|
path: path.to_owned(),
|
||||||
|
name: name.to_owned(),
|
||||||
|
hash: match hash {
|
||||||
|
"nohash" => None,
|
||||||
|
_ => Some(hash.to_owned()),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// TODO: preserve the warning info or something!
|
||||||
|
bail!("malformed FILELIST line");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_search_line(&mut self, name: String, line: String) -> Result<()> {
|
||||||
|
let stat = self.search_orders.entry(name).or_default();
|
||||||
|
stat.push(line);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_defaultsearch_line(&mut self, line: String) -> Result<()> {
|
||||||
|
self.default_search_order = line;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'this> FileIndex<'this> for TTBFileIndex {
|
||||||
|
type InfoType = TTBFileInfo;
|
||||||
|
|
||||||
|
fn iter(&'this self) -> Box<dyn Iterator<Item = &'this TTBFileInfo> + 'this> {
|
||||||
|
Box::new(self.content.iter())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
self.content.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn initialize(&mut self, reader: &mut dyn Read) -> Result<()> {
|
||||||
|
self.content.clear();
|
||||||
|
self.search_orders.clear();
|
||||||
|
self.search_cache.clear();
|
||||||
|
self.default_search_order.clear();
|
||||||
|
|
||||||
|
let mut mode: String = String::new();
|
||||||
|
for line in BufReader::new(reader).lines() {
|
||||||
|
let line = line?;
|
||||||
|
|
||||||
|
if line.starts_with('[') {
|
||||||
|
mode = line[1..line.len() - 1].to_owned();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if mode.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let (cmd, arg) = mode.rsplit_once(':').unwrap_or((&mode[..], ""));
|
||||||
|
|
||||||
|
match cmd {
|
||||||
|
"DEFAULTSEARCH" => self.read_defaultsearch_line(line)?,
|
||||||
|
"FILELIST" => self.read_filelist_line(line)?,
|
||||||
|
"SEARCH" => self.read_search_line(arg.to_owned(), line)?,
|
||||||
|
_ => continue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search(&'this mut self, name: &str) -> Option<TTBFileInfo> {
|
||||||
|
match self.search_cache.get(name) {
|
||||||
|
None => {}
|
||||||
|
Some(r) => return r.clone(),
|
||||||
|
}
|
||||||
|
|
||||||
|
let search = self.search_orders.get(&self.default_search_order).unwrap();
|
||||||
|
|
||||||
|
// Edge case: absolute paths
|
||||||
|
if name.starts_with('/') {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get last element of path, since
|
||||||
|
// some packages reference a path to a file.
|
||||||
|
// `fithesis4` is one example.
|
||||||
|
let relative_parent: bool;
|
||||||
|
|
||||||
|
let n = match name.rsplit_once('/') {
|
||||||
|
Some(n) => {
|
||||||
|
relative_parent = true;
|
||||||
|
n.1
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
relative_parent = false;
|
||||||
|
name
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// If we don't have this path in the index, this file doesn't exist.
|
||||||
|
// The code below will clone these strings iff it has to.
|
||||||
|
let mut infos: Vec<&TTBFileInfo> = Vec::new();
|
||||||
|
for i in self.iter() {
|
||||||
|
if i.name() == n {
|
||||||
|
infos.push(i);
|
||||||
|
} else if !infos.is_empty() {
|
||||||
|
// infos is sorted, so we can stop searching now.
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if relative_parent {
|
||||||
|
// TODO: REWORK
|
||||||
|
let mut matching: Option<&TTBFileInfo> = None;
|
||||||
|
for info in &infos {
|
||||||
|
if info.path().ends_with(&name) {
|
||||||
|
match matching {
|
||||||
|
Some(_) => return None, // TODO: warning. This shouldn't happen.
|
||||||
|
None => matching = Some(info),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let matching = Some(matching?.clone());
|
||||||
|
self.search_cache.insert(name.to_owned(), matching.clone());
|
||||||
|
matching
|
||||||
|
} else {
|
||||||
|
// Even if paths.len() is 1, we don't return here.
|
||||||
|
// We need to make sure this file matches a search path:
|
||||||
|
// if it's in a directory we don't search, we shouldn't find it!
|
||||||
|
|
||||||
|
let mut picked: Vec<&TTBFileInfo> = Vec::new();
|
||||||
|
for rule in search {
|
||||||
|
// Remove leading slash from rule
|
||||||
|
// (search patterns start with slashes, but paths do not)
|
||||||
|
let rule = &rule[1..];
|
||||||
|
|
||||||
|
for info in &infos {
|
||||||
|
if rule.ends_with("//") {
|
||||||
|
// Match start of patent path
|
||||||
|
// (cutting off the last slash)
|
||||||
|
if info.path().starts_with(&rule[0..rule.len() - 1]) {
|
||||||
|
picked.push(info);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Match full parent path
|
||||||
|
if &info.path()[0..info.path().len() - name.len()] == rule {
|
||||||
|
picked.push(info);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !picked.is_empty() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let r = {
|
||||||
|
if picked.is_empty() {
|
||||||
|
// No file in our search dirs had this name.
|
||||||
|
None
|
||||||
|
} else if picked.len() == 1 {
|
||||||
|
// We found exactly one file with this name.
|
||||||
|
//
|
||||||
|
// This chain of functions is essentially picked[0],
|
||||||
|
// but takes ownership of the string without requiring
|
||||||
|
// a .clone().
|
||||||
|
Some(picked[0].clone())
|
||||||
|
} else {
|
||||||
|
// We found multiple files with this name, all of which
|
||||||
|
// have the same priority. Pick alphabetically to emulate
|
||||||
|
// an "alphabetic DFS" search order.
|
||||||
|
picked.sort_by(|a, b| a.path().cmp(b.path()));
|
||||||
|
Some(picked[0].clone())
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
self.search_cache.insert(name.to_owned(), r.clone());
|
||||||
|
r
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,131 @@
|
||||||
|
// Copyright 2023-2024 the Tectonic Project
|
||||||
|
// Licensed under the MIT License.
|
||||||
|
|
||||||
|
//! Read ttb v1 bundles on the filesystem.
|
||||||
|
//!
|
||||||
|
//! The main type offered by this module is the [`Ttbv1NetBundle`] struct.
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
ttb::{TTBFileIndex, TTBFileInfo, TTBv1Header},
|
||||||
|
Bundle, FileIndex, FileInfo,
|
||||||
|
};
|
||||||
|
use flate2::read::GzDecoder;
|
||||||
|
use std::{
|
||||||
|
convert::TryFrom,
|
||||||
|
fs::File,
|
||||||
|
io::{Cursor, Read, Seek, SeekFrom},
|
||||||
|
path::Path,
|
||||||
|
};
|
||||||
|
use tectonic_errors::prelude::*;
|
||||||
|
use tectonic_io_base::{digest::DigestData, InputHandle, InputOrigin, IoProvider, OpenResult};
|
||||||
|
use tectonic_status_base::StatusBackend;
|
||||||
|
|
||||||
|
/// Read a [`TTBFileInfo`] from this bundle.
|
||||||
|
/// We assume that `fileinfo` points to a valid file in this bundle.
|
||||||
|
fn read_fileinfo<'a>(fileinfo: &TTBFileInfo, reader: &'a mut File) -> Result<Box<dyn Read + 'a>> {
|
||||||
|
reader.seek(SeekFrom::Start(fileinfo.start))?;
|
||||||
|
Ok(Box::new(GzDecoder::new(
|
||||||
|
reader.take(fileinfo.gzip_len as u64),
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A bundle backed by a ZIP file.
|
||||||
|
pub struct TTBFsBundle<T>
|
||||||
|
where
|
||||||
|
for<'a> T: FileIndex<'a>,
|
||||||
|
{
|
||||||
|
file: File,
|
||||||
|
index: T,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The internal file-information struct used by the [`TTBFsBundle`].
|
||||||
|
|
||||||
|
impl TTBFsBundle<TTBFileIndex> {
|
||||||
|
/// Create a new ZIP bundle for a generic readable and seekable stream.
|
||||||
|
pub fn new(file: File) -> Result<Self> {
|
||||||
|
Ok(TTBFsBundle {
|
||||||
|
file,
|
||||||
|
index: TTBFileIndex::default(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_header(&mut self) -> Result<TTBv1Header> {
|
||||||
|
self.file.seek(SeekFrom::Start(0))?;
|
||||||
|
let mut header: [u8; 70] = [0u8; 70];
|
||||||
|
self.file.read_exact(&mut header)?;
|
||||||
|
self.file.seek(SeekFrom::Start(0))?;
|
||||||
|
let header = TTBv1Header::try_from(header)?;
|
||||||
|
Ok(header)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fill this bundle's search rules, fetching files from our backend.
|
||||||
|
fn fill_index(&mut self) -> Result<()> {
|
||||||
|
let header = self.get_header()?;
|
||||||
|
let info = TTBFileInfo {
|
||||||
|
start: header.index_start,
|
||||||
|
gzip_len: header.index_real_len,
|
||||||
|
real_len: header.index_gzip_len,
|
||||||
|
path: "/INDEX".to_owned(),
|
||||||
|
name: "INDEX".to_owned(),
|
||||||
|
hash: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut reader = read_fileinfo(&info, &mut self.file)?;
|
||||||
|
self.index.initialize(&mut reader)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Open a file on the filesystem as a zip bundle.
|
||||||
|
pub fn open<P: AsRef<Path>>(path: P) -> Result<Self> {
|
||||||
|
Self::new(File::open(path)?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IoProvider for TTBFsBundle<TTBFileIndex> {
|
||||||
|
fn input_open_name(
|
||||||
|
&mut self,
|
||||||
|
name: &str,
|
||||||
|
_status: &mut dyn StatusBackend,
|
||||||
|
) -> OpenResult<InputHandle> {
|
||||||
|
// Fetch index if it is empty
|
||||||
|
if self.index.is_empty() {
|
||||||
|
if let Err(e) = self.fill_index() {
|
||||||
|
return OpenResult::Err(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let info = match self.index.search(name) {
|
||||||
|
None => return OpenResult::NotAvailable,
|
||||||
|
Some(s) => s,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut v: Vec<u8> = Vec::with_capacity(info.real_len as usize);
|
||||||
|
|
||||||
|
match read_fileinfo(&info, &mut self.file) {
|
||||||
|
Err(e) => return OpenResult::Err(e),
|
||||||
|
Ok(mut b) => {
|
||||||
|
if let Err(e) = b.read_to_end(&mut v) {
|
||||||
|
return OpenResult::Err(e.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
OpenResult::Ok(InputHandle::new_read_only(
|
||||||
|
name,
|
||||||
|
Cursor::new(v),
|
||||||
|
InputOrigin::Other,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Bundle for TTBFsBundle<TTBFileIndex> {
|
||||||
|
fn all_files(&self) -> Vec<String> {
|
||||||
|
self.index.iter().map(|x| x.path().to_owned()).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_digest(&mut self) -> Result<DigestData> {
|
||||||
|
let header = self.get_header()?;
|
||||||
|
Ok(header.digest)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,222 @@
|
||||||
|
// Copyright 2023-2024 the Tectonic Project
|
||||||
|
// Licensed under the MIT License.
|
||||||
|
|
||||||
|
//! Read ttb v1 bundles on the internet.
|
||||||
|
//!
|
||||||
|
//! The main type offered by this module is the [`TTBNetBundle`] struct,
|
||||||
|
//! which can (but should not) be used directly as a [`tectonic_io_base::IoProvider`].
|
||||||
|
//!
|
||||||
|
//! Instead, wrap it in a [`crate::BundleCache`] for filesystem-backed caching.
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
ttb::{TTBFileIndex, TTBFileInfo, TTBv1Header},
|
||||||
|
Bundle, CachableBundle, FileIndex, FileInfo, NET_RETRY_ATTEMPTS, NET_RETRY_SLEEP_MS,
|
||||||
|
};
|
||||||
|
use flate2::read::GzDecoder;
|
||||||
|
use std::{
|
||||||
|
convert::TryFrom,
|
||||||
|
io::{Cursor, Read},
|
||||||
|
thread,
|
||||||
|
time::Duration,
|
||||||
|
};
|
||||||
|
use tectonic_errors::prelude::*;
|
||||||
|
use tectonic_geturl::{DefaultBackend, DefaultRangeReader, GetUrlBackend, RangeReader};
|
||||||
|
use tectonic_io_base::{InputHandle, InputOrigin, IoProvider, OpenResult};
|
||||||
|
use tectonic_status_base::{tt_note, tt_warning, StatusBackend};
|
||||||
|
|
||||||
|
/// Read a [`TTBFileInfo`] from this bundle.
|
||||||
|
/// We assume that `fileinfo` points to a valid file in this bundle.
|
||||||
|
fn read_fileinfo(fileinfo: &TTBFileInfo, reader: &mut DefaultRangeReader) -> Result<Box<dyn Read>> {
|
||||||
|
// fileinfo.length is a u32, so it must fit inside a usize (assuming 32/64-bit machine).
|
||||||
|
let stream = reader.read_range(fileinfo.start, fileinfo.gzip_len as usize)?;
|
||||||
|
Ok(Box::new(GzDecoder::new(stream)))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Access ttbv1 bundle hosted on the internet.
|
||||||
|
/// This struct provides NO caching. All files
|
||||||
|
/// are downloaded.
|
||||||
|
///
|
||||||
|
/// As such, this bundle should probably be wrapped in a [`crate::BundleCache`].
|
||||||
|
pub struct TTBNetBundle<T>
|
||||||
|
where
|
||||||
|
for<'a> T: FileIndex<'a>,
|
||||||
|
{
|
||||||
|
url: String,
|
||||||
|
index: T,
|
||||||
|
|
||||||
|
// We need the network to load these.
|
||||||
|
// They're None until absolutely necessary.
|
||||||
|
reader: Option<DefaultRangeReader>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The internal file-information struct used by the [`TTBNetBundle`].
|
||||||
|
|
||||||
|
impl TTBNetBundle<TTBFileIndex> {
|
||||||
|
/// Create a new ZIP bundle for a generic readable and seekable stream.
|
||||||
|
/// This method does not require network access.
|
||||||
|
/// It will succeed even in we can't connect to the bundle, or if we're given a bad url.
|
||||||
|
pub fn new(url: String) -> Result<Self> {
|
||||||
|
Ok(TTBNetBundle {
|
||||||
|
reader: None,
|
||||||
|
index: TTBFileIndex::default(),
|
||||||
|
url,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn connect_reader(&mut self) -> Result<()> {
|
||||||
|
if self.reader.is_some() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
let geturl_backend = DefaultBackend::default();
|
||||||
|
self.reader = Some(geturl_backend.open_range_reader(&self.url));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_header(&mut self) -> Result<TTBv1Header> {
|
||||||
|
self.connect_reader()?;
|
||||||
|
let mut header: [u8; 70] = [0u8; 70];
|
||||||
|
self.reader
|
||||||
|
.as_mut()
|
||||||
|
.unwrap()
|
||||||
|
.read_range(0, 70)?
|
||||||
|
.read_exact(&mut header)?;
|
||||||
|
let header = TTBv1Header::try_from(header)?;
|
||||||
|
Ok(header)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fill this bundle's index if it is empty.
|
||||||
|
fn ensure_index(&mut self) -> Result<()> {
|
||||||
|
if self.index.is_initialized() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut reader = self.get_index_reader()?;
|
||||||
|
self.index.initialize(&mut reader)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IoProvider for TTBNetBundle<TTBFileIndex> {
|
||||||
|
fn input_open_name(
|
||||||
|
&mut self,
|
||||||
|
name: &str,
|
||||||
|
status: &mut dyn StatusBackend,
|
||||||
|
) -> OpenResult<InputHandle> {
|
||||||
|
if let Err(e) = self.ensure_index() {
|
||||||
|
return OpenResult::Err(e);
|
||||||
|
};
|
||||||
|
|
||||||
|
let info = match self.search(name) {
|
||||||
|
None => return OpenResult::NotAvailable,
|
||||||
|
Some(s) => s,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Retries are handled in open_fileinfo,
|
||||||
|
// since BundleCache never calls input_open_name.
|
||||||
|
self.open_fileinfo(&info, status)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Bundle for TTBNetBundle<TTBFileIndex> {
|
||||||
|
fn all_files(&self) -> Vec<String> {
|
||||||
|
self.index.iter().map(|x| x.path().to_owned()).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_digest(&mut self) -> Result<tectonic_io_base::digest::DigestData> {
|
||||||
|
let header = self.get_header()?;
|
||||||
|
Ok(header.digest)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'this> CachableBundle<'this, TTBFileIndex> for TTBNetBundle<TTBFileIndex> {
|
||||||
|
fn get_location(&mut self) -> String {
|
||||||
|
self.url.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn initialize_index(&mut self, source: &mut dyn Read) -> Result<()> {
|
||||||
|
self.index.initialize(source)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn index(&mut self) -> &mut TTBFileIndex {
|
||||||
|
&mut self.index
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search(&mut self, name: &str) -> Option<TTBFileInfo> {
|
||||||
|
self.index.search(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_index_reader(&mut self) -> Result<Box<dyn Read>> {
|
||||||
|
self.connect_reader()?;
|
||||||
|
let header = self.get_header()?;
|
||||||
|
|
||||||
|
read_fileinfo(
|
||||||
|
&TTBFileInfo {
|
||||||
|
start: header.index_start,
|
||||||
|
gzip_len: header.index_gzip_len,
|
||||||
|
real_len: header.index_real_len,
|
||||||
|
path: "".to_owned(),
|
||||||
|
name: "".to_owned(),
|
||||||
|
hash: None,
|
||||||
|
},
|
||||||
|
self.reader.as_mut().unwrap(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn open_fileinfo(
|
||||||
|
&mut self,
|
||||||
|
info: &TTBFileInfo,
|
||||||
|
status: &mut dyn StatusBackend,
|
||||||
|
) -> OpenResult<InputHandle> {
|
||||||
|
let mut v: Vec<u8> = Vec::with_capacity(info.real_len as usize);
|
||||||
|
tt_note!(status, "downloading {}", info.name);
|
||||||
|
|
||||||
|
// Edge case for zero-sized reads
|
||||||
|
// (these cause errors on some web hosts)
|
||||||
|
if info.gzip_len == 0 {
|
||||||
|
return OpenResult::Ok(InputHandle::new_read_only(
|
||||||
|
info.name.to_owned(),
|
||||||
|
Cursor::new(v),
|
||||||
|
InputOrigin::Other,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get file with retries
|
||||||
|
for i in 0..NET_RETRY_ATTEMPTS {
|
||||||
|
let mut reader = match read_fileinfo(info, self.reader.as_mut().unwrap()) {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(e) => {
|
||||||
|
tt_warning!(status,
|
||||||
|
"failure fetching \"{}\" from network ({}/{NET_RETRY_ATTEMPTS})",
|
||||||
|
info.name, i+1; e
|
||||||
|
);
|
||||||
|
thread::sleep(Duration::from_millis(NET_RETRY_SLEEP_MS));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
match reader.read_to_end(&mut v) {
|
||||||
|
Ok(_) => {}
|
||||||
|
Err(e) => {
|
||||||
|
tt_warning!(status,
|
||||||
|
"failure downloading \"{}\" from network ({}/{NET_RETRY_ATTEMPTS})",
|
||||||
|
info.name, i+1; e.into()
|
||||||
|
);
|
||||||
|
thread::sleep(Duration::from_millis(NET_RETRY_SLEEP_MS));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return OpenResult::Ok(InputHandle::new_read_only(
|
||||||
|
info.name.to_owned(),
|
||||||
|
Cursor::new(v),
|
||||||
|
InputOrigin::Other,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
OpenResult::Err(anyhow!(
|
||||||
|
"failed to download \"{}\"; please check your network connection.",
|
||||||
|
info.name
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
|
@ -3,18 +3,18 @@
|
||||||
|
|
||||||
//! ZIP files as Tectonic bundles.
|
//! ZIP files as Tectonic bundles.
|
||||||
|
|
||||||
|
use crate::Bundle;
|
||||||
use std::{
|
use std::{
|
||||||
fs::File,
|
fs::File,
|
||||||
io::{Cursor, Read, Seek},
|
io::{Cursor, Read, Seek},
|
||||||
path::Path,
|
path::Path,
|
||||||
|
str::FromStr,
|
||||||
};
|
};
|
||||||
use tectonic_errors::prelude::*;
|
use tectonic_errors::prelude::*;
|
||||||
use tectonic_io_base::{InputHandle, InputOrigin, IoProvider, OpenResult};
|
use tectonic_io_base::{digest, InputHandle, InputOrigin, IoProvider, OpenResult};
|
||||||
use tectonic_status_base::StatusBackend;
|
use tectonic_status_base::{NoopStatusBackend, StatusBackend};
|
||||||
use zip::{result::ZipError, ZipArchive};
|
use zip::{result::ZipError, ZipArchive};
|
||||||
|
|
||||||
use crate::Bundle;
|
|
||||||
|
|
||||||
/// A bundle backed by a ZIP file.
|
/// A bundle backed by a ZIP file.
|
||||||
pub struct ZipBundle<R: Read + Seek> {
|
pub struct ZipBundle<R: Read + Seek> {
|
||||||
zip: ZipArchive<R>,
|
zip: ZipArchive<R>,
|
||||||
|
@ -57,7 +57,11 @@ impl<R: Read + Seek> IoProvider for ZipBundle<R> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut buf = Vec::with_capacity(zipitem.size() as usize);
|
let s = zipitem.size();
|
||||||
|
if s >= u32::MAX as u64 {
|
||||||
|
return OpenResult::Err(anyhow!("Zip item too large."));
|
||||||
|
}
|
||||||
|
let mut buf = Vec::with_capacity(s as usize);
|
||||||
|
|
||||||
if let Err(e) = zipitem.read_to_end(&mut buf) {
|
if let Err(e) = zipitem.read_to_end(&mut buf) {
|
||||||
return OpenResult::Err(e.into());
|
return OpenResult::Err(e.into());
|
||||||
|
@ -72,7 +76,28 @@ impl<R: Read + Seek> IoProvider for ZipBundle<R> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<R: Read + Seek> Bundle for ZipBundle<R> {
|
impl<R: Read + Seek> Bundle for ZipBundle<R> {
|
||||||
fn all_files(&mut self, _status: &mut dyn StatusBackend) -> Result<Vec<String>> {
|
fn all_files(&self) -> Vec<String> {
|
||||||
Ok(self.zip.file_names().map(|s| s.to_owned()).collect())
|
self.zip.file_names().map(|x| x.to_owned()).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_digest(&mut self) -> Result<tectonic_io_base::digest::DigestData> {
|
||||||
|
let digest_text = match self.input_open_name(digest::DIGEST_NAME, &mut NoopStatusBackend {})
|
||||||
|
{
|
||||||
|
OpenResult::Ok(h) => {
|
||||||
|
let mut text = String::new();
|
||||||
|
h.take(64).read_to_string(&mut text)?;
|
||||||
|
text
|
||||||
|
}
|
||||||
|
|
||||||
|
OpenResult::NotAvailable => {
|
||||||
|
bail!("bundle does not provide needed SHA256SUM file");
|
||||||
|
}
|
||||||
|
|
||||||
|
OpenResult::Err(e) => {
|
||||||
|
return Err(e);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(atry!(digest::DigestData::from_str(&digest_text); ["corrupted SHA256 digest data"]))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -154,7 +154,7 @@ impl Spx2HtmlEngine {
|
||||||
let mut output = hooks.io().output_open_name(asp).must_exist()?;
|
let mut output = hooks.io().output_open_name(asp).must_exist()?;
|
||||||
serde_json::to_writer_pretty(&mut output, &ser)?;
|
serde_json::to_writer_pretty(&mut output, &ser)?;
|
||||||
let (name, digest) = output.into_name_digest();
|
let (name, digest) = output.into_name_digest();
|
||||||
hooks.event_output_closed(name, digest, status);
|
hooks.event_output_closed(name, digest);
|
||||||
} else if !self.do_not_emit_assets {
|
} else if !self.do_not_emit_assets {
|
||||||
assets.emit(fonts, &mut common)?;
|
assets.emit(fonts, &mut common)?;
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,6 @@
|
||||||
use curl::easy::Easy;
|
use curl::easy::Easy;
|
||||||
use std::io::Cursor;
|
use std::io::Cursor;
|
||||||
use tectonic_errors::{anyhow::bail, Result};
|
use tectonic_errors::{anyhow::bail, Result};
|
||||||
use tectonic_status_base::StatusBackend;
|
|
||||||
|
|
||||||
use crate::{GetUrlBackend, RangeReader};
|
use crate::{GetUrlBackend, RangeReader};
|
||||||
|
|
||||||
|
@ -67,11 +66,11 @@ impl GetUrlBackend for CurlBackend {
|
||||||
type Response = Cursor<Vec<u8>>;
|
type Response = Cursor<Vec<u8>>;
|
||||||
type RangeReader = CurlRangeReader;
|
type RangeReader = CurlRangeReader;
|
||||||
|
|
||||||
fn get_url(&mut self, url: &str, _status: &mut dyn StatusBackend) -> Result<Self::Response> {
|
fn get_url(&mut self, url: &str) -> Result<Self::Response> {
|
||||||
get_url_generic(&mut self.handle, url, None)
|
get_url_generic(&mut self.handle, url, None)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_url(&mut self, url: &str, _status: &mut dyn StatusBackend) -> Result<String> {
|
fn resolve_url(&mut self, url: &str) -> Result<String> {
|
||||||
Ok(url.into())
|
Ok(url.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,6 @@
|
||||||
use cfg_if::cfg_if;
|
use cfg_if::cfg_if;
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
use tectonic_errors::Result;
|
use tectonic_errors::Result;
|
||||||
use tectonic_status_base::StatusBackend;
|
|
||||||
|
|
||||||
/// A trait for reading byte ranges from an HTTP resource.
|
/// A trait for reading byte ranges from an HTTP resource.
|
||||||
pub trait RangeReader {
|
pub trait RangeReader {
|
||||||
|
@ -48,10 +47,10 @@ pub trait GetUrlBackend: Default {
|
||||||
///
|
///
|
||||||
/// But we attempt to detect redirects into CDNs/S3/etc and *stop* following
|
/// But we attempt to detect redirects into CDNs/S3/etc and *stop* following
|
||||||
/// before we get that deep.
|
/// before we get that deep.
|
||||||
fn resolve_url(&mut self, url: &str, status: &mut dyn StatusBackend) -> Result<String>;
|
fn resolve_url(&mut self, url: &str) -> Result<String>;
|
||||||
|
|
||||||
/// Perform an HTTP GET on a URL, returning a readable result.
|
/// Perform an HTTP GET on a URL, returning a readable result.
|
||||||
fn get_url(&mut self, url: &str, status: &mut dyn StatusBackend) -> Result<Self::Response>;
|
fn get_url(&mut self, url: &str) -> Result<Self::Response>;
|
||||||
|
|
||||||
/// Open a range reader that can perform byte-range reads on the specified URL.
|
/// Open a range reader that can perform byte-range reads on the specified URL.
|
||||||
fn open_range_reader(&self, url: &str) -> Self::RangeReader;
|
fn open_range_reader(&self, url: &str) -> Self::RangeReader;
|
||||||
|
|
|
@ -10,7 +10,6 @@ use std::{
|
||||||
result::Result as StdResult,
|
result::Result as StdResult,
|
||||||
};
|
};
|
||||||
use tectonic_errors::Result;
|
use tectonic_errors::Result;
|
||||||
use tectonic_status_base::StatusBackend;
|
|
||||||
|
|
||||||
use crate::{GetUrlBackend, RangeReader};
|
use crate::{GetUrlBackend, RangeReader};
|
||||||
|
|
||||||
|
@ -34,11 +33,11 @@ impl GetUrlBackend for NullBackend {
|
||||||
type Response = Empty;
|
type Response = Empty;
|
||||||
type RangeReader = NullRangeReader;
|
type RangeReader = NullRangeReader;
|
||||||
|
|
||||||
fn get_url(&mut self, _url: &str, _status: &mut dyn StatusBackend) -> Result<Empty> {
|
fn get_url(&mut self, _url: &str) -> Result<Empty> {
|
||||||
Err((NoGetUrlBackendError {}).into())
|
Err((NoGetUrlBackendError {}).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_url(&mut self, _url: &str, _status: &mut dyn StatusBackend) -> Result<String> {
|
fn resolve_url(&mut self, _url: &str) -> Result<String> {
|
||||||
Err((NoGetUrlBackendError {}).into())
|
Err((NoGetUrlBackendError {}).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,6 @@ use reqwest::{
|
||||||
StatusCode, Url,
|
StatusCode, Url,
|
||||||
};
|
};
|
||||||
use tectonic_errors::{anyhow::bail, Result};
|
use tectonic_errors::{anyhow::bail, Result};
|
||||||
use tectonic_status_base::{tt_note, StatusBackend};
|
|
||||||
|
|
||||||
use crate::{GetUrlBackend, RangeReader};
|
use crate::{GetUrlBackend, RangeReader};
|
||||||
|
|
||||||
|
@ -24,7 +23,7 @@ impl GetUrlBackend for ReqwestBackend {
|
||||||
type Response = Response;
|
type Response = Response;
|
||||||
type RangeReader = ReqwestRangeReader;
|
type RangeReader = ReqwestRangeReader;
|
||||||
|
|
||||||
fn get_url(&mut self, url: &str, _status: &mut dyn StatusBackend) -> Result<Response> {
|
fn get_url(&mut self, url: &str) -> Result<Response> {
|
||||||
let res = Client::new().get(url).send()?;
|
let res = Client::new().get(url).send()?;
|
||||||
if !res.status().is_success() {
|
if !res.status().is_success() {
|
||||||
bail!(
|
bail!(
|
||||||
|
@ -36,9 +35,7 @@ impl GetUrlBackend for ReqwestBackend {
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_url(&mut self, url: &str, status: &mut dyn StatusBackend) -> Result<String> {
|
fn resolve_url(&mut self, url: &str) -> Result<String> {
|
||||||
tt_note!(status, "connecting to {}", url);
|
|
||||||
|
|
||||||
let parsed = Url::parse(url)?;
|
let parsed = Url::parse(url)?;
|
||||||
let original_filename = parsed
|
let original_filename = parsed
|
||||||
.path_segments()
|
.path_segments()
|
||||||
|
@ -96,10 +93,6 @@ impl GetUrlBackend for ReqwestBackend {
|
||||||
}
|
}
|
||||||
|
|
||||||
let final_url: String = res.url().clone().into();
|
let final_url: String = res.url().clone().into();
|
||||||
if final_url != url {
|
|
||||||
tt_note!(status, "resolved to {}", final_url);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(final_url)
|
Ok(final_url)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
|
|
||||||
use app_dirs2::AppDataType;
|
use app_dirs2::AppDataType;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
use std::{env, fs};
|
||||||
use tectonic_errors::prelude::*;
|
use tectonic_errors::prelude::*;
|
||||||
|
|
||||||
/// The instance of the `app_dirs2` crate that this crate links to.
|
/// The instance of the `app_dirs2` crate that this crate links to.
|
||||||
|
@ -61,6 +62,27 @@ pub fn ensure_user_config() -> Result<PathBuf> {
|
||||||
/// - macOS: `$HOME/Library/Caches/Tectonic`
|
/// - macOS: `$HOME/Library/Caches/Tectonic`
|
||||||
/// - Others: `$XDG_CACHE_HOME/Tectonic` if defined, otherwise
|
/// - Others: `$XDG_CACHE_HOME/Tectonic` if defined, otherwise
|
||||||
/// `$HOME/.cache/Tectonic`
|
/// `$HOME/.cache/Tectonic`
|
||||||
pub fn ensure_user_cache_dir(path: &str) -> Result<PathBuf> {
|
///
|
||||||
Ok(app_dirs2::app_dir(AppDataType::UserCache, &APP_INFO, path)?)
|
///
|
||||||
|
/// The cache location defaults to the `AppDataType::UserCache`
|
||||||
|
/// provided by `app_dirs2` but can be overwritten using the
|
||||||
|
/// `TECTONIC_CACHE_DIR` environment variable.
|
||||||
|
///
|
||||||
|
/// This method may perform I/O to create the user cache directory, so it is
|
||||||
|
/// fallible. (Due to its `app_dirs2` implementation, it would have to be
|
||||||
|
/// fallible even if it didn't perform I/O.)
|
||||||
|
pub fn get_user_cache_dir(subdir: &str) -> Result<PathBuf> {
|
||||||
|
let env_cache_path = env::var_os("TECTONIC_CACHE_DIR");
|
||||||
|
|
||||||
|
let cache_path = match env_cache_path {
|
||||||
|
Some(env_cache_path) => {
|
||||||
|
let mut env_cache_path: PathBuf = env_cache_path.into();
|
||||||
|
env_cache_path.push(subdir);
|
||||||
|
fs::create_dir_all(&env_cache_path)?;
|
||||||
|
env_cache_path
|
||||||
|
}
|
||||||
|
None => app_dirs2::app_dir(AppDataType::UserCache, &APP_INFO, subdir)?,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(cache_path)
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,7 @@ use std::path::{Path, PathBuf};
|
||||||
use tectonic_bridge_core::{SecuritySettings, SecurityStance};
|
use tectonic_bridge_core::{SecuritySettings, SecurityStance};
|
||||||
|
|
||||||
use tectonic::{
|
use tectonic::{
|
||||||
config::PersistentConfig,
|
config::{maybe_return_test_bundle, PersistentConfig},
|
||||||
driver::{OutputFormat, PassSetting, ProcessingSession, ProcessingSessionBuilder},
|
driver::{OutputFormat, PassSetting, ProcessingSession, ProcessingSessionBuilder},
|
||||||
errmsg,
|
errmsg,
|
||||||
errors::{ErrorKind, Result},
|
errors::{ErrorKind, Result},
|
||||||
|
@ -19,6 +19,8 @@ use tectonic::{
|
||||||
unstable_opts::{UnstableArg, UnstableOptions},
|
unstable_opts::{UnstableArg, UnstableOptions},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use tectonic_bundles::detect_bundle;
|
||||||
|
|
||||||
#[derive(Debug, Parser)]
|
#[derive(Debug, Parser)]
|
||||||
pub struct CompileOptions {
|
pub struct CompileOptions {
|
||||||
/// The file to process, or "-" to process the standard input stream
|
/// The file to process, or "-" to process the standard input stream
|
||||||
|
@ -94,8 +96,6 @@ pub struct CompileOptions {
|
||||||
|
|
||||||
//impl TectonicCommand for CompileOptions {
|
//impl TectonicCommand for CompileOptions {
|
||||||
impl CompileOptions {
|
impl CompileOptions {
|
||||||
//fn customize(&self, _cc: &mut CommandCustomizations) {}
|
|
||||||
|
|
||||||
pub fn execute(self, config: PersistentConfig, status: &mut dyn StatusBackend) -> Result<i32> {
|
pub fn execute(self, config: PersistentConfig, status: &mut dyn StatusBackend) -> Result<i32> {
|
||||||
let unstable = UnstableOptions::from_unstable_args(self.unstable.into_iter());
|
let unstable = UnstableOptions::from_unstable_args(self.unstable.into_iter());
|
||||||
|
|
||||||
|
@ -185,16 +185,26 @@ impl CompileOptions {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let only_cached = self.only_cached;
|
if self.only_cached {
|
||||||
if only_cached {
|
|
||||||
tt_note!(status, "using only cached resource files");
|
tt_note!(status, "using only cached resource files");
|
||||||
}
|
}
|
||||||
if let Some(path) = self.bundle {
|
|
||||||
sess_builder.bundle(config.make_local_file_provider(path, status)?);
|
if let Some(bundle) = self.bundle {
|
||||||
} else if let Some(u) = self.web_bundle {
|
// TODO: this is ugly.
|
||||||
sess_builder.bundle(config.make_cached_url_provider(&u, only_cached, None, status)?);
|
// It's probably a good idea to re-design our code so we
|
||||||
|
// don't need special cases for tests our source.
|
||||||
|
if let Ok(bundle) = maybe_return_test_bundle(Some(bundle.clone())) {
|
||||||
|
sess_builder.bundle(bundle);
|
||||||
|
} else if let Some(bundle) = detect_bundle(bundle.clone(), self.only_cached, None)? {
|
||||||
|
sess_builder.bundle(bundle);
|
||||||
|
} else {
|
||||||
|
return Err(errmsg!("`{bundle}` doesn't specify a valid bundle."));
|
||||||
|
}
|
||||||
|
} else if let Ok(bundle) = maybe_return_test_bundle(None) {
|
||||||
|
// TODO: this is ugly too.
|
||||||
|
sess_builder.bundle(bundle);
|
||||||
} else {
|
} else {
|
||||||
sess_builder.bundle(config.default_bundle(only_cached, status)?);
|
sess_builder.bundle(config.default_bundle(self.only_cached)?);
|
||||||
}
|
}
|
||||||
sess_builder.build_date_from_env(deterministic_mode);
|
sess_builder.build_date_from_env(deterministic_mode);
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@ fn get_a_bundle(
|
||||||
let doc = ws.first_document();
|
let doc = ws.first_document();
|
||||||
let mut options: DocumentSetupOptions = Default::default();
|
let mut options: DocumentSetupOptions = Default::default();
|
||||||
options.only_cached(only_cached);
|
options.only_cached(only_cached);
|
||||||
doc.bundle(&options, status)
|
doc.bundle(&options)
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
@ -43,7 +43,6 @@ fn get_a_bundle(
|
||||||
Ok(Box::new(tectonic_bundles::get_fallback_bundle(
|
Ok(Box::new(tectonic_bundles::get_fallback_bundle(
|
||||||
tectonic_engine_xetex::FORMAT_SERIAL,
|
tectonic_engine_xetex::FORMAT_SERIAL,
|
||||||
only_cached,
|
only_cached,
|
||||||
status,
|
|
||||||
)?))
|
)?))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -131,8 +130,8 @@ impl BundleSearchCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn execute(self, config: PersistentConfig, status: &mut dyn StatusBackend) -> Result<i32> {
|
fn execute(self, config: PersistentConfig, status: &mut dyn StatusBackend) -> Result<i32> {
|
||||||
let mut bundle = get_a_bundle(config, self.only_cached, status)?;
|
let bundle = get_a_bundle(config, self.only_cached, status)?;
|
||||||
let files = bundle.all_files(status)?;
|
let files = bundle.all_files();
|
||||||
|
|
||||||
// Is there a better way to do this?
|
// Is there a better way to do this?
|
||||||
let filter: Box<dyn Fn(&str) -> bool> = if let Some(t) = self.term {
|
let filter: Box<dyn Fn(&str) -> bool> = if let Some(t) = self.term {
|
||||||
|
|
|
@ -30,7 +30,7 @@ impl TectonicCommand for InitCommand {
|
||||||
|
|
||||||
let wc = WorkspaceCreator::new(path);
|
let wc = WorkspaceCreator::new(path);
|
||||||
ctry!(
|
ctry!(
|
||||||
wc.create_defaulted(config, status, self.web_bundle);
|
wc.create_defaulted(&config, self.bundle);
|
||||||
"failed to create the new Tectonic workspace"
|
"failed to create the new Tectonic workspace"
|
||||||
);
|
);
|
||||||
Ok(0)
|
Ok(0)
|
||||||
|
@ -61,7 +61,7 @@ impl TectonicCommand for NewCommand {
|
||||||
|
|
||||||
let wc = WorkspaceCreator::new(self.path);
|
let wc = WorkspaceCreator::new(self.path);
|
||||||
ctry!(
|
ctry!(
|
||||||
wc.create_defaulted(config, status, self.web_bundle);
|
wc.create_defaulted(&config, self.bundle);
|
||||||
"failed to create the new Tectonic workspace"
|
"failed to create the new Tectonic workspace"
|
||||||
);
|
);
|
||||||
Ok(0)
|
Ok(0)
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
use clap::{CommandFactory, Parser};
|
use clap::{CommandFactory, Parser};
|
||||||
use tectonic::{config::PersistentConfig, errors::Result};
|
use tectonic::{config::PersistentConfig, errors::Result};
|
||||||
|
use tectonic_io_base::app_dirs;
|
||||||
use tectonic_status_base::StatusBackend;
|
use tectonic_status_base::StatusBackend;
|
||||||
|
|
||||||
use crate::v2cli::{CommandCustomizations, TectonicCommand, V2CliOptions};
|
use crate::v2cli::{CommandCustomizations, TectonicCommand, V2CliOptions};
|
||||||
|
@ -47,9 +48,7 @@ impl ShowUserCacheDirCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn execute(self, _config: PersistentConfig, _status: &mut dyn StatusBackend) -> Result<i32> {
|
fn execute(self, _config: PersistentConfig, _status: &mut dyn StatusBackend) -> Result<i32> {
|
||||||
use tectonic_bundles::cache::Cache;
|
println!("{}", app_dirs::get_user_cache_dir("bundles")?.display());
|
||||||
let cache = Cache::get_user_default()?;
|
|
||||||
println!("{}", cache.root().display());
|
|
||||||
Ok(0)
|
Ok(0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,19 +12,13 @@
|
||||||
#[cfg(feature = "serde")]
|
#[cfg(feature = "serde")]
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::{
|
use std::{
|
||||||
path::{Path, PathBuf},
|
path::PathBuf,
|
||||||
sync::atomic::{AtomicBool, Ordering},
|
sync::atomic::{AtomicBool, Ordering},
|
||||||
};
|
};
|
||||||
use tectonic_bundles::{
|
use tectonic_bundles::{detect_bundle, Bundle};
|
||||||
cache::Cache, dir::DirBundle, itar::IndexedTarBackend, zip::ZipBundle, Bundle,
|
|
||||||
};
|
|
||||||
use tectonic_io_base::app_dirs;
|
use tectonic_io_base::app_dirs;
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::errors::{ErrorKind, Result};
|
||||||
errors::{ErrorKind, Result},
|
|
||||||
status::StatusBackend,
|
|
||||||
};
|
|
||||||
|
|
||||||
/// Awesome hack time!!!
|
/// Awesome hack time!!!
|
||||||
///
|
///
|
||||||
|
@ -44,19 +38,19 @@ pub fn is_config_test_mode_activated() -> bool {
|
||||||
CONFIG_TEST_MODE_ACTIVATED.load(Ordering::SeqCst)
|
CONFIG_TEST_MODE_ACTIVATED.load(Ordering::SeqCst)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_test_bundle_wanted(web_bundle: Option<String>) -> bool {
|
pub fn is_test_bundle_wanted(bundle: Option<String>) -> bool {
|
||||||
if !is_config_test_mode_activated() {
|
if !is_config_test_mode_activated() {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
match web_bundle {
|
match bundle {
|
||||||
None => true,
|
None => true,
|
||||||
Some(x) if x.contains("test-bundle://") => true,
|
Some(x) if x.contains("test-bundle://") => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn maybe_return_test_bundle(web_bundle: Option<String>) -> Result<Box<dyn Bundle>> {
|
pub fn maybe_return_test_bundle(bundle: Option<String>) -> Result<Box<dyn Bundle>> {
|
||||||
if is_test_bundle_wanted(web_bundle) {
|
if is_test_bundle_wanted(bundle) {
|
||||||
Ok(Box::<crate::test_util::TestBundle>::default())
|
Ok(Box::<crate::test_util::TestBundle>::default())
|
||||||
} else {
|
} else {
|
||||||
Err(ErrorKind::Msg("not asking for the default test bundle".to_owned()).into())
|
Err(ErrorKind::Msg("not asking for the default test bundle".to_owned()).into())
|
||||||
|
@ -134,53 +128,14 @@ impl PersistentConfig {
|
||||||
Ok(PersistentConfig::default())
|
Ok(PersistentConfig::default())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn make_cached_url_provider(
|
|
||||||
&self,
|
|
||||||
url: &str,
|
|
||||||
only_cached: bool,
|
|
||||||
custom_cache_root: Option<&Path>,
|
|
||||||
status: &mut dyn StatusBackend,
|
|
||||||
) -> Result<Box<dyn Bundle>> {
|
|
||||||
if let Ok(test_bundle) = maybe_return_test_bundle(Some(url.to_owned())) {
|
|
||||||
return Ok(test_bundle);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut cache = if let Some(root) = custom_cache_root {
|
|
||||||
Cache::get_for_custom_directory(root)
|
|
||||||
} else {
|
|
||||||
Cache::get_user_default()?
|
|
||||||
};
|
|
||||||
|
|
||||||
let bundle = cache.open::<IndexedTarBackend>(url, only_cached, status)?;
|
|
||||||
Ok(Box::new(bundle) as _)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn make_local_file_provider(
|
|
||||||
&self,
|
|
||||||
file_path: PathBuf,
|
|
||||||
_status: &mut dyn StatusBackend,
|
|
||||||
) -> Result<Box<dyn Bundle>> {
|
|
||||||
let bundle: Box<dyn Bundle> = if file_path.is_dir() {
|
|
||||||
Box::new(DirBundle::new(file_path))
|
|
||||||
} else {
|
|
||||||
Box::new(ZipBundle::open(file_path)?)
|
|
||||||
};
|
|
||||||
Ok(bundle)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn default_bundle_loc(&self) -> &str {
|
pub fn default_bundle_loc(&self) -> &str {
|
||||||
&self.default_bundles[0].url
|
&self.default_bundles[0].url
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn default_bundle(
|
pub fn default_bundle(&self, only_cached: bool) -> Result<Box<dyn Bundle>> {
|
||||||
&self,
|
if CONFIG_TEST_MODE_ACTIVATED.load(Ordering::SeqCst) {
|
||||||
only_cached: bool,
|
let bundle = crate::test_util::TestBundle::default();
|
||||||
status: &mut dyn StatusBackend,
|
return Ok(Box::new(bundle));
|
||||||
) -> Result<Box<dyn Bundle>> {
|
|
||||||
use std::io;
|
|
||||||
|
|
||||||
if let Ok(test_bundle) = maybe_return_test_bundle(None) {
|
|
||||||
return Ok(test_bundle);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.default_bundles.len() != 1 {
|
if self.default_bundles.len() != 1 {
|
||||||
|
@ -190,25 +145,18 @@ impl PersistentConfig {
|
||||||
.into());
|
.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
let url = Url::parse(&self.default_bundles[0].url)
|
Ok(
|
||||||
.map_err(|_| io::Error::new(io::ErrorKind::InvalidInput, "failed to parse url"))?;
|
detect_bundle(self.default_bundles[0].url.to_owned(), only_cached, None)
|
||||||
if url.scheme() == "file" {
|
.unwrap()
|
||||||
// load the local zip file.
|
.unwrap(),
|
||||||
let file_path = url.to_file_path().map_err(|_| {
|
)
|
||||||
io::Error::new(io::ErrorKind::InvalidInput, "failed to parse local path")
|
|
||||||
})?;
|
|
||||||
return self.make_local_file_provider(file_path, status);
|
|
||||||
}
|
|
||||||
let bundle =
|
|
||||||
self.make_cached_url_provider(&self.default_bundles[0].url, only_cached, None, status)?;
|
|
||||||
Ok(Box::new(bundle) as _)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn format_cache_path(&self) -> Result<PathBuf> {
|
pub fn format_cache_path(&self) -> Result<PathBuf> {
|
||||||
if is_config_test_mode_activated() {
|
if is_config_test_mode_activated() {
|
||||||
Ok(crate::test_util::test_path(&[]))
|
Ok(crate::test_util::test_path(&[]))
|
||||||
} else {
|
} else {
|
||||||
Ok(app_dirs::ensure_user_cache_dir("formats")?)
|
Ok(app_dirs::get_user_cache_dir("formats")?)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,28 +7,21 @@
|
||||||
//! `tectonic_docmodel` crate with the actual document-processing capabilities
|
//! `tectonic_docmodel` crate with the actual document-processing capabilities
|
||||||
//! provided by the processing engines.
|
//! provided by the processing engines.
|
||||||
|
|
||||||
use std::{
|
use std::{fmt::Write as FmtWrite, fs, io, path::PathBuf};
|
||||||
fmt::Write as FmtWrite,
|
|
||||||
fs, io,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
};
|
|
||||||
use tectonic_bridge_core::SecuritySettings;
|
use tectonic_bridge_core::SecuritySettings;
|
||||||
use tectonic_bundles::{
|
use tectonic_bundles::{detect_bundle, Bundle};
|
||||||
cache::Cache, dir::DirBundle, itar::IndexedTarBackend, zip::ZipBundle, Bundle,
|
|
||||||
};
|
|
||||||
use tectonic_docmodel::{
|
use tectonic_docmodel::{
|
||||||
document::{BuildTargetType, Document, InputFile},
|
document::{BuildTargetType, Document, InputFile},
|
||||||
workspace::{Workspace, WorkspaceCreator},
|
workspace::{Workspace, WorkspaceCreator},
|
||||||
};
|
};
|
||||||
use tectonic_geturl::{DefaultBackend, GetUrlBackend};
|
use tectonic_geturl::{DefaultBackend, GetUrlBackend};
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
config, ctry,
|
config, ctry,
|
||||||
driver::{OutputFormat, PassSetting, ProcessingSessionBuilder},
|
driver::{OutputFormat, PassSetting, ProcessingSessionBuilder},
|
||||||
errors::{ErrorKind, Result},
|
errors::{ErrorKind, Result},
|
||||||
status::StatusBackend,
|
status::StatusBackend,
|
||||||
tt_note,
|
test_util, tt_note,
|
||||||
unstable_opts::UnstableOptions,
|
unstable_opts::UnstableOptions,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -79,11 +72,7 @@ pub trait DocumentExt {
|
||||||
///
|
///
|
||||||
/// This parses [`Document::bundle_loc`] and turns it into the appropriate
|
/// This parses [`Document::bundle_loc`] and turns it into the appropriate
|
||||||
/// bundle backend.
|
/// bundle backend.
|
||||||
fn bundle(
|
fn bundle(&self, setup_options: &DocumentSetupOptions) -> Result<Box<dyn Bundle>>;
|
||||||
&self,
|
|
||||||
setup_options: &DocumentSetupOptions,
|
|
||||||
status: &mut dyn StatusBackend,
|
|
||||||
) -> Result<Box<dyn Bundle>>;
|
|
||||||
|
|
||||||
/// Set up a [`ProcessingSessionBuilder`] for one of the outputs.
|
/// Set up a [`ProcessingSessionBuilder`] for one of the outputs.
|
||||||
///
|
///
|
||||||
|
@ -98,38 +87,18 @@ pub trait DocumentExt {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DocumentExt for Document {
|
impl DocumentExt for Document {
|
||||||
fn bundle(
|
fn bundle(&self, setup_options: &DocumentSetupOptions) -> Result<Box<dyn Bundle>> {
|
||||||
&self,
|
// Load test bundle
|
||||||
setup_options: &DocumentSetupOptions,
|
if config::is_config_test_mode_activated() {
|
||||||
status: &mut dyn StatusBackend,
|
let bundle = test_util::TestBundle::default();
|
||||||
) -> Result<Box<dyn Bundle>> {
|
return Ok(Box::new(bundle));
|
||||||
fn bundle_from_path(p: PathBuf) -> Result<Box<dyn Bundle>> {
|
|
||||||
if p.is_dir() {
|
|
||||||
Ok(Box::new(DirBundle::new(p)))
|
|
||||||
} else {
|
|
||||||
Ok(Box::new(ZipBundle::open(p)?))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Ok(test_bundle) = config::maybe_return_test_bundle(None) {
|
let d = detect_bundle(self.bundle_loc.clone(), setup_options.only_cached, None)?;
|
||||||
Ok(test_bundle)
|
|
||||||
} else if let Ok(url) = Url::parse(&self.bundle_loc) {
|
match d {
|
||||||
if url.scheme() != "file" {
|
Some(b) => Ok(b),
|
||||||
let mut cache = Cache::get_user_default()?;
|
None => Err(io::Error::new(io::ErrorKind::InvalidInput, "Could not get bundle").into()),
|
||||||
let bundle = cache.open::<IndexedTarBackend>(
|
|
||||||
&self.bundle_loc,
|
|
||||||
setup_options.only_cached,
|
|
||||||
status,
|
|
||||||
)?;
|
|
||||||
Ok(Box::new(bundle))
|
|
||||||
} else {
|
|
||||||
let file_path = url.to_file_path().map_err(|_| {
|
|
||||||
io::Error::new(io::ErrorKind::InvalidInput, "failed to parse local path")
|
|
||||||
})?;
|
|
||||||
bundle_from_path(file_path)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
bundle_from_path(Path::new(&self.bundle_loc).to_owned())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -198,7 +167,7 @@ impl DocumentExt for Document {
|
||||||
if setup_options.only_cached {
|
if setup_options.only_cached {
|
||||||
tt_note!(status, "using only cached resource files");
|
tt_note!(status, "using only cached resource files");
|
||||||
}
|
}
|
||||||
sess_builder.bundle(self.bundle(setup_options, status)?);
|
sess_builder.bundle(self.bundle(setup_options)?);
|
||||||
|
|
||||||
let mut tex_dir = self.src_dir().to_owned();
|
let mut tex_dir = self.src_dir().to_owned();
|
||||||
tex_dir.push("src");
|
tex_dir.push("src");
|
||||||
|
@ -225,25 +194,23 @@ pub trait WorkspaceCreatorExt {
|
||||||
/// for the main document.
|
/// for the main document.
|
||||||
fn create_defaulted(
|
fn create_defaulted(
|
||||||
self,
|
self,
|
||||||
config: config::PersistentConfig,
|
config: &config::PersistentConfig,
|
||||||
status: &mut dyn StatusBackend,
|
bundle: Option<String>,
|
||||||
web_bundle: Option<String>,
|
|
||||||
) -> Result<Workspace>;
|
) -> Result<Workspace>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl WorkspaceCreatorExt for WorkspaceCreator {
|
impl WorkspaceCreatorExt for WorkspaceCreator {
|
||||||
fn create_defaulted(
|
fn create_defaulted(
|
||||||
self,
|
self,
|
||||||
config: config::PersistentConfig,
|
config: &config::PersistentConfig,
|
||||||
status: &mut dyn StatusBackend,
|
bundle: Option<String>,
|
||||||
web_bundle: Option<String>,
|
|
||||||
) -> Result<Workspace> {
|
) -> Result<Workspace> {
|
||||||
let bundle_loc = if config::is_test_bundle_wanted(web_bundle.clone()) {
|
let bundle_loc = if config::is_test_bundle_wanted(bundle.clone()) {
|
||||||
"test-bundle://".to_owned()
|
"test-bundle://".to_owned()
|
||||||
} else {
|
} else {
|
||||||
let unresolved_loc = web_bundle.unwrap_or(config.default_bundle_loc().to_owned());
|
let loc = bundle.unwrap_or(config.default_bundle_loc().to_owned());
|
||||||
let mut gub = DefaultBackend::default();
|
let mut gub = DefaultBackend::default();
|
||||||
gub.resolve_url(&unresolved_loc, status)?
|
gub.resolve_url(&loc)?
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(self.create(bundle_loc, Vec::new())?)
|
Ok(self.create(bundle_loc, Vec::new())?)
|
||||||
|
|
|
@ -641,12 +641,7 @@ impl DriverHooks for BridgeState {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
fn event_output_closed(
|
fn event_output_closed(&mut self, name: String, digest: DigestData) {
|
||||||
&mut self,
|
|
||||||
name: String,
|
|
||||||
digest: DigestData,
|
|
||||||
_status: &mut dyn StatusBackend,
|
|
||||||
) {
|
|
||||||
let summ = self
|
let summ = self
|
||||||
.events
|
.events
|
||||||
.get_mut(&name)
|
.get_mut(&name)
|
||||||
|
@ -1166,7 +1161,7 @@ impl ProcessingSessionBuilder {
|
||||||
let format_cache_path = self
|
let format_cache_path = self
|
||||||
.format_cache_path
|
.format_cache_path
|
||||||
.unwrap_or_else(|| filesystem_root.clone());
|
.unwrap_or_else(|| filesystem_root.clone());
|
||||||
let format_cache = FormatCache::new(bundle.get_digest(status)?, format_cache_path);
|
let format_cache = FormatCache::new(bundle.get_digest()?, format_cache_path);
|
||||||
|
|
||||||
let genuine_stdout = if self.print_stdout {
|
let genuine_stdout = if self.print_stdout {
|
||||||
Some(GenuineStdoutIo::new())
|
Some(GenuineStdoutIo::new())
|
||||||
|
|
|
@ -149,7 +149,7 @@ pub fn latex_to_pdf<T: AsRef<str>>(latex: T) -> Result<Vec<u8>> {
|
||||||
"failed to open the default configuration file");
|
"failed to open the default configuration file");
|
||||||
|
|
||||||
let only_cached = false;
|
let only_cached = false;
|
||||||
let bundle = ctry!(config.default_bundle(only_cached, &mut status);
|
let bundle = ctry!(config.default_bundle(only_cached);
|
||||||
"failed to load the default resource bundle");
|
"failed to load the default resource bundle");
|
||||||
|
|
||||||
let format_cache_path = ctry!(config.format_cache_path();
|
let format_cache_path = ctry!(config.format_cache_path();
|
||||||
|
|
|
@ -126,11 +126,11 @@ impl IoProvider for TestBundle {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Bundle for TestBundle {
|
impl Bundle for TestBundle {
|
||||||
fn get_digest(&mut self, _status: &mut dyn StatusBackend) -> Result<DigestData> {
|
fn get_digest(&mut self) -> Result<DigestData> {
|
||||||
Ok(DigestData::zeros())
|
Ok(DigestData::zeros())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn all_files(&mut self, status: &mut dyn StatusBackend) -> Result<Vec<String>> {
|
fn all_files(&self) -> Vec<String> {
|
||||||
self.0.all_files(status)
|
self.0.all_files()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -117,12 +117,7 @@ impl<'a> DriverHooks for FormatTestDriver<'a> {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
fn event_output_closed(
|
fn event_output_closed(&mut self, name: String, digest: DigestData) {
|
||||||
&mut self,
|
|
||||||
name: String,
|
|
||||||
digest: DigestData,
|
|
||||||
_status: &mut dyn StatusBackend,
|
|
||||||
) {
|
|
||||||
let summ = self
|
let summ = self
|
||||||
.events
|
.events
|
||||||
.get_mut(&name)
|
.get_mut(&name)
|
||||||
|
|
Loading…
Reference in New Issue