Bundle rework (#1218)
This commit is contained in:
commit
51e179f742
|
@ -252,7 +252,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "05efc5cfd9110c8416e471df0e96702d58690178e206e61b7173706673c93706"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"regex-automata",
|
||||
"regex-automata 0.4.6",
|
||||
"serde",
|
||||
]
|
||||
|
||||
|
@ -1075,8 +1075,8 @@ dependencies = [
|
|||
"aho-corasick",
|
||||
"bstr",
|
||||
"log",
|
||||
"regex-automata",
|
||||
"regex-syntax",
|
||||
"regex-automata 0.4.6",
|
||||
"regex-syntax 0.8.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1390,7 +1390,7 @@ dependencies = [
|
|||
"globset",
|
||||
"log",
|
||||
"memchr",
|
||||
"regex-automata",
|
||||
"regex-automata 0.4.6",
|
||||
"same-file",
|
||||
"walkdir",
|
||||
"winapi-util",
|
||||
|
@ -1597,6 +1597,15 @@ version = "0.4.21"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c"
|
||||
|
||||
[[package]]
|
||||
name = "matchers"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
|
||||
dependencies = [
|
||||
"regex-automata 0.1.10",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "md-5"
|
||||
version = "0.10.6"
|
||||
|
@ -1757,6 +1766,16 @@ dependencies = [
|
|||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nu-ansi-term"
|
||||
version = "0.46.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
|
||||
dependencies = [
|
||||
"overload",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-conv"
|
||||
version = "0.1.0"
|
||||
|
@ -1871,6 +1890,12 @@ dependencies = [
|
|||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "overload"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.12.1"
|
||||
|
@ -2193,8 +2218,17 @@ checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15"
|
|||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-automata",
|
||||
"regex-syntax",
|
||||
"regex-automata 0.4.6",
|
||||
"regex-syntax 0.8.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
|
||||
dependencies = [
|
||||
"regex-syntax 0.6.29",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -2205,9 +2239,15 @@ checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea"
|
|||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-syntax",
|
||||
"regex-syntax 0.8.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.6.29"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.8.2"
|
||||
|
@ -2475,6 +2515,15 @@ dependencies = [
|
|||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sharded-slab"
|
||||
version = "0.1.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "signal-hook-registry"
|
||||
version = "1.4.1"
|
||||
|
@ -2604,10 +2653,22 @@ version = "1.0.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
|
||||
|
||||
[[package]]
|
||||
name = "tar"
|
||||
version = "0.4.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cb797dad5fb5b76fcf519e702f4a589483b5ef06567f160c392832c1f5e44909"
|
||||
dependencies = [
|
||||
"filetime",
|
||||
"libc",
|
||||
"xattr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tectonic"
|
||||
version = "0.0.0-dev.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"byte-unit",
|
||||
"cfg-if",
|
||||
"clap",
|
||||
|
@ -2626,8 +2687,10 @@ dependencies = [
|
|||
"md-5",
|
||||
"open",
|
||||
"quick-xml",
|
||||
"regex",
|
||||
"serde",
|
||||
"sha2",
|
||||
"tar",
|
||||
"tectonic_bridge_core",
|
||||
"tectonic_bundles",
|
||||
"tectonic_docmodel",
|
||||
|
@ -2646,7 +2709,10 @@ dependencies = [
|
|||
"time",
|
||||
"tokio",
|
||||
"toml",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"url",
|
||||
"walkdir",
|
||||
"watchexec",
|
||||
"watchexec-filterer-globset",
|
||||
"watchexec-signals",
|
||||
|
@ -2716,6 +2782,7 @@ dependencies = [
|
|||
"tectonic_geturl",
|
||||
"tectonic_io_base",
|
||||
"tectonic_status_base",
|
||||
"url",
|
||||
"zip",
|
||||
]
|
||||
|
||||
|
@ -2943,6 +3010,16 @@ dependencies = [
|
|||
"syn 2.0.52",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thread_local"
|
||||
version = "1.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "time"
|
||||
version = "0.3.36"
|
||||
|
@ -3136,6 +3213,36 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"valuable",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-log"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
|
||||
dependencies = [
|
||||
"log",
|
||||
"once_cell",
|
||||
"tracing-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-subscriber"
|
||||
version = "0.3.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
|
||||
dependencies = [
|
||||
"matchers",
|
||||
"nu-ansi-term",
|
||||
"once_cell",
|
||||
"regex",
|
||||
"sharded-slab",
|
||||
"smallvec",
|
||||
"thread_local",
|
||||
"tracing",
|
||||
"tracing-core",
|
||||
"tracing-log",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -3268,6 +3375,12 @@ version = "1.7.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a"
|
||||
|
||||
[[package]]
|
||||
name = "valuable"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
|
||||
|
||||
[[package]]
|
||||
name = "vcpkg"
|
||||
version = "0.2.15"
|
||||
|
@ -3749,6 +3862,17 @@ dependencies = [
|
|||
"tap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xattr"
|
||||
version = "1.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
"rustix",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xdg"
|
||||
version = "2.5.2"
|
||||
|
|
46
Cargo.toml
46
Cargo.toml
|
@ -16,7 +16,13 @@ documentation = "https://docs.rs/tectonic"
|
|||
repository = "https://github.com/tectonic-typesetting/tectonic/"
|
||||
readme = "CARGO_README.md"
|
||||
keywords = ["tex", "latex", "typesetting", "font"]
|
||||
categories = ["command-line-interface", "parser-implementations", "rendering", "science", "text-processing"]
|
||||
categories = [
|
||||
"command-line-interface",
|
||||
"parser-implementations",
|
||||
"rendering",
|
||||
"science",
|
||||
"text-processing",
|
||||
]
|
||||
license = "MIT"
|
||||
edition = "2018"
|
||||
exclude = ["/dist/", "/reference_sources/"]
|
||||
|
@ -96,6 +102,12 @@ watchexec-supervisor = "1.0"
|
|||
zip = { version = "^0.6", default-features = false, features = ["deflate"] }
|
||||
time = "0.3.36"
|
||||
clap_complete = "4.5.1"
|
||||
walkdir = "2"
|
||||
regex = "1.10.2"
|
||||
anyhow = "1.0.80"
|
||||
tar = "0.4.40"
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
|
||||
[features]
|
||||
default = ["geturl-reqwest", "serialization"]
|
||||
|
@ -113,7 +125,10 @@ external-harfbuzz = ["tectonic_engine_xetex/external-harfbuzz"]
|
|||
geturl-curl = ["tectonic_bundles/geturl-curl", "tectonic_geturl/curl"]
|
||||
geturl-reqwest = ["tectonic_bundles/geturl-reqwest", "tectonic_geturl/reqwest"]
|
||||
|
||||
native-tls-vendored = ["tectonic_bundles/native-tls-vendored", "tectonic_geturl/native-tls-vendored"]
|
||||
native-tls-vendored = [
|
||||
"tectonic_bundles/native-tls-vendored",
|
||||
"tectonic_geturl/native-tls-vendored",
|
||||
]
|
||||
|
||||
# developer feature to compile with the necessary flags for profiling tectonic.
|
||||
profile = []
|
||||
|
@ -124,7 +139,12 @@ futures = "0.3"
|
|||
headers = "0.4"
|
||||
http-body-util = "0.1.0"
|
||||
hyper = { version = "1.0.0", features = ["server", "http1", "http2"] }
|
||||
hyper-util = { version = "0.1", features = ["server", "http1", "http2", "tokio"] }
|
||||
hyper-util = { version = "0.1", features = [
|
||||
"server",
|
||||
"http1",
|
||||
"http2",
|
||||
"tokio",
|
||||
] }
|
||||
tempfile = "^3.1"
|
||||
|
||||
[package.metadata.vcpkg]
|
||||
|
@ -137,9 +157,23 @@ overlay-triplets-path = "dist/vcpkg-triplets"
|
|||
# guidance if they might need to set $VCPKGRS_TRIPLET.
|
||||
[package.metadata.vcpkg.target]
|
||||
x86_64-apple-darwin = { install = ["freetype", "harfbuzz[graphite2]", "icu"] }
|
||||
aarch64-apple-darwin = { triplet = "arm64-osx", install = ["freetype", "harfbuzz[graphite2]", "icu"] }
|
||||
x86_64-unknown-linux-gnu = { install = ["fontconfig", "freetype", "harfbuzz[graphite2]", "icu"] }
|
||||
x86_64-pc-windows-msvc = { triplet = "x64-windows-static-release", install = ["fontconfig", "freetype", "harfbuzz[graphite2]", "icu"] }
|
||||
aarch64-apple-darwin = { triplet = "arm64-osx", install = [
|
||||
"freetype",
|
||||
"harfbuzz[graphite2]",
|
||||
"icu",
|
||||
] }
|
||||
x86_64-unknown-linux-gnu = { install = [
|
||||
"fontconfig",
|
||||
"freetype",
|
||||
"harfbuzz[graphite2]",
|
||||
"icu",
|
||||
] }
|
||||
x86_64-pc-windows-msvc = { triplet = "x64-windows-static-release", install = [
|
||||
"fontconfig",
|
||||
"freetype",
|
||||
"harfbuzz[graphite2]",
|
||||
"icu",
|
||||
] }
|
||||
|
||||
[package.metadata.internal_dep_versions]
|
||||
tectonic_bridge_core = "thiscommit:2023-06-11:PvhF7YB"
|
||||
|
|
|
@ -0,0 +1,58 @@
|
|||
# Tectonic Bundles
|
||||
|
||||
This repository contains tools for building bundles for [Tectonic](https://tectonic-typesetting.github.io).
|
||||
You should only need this if you're producing your own bundles. If you're just using Tectonic to compile LaTeX,
|
||||
you should use the pre-generated bundles we distribute.
|
||||
|
||||
|
||||
## Prerequisites
|
||||
To use these tools, you will need:
|
||||
- GNU `patch`. Patch is called by `tectonic bundle create `.
|
||||
- A [TeXlive tarball](https://tug.org/texlive/acquire-tar.html).
|
||||
|
||||
The following bundles are available:
|
||||
- [`texlive2023`](./bundles/texlive2023): based on `texlive2023-20230313`.
|
||||
|
||||
|
||||
## Build Process:
|
||||
Before building any bundles, you'll need to download the prerequisite files.
|
||||
Usually, this is a [TeXlive tarball](https://tug.org/texlive/acquire-tar.html) with a version that matches the bundle you want to build.
|
||||
See `bundle.toml` in the bundle you want to build.
|
||||
|
||||
|
||||
To build a bundle, run the following:
|
||||
- `cd bundles`
|
||||
- `tectonic -X bundle create --build-dir ./build texlive2023/bundle.toml v1`
|
||||
|
||||
This runs the following jobs, in order. Individual jobs may be run by specifying `--job <job name>`.
|
||||
- `select`
|
||||
- `pack`
|
||||
|
||||
The contents of `<build dir>/content` may be inspected and edited after running `select`. \
|
||||
This should only be used to debug bundles.
|
||||
|
||||
|
||||
## Extra Documentation
|
||||
- Each directory in [`./bundles`](./bundles/) is a bundle specification, documented [here](./bundles/README.md).
|
||||
- Only one bundle format is currently supported, it is described in [`./format-v1.md`](./format-v1.md).
|
||||
- This repository includes legacy bundle [tests](./tests/README.md), which may be broken.
|
||||
|
||||
|
||||
## Output files
|
||||
|
||||
The files that `tectonic bundle create` produces are listed below:
|
||||
- `./build/output/<bundle>/content`: contains all bundle files. It is organized by source: files from the bundle's `include` dir will be under `./include`, texlive files will be under `./texlive`, and so on. See `builder/src/select.rs`.
|
||||
This directory also contains some metadata:
|
||||
- `content/FILELIST`: each line of this file is `<path> <hash>`, sorted by file name.\
|
||||
Files with identical names are included.\
|
||||
Files not in any search path are also included.\
|
||||
`<hash>` is either a hex sha256 of that file's contents, or `nohash` for a few special files.
|
||||
- `content/SHA256SUM`: The sha256sum of `content/FILES`. This string uniquely defines this bundle.
|
||||
- `content/SEARCH`: File search order for this bundle. See bundle spec documentation.
|
||||
- `search-report`: debug file. Lists all directories that will not be searched by the rules in `search-order`.\
|
||||
The entries in this file are non-recursive: If `search-report` contains a line with `/texlive`, this means that direct children of `/texlive` (like `/texlive/file.tex`) will not be found, but files in *subdirectories* (like `/texlive/tex/file.tex`) may be.
|
||||
|
||||
**Final output files are listed below:**
|
||||
- `<bundle>.ttb`: the bundle. Note that the ttb version is *not* included in the extension.
|
||||
- Index location and length are printed once this job completes.
|
||||
- You can extract files from this bundle by running `dd if=file.ttb ibs=1 skip=<start> count=<len> | gunzip`
|
|
@ -0,0 +1,3 @@
|
|||
# Ignore build files & resources
|
||||
/build
|
||||
*.tar
|
|
@ -0,0 +1,166 @@
|
|||
# Bundle Specification
|
||||
|
||||
Every directory in this dir defines a Tectonic bundle.
|
||||
The main configuration file is `bundle.toml`, its contents are documented below.
|
||||
|
||||
## Overview
|
||||
|
||||
```toml
|
||||
[bundle]
|
||||
# This bundle's name
|
||||
name = "texlive2023"
|
||||
# This bundle's expected final SHA256 hash. See the bundle specification to learn how it's generated.
|
||||
# If this is left empty, the bundle hash will be printed once the bundle is built.
|
||||
expected_hash = "c1bbb5f8498d2bb5471cc2b0790700ecb86fc992ec290f7eb718d8751f8ada2a"
|
||||
|
||||
# Bundle search order. This tells Tectonic how to search for files.
|
||||
# The global `search_order` may have two kinds of values:
|
||||
# - plain strings (which follow the usual search order rules)
|
||||
# - `{ input = "" }` entries, which specify an input's search order.
|
||||
# If an input is not listed here, it will NOT be searched!
|
||||
# (unless one specifies "//", which is a bad idea.)
|
||||
#
|
||||
# This is used to generate SEARCH in the bundle.
|
||||
search_order = ["/", { input = "include" }, { input = "texlive" }]
|
||||
|
||||
# Note the triple-quoted multiline strings:
|
||||
# this is the best way to get raw strings in TOML.
|
||||
ignore = [
|
||||
# Files and extensions we want to ignore.
|
||||
# These will be applied to ALL inputs.
|
||||
# If a file's relative path matches any of these patterns,
|
||||
# that file will be excluded from the bundle.
|
||||
'''.*/LICENSE\.md''',
|
||||
'''.*/Makefile''',
|
||||
'''.*/README''',
|
||||
'''.*/README.md''',
|
||||
'''.*/readme\.txt'''
|
||||
]
|
||||
|
||||
|
||||
# A simple directory input, with `path` relative to this toml file.
|
||||
[inputs."include"]
|
||||
source.dir.path = "include"
|
||||
|
||||
|
||||
# A tarball input, usually used to add TeXlive files.
|
||||
#
|
||||
# Note that this MUST be a .tar file.
|
||||
# You'll likely download a compressed tar file. Extract it.
|
||||
#
|
||||
# It's a good idea to add a comment with the TeXlive version
|
||||
# and url of this file, so that others may find it.
|
||||
[inputs."texlive"]
|
||||
|
||||
# Patch directory for this input. Optional.
|
||||
# This should be a directory of `.diff` files in unified format,
|
||||
# the first line of which specifies the path (relative to `root_dir` below)
|
||||
# of the file that diff should be applied to.
|
||||
#
|
||||
#
|
||||
# To make a patch file, you should...
|
||||
# - Copy the original file and apply your changes.
|
||||
# - Run `diff "original-file" "modified-file" > file.diff`. ORDER MATTERS!
|
||||
# - Add **one** new line to the top of `file.diff` containing a path to the file this diff should be applied to. This path should be relative to the bundle's content dir, as shown below.
|
||||
# - Place `file.diff` anywhere in your bundle's include dir. The file selection script should find and apply it.
|
||||
#
|
||||
# The line at the top is essential and must be added manually.
|
||||
# We can't do without it, since we may have many files with # the same name.
|
||||
#
|
||||
# Also note that the brace decorations used in `search_order` may also be used in this first line.
|
||||
# For example, a patch marked `tex/{latex,latex-dev}/base/latex.ltx` will be applied to `latex.ltx` in both
|
||||
# `texlive/tex/latex` and `texlive/tex/latex-dev`. This will only work if those files are identical.
|
||||
patch_dir = "patches/texlive"
|
||||
|
||||
|
||||
# Path to the tarball, relative to this toml file's parent directory.
|
||||
source.tarball.path = "texlive-20230313-texmf.tar"
|
||||
|
||||
# Compute this hash by running `sha256 -b file.tar`
|
||||
source.tarball.hash = "ac1683d4abeb7fd534851ad7ff0ec891da7da4729603506efd0245259dcdcc67"
|
||||
|
||||
# The directory inside this tarball to add. Optional.
|
||||
# All paths below are relative to this.
|
||||
source.tarball.root_dir = "texlive-20230313-texmf/texmf-dist"
|
||||
|
||||
# Regex ignore pattens. Any file whose path matches any of these patterns will not be added to the bundle.
|
||||
# These are relative to `root_dir` and do NOT start with a slash.
|
||||
ignore = [
|
||||
'''tex/luatex/.*''',
|
||||
'''tex/lualatex/.*'''
|
||||
]
|
||||
|
||||
|
||||
# Search order of this input.
|
||||
# This is optional, ommitting `search_order` is equivalent
|
||||
# to setting `search_order = [ "//" ]`
|
||||
#
|
||||
# As always, these paths are relative to `root_dir` and do NOT start with a slash.
|
||||
#
|
||||
#
|
||||
# Lines may be decorated with braces: `/a/{b,c}/` will become `/a/b` and `a/c`, in that order.
|
||||
# - Brace decorations may not be nested.
|
||||
# - Paths may not contain braces. Escaping with `\{` will not work.
|
||||
# - Multiple brace decorations in one line are allowed:
|
||||
# `/{a,b}/{1,2}` expands to `/a/1`, `/a/2`, `/b/1`, `b/2`, in that order.
|
||||
#
|
||||
# Just like kpathsea search paths, each search pattern can end with one or two slashes.
|
||||
# - If a line ends with two slashes (like `texlive/tex/latex//`), it will match all subdirectories of that path.
|
||||
# - If a line ends with one slash (like `texlive/tex/latex/`), it will match only direct children of that path:
|
||||
# `texlive/tex/latex/a.tex` will be searched, `texlive/tex/latex/base/a.tex` will not.
|
||||
#
|
||||
# - If a line does not end with a slash, we pretend it ends with one.
|
||||
# - If a line ends with three or more slashes, it won't be searched at all. Don't do that.
|
||||
#
|
||||
# This scheme lets us override the default "alphabetic depth-first search" by adding seach paths as follows,
|
||||
# which will look for direct children of `latex` before descending into subdirectories:
|
||||
# ```
|
||||
# texlive/tex/latex/
|
||||
# texlive/tex/latex//
|
||||
# ```
|
||||
search_order = [
|
||||
"tex/{xelatex,latex,xetex,plain,generic}//",
|
||||
"bibtex/{bib,bst,csf}//",
|
||||
"web2c//",
|
||||
"fonts//",
|
||||
"biber//",
|
||||
"mft//",
|
||||
"dvips//",
|
||||
"makeindex//",
|
||||
"{web,cweb}//",
|
||||
"ttf2pk//",
|
||||
"dvipdfmx/",
|
||||
]
|
||||
|
||||
```
|
||||
|
||||
|
||||
## Extra details: finding files
|
||||
|
||||
### Overview
|
||||
Any TeX distribution needs a way to find files. This is necessary because files are usually included only by name: `\include{file}`, `\usepackage{package}`, etc. Where do we find `file.tex` and `package.sty`?
|
||||
|
||||
In a conventional TeXLive installation, kpathsea solves this problem. It defines an array of "search paths," and walks through them when you ask for a file. You can find an overview [here](https://www.overleaf.com/learn/latex/Articles/An_introduction_to_Kpathsea_and_how_TeX_engines_search_for_files) and more detailed information in the kpathsea docs.
|
||||
|
||||
Tectonic's supporting files are distributed in bundles, so we can't use the same approach.
|
||||
Within tectonic's *bundles*[^1], we use FILELIST and SEARCH files to map a filename to an input path. Note that this logic is implemented in tectonic, not in the bundle build script.
|
||||
|
||||
[^1]: Tectonic searches for files on your disk seperately. The information in this file only applies to bundles. I won't document this fully here, you'll have to read the tectonic docs and source code.
|
||||
|
||||
- **Case 1:** tectonic looks for `file.tex` and finds one path in `FILELIST`\
|
||||
Nothing fancy here, we just use the file we found.
|
||||
|
||||
- **Case 2:** tectonic looks for `partial/path/to/file.tex`\
|
||||
This is an edge case caused by some packages (for example, `fithesis`). To handle this,
|
||||
we first find `file.tex` in `FILELIST` and look at its path. If its path ends with `partial/path/to/file.tex`, we use it,
|
||||
if it doesn't, we don't. If multiple files match, we print an error--that shouldn't ever happen.
|
||||
|
||||
- **Case 3:** tectonic looks for `file.tex` and finds multiple paths in `FILELIST`\
|
||||
This where things get interesting. First, we match all paths against each line of the bundles's `SEARCH` file with a simple `starts_with`.
|
||||
- If *exactly one* path matches a certain line, we immediately stop checking and use that path. Search lines are ordered by priority, so if only one path matches the first line, it *must* be the right path to use.
|
||||
- If multiple paths match a certain line, we discard all others and resolve the conflict alphabetically.
|
||||
- If we've checked all lines of `SEARCH` and found no matches, we didn't find the file. Return an error.
|
||||
|
||||
"Resolving the conflict alphabetically" means we sort the paths in alphabetical order and pick the first. This emulates an alphabetically-ordered depth-first search on the file tree, which is a reasonable default.
|
||||
|
||||
Any filename conflicts which would be resolved alphabetically are listed in `search-report` after the `select` build step. These aren't errors, but we should look over that file to make sure everything is working as expected.
|
|
@ -0,0 +1,89 @@
|
|||
[bundle]
|
||||
name = "texlive2023"
|
||||
expected_hash = "e2571849ee65f1c8cb6dc7e433a1ae6b97b47eb24d27074cd485ff2bb87a79ab"
|
||||
|
||||
search_order = ["/", { input = "include" }, { input = "texlive" }]
|
||||
|
||||
# Note the triple-quoted multiline strings:
|
||||
# this is the best way to get raw strings in TOML.
|
||||
ignore = [
|
||||
# Files and extensions we usually want to ignore.
|
||||
'''.*/00readme\.txt''',
|
||||
'''.*/LICENSE\.md''',
|
||||
'''.*/Makefile''',
|
||||
'''.*/README''',
|
||||
'''.*/README.md''',
|
||||
'''.*/readme\.txt''',
|
||||
'''.*/ls-R''',
|
||||
'''.*\.fmt''',
|
||||
'''.*\.log''',
|
||||
'''.*\.lua''',
|
||||
'''.*\.mf''',
|
||||
'''.*\.pl''',
|
||||
'''.*\.ps''',
|
||||
]
|
||||
|
||||
|
||||
[inputs."include"]
|
||||
source.dir.path = "include"
|
||||
|
||||
|
||||
# Requires texlive-20230313-texmf, which is available at
|
||||
# https://ftp.math.utah.edu/pub/tex/historic/systems/texlive/2023/texlive-20230313-texmf.tar.xz
|
||||
# download and extract this file as a .tar in this directory.
|
||||
[inputs."texlive"]
|
||||
source.tarball.path = "texlive-20230313-texmf.tar"
|
||||
source.tarball.hash = "ac1683d4abeb7fd534851ad7ff0ec891da7da4729603506efd0245259dcdcc67"
|
||||
source.tarball.root_dir = "texlive-20230313-texmf/texmf-dist"
|
||||
patch_dir = "patches/texlive"
|
||||
|
||||
ignore = [
|
||||
# I don't think tectonic has xindy support, ignore for now
|
||||
'''xindy/.*''',
|
||||
|
||||
# We may need this, but exclude for now.
|
||||
'''tex4ht/.*''',
|
||||
|
||||
# These require pLaTeX2e, tectonic uses XeTeX
|
||||
'''tex/uplatex/.*''',
|
||||
'''tex/uptex/.*''',
|
||||
|
||||
# Other odd tex formats & unnecessary files
|
||||
'''tex/cslatex/.*''',
|
||||
'''tex/csplain/.*''',
|
||||
'''tex/ptex/.*''',
|
||||
'''tex/platex/.*''',
|
||||
'''tex/lollipop/.*''',
|
||||
'''tex/context/.*''',
|
||||
'''context/.*''',
|
||||
'''texdoc/.*''',
|
||||
'''texdoctk/.*''',
|
||||
'''texconfig/.*''',
|
||||
'''scripts/.*''',
|
||||
'''dvips/.*''',
|
||||
'''asymptote/.*''',
|
||||
'''makeindex/.*''',
|
||||
'''luatex-cache/.*''',
|
||||
'''hb2gf/.*''',
|
||||
'''chktex/.*''',
|
||||
'''source/.*''',
|
||||
'''doc/.*''',
|
||||
'''tex/luatex/.*''',
|
||||
'''tex/lualatex/.*''',
|
||||
'''tex/lambda/.*''',
|
||||
'''omega/.*''',
|
||||
]
|
||||
|
||||
search_order = [
|
||||
"tex/{xelatex,latex,xetex,plain,generic}//",
|
||||
"bibtex/{bib,bst,csf}//",
|
||||
"web2c//",
|
||||
"fonts//",
|
||||
"biber//",
|
||||
"mft//",
|
||||
"dvips//",
|
||||
"makeindex//",
|
||||
"{web,cweb}//",
|
||||
"ttf2pk//",
|
||||
"dvipdfmx/",
|
||||
]
|
|
@ -0,0 +1 @@
|
|||
\input xelatex.ini
|
|
@ -0,0 +1 @@
|
|||
\input plain \dump
|
|
@ -0,0 +1,24 @@
|
|||
tex/latex/fithesis/style/mu/fithesis-mu-base.sty
|
||||
131c131,138
|
||||
< \setmainfont[Ligatures=TeX]{TeX Gyre Pagella}
|
||||
---
|
||||
> \setmainfont{texgyrepagella}[
|
||||
> Ligatures = TeX,
|
||||
> Extension = .otf,
|
||||
> UprightFont = *-regular,
|
||||
> ItalicFont = *-italic,
|
||||
> BoldFont = *-bold,
|
||||
> BoldItalicFont = *-bolditalic,
|
||||
> ]
|
||||
136c143,151
|
||||
< \setsansfont[Ligatures=TeX,Scale=MatchLowercase]{TeX Gyre Heros}
|
||||
---
|
||||
> \setmainfont{texgyreheros}[
|
||||
> Ligatures = TeX,
|
||||
> Scale = MatchLowercase,
|
||||
> Extension = .otf,
|
||||
> UprightFont = *-regular,
|
||||
> ItalicFont = *-italic,
|
||||
> BoldFont = *-bold,
|
||||
> BoldItalicFont = *-bolditalic,
|
||||
> ]
|
|
@ -0,0 +1,5 @@
|
|||
tex/latex/fontawesome/fontawesome.sty
|
||||
45c45
|
||||
< \newfontfamily{\FA}{FontAwesome}
|
||||
---
|
||||
> \newfontfamily{\FA}{FontAwesome.otf}
|
|
@ -0,0 +1,32 @@
|
|||
tex/{latex,latex-dev}/base/latex.ltx
|
||||
7211,7237c7211,7212
|
||||
< \typeout{^^J! LaTeX Error: File `#1.#2' not found.^^J^^J%
|
||||
< Type X to quit or <RETURN> to proceed,^^J%
|
||||
< or enter new name. (Default extension: #2)^^J}%
|
||||
< \message{Enter file name: }%
|
||||
< {\endlinechar\m@ne
|
||||
< \global\read\m@ne to\@gtempa}%
|
||||
< \ifx\@gtempa\@empty
|
||||
< \let\@missingfile@area\@empty
|
||||
< \let\@missingfile@base\@empty
|
||||
< \def\@missingfile@ext{tex}%
|
||||
< \else
|
||||
< \def\reserved@b{\batchmode\read-1 to \reserved@a}%
|
||||
< \def\reserved@a{x}\ifx\reserved@a\@gtempa\reserved@b\fi
|
||||
< \def\reserved@a{X}\ifx\reserved@a\@gtempa\reserved@b\fi
|
||||
< \filename@parse\@gtempa
|
||||
< \edef\filename@ext{%
|
||||
< \ifx\filename@ext\relax#2\else\filename@ext\fi}%
|
||||
< \edef\reserved@a{%
|
||||
< \noexpand\IfFileExists
|
||||
< {\filename@area\filename@base.\filename@ext}%
|
||||
< {\def\noexpand\@missingfile@area{\filename@area}%
|
||||
< \def\noexpand\@missingfile@base{\filename@base}%
|
||||
< \def\noexpand\@missingfile@ext {\filename@ext}}%
|
||||
< {\noexpand\@missingfileerror
|
||||
< {\filename@area\filename@base}{\filename@ext}}}%
|
||||
< \reserved@a
|
||||
< \fi
|
||||
---
|
||||
> % Tectonic: no terminal input allowed, so this is always a fatal error.
|
||||
> \errmessage{! LaTeX Error: File `#1.#2' not found.}%
|
|
@ -0,0 +1,25 @@
|
|||
tex/latex/listings/listings.sty
|
||||
2057,2075c2057,2059
|
||||
< \typeout{^^J! Package Listings Error: File `#1(.#2)' not found.^^J%
|
||||
< ^^JType X to quit or <RETURN> to proceed,^^J%
|
||||
< or enter new name. (Default extension: #2)^^J}%
|
||||
< \message{Enter file name: }%
|
||||
< {\endlinechar\m@ne \global\read\m@ne to\@gtempa}%
|
||||
< \ifx\@gtempa\@empty \else
|
||||
< \def\reserved@a{x}\ifx\reserved@a\@gtempa\batchmode\@@end\fi
|
||||
< \def\reserved@a{X}\ifx\reserved@a\@gtempa\batchmode\@@end\fi
|
||||
< \filename@parse\@gtempa
|
||||
< \edef\filename@ext{%
|
||||
< \ifx\filename@ext\relax#2\else\filename@ext\fi}%
|
||||
< \edef\reserved@a{\noexpand\IfFileExists %
|
||||
< {\filename@area\filename@base.\filename@ext}%
|
||||
< {\noexpand\lst@InputListing %
|
||||
< {\filename@area\filename@base.\filename@ext}}%
|
||||
< {\noexpand\lst@MissingFileError
|
||||
< {\filename@area\filename@base}{\filename@ext}}}%
|
||||
< \expandafter\reserved@a %
|
||||
< \fi}
|
||||
---
|
||||
> % Tectonic: no terminal input allowed, so this is always a fatal error.
|
||||
> \errmessage{! Package Listings Error: File `#1(.#2)' not found.}%
|
||||
> }
|
|
@ -0,0 +1,63 @@
|
|||
# Tectonic Bundle Format V1 Specification
|
||||
|
||||
A single TTBv1 bundle may be used on the network or from a local filesystem. This bundle format contains two parts:
|
||||
- A 66-byte header, documented below
|
||||
- File data, a concatenated blob of gzipped files. One of these blobs is the bundle index.
|
||||
|
||||
Note that the extension for a tectonic bundle is `ttb`, regardless of its version. Also note that a bundle's hash depends on it's content. If we generate two bundles in different formats from one specification, the the resulting bundle hashes should be identical..
|
||||
|
||||
|
||||
### Header format
|
||||
A TTBv1 header consists of the following fields, in order.
|
||||
All numbers are stored with little-endian byte ordering.
|
||||
|
||||
- `14 bytes`: magic bytes. Always `tectonicbundle`, in any ttb version.
|
||||
- ` 4 bytes`: bundle version, a `u32`. In this case, always 1.
|
||||
- ` 8 bytes`: index location, a `u64`. This is the first byte of the bundle index file.
|
||||
- ` 4 bytes`: gzipped index length, a `u32`. This is the length the bundle index file.
|
||||
- ` 4 bytes`: true index length, a `u32`. This is the decompressed length of the bundle index file.
|
||||
- `32 bytes`: this bundle's hash.
|
||||
|
||||
|
||||
### Index
|
||||
Bundle contents are stored as a concatenated `gzip` blobs after the header. These are found using a special file called the Index, the location of which location is stored in the header. The index is generated from the "meta-files" that the file selector produces, namely `FILELIST` and `SEARCH`. These are included in the bundle for consistency, but shouldn't ever be used.
|
||||
|
||||
The index may be retrieved from a bundle by running `dd if=file.ttb ibs=1 skip=<start> count=<len> | gunzip`.
|
||||
|
||||
|
||||
The Index file comes in sections, each of which starts on a line marked with square braces. The following sections are currently used, all others are ignored.
|
||||
|
||||
- `[DEFAULTSEARCH]`: the default search order.
|
||||
- `[SEARCH:<name>]`: a search specification. Tectonic will search these paths for files, in this order. See the [bundle spec documentation](../../bundles/README.md).
|
||||
- `[FILELIST]`: a list of files in this bundle.
|
||||
- Each line contains the following: `<start_byte> <gzip_len> <real_len> <hash> <path>`
|
||||
- `<hash>` is either a sha256 hash, or `nohash` for certain special files.
|
||||
- `<start_byte>` and `<gzip_len>` are the location and length of files in the bundle.
|
||||
- `<real_len>` is the decompressed length of each file, used for efficient memory allocation.
|
||||
- `<path>` is the file's path. This is relative, and doesn't start with a slash. Also, this is intentionally last: paths may contain spaces (but not newlines!).
|
||||
|
||||
This index is generated by a script, so *very little error-checking is done inside Tectonic*. Keep the following in mind:
|
||||
- Empty lines should not exist
|
||||
- Any lines before a section specification are ignored
|
||||
- Invalid sections are ignored
|
||||
- Only the last line of `[DEFAULTSEARCH]` has any effect
|
||||
- We assume that the default `[SEARCH:<name>]` exists.
|
||||
|
||||
An example index is below.
|
||||
```
|
||||
[DEFAULTSEARCH]
|
||||
MAIN
|
||||
[SEARCH:MAIN]
|
||||
/
|
||||
/include//
|
||||
/texlive/tex/xelatex//
|
||||
/texlive/tex/latex//
|
||||
/texlive/tex/xetex//
|
||||
/texlive/tex/plain//
|
||||
[FILELIST]
|
||||
70 6065990 17331559 nohash FILELIST
|
||||
6066060 133 360 589b5c09a33c2655c76f9a6b9bbb6060674c896fbc5b4555af0e20c86f32ac13 SEARCH
|
||||
6066193 72 65 nohash SHA256SUM
|
||||
6066265 39 19 86d8d12cfdfbe74a81b9df7c5b38a766303a772d57e7cb0d228e1e7b9294cf34 include/tectonic/tectonic-format-latex.tex
|
||||
... many more lines ...
|
||||
```
|
|
@ -0,0 +1,29 @@
|
|||
# Testing Bundles
|
||||
These are a work in progress, and may be broken.
|
||||
All tests are run through `test.sh` as follows: `./test.sh <path-to-ttb> <test set>`.
|
||||
|
||||
Tests require the following:
|
||||
- a `ttb` bundle (local or remote)
|
||||
- a recent installation of Tectonic
|
||||
|
||||
## Test Sets
|
||||
The following test sets are avaiable:
|
||||
- `files`, which tries to compile all files under `tests/files` and `tests/formats`
|
||||
- `classes`, which tries to compile a simple document using `tests/classes.list`
|
||||
|
||||
Note that most test files contain comments explaining the reason and expected outcome of the test.
|
||||
|
||||
|
||||
|
||||
## Test Output
|
||||
All test output ends up under `tests/build`
|
||||
|
||||
**Output for `files`:**
|
||||
- `files/logs`: log files for all builds (passed or failed)
|
||||
- `files/*.{pdf,fmt,etc}`: output files for each build
|
||||
|
||||
|
||||
**Output for `classes`**
|
||||
- `failed`: classes that failed to compile
|
||||
- `passed`: classes that complied without error
|
||||
- `logs`: log files for all compile jobs
|
|
@ -0,0 +1,628 @@
|
|||
BHCexam xfail
|
||||
BMSTU-IU8 xfail
|
||||
ConcProg ok
|
||||
FUpowerdot xfail
|
||||
IEEEconf ok
|
||||
IEEEtran ok
|
||||
IMTEKda xfail
|
||||
ReadableCV xfail
|
||||
RecipeBook ok
|
||||
SPhdThesis ok
|
||||
TOPletter xfail
|
||||
URbeamer xfail
|
||||
URletter xfail
|
||||
UoWthesis ok
|
||||
a0poster ok
|
||||
aalok xfail
|
||||
aastex ok
|
||||
aastex61 ok
|
||||
aastex62 ok
|
||||
aastex63 ok
|
||||
aastex631 ok
|
||||
abntex2 ok
|
||||
abstbook ok
|
||||
achemso ok,titleauth
|
||||
acmart ok
|
||||
acmconf xfail
|
||||
active-conf ok
|
||||
adfathesis ok
|
||||
afparticle ok,titleauth
|
||||
afthesis ok
|
||||
agecon ok
|
||||
aguplus xfail
|
||||
aiaa-tc ok
|
||||
ajae ok
|
||||
akklecture ok,titleauth
|
||||
akkscript ok
|
||||
akktecdoc ok
|
||||
akletter ok
|
||||
ametsoc ok
|
||||
amsart ok
|
||||
amsbook ok
|
||||
amsdtx ok
|
||||
amsldoc ok
|
||||
amsproc ok
|
||||
aomart ok
|
||||
apa ok
|
||||
apa6 xfail - looks unmaintained; uses known option to flushend package
|
||||
apa6e xfail
|
||||
apa7 ok
|
||||
apecon ok
|
||||
arabart ok
|
||||
arabbook ok
|
||||
arabic-book xfail
|
||||
arabrep ok
|
||||
arabrep1 xfail
|
||||
argetabelle ok
|
||||
article ok
|
||||
articleingud ok
|
||||
articoletteracdp ok
|
||||
artikel1 ok
|
||||
artikel2 ok
|
||||
artikel3 ok
|
||||
asaetr ok
|
||||
ascelike ok
|
||||
asmeconf xfail
|
||||
asmejour xfail - typo breaks build on XeTeX
|
||||
assignment ok
|
||||
aucklandthesis ok
|
||||
bangorcsthesis xfail
|
||||
bangorexam xfail
|
||||
bankstatement ok
|
||||
barticle xfail
|
||||
basque-book ok
|
||||
bbook xfail
|
||||
beamer ok
|
||||
beamer-rl xfail
|
||||
beamerswitch xfail
|
||||
beaulivre ok
|
||||
beilstein ok
|
||||
beletter ok
|
||||
bewerbung ok
|
||||
bfhbeamer xfail
|
||||
bfhpub xfail
|
||||
bfhsciposter xfail
|
||||
bfhthesis xfail
|
||||
bgteubner xfail
|
||||
bidimoderncv xfail
|
||||
bidipresentation ok
|
||||
biditufte-book ok
|
||||
biditufte-handout ok
|
||||
bitart xfail
|
||||
bitbook xfail
|
||||
bjfuthesis xfail
|
||||
bletter ok
|
||||
bmstu xfail
|
||||
boek ok
|
||||
boek3 ok
|
||||
book ok
|
||||
bookcover ok
|
||||
bookest ok
|
||||
bookshelf xfail
|
||||
br-lex ok
|
||||
brandeis-dissertation ok
|
||||
brandeis-problemset xfail
|
||||
brandeis-thesis xfail
|
||||
brief ok
|
||||
buctcover xfail
|
||||
buctthesis xfail
|
||||
businesscard-qrcode ok
|
||||
bxjsarticle ok
|
||||
bxjsbook ok
|
||||
bxjsreport ok
|
||||
bxjsslide ok
|
||||
caesar_book ok
|
||||
cas-dc xfail
|
||||
cas-sc xfail
|
||||
cascadilla xfail
|
||||
cassete ok
|
||||
cc xfail
|
||||
cd ok
|
||||
cd-cover ok
|
||||
cesenaexam xfail
|
||||
cheatsheet ok
|
||||
chletter ok
|
||||
cje xfail
|
||||
cnbwp xfail
|
||||
cnltx-doc xfail
|
||||
codedoc ok
|
||||
colorart ok
|
||||
colorbook ok
|
||||
combine xfail
|
||||
confproc xfail
|
||||
contracard ok
|
||||
cours ok
|
||||
courseoutline ok
|
||||
coursepaper ok
|
||||
cquthesis xfail
|
||||
csbulletin ok
|
||||
csbulobalka xfail
|
||||
csbulv1 xfail
|
||||
ctexart ok
|
||||
ctexbeamer ok
|
||||
ctexbook ok
|
||||
ctexrep ok
|
||||
ctxdoc ok
|
||||
curve ok
|
||||
cv4tw xfail
|
||||
cweb xfail
|
||||
dfgproposal xfail
|
||||
dfgreporting xfail
|
||||
dinbrief ok
|
||||
disser xfail
|
||||
dithesis xfail
|
||||
document-structure xfail
|
||||
droit-fr xfail
|
||||
dtk ok
|
||||
dvdcoll ok
|
||||
easybook ok
|
||||
ebsthesis ok
|
||||
ecca ok
|
||||
ecv xfail
|
||||
einfart ok
|
||||
ejpecp xfail
|
||||
elbioimp ok
|
||||
elegantbook ok
|
||||
elegantnote ok
|
||||
elegantpaper ok
|
||||
elpres ok
|
||||
elsarticle ok
|
||||
elteikthesis ok
|
||||
emisa xfail
|
||||
emulateapj ok
|
||||
erae ok
|
||||
erdc xfail
|
||||
eskd xfail
|
||||
eskdgraph xfail
|
||||
eskdtab xfail
|
||||
eskdtext xfail
|
||||
estcpmm ok
|
||||
etiketka ok
|
||||
euproposal xfail
|
||||
eureporting xfail
|
||||
europasscv xfail
|
||||
europecv ok
|
||||
europroc ok
|
||||
exam ok
|
||||
exam-n ok
|
||||
examdesign xfail
|
||||
exesheet ok
|
||||
extarticle ok
|
||||
extbook ok
|
||||
extletter ok
|
||||
extproc ok
|
||||
extreport ok
|
||||
facsimile ok
|
||||
factura xfail
|
||||
facture xfail
|
||||
fancyhandout ok
|
||||
fancyslides ok
|
||||
fbithesis xfail
|
||||
fcavtex ok
|
||||
fdudoc xfail
|
||||
fduthesis xfail
|
||||
fduthesis-en xfail
|
||||
fei xfail
|
||||
ffslides xfail
|
||||
fiche ok
|
||||
fithesis ok
|
||||
fithesis2 ok
|
||||
fithesis3 ok
|
||||
fithesis4 ok
|
||||
flacards ok
|
||||
flashcard xfail
|
||||
flashcards xfail
|
||||
frletter ok
|
||||
fsbispit xfail
|
||||
g-brief ok
|
||||
g-brief2 ok
|
||||
gaceta ok
|
||||
gammas xfail
|
||||
geradwp ok
|
||||
gmdocc ok
|
||||
gost732 xfail
|
||||
gradstudentresume xfail
|
||||
grant xfail
|
||||
grant-afosr xfail
|
||||
grant-aro xfail
|
||||
grant-darpa xfail
|
||||
grant-doe xfail
|
||||
grant-nih xfail
|
||||
grant-nrl xfail
|
||||
grant-nsf xfail
|
||||
grant-onr xfail
|
||||
graphpaper ok
|
||||
gridslides ok
|
||||
gsemthesis ok
|
||||
guitartabs ok
|
||||
gzt xfail
|
||||
gztarticle ok
|
||||
h2020proposal ok
|
||||
harnon-cv xfail
|
||||
hausarbeit-jura xfail
|
||||
hcart xfail
|
||||
hcletter xfail
|
||||
hcreport xfail
|
||||
hcslides xfail
|
||||
hecthese ok
|
||||
hepthesis ok
|
||||
hgbarticle xfail
|
||||
hgbreport xfail
|
||||
hgbthesis xfail
|
||||
hitec ok,titleauth
|
||||
hithesis xfail
|
||||
hitreport ok
|
||||
hitszthesis xfail
|
||||
hletter ok
|
||||
hpsdiss xfail
|
||||
hu-berlin-letter xfail
|
||||
huawei ok
|
||||
hustthesis xfail
|
||||
hwexam xfail
|
||||
iagproc ok
|
||||
icsv ok
|
||||
idcc xfail
|
||||
ijdc-v14 xfail
|
||||
ijdc-v9 ok
|
||||
ijmart ok
|
||||
ijsra xfail
|
||||
image-gallery ok
|
||||
imsproc ok
|
||||
inkpaper xfail
|
||||
invoice-class ok
|
||||
iodhbwm ok
|
||||
iscram ok
|
||||
isodoc xfail
|
||||
isov2 ok
|
||||
itaxpf ok
|
||||
iwhdp xfail
|
||||
jacow xfail
|
||||
jarticle xfail
|
||||
jbook xfail
|
||||
jlreq xfail
|
||||
jltxdoc xfail
|
||||
jmlr ok
|
||||
jmlrbook xfail
|
||||
jnuexam ok
|
||||
journal ok
|
||||
jpsj2 ok
|
||||
jreport xfail
|
||||
jrurstud ok
|
||||
jsarticle xfail
|
||||
jsbook xfail
|
||||
jspf xfail
|
||||
jsreport xfail
|
||||
jura ok
|
||||
jurabook ok
|
||||
juraovw ok
|
||||
juraurtl ok
|
||||
kdgcoursetext ok,titleauth
|
||||
kdgmasterthesis ok,titleauth
|
||||
kdpcover ok
|
||||
kerntest xfail
|
||||
kiyou xfail
|
||||
kluwer ok
|
||||
knittingpattern ok
|
||||
komacv ok
|
||||
ksp-thesis xfail
|
||||
l3doc ok
|
||||
labbook ok
|
||||
langscibook xfail
|
||||
leadsheet ok
|
||||
leaflet ok
|
||||
lebhart ok
|
||||
lectures xfail
|
||||
legislation xfail
|
||||
letgut xfail
|
||||
letter ok
|
||||
letteracdp ok
|
||||
lettre ok
|
||||
limap ok
|
||||
limecv xfail
|
||||
lion-msc xfail
|
||||
llncs ok
|
||||
lni ok,titleauth
|
||||
lps ok
|
||||
lt3graph-packagedoc xfail
|
||||
ltjarticle xfail
|
||||
ltjbook xfail
|
||||
ltjltxdoc xfail
|
||||
ltjreport xfail
|
||||
ltjsarticle xfail
|
||||
ltjsbook xfail
|
||||
ltjskiyou xfail
|
||||
ltjspf xfail
|
||||
ltjsreport xfail
|
||||
ltjtarticle xfail
|
||||
ltjtbook xfail
|
||||
ltjtreport xfail
|
||||
ltnews ok
|
||||
ltugboat ok
|
||||
ltugproc ok
|
||||
ltxdoc ok
|
||||
ltxdockit ok
|
||||
ltxguide ok
|
||||
ltxguidex ok
|
||||
ltxmdf xfail
|
||||
matapli xfail
|
||||
matc3mem ok
|
||||
mcmthesis ok
|
||||
medstarbeamer xfail
|
||||
meetingmins ok
|
||||
memoir ok
|
||||
mensa-tex ok
|
||||
mentis xfail
|
||||
metanorma ok
|
||||
milog ok
|
||||
minimal ok
|
||||
minimart ok
|
||||
minimbook ok
|
||||
mla ok
|
||||
mluexercise xfail
|
||||
mnras ok
|
||||
moderncv xfail
|
||||
modernposter ok
|
||||
movie ok
|
||||
msu-thesis ok
|
||||
mucproc xfail
|
||||
mugsthesis ok
|
||||
muling ok
|
||||
musuos ok
|
||||
muthesis ok
|
||||
mwart ok
|
||||
mwbk ok
|
||||
mwrep ok
|
||||
my-thesis ok
|
||||
mycv ok
|
||||
myletter ok
|
||||
mynsfc ok
|
||||
nanicolle xfail
|
||||
nature ok
|
||||
ncc ok
|
||||
nccproc ok
|
||||
nddiss2e xfail
|
||||
ndsu-thesis ok
|
||||
newlfm xfail
|
||||
nih ok
|
||||
nihbiosketch xfail
|
||||
njf ok
|
||||
njurepo xfail
|
||||
njustthesis xfail
|
||||
njuthesis ok
|
||||
nlctdoc ok
|
||||
nostarch xfail
|
||||
notesslides ok
|
||||
novel xfail
|
||||
nrc1 xfail
|
||||
nrc2 xfail
|
||||
nwafuthesis xfail
|
||||
nwejm xfail
|
||||
nwejmart xfail - assumes font "Latin Modern Mono" is available
|
||||
oblivoir ok
|
||||
oblivoir-utf xfail
|
||||
oblivoir-xl xfail
|
||||
octavo ok
|
||||
oegatb xfail
|
||||
onrannual ok
|
||||
oup-authoring-template ok
|
||||
paper ok
|
||||
papertex xfail
|
||||
pbsheet xfail
|
||||
pdfArticle xfail
|
||||
pecha xfail
|
||||
petiteannonce ok
|
||||
phfextendedabstract ok
|
||||
philosophersimprint ok
|
||||
pittetd ok
|
||||
pkuthss xfail
|
||||
plari ok
|
||||
play ok
|
||||
plnews xfail
|
||||
pocoec ok
|
||||
postcards xfail
|
||||
powerdot xfail
|
||||
powersem xfail
|
||||
ppr-prv xfail
|
||||
pracjourn ok
|
||||
pressrelease ok
|
||||
proc ok
|
||||
proposal xfail
|
||||
prosper xfail
|
||||
protocol ok
|
||||
prtec ok
|
||||
ptptex ok
|
||||
qcm ok
|
||||
quantumarticle xfail
|
||||
qyxf-book xfail
|
||||
rapport1 ok
|
||||
rapport3 ok
|
||||
rbt-mathnotes xfail
|
||||
rbt-mathnotes-formula-sheet xfail
|
||||
rbt-mathnotes-hw xfail
|
||||
recipe ok
|
||||
recipecard xfail
|
||||
refart ok
|
||||
refrep ok
|
||||
regstud ok
|
||||
report ok
|
||||
reporting xfail
|
||||
resphilosophica ok
|
||||
resumecls ok
|
||||
revtex4 ok
|
||||
revtex4-1 ok
|
||||
revtex4-2 ok
|
||||
rtklage xfail
|
||||
ryersonSGSThesis xfail
|
||||
ryethesis xfail
|
||||
sageep ok
|
||||
sapthesis ok
|
||||
schuleab ok
|
||||
schulein ok
|
||||
schuleit ok
|
||||
schulekl ok
|
||||
schuleub xfail
|
||||
schuleue ok
|
||||
schullsg ok
|
||||
schullzk ok
|
||||
schulma-ab ok
|
||||
schulma-gutachten ok
|
||||
schulma-klausur xfail
|
||||
schulma-komp ok
|
||||
schulma-mdlprf ok
|
||||
schulma-praes ok
|
||||
sciposter ok
|
||||
scrartcl ok
|
||||
scrarticle ok
|
||||
scrbook ok
|
||||
scrdoc ok
|
||||
screenplay ok
|
||||
scrguide xfail
|
||||
scrjrnl xfail
|
||||
scrletter ok
|
||||
scrlttr2 ok
|
||||
scrreport ok
|
||||
scrreprt ok
|
||||
sdapsclassic xfail
|
||||
sduthesis ok
|
||||
seminar xfail
|
||||
semproc ok
|
||||
sesamanuel xfail
|
||||
seu-ml-assign xfail
|
||||
seuthesix xfail
|
||||
sffms ok,titleauth
|
||||
shtthesis xfail
|
||||
sibjnm xfail
|
||||
sides ok
|
||||
simplecv ok
|
||||
simpleresumecv xfail
|
||||
simplethesisdissertation xfail
|
||||
simplivre ok
|
||||
simurgh-doc xfail
|
||||
skbarticle ok
|
||||
skbbeamer ok
|
||||
skbbook ok
|
||||
skblncsbeamer xfail
|
||||
skblncsppt ok
|
||||
skbmoderncv xfail
|
||||
skdoc xfail
|
||||
skeyval-testclass xfail
|
||||
skrapport xfail
|
||||
slides ok
|
||||
smfart ok
|
||||
smfbook ok
|
||||
source2edoc ok
|
||||
spie ok
|
||||
sr-vorl ok
|
||||
sslides ok
|
||||
stage ok
|
||||
standalone ok
|
||||
stex xfail
|
||||
subfiles xfail
|
||||
suftesi xfail
|
||||
sugconf titleauth,xfail - uses inputenc
|
||||
tabriz-thesis ok
|
||||
talk ok
|
||||
tarticle xfail
|
||||
tbook xfail
|
||||
tcldoc ok
|
||||
tclldoc ok
|
||||
technionThesis xfail
|
||||
thesis-ekf ok
|
||||
thesis-gwu xfail
|
||||
thesis-qom xfail
|
||||
third-rep xfail
|
||||
thuthesis ok
|
||||
tikz-kalender ok
|
||||
tikzposter ok
|
||||
tlc-article ok,titleauth
|
||||
toptesi ok
|
||||
treport xfail
|
||||
tudabeamer xfail
|
||||
tudaexercise ok
|
||||
tudaleaflet ok
|
||||
tudaletter xfail
|
||||
tudaposter xfail
|
||||
tudapub xfail
|
||||
tudasciposter ok
|
||||
tudscrartcl ok
|
||||
tudscrbook ok
|
||||
tudscrdoc ok
|
||||
tudscrmanual xfail
|
||||
tudscrposter ok
|
||||
tudscrreprt ok
|
||||
tufte-book ok
|
||||
tufte-handout ok
|
||||
tui xfail
|
||||
turabian xfail
|
||||
turabian-researchpaper ok
|
||||
turabian-thesis ok
|
||||
ua-thesis ok
|
||||
uafthesis ok
|
||||
uantwerpenbamathesis ok
|
||||
uantwerpencoursetext ok
|
||||
uantwerpenexam ok
|
||||
uantwerpenletter ok
|
||||
uantwerpenphdthesis ok
|
||||
uantwerpenreport ok
|
||||
ucalgmthesis ok
|
||||
ucbthesis ok
|
||||
ucdavisthesis ok
|
||||
ucsmonograph ok
|
||||
ucthesis ok
|
||||
udesoftec xfail
|
||||
uebungsblatt xfail
|
||||
uestcthesis xfail
|
||||
uhhassignment ok
|
||||
uiucredborder ok
|
||||
uiucthesis ok
|
||||
ujarticle xfail
|
||||
ujbook xfail
|
||||
ujreport xfail
|
||||
ulthese xfail
|
||||
umich-thesis ok
|
||||
umthesis ok
|
||||
unam-thesis xfail
|
||||
unbtex ok
|
||||
unifith ok
|
||||
unitn-bimrep xfail
|
||||
univie-ling-expose ok
|
||||
univie-ling-paper ok
|
||||
univie-ling-thesis ok
|
||||
univie-ling-wlg xfail
|
||||
unizgklasa ok
|
||||
uothesis ok
|
||||
upmethodology-document xfail
|
||||
upmgr ok
|
||||
uspatent ok
|
||||
usthesis xfail
|
||||
ut-thesis ok
|
||||
utarticle xfail
|
||||
utbook xfail
|
||||
utexasthesis ok
|
||||
utreport xfail
|
||||
uwa-pcf xfail
|
||||
uwa-pif xfail
|
||||
uwmslide ok
|
||||
uwthesis ok
|
||||
verifica ok
|
||||
wallcalendar xfail
|
||||
webquiz ok
|
||||
willowtreebook xfail
|
||||
withargs-packagedoc xfail
|
||||
wkmgr ok
|
||||
worlddev ok
|
||||
wsemclassic xfail
|
||||
xduthesis xfail
|
||||
xebaposter xfail
|
||||
xepersian-magazine ok
|
||||
xmuthesis xfail
|
||||
xoblivoir ok
|
||||
xsim-manual xfail
|
||||
yaletter xfail
|
||||
yathesis ok
|
||||
yazd-thesis ok
|
||||
yb-book xfail
|
||||
ycbook ok
|
||||
ydoc xfail
|
||||
york-thesis ok
|
||||
zbMATH ok
|
|
@ -0,0 +1,94 @@
|
|||
\documentclass[12pt]{article}
|
||||
\usepackage[inline]{asymptote}
|
||||
|
||||
% This test compiles without error, but
|
||||
% the output pdf is missing graphics.
|
||||
|
||||
|
||||
\title{2D Graphics with Asymptote}
|
||||
\author{The Asymptote Project}
|
||||
|
||||
|
||||
\begin{document}
|
||||
\maketitle
|
||||
|
||||
\begin{asydef}
|
||||
//
|
||||
// Global Asymptote definitions can be put here.
|
||||
//
|
||||
usepackage("bm");
|
||||
texpreamble("\def\V#1{\bm{#1}}");
|
||||
\end{asydef}
|
||||
|
||||
Here is a venn diagram produced with Asymptote, drawn to width 4cm:
|
||||
|
||||
\def\A{A}
|
||||
\def\B{\V{B}}
|
||||
|
||||
\begin{center}
|
||||
\begin{asy}
|
||||
size(4cm,0);
|
||||
pen colour1=red;
|
||||
pen colour2=green;
|
||||
|
||||
pair z0=(0,0);
|
||||
pair z1=(-1,0);
|
||||
pair z2=(1,0);
|
||||
real r=1.5;
|
||||
path c1=circle(z1,r);
|
||||
path c2=circle(z2,r);
|
||||
fill(c1,colour1);
|
||||
fill(c2,colour2);
|
||||
|
||||
picture intersection=new picture;
|
||||
fill(intersection,c1,colour1+colour2);
|
||||
clip(intersection,c2);
|
||||
|
||||
add(intersection);
|
||||
|
||||
draw(c1);
|
||||
draw(c2);
|
||||
|
||||
//draw("$\A$",box,z1); // Requires [inline] package option.
|
||||
//draw(Label("$\B$","$B$"),box,z2); // Requires [inline] package option.
|
||||
draw("$A$",box,z1);
|
||||
draw("$\V{B}$",box,z2);
|
||||
|
||||
pair z=(0,-2);
|
||||
real m=3;
|
||||
margin BigMargin=Margin(0,m*dot(unit(z1-z),unit(z0-z)));
|
||||
|
||||
draw(Label("$A\cap B$",0),conj(z)--z0,Arrow,BigMargin);
|
||||
draw(Label("$A\cup B$",0),z--z0,Arrow,BigMargin);
|
||||
draw(z--z1,Arrow,Margin(0,m));
|
||||
draw(z--z2,Arrow,Margin(0,m));
|
||||
|
||||
shipout(bbox(0.25cm));
|
||||
\end{asy}
|
||||
\end{center}
|
||||
|
||||
Here are some graphs. The figure is scaled to line width.
|
||||
\begin{center}
|
||||
\begin{asy}[width=\the\linewidth,inline=true]
|
||||
pair z0=(0,0);
|
||||
pair z1=(2,0);
|
||||
pair z2=(5,0);
|
||||
pair zf=z1+0.75*(z2-z1);
|
||||
|
||||
draw(z1--z2);
|
||||
dot(z1,red+0.15cm);
|
||||
dot(z2,darkgreen+0.3cm);
|
||||
label("$m$",z1,1.2N,red);
|
||||
label("$M$",z2,1.5N,darkgreen);
|
||||
label("$\hat{\ }$",zf,0.2*S,fontsize(24pt)+blue);
|
||||
|
||||
pair s=-0.2*I;
|
||||
draw("$x$",z0+s--z1+s,N,red,Arrows,Bars,PenMargins);
|
||||
s=-0.5*I;
|
||||
draw("$\bar{x}$",z0+s--zf+s,blue,Arrows,Bars,PenMargins);
|
||||
s=-0.95*I;
|
||||
draw("$X$",z0+s--z2+s,darkgreen,Arrows,Bars,PenMargins);
|
||||
\end{asy}
|
||||
\end{center}
|
||||
|
||||
\end{document}
|
|
@ -0,0 +1,14 @@
|
|||
\documentclass{article}
|
||||
|
||||
\usepackage{chessfss}
|
||||
\usepackage{chessboard}
|
||||
\usepackage{xskak}
|
||||
|
||||
\begin{document}
|
||||
|
||||
\begin{center}
|
||||
\newchessgame
|
||||
\chessboard[normalboard, showmover=false]
|
||||
\end{center}
|
||||
|
||||
\end{document}
|
|
@ -0,0 +1,10 @@
|
|||
\documentclass{article}
|
||||
\usepackage{fontawesome}
|
||||
|
||||
% Fontawesome was patched to fix font path.
|
||||
% It should now look for fonts in the bundle
|
||||
% rather than system font dirs.
|
||||
|
||||
\begin{document}
|
||||
\faStar
|
||||
\end{document}
|
|
@ -0,0 +1,44 @@
|
|||
\documentclass[
|
||||
digital,
|
||||
twoside,
|
||||
nolof,
|
||||
nolot
|
||||
]{fithesis4}
|
||||
\usepackage[main=english]{babel}
|
||||
|
||||
% The fithesis document class uses tex paths in many odd ways:
|
||||
% - it provides many files with the same name
|
||||
% - it loads these files, depending on configuration, with a partial path
|
||||
%
|
||||
% This behavior makes fithesis a good test case for
|
||||
% Tectonic's file search algorithm.
|
||||
|
||||
|
||||
\thesissetup{
|
||||
date = \the\year/\the\month/\the\day,
|
||||
university = mu,
|
||||
faculty = sci,
|
||||
type = bc,
|
||||
programme = NA,
|
||||
field = Sport Management,
|
||||
department = Department of Social Sciences and Sport Management,
|
||||
author = Jane Doe,
|
||||
gender = f,
|
||||
advisor = {Prof. RNDr. John Smith, CSc.},
|
||||
title = The use of LaTeX for the Typesetting
|
||||
of Sports Tables,
|
||||
TeXtitle = The use of \LaTeX\ for the Typesetting
|
||||
of Sports Tables,
|
||||
keywords = {keyword1, keywords2, ...},
|
||||
TeXkeywords = {keyword1, keywords2, \ldots},
|
||||
abstract = {%
|
||||
This is the abstract of my thesis, which can
|
||||
span multiple paragraphs.
|
||||
}
|
||||
}
|
||||
|
||||
\begin{document}
|
||||
|
||||
test
|
||||
|
||||
\end{document}
|
|
@ -0,0 +1,19 @@
|
|||
\documentclass{article}
|
||||
|
||||
\usepackage{fontspec}
|
||||
\setmainfont{TeX Gyre Pagella}
|
||||
|
||||
% Try to load a font by name.
|
||||
% At the moment, this searches the system font cache,
|
||||
% and thus usually fails.
|
||||
%
|
||||
% Eventually we'll implement support for that in Tectonic:
|
||||
% we should check the bundle for ALL fonts, even those loaded
|
||||
% by name.
|
||||
|
||||
\begin{document}
|
||||
|
||||
\section*{Introduction}
|
||||
This is Introduction
|
||||
|
||||
\end{document}
|
|
@ -0,0 +1,20 @@
|
|||
\documentclass{article}
|
||||
\usepackage{markdown}
|
||||
|
||||
% The markdown package requires shell escapes,
|
||||
% which are currently disabled.
|
||||
% We expect this test to fail, for now.
|
||||
|
||||
\begin{document}
|
||||
\begin{markdown}
|
||||
# Grocery list
|
||||
|
||||
## Food
|
||||
- baked beans
|
||||
- spaghetti
|
||||
|
||||
## Stationery
|
||||
- writing pad
|
||||
- pencils
|
||||
\end{markdown}
|
||||
\end{document}
|
|
@ -0,0 +1,11 @@
|
|||
\documentclass{report}
|
||||
\usepackage{pgfornament}
|
||||
|
||||
\begin{document}
|
||||
|
||||
\begin{center}
|
||||
Below is a test ornament.\linebreak
|
||||
\pgfornament[scale=0.125]{3}
|
||||
\end{center}
|
||||
|
||||
\end{document}
|
|
@ -0,0 +1,7 @@
|
|||
\documentclass[a4paper,12pt]{article}
|
||||
\usepackage{polyglossia}
|
||||
\setmainlanguage[variant=brazilian]{portuguese}
|
||||
|
||||
\begin{document}
|
||||
Test
|
||||
\end{document}
|
|
@ -0,0 +1 @@
|
|||
\input xelatex.ini
|
|
@ -0,0 +1 @@
|
|||
\input plain \dump
|
|
@ -0,0 +1,302 @@
|
|||
#! /usr/bin/env python3
|
||||
# -*- mode: python; coding: utf-8 -*-
|
||||
# Copyright 2020-2021 the Tectonic Project.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
"""
|
||||
Test builds using some of the LaTeX package (style) files provided in a bundle.
|
||||
|
||||
There are thousands of these (about 5000 as of TeXLive 2020), so we use a
|
||||
reproducible-random scheme to skip most of them to keep the testing time
|
||||
reasonable. In particular:
|
||||
|
||||
- I did an initial run over all of the packages on the TeXLive 2020 bundle when
|
||||
setting this all up. All of the packages that failed were marked with a "skip"
|
||||
tag. These are always skipped.
|
||||
|
||||
- All of the packages were assigned a randomly-generated number between 0 and 99
|
||||
(inclusive), using a `rand=` key in the listing file. Of the remaining
|
||||
non-"skip" packages, only a fraction of them are tested, using the random key
|
||||
to select them. This program takes a `-S` option to specify the percentage of
|
||||
packages to test, and a `-K` option to specify which random subset to
|
||||
investigate. Packages where `(randkey + K) % 100 >= S` are skipped.
|
||||
|
||||
- Packages without a `rand=` setting are always tested.
|
||||
|
||||
- The default `-S` setting is 5%, which tests about 150 packages and takes about
|
||||
7 minutes to run. The default `-K` setting is random.
|
||||
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os.path
|
||||
import random
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from test_utils import *
|
||||
|
||||
# We use percent formatting since all the TeX braces would be super annoying to
|
||||
# escape in str.format() formatting.
|
||||
DOC_CLASS_TEMPLATE = r"\documentclass{%(class)s}"
|
||||
PACKAGE_TEMPLATE = r"\usepackage{%(package)s}"
|
||||
|
||||
DOCUMENT_BODY = r"""\begin{document}
|
||||
Hello, world.
|
||||
\end{document}"""
|
||||
|
||||
|
||||
def entrypoint(argv):
|
||||
settings = make_arg_parser().parse_args(argv[1:])
|
||||
bundle = Bundle.open_with_inferred_state(settings.bundle_dir)
|
||||
|
||||
packagedir = bundle.test_path("packages")
|
||||
n_errors = 0
|
||||
n_surprises = 0
|
||||
n_tested = 0
|
||||
n_skipped = 0
|
||||
n_missing = 0
|
||||
n_removed = 0
|
||||
n_xfail = 0
|
||||
|
||||
# Random sampling setup
|
||||
|
||||
if settings.sample_key is None:
|
||||
settings.sample_key = random.randint(0, 99)
|
||||
|
||||
if settings.update:
|
||||
print("note: update mode engaged - will rewrite packages.txt")
|
||||
print()
|
||||
|
||||
# Load the packages from the bundle
|
||||
|
||||
bundle_packages = set()
|
||||
|
||||
with open(bundle.listing_path()) as flist:
|
||||
for line in flist:
|
||||
base = line.strip()
|
||||
if base.endswith(".sty"):
|
||||
bundle_packages.add(base[:-4])
|
||||
|
||||
# Load the stored information
|
||||
|
||||
ref_packages = {}
|
||||
packages_path = bundle.path("packages.txt")
|
||||
|
||||
with open(packages_path) as fref:
|
||||
for line in fref:
|
||||
bits = line.split()
|
||||
classname = bits[0]
|
||||
info = {}
|
||||
|
||||
info["tags"] = set(bits[1].split(","))
|
||||
|
||||
for bit in bits[2:]:
|
||||
if bit.startswith("rand="):
|
||||
info["randkey"] = int(bit[5:])
|
||||
else:
|
||||
die(f"unexpected metadata item {bit!r} in packages.txt")
|
||||
|
||||
ref_packages[classname] = info
|
||||
|
||||
# Cross-check the two lists
|
||||
|
||||
for p in bundle_packages:
|
||||
if p not in ref_packages:
|
||||
# `just_added` enables us to make sure to test new packages in
|
||||
# update mode
|
||||
print(f"MISSING {p} - not in packages.txt")
|
||||
ref_packages[p] = {
|
||||
"tags": set(["ok"]),
|
||||
"randkey": random.randint(0, 99),
|
||||
"just_added": settings.update,
|
||||
}
|
||||
|
||||
if not settings.update:
|
||||
n_missing += 1
|
||||
n_errors += 1
|
||||
|
||||
refkeys = list(ref_packages.keys())
|
||||
|
||||
for p in refkeys:
|
||||
if p not in bundle_packages:
|
||||
print(f"REMOVED {p} - in packages.txt but not bundle")
|
||||
del ref_packages[p]
|
||||
|
||||
if not settings.update:
|
||||
n_removed += 1
|
||||
n_errors += 1
|
||||
|
||||
if n_missing + n_removed > 0:
|
||||
print("NOTE: use --update to rebuild packages.txt if needed")
|
||||
|
||||
# Sampling setup.
|
||||
|
||||
if settings.sample_percentage is None:
|
||||
TARGET_N_PACKAGES = 100
|
||||
settings.sample_percentage = max(
|
||||
100 * TARGET_N_PACKAGES // len(ref_packages), 1
|
||||
)
|
||||
n_eff = settings.sample_percentage * len(ref_packages) // 100
|
||||
print(
|
||||
f"note: targeting about {n_eff} randomized test cases ({settings.sample_percentage}% of corpus; actual number will vary)"
|
||||
)
|
||||
else:
|
||||
print(
|
||||
f"note: sampling {settings.sample_percentage}% of the randomized test cases"
|
||||
)
|
||||
|
||||
print(
|
||||
f"note: sample key is {settings.sample_key}; use argument `-K {settings.sample_key}` to reproduce this run`"
|
||||
)
|
||||
|
||||
# Run the tests
|
||||
|
||||
refkeys = sorted(ref_packages.keys())
|
||||
|
||||
for pkg in refkeys:
|
||||
info = ref_packages[pkg]
|
||||
tags = info["tags"]
|
||||
|
||||
if info.get("just_added", False):
|
||||
random_skipped = False
|
||||
elif "randkey" in info:
|
||||
effkey = (info["randkey"] + settings.sample_key) % 100
|
||||
random_skipped = effkey >= settings.sample_percentage
|
||||
else:
|
||||
random_skipped = False
|
||||
|
||||
if "skip" in tags or random_skipped:
|
||||
n_skipped += 1
|
||||
continue
|
||||
|
||||
print(pkg, "... ", end="")
|
||||
sys.stdout.flush()
|
||||
n_tested += 1
|
||||
|
||||
thisdir = os.path.join(packagedir, pkg)
|
||||
os.makedirs(thisdir, exist_ok=True)
|
||||
|
||||
texpath = os.path.join(thisdir, "index.tex")
|
||||
|
||||
params = {
|
||||
"class": "article",
|
||||
"package": pkg,
|
||||
}
|
||||
|
||||
with open(texpath, "wt") as f:
|
||||
print(DOC_CLASS_TEMPLATE % params, file=f)
|
||||
print(PACKAGE_TEMPLATE % params, file=f)
|
||||
print(DOCUMENT_BODY, file=f)
|
||||
|
||||
with open(os.path.join(thisdir, "log.txt"), "wb") as log:
|
||||
result = subprocess.call(
|
||||
[TECTONIC_PROGRAM, "-p", "-b", bundle.zip_path(), texpath],
|
||||
shell=False,
|
||||
stdout=log,
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
|
||||
if result == 0:
|
||||
if "ok" in tags:
|
||||
print("pass", flush=True)
|
||||
else:
|
||||
# This test succeeded even though we didn't expect it to.
|
||||
# Not a bad thing, but worth noting!
|
||||
print("pass (unexpected)", flush=True)
|
||||
n_surprises += 1
|
||||
|
||||
try:
|
||||
tags.remove("xfail")
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
tags.add("ok")
|
||||
else:
|
||||
if "xfail" in tags:
|
||||
print("xfail", flush=True)
|
||||
n_xfail += 1
|
||||
else:
|
||||
# This test failed unexpectedly :-(
|
||||
print("FAIL", flush=True)
|
||||
n_errors += 1
|
||||
|
||||
if settings.update:
|
||||
try:
|
||||
tags.remove("ok")
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
tags.add("xfail")
|
||||
|
||||
print()
|
||||
print("Summary:")
|
||||
print(f"- Tested {n_tested} packages")
|
||||
if n_skipped:
|
||||
print(f"- {n_skipped} cases skipped")
|
||||
if n_missing:
|
||||
print(f"- {n_missing} packages missing from packages.txt")
|
||||
if n_removed:
|
||||
print(f"- {n_removed} packages in packages.txt removed from bundle")
|
||||
if n_xfail:
|
||||
print(f"- {n_xfail} expected failures")
|
||||
if n_surprises:
|
||||
print(f"- {n_surprises} surprise passes")
|
||||
if n_errors:
|
||||
print(
|
||||
f"- {n_errors} total errors: test failed (outputs stored in {packagedir})"
|
||||
)
|
||||
else:
|
||||
print(f"- no errors: test passed (outputs stored in {packagedir})")
|
||||
|
||||
# Update listing if needed
|
||||
|
||||
if settings.update:
|
||||
with open(packages_path, "wt") as f:
|
||||
for pkg in refkeys:
|
||||
info = ref_packages[pkg]
|
||||
tag_text = ",".join(sorted(info["tags"]))
|
||||
|
||||
randkey = info.get("randkey")
|
||||
if randkey is None:
|
||||
rest = ""
|
||||
else:
|
||||
rest = f" rand={randkey}"
|
||||
|
||||
print(pkg, " ", tag_text, rest, sep="", file=f)
|
||||
|
||||
# All done!
|
||||
|
||||
return 1 if n_errors and not settings.update else 0
|
||||
|
||||
|
||||
def make_arg_parser():
|
||||
p = argparse.ArgumentParser()
|
||||
p.add_argument(
|
||||
"--update",
|
||||
action="store_true",
|
||||
help="Update mode: sync packages.txt to bundle; may wish to use `-S 100` too",
|
||||
)
|
||||
p.add_argument(
|
||||
"-S",
|
||||
"--samp-pct",
|
||||
dest="sample_percentage",
|
||||
type=int,
|
||||
help="The percentage of test cases to sample",
|
||||
)
|
||||
p.add_argument(
|
||||
"-K",
|
||||
"--samp-key",
|
||||
dest="sample_key",
|
||||
type=int,
|
||||
help='The "key" determining which random subset of cases are sampled',
|
||||
)
|
||||
p.add_argument(
|
||||
"bundle_dir",
|
||||
help="The directory of the bundle specification",
|
||||
)
|
||||
return p
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(entrypoint(sys.argv))
|
|
@ -0,0 +1,212 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
this_dir="$(pwd)"
|
||||
|
||||
test_dir="${this_dir}"
|
||||
|
||||
bundle_path="$(realpath "${1}")"
|
||||
output_dir="${test_dir}/build"
|
||||
|
||||
rm -drf "${output_dir}"
|
||||
mkdir -p "${output_dir}"
|
||||
|
||||
|
||||
function relative() {
|
||||
echo "./$(realpath --relative-to="${this_dir}" "${1}")"
|
||||
}
|
||||
|
||||
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
NC='\033[0m'
|
||||
|
||||
|
||||
function test_files() {
|
||||
|
||||
rm -drf "${output_dir}/files"
|
||||
mkdir -p "${output_dir}/files"
|
||||
mkdir -p "${output_dir}/files/logs"
|
||||
|
||||
|
||||
|
||||
for f in "${test_dir}/files"/*; do
|
||||
echo -n "Testing file $(relative "${f}")..."
|
||||
|
||||
tectonic \
|
||||
--chatter minimal \
|
||||
--outdir "${output_dir}/files" \
|
||||
--bundle "${bundle_path}" \
|
||||
"${f}" \
|
||||
&> "${output_dir}/files/logs/$(basename "${f}").log"
|
||||
|
||||
if [[ $? == 0 ]]; then
|
||||
echo -en "\r${GREEN}PASS${NC}"
|
||||
else
|
||||
echo -en "\r${RED}FAIL${NC}"
|
||||
fi
|
||||
echo " Tested file $(relative "${f}")"
|
||||
done
|
||||
|
||||
|
||||
for f in "${test_dir}/formats"/*; do
|
||||
echo -n "Testing format $(relative "${f}")..."
|
||||
|
||||
tectonic \
|
||||
--chatter minimal \
|
||||
--outdir "${output_dir}/files" \
|
||||
-p --outfmt "fmt" \
|
||||
--bundle "${bundle_path}" \
|
||||
"${f}" \
|
||||
&> "${output_dir}/files/logs/$(basename "${f}").log"
|
||||
|
||||
if [[ $? == 0 ]]; then
|
||||
echo -en "\r${GREEN}PASS${NC}"
|
||||
else
|
||||
echo -en "\r${RED}FAIL${NC}"
|
||||
fi
|
||||
echo " Tested format $(relative "${f}")"
|
||||
done
|
||||
}
|
||||
|
||||
|
||||
|
||||
function test_class_single() {
|
||||
local class="${1}"
|
||||
local flags="${2}"
|
||||
|
||||
mkdir -p "${output_dir}/classes/logs/failed"
|
||||
mkdir -p "${output_dir}/classes/logs/passed"
|
||||
local target="$(mktemp --tmpdir="${output_dir}/classes" "tmp.XXXX")"
|
||||
|
||||
(
|
||||
echo "\documentclass{${class}}"
|
||||
echo ""
|
||||
|
||||
if [[ $flags =~ "titleauth" ]]; then
|
||||
echo "title{Test Title}"
|
||||
echo "\author{An Author}"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
echo "\begin{document}"
|
||||
echo "Hello, world"
|
||||
echo "\end{document}"
|
||||
) > "${target}"
|
||||
|
||||
|
||||
tectonic \
|
||||
--chatter minimal \
|
||||
--outdir "${output_dir}/classes" \
|
||||
--bundle "${bundle_path}" \
|
||||
"${target}" \
|
||||
&> "${output_dir}/classes/logs/${class}.log"
|
||||
|
||||
if [[ $? == 0 ]]; then
|
||||
echo "$class" >> "${output_dir}/classes/passed"
|
||||
mv "${output_dir}/classes/logs/${class}.log" "${output_dir}/classes/logs/passed"
|
||||
echo 0
|
||||
else
|
||||
echo "$class" >> "${output_dir}/classes/failed"
|
||||
mv "${output_dir}/classes/logs/${class}.log" "${output_dir}/classes/logs/failed"
|
||||
echo 1
|
||||
fi
|
||||
|
||||
rm "${target}"
|
||||
}
|
||||
|
||||
|
||||
function test_classes() {
|
||||
rm -drf "${output_dir}/classes"
|
||||
mkdir -p "${output_dir}/classes"
|
||||
|
||||
local fails=0
|
||||
local passes=0
|
||||
local skipped=0
|
||||
local total=$(wc -l < "${test_dir}/classes.list")
|
||||
|
||||
cat "${test_dir}/classes.list" | while read class flags; do
|
||||
|
||||
if [[ $flags =~ "xfail" ]]; then
|
||||
skipped=$(($skipped+1))
|
||||
continue
|
||||
fi
|
||||
|
||||
r=$(test_class_single "${class}" "${flags}")
|
||||
|
||||
if [[ $r == 0 ]]; then
|
||||
passes=$(($passes+1))
|
||||
else
|
||||
fails=$(($fails+1))
|
||||
fi
|
||||
|
||||
echo -en "\r"
|
||||
echo -en "$(($passes + $fails + $skipped))/${total} "
|
||||
echo -en "${GREEN}P:${passes}${NC} "
|
||||
echo -en "${RED}F:${fails}${NC} "
|
||||
echo -en "S:${skipped}${NC} "
|
||||
echo -en " Tested class ${class}"
|
||||
|
||||
# Delete remnant of previous class name
|
||||
# and move cursor back.
|
||||
echo -en " "
|
||||
echo -en "\033[22D"
|
||||
done
|
||||
|
||||
echo ""
|
||||
}
|
||||
|
||||
|
||||
function test_class() {
|
||||
class="$1"
|
||||
flags="$2"
|
||||
|
||||
exists=false;
|
||||
exists=$(
|
||||
cat "${test_dir}/classes.list" | while read tclass flags; do
|
||||
if [[ "${class}" == "${tclass}" ]]; then
|
||||
echo "${class}"
|
||||
break
|
||||
fi
|
||||
done
|
||||
)
|
||||
|
||||
if [[ -z $exists ]]; then
|
||||
echo "No such class "${class}""
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo -n "Testing class "${class}"..."
|
||||
r=$(test_class_single "${class}" "${flags}")
|
||||
|
||||
if [[ $r == 0 ]]; then
|
||||
echo -e " ${GREEN}Pass${NC}"
|
||||
else
|
||||
echo -e " ${RED}Fail${NC}"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
case "${2}" in
|
||||
|
||||
"all")
|
||||
test_files
|
||||
test_classes
|
||||
;;
|
||||
|
||||
"files")
|
||||
test_files
|
||||
;;
|
||||
|
||||
"classes")
|
||||
test_classes
|
||||
;;
|
||||
|
||||
"class")
|
||||
test_class "${3}" "${4}"
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Unknown test suite `${1}`"
|
||||
echo "See README.md"
|
||||
;;
|
||||
esac
|
|
@ -106,13 +106,7 @@ pub trait DriverHooks {
|
|||
/// argument specifies the cryptographic digest of the data that were
|
||||
/// written. Note that this function takes ownership of the name and
|
||||
/// digest.
|
||||
fn event_output_closed(
|
||||
&mut self,
|
||||
_name: String,
|
||||
_digest: DigestData,
|
||||
_status: &mut dyn StatusBackend,
|
||||
) {
|
||||
}
|
||||
fn event_output_closed(&mut self, _name: String, _digest: DigestData) {}
|
||||
|
||||
/// This function is called when an input file is closed. The "digest"
|
||||
/// argument specifies the cryptographic digest of the data that were
|
||||
|
@ -560,7 +554,7 @@ impl<'a> CoreBridgeState<'a> {
|
|||
rv = true;
|
||||
}
|
||||
let (name, digest) = oh.into_name_digest();
|
||||
self.hooks.event_output_closed(name, digest, self.status);
|
||||
self.hooks.event_output_closed(name, digest);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
[package]
|
||||
name = "tectonic_bundles"
|
||||
version = "0.0.0-dev.0" # assigned with cranko (see README)
|
||||
version = "0.0.0-dev.0" # assigned with cranko (see README)
|
||||
authors = ["Peter Williams <peter@newton.cx>"]
|
||||
description = """
|
||||
Tectonic "bundle" (support file collection) implementations.
|
||||
|
@ -25,6 +25,7 @@ tectonic_geturl = { path = "../geturl", version = "0.0.0-dev.0", default-feature
|
|||
tectonic_io_base = { path = "../io_base", version = "0.0.0-dev.0" }
|
||||
tectonic_status_base = { path = "../status_base", version = "0.0.0-dev.0" }
|
||||
zip = { version = "^0.6", default-features = false, features = ["deflate"] }
|
||||
url = "^2.0"
|
||||
|
||||
[features]
|
||||
default = ["geturl-reqwest"]
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -5,11 +5,13 @@
|
|||
|
||||
use std::{
|
||||
fs,
|
||||
io::Read,
|
||||
path::{Path, PathBuf},
|
||||
str::FromStr,
|
||||
};
|
||||
use tectonic_errors::prelude::*;
|
||||
use tectonic_io_base::{filesystem::FilesystemIo, InputHandle, IoProvider, OpenResult};
|
||||
use tectonic_status_base::StatusBackend;
|
||||
use tectonic_io_base::{digest, filesystem::FilesystemIo, InputHandle, IoProvider, OpenResult};
|
||||
use tectonic_status_base::{NoopStatusBackend, StatusBackend};
|
||||
|
||||
use super::Bundle;
|
||||
|
||||
|
@ -56,21 +58,34 @@ impl IoProvider for DirBundle {
|
|||
}
|
||||
|
||||
impl Bundle for DirBundle {
|
||||
fn all_files(&mut self, _status: &mut dyn StatusBackend) -> Result<Vec<String>> {
|
||||
let mut files = Vec::new();
|
||||
fn all_files(&self) -> Vec<String> {
|
||||
fs::read_dir(self.0.root())
|
||||
.unwrap()
|
||||
.filter_map(|x| x.ok())
|
||||
.filter(|x| !x.file_type().map(|x| x.is_dir()).unwrap_or(false))
|
||||
.map(|x| x.file_name().to_str().unwrap_or("").to_owned())
|
||||
.filter(|x| !x.is_empty())
|
||||
.collect()
|
||||
}
|
||||
|
||||
// We intentionally do not explore the directory recursively.
|
||||
for entry in fs::read_dir(self.0.root())? {
|
||||
let entry = entry?;
|
||||
|
||||
// This catches both regular files and symlinks:`
|
||||
if !entry.file_type()?.is_dir() {
|
||||
if let Some(s) = entry.file_name().to_str() {
|
||||
files.push(s.to_owned());
|
||||
}
|
||||
fn get_digest(&mut self) -> Result<tectonic_io_base::digest::DigestData> {
|
||||
let digest_text = match self.input_open_name(digest::DIGEST_NAME, &mut NoopStatusBackend {})
|
||||
{
|
||||
OpenResult::Ok(h) => {
|
||||
let mut text = String::new();
|
||||
h.take(64).read_to_string(&mut text)?;
|
||||
text
|
||||
}
|
||||
}
|
||||
|
||||
Ok(files)
|
||||
OpenResult::NotAvailable => {
|
||||
bail!("bundle does not provide needed SHA256SUM file");
|
||||
}
|
||||
|
||||
OpenResult::Err(e) => {
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
|
||||
Ok(atry!(digest::DigestData::from_str(&digest_text); ["corrupted SHA256 digest data"]))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,10 +3,11 @@
|
|||
|
||||
//! The web-friendly "indexed tar" bundle backend.
|
||||
//!
|
||||
//! The main type offered by this module is the [`IndexedTarBackend`] struct,
|
||||
//! which cannot be used directly as a [`tectonic_io_base::IoProvider`] but is
|
||||
//! the default backend for cached web-based bundle access through the
|
||||
//! [`crate::cache::CachingBundle`] framework.
|
||||
//! The main type offered by this module is the [`ItarBundle`] struct,
|
||||
//! which can (but should not) be used directly as any other bundle.
|
||||
//!
|
||||
//! Instead, wrap it in a [`crate::BundleCache`] for filesystem-backed
|
||||
//! caching.
|
||||
//!
|
||||
//! While the on-server file format backing the "indexed tar" backend is indeed
|
||||
//! a standard `tar` file, as far as the client is concerned, this backend is
|
||||
|
@ -14,187 +15,273 @@
|
|||
//! resource, the index file merely contains a byte offset and length that are
|
||||
//! then used to construct an HTTP Range request to obtain the file as needed.
|
||||
|
||||
use crate::{Bundle, CachableBundle, FileIndex, FileInfo, NET_RETRY_ATTEMPTS, NET_RETRY_SLEEP_MS};
|
||||
use flate2::read::GzDecoder;
|
||||
use std::{convert::TryInto, io::Read, str::FromStr};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
io::{BufRead, BufReader, Cursor, Read},
|
||||
str::FromStr,
|
||||
thread,
|
||||
time::Duration,
|
||||
};
|
||||
use tectonic_errors::prelude::*;
|
||||
use tectonic_geturl::{DefaultBackend, DefaultRangeReader, GetUrlBackend, RangeReader};
|
||||
use tectonic_io_base::digest::{self, DigestData};
|
||||
use tectonic_status_base::{tt_note, tt_warning, StatusBackend};
|
||||
use tectonic_io_base::{digest, InputHandle, InputOrigin, IoProvider, OpenResult};
|
||||
use tectonic_status_base::{tt_note, tt_warning, NoopStatusBackend, StatusBackend};
|
||||
|
||||
use crate::cache::{BackendPullData, CacheBackend};
|
||||
|
||||
const MAX_HTTP_ATTEMPTS: usize = 4;
|
||||
|
||||
/// The internal file-information struct used by the [`IndexedTarBackend`].
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct FileInfo {
|
||||
/// The internal file-information struct used by the [`ItarBundle`].
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ItarFileInfo {
|
||||
name: String,
|
||||
offset: u64,
|
||||
length: u64,
|
||||
length: usize,
|
||||
}
|
||||
|
||||
/// A simple web-based file backend based on HTTP Range requests.
|
||||
///
|
||||
/// This type implements the [`CacheBackend`] trait and so can be used for
|
||||
/// web-based bundle access thorugh the [`crate::cache::CachingBundle`]
|
||||
/// framework.
|
||||
#[derive(Debug)]
|
||||
pub struct IndexedTarBackend {
|
||||
reader: DefaultRangeReader,
|
||||
impl FileInfo for ItarFileInfo {
|
||||
fn name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
fn path(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
}
|
||||
|
||||
impl CacheBackend for IndexedTarBackend {
|
||||
type FileInfo = FileInfo;
|
||||
/// A simple FileIndex for compatiblity with [`crate::BundleCache`]
|
||||
#[derive(Default, Debug)]
|
||||
pub struct ItarFileIndex {
|
||||
content: HashMap<String, ItarFileInfo>,
|
||||
}
|
||||
|
||||
fn open_with_pull(
|
||||
start_url: &str,
|
||||
status: &mut dyn StatusBackend,
|
||||
) -> Result<(Self, BackendPullData)> {
|
||||
// Step 1: resolve URL
|
||||
let mut geturl_backend = DefaultBackend::default();
|
||||
let resolved_url = geturl_backend.resolve_url(start_url, status)?;
|
||||
impl<'this> FileIndex<'this> for ItarFileIndex {
|
||||
type InfoType = ItarFileInfo;
|
||||
|
||||
// Step 2: fetch index
|
||||
let index = {
|
||||
let mut index = String::new();
|
||||
let index_url = format!("{}.index.gz", &resolved_url);
|
||||
tt_note!(status, "downloading index {}", index_url);
|
||||
GzDecoder::new(geturl_backend.get_url(&index_url, status)?)
|
||||
.read_to_string(&mut index)?;
|
||||
index
|
||||
};
|
||||
|
||||
// Step 3: get digest, setting up instance as we go
|
||||
|
||||
let mut cache_backend = IndexedTarBackend {
|
||||
reader: geturl_backend.open_range_reader(&resolved_url),
|
||||
};
|
||||
|
||||
let digest_info = {
|
||||
let mut digest_info = None;
|
||||
|
||||
for line in index.lines() {
|
||||
if let Ok((name, info)) = Self::parse_index_line(line) {
|
||||
if name == digest::DIGEST_NAME {
|
||||
digest_info = Some(info);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
atry!(
|
||||
digest_info;
|
||||
["backend does not provide needed {} file", digest::DIGEST_NAME]
|
||||
)
|
||||
};
|
||||
|
||||
let digest_text =
|
||||
String::from_utf8(cache_backend.get_file(digest::DIGEST_NAME, &digest_info, status)?)
|
||||
.map_err(|e| e.utf8_error())?;
|
||||
let digest = DigestData::from_str(&digest_text)?;
|
||||
|
||||
// All done.
|
||||
Ok((
|
||||
cache_backend,
|
||||
BackendPullData {
|
||||
resolved_url,
|
||||
digest,
|
||||
index,
|
||||
},
|
||||
))
|
||||
fn iter(&'this self) -> Box<dyn Iterator<Item = &'this ItarFileInfo> + 'this> {
|
||||
Box::new(self.content.values())
|
||||
}
|
||||
|
||||
fn open_with_quick_check(
|
||||
resolved_url: &str,
|
||||
digest_file_info: &Self::FileInfo,
|
||||
status: &mut dyn StatusBackend,
|
||||
) -> Result<Option<(Self, DigestData)>> {
|
||||
let mut cache_backend = IndexedTarBackend {
|
||||
reader: DefaultBackend::default().open_range_reader(resolved_url),
|
||||
};
|
||||
fn len(&self) -> usize {
|
||||
self.content.len()
|
||||
}
|
||||
|
||||
if let Ok(d) = cache_backend.get_file(digest::DIGEST_NAME, digest_file_info, status) {
|
||||
if let Ok(d) = String::from_utf8(d) {
|
||||
if let Ok(d) = DigestData::from_str(&d) {
|
||||
return Ok(Some((cache_backend, d)));
|
||||
}
|
||||
fn initialize(&mut self, reader: &mut dyn Read) -> Result<()> {
|
||||
self.content.clear();
|
||||
|
||||
for line in BufReader::new(reader).lines() {
|
||||
let line = line?;
|
||||
let mut bits = line.split_whitespace();
|
||||
|
||||
if let (Some(name), Some(offset), Some(length)) =
|
||||
(bits.next(), bits.next(), bits.next())
|
||||
{
|
||||
self.content.insert(
|
||||
name.to_owned(),
|
||||
ItarFileInfo {
|
||||
name: name.to_owned(),
|
||||
offset: offset.parse::<u64>()?,
|
||||
length: length.parse::<usize>()?,
|
||||
},
|
||||
);
|
||||
} else {
|
||||
// TODO: preserve the warning info or something!
|
||||
bail!("malformed index line");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_index_line(line: &str) -> Result<(String, Self::FileInfo)> {
|
||||
let mut bits = line.split_whitespace();
|
||||
/// Find a file in this index
|
||||
fn search(&'this mut self, name: &str) -> Option<ItarFileInfo> {
|
||||
self.content.get(name).cloned()
|
||||
}
|
||||
}
|
||||
|
||||
if let (Some(name), Some(offset), Some(length)) = (bits.next(), bits.next(), bits.next()) {
|
||||
Ok((
|
||||
name.to_owned(),
|
||||
FileInfo {
|
||||
offset: offset.parse::<u64>()?,
|
||||
length: length.parse::<u64>()?,
|
||||
},
|
||||
))
|
||||
} else {
|
||||
// TODO: preserve the warning info or something!
|
||||
bail!("malformed index line");
|
||||
/// The old-fashoned Tectonic web bundle format.
|
||||
pub struct ItarBundle {
|
||||
url: String,
|
||||
/// Maps all available file names to [`FileInfo`]s.
|
||||
/// This is empty after we create this bundle, so we don't need network
|
||||
/// to make an object. It is automatically filled by get_index when we need it.
|
||||
index: ItarFileIndex,
|
||||
|
||||
/// RangeReader object, responsible for sending queries.
|
||||
/// Will be None when the object is created, automatically
|
||||
/// replaced with Some(...) once needed.
|
||||
reader: Option<DefaultRangeReader>,
|
||||
}
|
||||
|
||||
impl ItarBundle {
|
||||
/// Make a new ItarBundle.
|
||||
/// This method does not require network access.
|
||||
/// It will succeed even in we can't connect to the bundle, or if we're given a bad url.
|
||||
pub fn new(url: String) -> Result<ItarBundle> {
|
||||
Ok(ItarBundle {
|
||||
index: ItarFileIndex::default(),
|
||||
reader: None,
|
||||
url,
|
||||
})
|
||||
}
|
||||
|
||||
fn connect_reader(&mut self) {
|
||||
let geturl_backend = DefaultBackend::default();
|
||||
// Connect reader if it is not already connected
|
||||
if self.reader.is_none() {
|
||||
self.reader = Some(geturl_backend.open_range_reader(&self.url));
|
||||
}
|
||||
}
|
||||
|
||||
fn get_file(
|
||||
/// Fill this bundle's index, if it is empty.
|
||||
fn ensure_index(&mut self) -> Result<()> {
|
||||
// Fetch index if it is empty
|
||||
if self.index.is_initialized() {
|
||||
return Ok(());
|
||||
}
|
||||
self.connect_reader();
|
||||
|
||||
let mut reader = self.get_index_reader()?;
|
||||
self.index.initialize(&mut reader)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl IoProvider for ItarBundle {
|
||||
fn input_open_name(
|
||||
&mut self,
|
||||
name: &str,
|
||||
info: &Self::FileInfo,
|
||||
status: &mut dyn StatusBackend,
|
||||
) -> Result<Vec<u8>> {
|
||||
tt_note!(status, "downloading {}", name);
|
||||
) -> OpenResult<InputHandle> {
|
||||
if let Err(e) = self.ensure_index() {
|
||||
return OpenResult::Err(e);
|
||||
};
|
||||
|
||||
// Historically, sometimes our web service would drop connections when
|
||||
// fetching a bunch of resource files (i.e., on the first invocation).
|
||||
// The error manifested itself in a way that has a not-so-nice user
|
||||
// experience. Our solution: retry the request a few times in case it
|
||||
// was a transient problem.
|
||||
let info = match self.index.search(name) {
|
||||
Some(a) => a,
|
||||
None => return OpenResult::NotAvailable,
|
||||
};
|
||||
|
||||
let n = info.length.try_into().unwrap();
|
||||
let mut buf = Vec::with_capacity(n);
|
||||
let mut overall_failed = true;
|
||||
let mut any_failed = false;
|
||||
|
||||
// Our HTTP implementation actually has problems with zero-sized ranged
|
||||
// reads (Azure gives us a 200 response, which we don't properly
|
||||
// handle), but when the file is 0-sized we're all set anyway!
|
||||
if n > 0 {
|
||||
for _ in 0..MAX_HTTP_ATTEMPTS {
|
||||
let mut stream = match self.reader.read_range(info.offset, n) {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
tt_warning!(status, "failure requesting \"{}\" from network", name; e);
|
||||
any_failed = true;
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
if let Err(e) = stream.read_to_end(&mut buf) {
|
||||
tt_warning!(status, "failure downloading \"{}\" from network", name; e.into());
|
||||
any_failed = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
overall_failed = false;
|
||||
break;
|
||||
}
|
||||
|
||||
if overall_failed {
|
||||
bail!(
|
||||
"failed to retrieve \"{}\" from the network; \
|
||||
this most probably is not Tectonic's fault \
|
||||
-- please check your network connection.",
|
||||
name
|
||||
);
|
||||
} else if any_failed {
|
||||
tt_note!(status, "download succeeded after retry");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(buf)
|
||||
// Retries are handled in open_fileinfo,
|
||||
// since BundleCache never calls input_open_name.
|
||||
self.open_fileinfo(&info, status)
|
||||
}
|
||||
}
|
||||
|
||||
impl Bundle for ItarBundle {
|
||||
fn all_files(&self) -> Vec<String> {
|
||||
self.index.iter().map(|x| x.path().to_owned()).collect()
|
||||
}
|
||||
|
||||
fn get_digest(&mut self) -> Result<tectonic_io_base::digest::DigestData> {
|
||||
let digest_text = match self.input_open_name(digest::DIGEST_NAME, &mut NoopStatusBackend {})
|
||||
{
|
||||
OpenResult::Ok(h) => {
|
||||
let mut text = String::new();
|
||||
h.take(64).read_to_string(&mut text)?;
|
||||
text
|
||||
}
|
||||
|
||||
OpenResult::NotAvailable => {
|
||||
// Broken or un-cacheable backend.
|
||||
bail!("bundle does not provide needed SHA256SUM file");
|
||||
}
|
||||
|
||||
OpenResult::Err(e) => {
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
|
||||
Ok(atry!(digest::DigestData::from_str(&digest_text); ["corrupted SHA256 digest data"]))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'this> CachableBundle<'this, ItarFileIndex> for ItarBundle {
|
||||
fn get_location(&mut self) -> String {
|
||||
self.url.clone()
|
||||
}
|
||||
|
||||
fn initialize_index(&mut self, source: &mut dyn Read) -> Result<()> {
|
||||
self.index.initialize(source)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn index(&mut self) -> &mut ItarFileIndex {
|
||||
&mut self.index
|
||||
}
|
||||
|
||||
fn search(&mut self, name: &str) -> Option<ItarFileInfo> {
|
||||
self.index.search(name)
|
||||
}
|
||||
|
||||
fn get_index_reader(&mut self) -> Result<Box<dyn Read>> {
|
||||
let mut geturl_backend = DefaultBackend::default();
|
||||
let index_url = format!("{}.index.gz", &self.url);
|
||||
let reader = GzDecoder::new(geturl_backend.get_url(&index_url)?);
|
||||
Ok(Box::new(reader))
|
||||
}
|
||||
|
||||
fn open_fileinfo(
|
||||
&mut self,
|
||||
info: &ItarFileInfo,
|
||||
status: &mut dyn StatusBackend,
|
||||
) -> OpenResult<InputHandle> {
|
||||
match self.ensure_index() {
|
||||
Ok(_) => {}
|
||||
Err(e) => return OpenResult::Err(e),
|
||||
};
|
||||
|
||||
let mut v = Vec::with_capacity(info.length);
|
||||
tt_note!(status, "downloading {}", info.name);
|
||||
|
||||
// Edge case for zero-sized reads
|
||||
// (these cause errors on some web hosts)
|
||||
if info.length == 0 {
|
||||
return OpenResult::Ok(InputHandle::new_read_only(
|
||||
info.name.to_owned(),
|
||||
Cursor::new(v),
|
||||
InputOrigin::Other,
|
||||
));
|
||||
}
|
||||
|
||||
// Get file with retries
|
||||
for i in 0..NET_RETRY_ATTEMPTS {
|
||||
let mut stream = match self
|
||||
.reader
|
||||
.as_mut()
|
||||
.unwrap()
|
||||
.read_range(info.offset, info.length)
|
||||
{
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
tt_warning!(status,
|
||||
"failure fetching \"{}\" from network ({}/{NET_RETRY_ATTEMPTS})",
|
||||
info.name, i+1; e
|
||||
);
|
||||
thread::sleep(Duration::from_millis(NET_RETRY_SLEEP_MS));
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
match stream.read_to_end(&mut v) {
|
||||
Ok(_) => {}
|
||||
Err(e) => {
|
||||
tt_warning!(status,
|
||||
"failure downloading \"{}\" from network ({}/{NET_RETRY_ATTEMPTS})",
|
||||
info.name, i+1; e.into()
|
||||
);
|
||||
thread::sleep(Duration::from_millis(NET_RETRY_SLEEP_MS));
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
return OpenResult::Ok(InputHandle::new_read_only(
|
||||
info.name.to_owned(),
|
||||
Cursor::new(v),
|
||||
InputOrigin::Other,
|
||||
));
|
||||
}
|
||||
|
||||
OpenResult::Err(anyhow!(
|
||||
"failed to download \"{}\"; please check your network connection.",
|
||||
info.name
|
||||
))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,22 +11,82 @@
|
|||
//!
|
||||
//! This crate provides the following bundle implementations:
|
||||
//!
|
||||
//! - [`cache::CachingBundle`] for access to remote bundles with local
|
||||
//! filesystem caching.
|
||||
//! - [`cache::BundleCache`] provides filesystem-backed caching for any bundle
|
||||
//! - [`itar::ItarBundle`] provides filesystem-backed caching for any bundle
|
||||
//! - [`dir::DirBundle`] turns a directory full of files into a bundle; it is
|
||||
//! useful for testing and lightweight usage.
|
||||
//! - [`zip::ZipBundle`] for a ZIP-format bundle.
|
||||
|
||||
use std::{io::Read, str::FromStr};
|
||||
use tectonic_errors::{anyhow::bail, atry, Result};
|
||||
use tectonic_io_base::{digest, digest::DigestData, IoProvider, OpenResult};
|
||||
use std::{fmt::Debug, io::Read, path::PathBuf};
|
||||
use tectonic_errors::{prelude::bail, Result};
|
||||
use tectonic_io_base::{digest::DigestData, InputHandle, IoProvider, OpenResult};
|
||||
use tectonic_status_base::StatusBackend;
|
||||
|
||||
pub mod cache;
|
||||
pub mod dir;
|
||||
pub mod itar;
|
||||
mod ttb;
|
||||
pub mod ttb_fs;
|
||||
pub mod ttb_net;
|
||||
pub mod zip;
|
||||
|
||||
use cache::BundleCache;
|
||||
use dir::DirBundle;
|
||||
use itar::ItarBundle;
|
||||
use ttb_fs::TTBFsBundle;
|
||||
use ttb_net::TTBNetBundle;
|
||||
use zip::ZipBundle;
|
||||
|
||||
// How many times network bundles should retry
|
||||
// a download, and how long they should wait
|
||||
// between attempts.
|
||||
const NET_RETRY_ATTEMPTS: usize = 3;
|
||||
const NET_RETRY_SLEEP_MS: u64 = 500;
|
||||
|
||||
/// Uniquely identifies a file in a bundle.
|
||||
pub trait FileInfo: Clone + Debug {
|
||||
/// Return a path to this file, relative to the bundle.
|
||||
fn path(&self) -> &str;
|
||||
|
||||
/// Return the name of this file
|
||||
fn name(&self) -> &str;
|
||||
}
|
||||
|
||||
/// Keeps track of
|
||||
pub trait FileIndex<'this>
|
||||
where
|
||||
Self: Sized + 'this + Debug,
|
||||
{
|
||||
/// The FileInfo this index handles
|
||||
type InfoType: FileInfo;
|
||||
|
||||
/// Iterate over all [`FileInfo`]s in this index
|
||||
fn iter(&'this self) -> Box<dyn Iterator<Item = &'this Self::InfoType> + 'this>;
|
||||
|
||||
/// Get the number of [`FileInfo`]s in this index
|
||||
fn len(&self) -> usize;
|
||||
|
||||
/// Returns true if this index is empty
|
||||
fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
|
||||
/// Has this index been filled with bundle data?
|
||||
/// This is always false until we call [`self.initialize()`],
|
||||
/// and is always true afterwards.
|
||||
fn is_initialized(&self) -> bool {
|
||||
!self.is_empty()
|
||||
}
|
||||
|
||||
/// Fill this index from a file
|
||||
fn initialize(&mut self, reader: &mut dyn Read) -> Result<()>;
|
||||
|
||||
/// Search for a file in this index, obeying search order.
|
||||
///
|
||||
/// Returns a `Some(FileInfo)` if a file was found, and `None` otherwise.
|
||||
fn search(&'this mut self, name: &str) -> Option<Self::InfoType>;
|
||||
}
|
||||
|
||||
/// A trait for bundles of Tectonic support files.
|
||||
///
|
||||
/// A "bundle" is an [`IoProvider`] with a few special properties. Bundles are
|
||||
|
@ -39,59 +99,154 @@ pub mod zip;
|
|||
/// of TeX support files, and that you can generate one or more TeX format files
|
||||
/// using only the files contained in a bundle.
|
||||
pub trait Bundle: IoProvider {
|
||||
/// Get a cryptographic digest summarizing this bundle’s contents.
|
||||
///
|
||||
/// The digest summarizes the exact contents of every file in the bundle. It
|
||||
/// is computed from the sorted names and SHA256 digests of the component
|
||||
/// files [as implemented in the TeXLive bundle builder][x].
|
||||
///
|
||||
/// [x]: https://github.com/tectonic-typesetting/tectonic-texlive-bundles/blob/master/scripts/ttb_utils.py#L321
|
||||
///
|
||||
/// The default implementation gets the digest from a file named
|
||||
/// `SHA256SUM`, which is expected to contain the digest in hex-encoded
|
||||
/// format.
|
||||
fn get_digest(&mut self, status: &mut dyn StatusBackend) -> Result<DigestData> {
|
||||
let digest_text = match self.input_open_name(digest::DIGEST_NAME, status) {
|
||||
OpenResult::Ok(h) => {
|
||||
let mut text = String::new();
|
||||
h.take(64).read_to_string(&mut text)?;
|
||||
text
|
||||
}
|
||||
/// Get a cryptographic digest summarizing this bundle’s contents,
|
||||
/// which summarizes the exact contents of every file in the bundle.
|
||||
fn get_digest(&mut self) -> Result<DigestData>;
|
||||
|
||||
OpenResult::NotAvailable => {
|
||||
// Broken or un-cacheable backend.
|
||||
bail!("bundle does not provide needed SHA256SUM file");
|
||||
}
|
||||
|
||||
OpenResult::Err(e) => {
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
|
||||
Ok(atry!(DigestData::from_str(&digest_text); ["corrupted SHA256 digest data"]))
|
||||
}
|
||||
|
||||
/// Enumerate the files in this bundle.
|
||||
///
|
||||
/// This interface is intended to be used for diagnostics, not by anything
|
||||
/// during actual execution of an engine. This should include meta-files
|
||||
/// such as the `SHA256SUM` file. The ordering of the returned filenames is
|
||||
/// unspecified.
|
||||
///
|
||||
/// To ease implementation, the filenames are returned in one big vector of
|
||||
/// owned strings. For a large bundle, the memory consumed by this operation
|
||||
/// might be fairly substantial (although we are talking megabytes, not
|
||||
/// gigabytes).
|
||||
fn all_files(&mut self, status: &mut dyn StatusBackend) -> Result<Vec<String>>;
|
||||
/// Iterate over all file paths in this bundle.
|
||||
/// This is used for the `bundle search` command
|
||||
fn all_files(&self) -> Vec<String>;
|
||||
}
|
||||
|
||||
impl<B: Bundle + ?Sized> Bundle for Box<B> {
|
||||
fn get_digest(&mut self, status: &mut dyn StatusBackend) -> Result<DigestData> {
|
||||
(**self).get_digest(status)
|
||||
fn get_digest(&mut self) -> Result<DigestData> {
|
||||
(**self).get_digest()
|
||||
}
|
||||
|
||||
fn all_files(&mut self, status: &mut dyn StatusBackend) -> Result<Vec<String>> {
|
||||
(**self).all_files(status)
|
||||
fn all_files(&self) -> Vec<String> {
|
||||
(**self).all_files()
|
||||
}
|
||||
}
|
||||
|
||||
/// A bundle that may be cached.
|
||||
///
|
||||
/// These methods do not implement any new features.
|
||||
/// Instead, they give the [`cache::BundleCache`] wrapper
|
||||
/// more direct access to existing bundle functionality.
|
||||
pub trait CachableBundle<'this, T>
|
||||
where
|
||||
Self: Bundle + 'this,
|
||||
T: FileIndex<'this>,
|
||||
{
|
||||
/// Initialize this bundle's file index from an external reader
|
||||
/// This allows us to retrieve the FileIndex from the cache WITHOUT
|
||||
/// touching the network.
|
||||
fn initialize_index(&mut self, _source: &mut dyn Read) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get a `Read` instance to this bundle's index,
|
||||
/// reading directly from the backend.
|
||||
fn get_index_reader(&mut self) -> Result<Box<dyn Read>>;
|
||||
|
||||
/// Return a reference to this bundle's FileIndex.
|
||||
fn index(&mut self) -> &mut T;
|
||||
|
||||
/// Open the file that `info` points to.
|
||||
fn open_fileinfo(
|
||||
&mut self,
|
||||
info: &T::InfoType,
|
||||
status: &mut dyn StatusBackend,
|
||||
) -> OpenResult<InputHandle>;
|
||||
|
||||
/// Search for a file in this bundle.
|
||||
/// This should foward the call to `self.index`
|
||||
fn search(&mut self, name: &str) -> Option<T::InfoType>;
|
||||
|
||||
/// Return a string that corresponds to this bundle's location, probably a URL.
|
||||
/// We should NOT need to do any network IO to get this value.
|
||||
fn get_location(&mut self) -> String;
|
||||
}
|
||||
|
||||
impl<'this, T: FileIndex<'this>, B: CachableBundle<'this, T> + ?Sized> CachableBundle<'this, T>
|
||||
for Box<B>
|
||||
{
|
||||
fn initialize_index(&mut self, source: &mut dyn Read) -> Result<()> {
|
||||
(**self).initialize_index(source)
|
||||
}
|
||||
|
||||
fn get_location(&mut self) -> String {
|
||||
(**self).get_location()
|
||||
}
|
||||
|
||||
fn get_index_reader(&mut self) -> Result<Box<dyn Read>> {
|
||||
(**self).get_index_reader()
|
||||
}
|
||||
|
||||
fn index(&mut self) -> &mut T {
|
||||
(**self).index()
|
||||
}
|
||||
|
||||
fn open_fileinfo(
|
||||
&mut self,
|
||||
info: &T::InfoType,
|
||||
status: &mut dyn StatusBackend,
|
||||
) -> OpenResult<InputHandle> {
|
||||
(**self).open_fileinfo(info, status)
|
||||
}
|
||||
|
||||
fn search(&mut self, name: &str) -> Option<T::InfoType> {
|
||||
(**self).search(name)
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to open a bundle from a string,
|
||||
/// detecting its type.
|
||||
///
|
||||
/// Returns None if auto-detection fails.
|
||||
pub fn detect_bundle(
|
||||
source: String,
|
||||
only_cached: bool,
|
||||
custom_cache_dir: Option<PathBuf>,
|
||||
) -> Result<Option<Box<dyn Bundle>>> {
|
||||
use url::Url;
|
||||
|
||||
// Parse URL and detect bundle type
|
||||
if let Ok(url) = Url::parse(&source) {
|
||||
if url.scheme() == "https" || url.scheme() == "http" {
|
||||
if source.ends_with("ttb") {
|
||||
let bundle = BundleCache::new(
|
||||
Box::new(TTBNetBundle::new(source)?),
|
||||
only_cached,
|
||||
custom_cache_dir,
|
||||
)?;
|
||||
return Ok(Some(Box::new(bundle)));
|
||||
} else {
|
||||
let bundle = BundleCache::new(
|
||||
Box::new(ItarBundle::new(source)?),
|
||||
only_cached,
|
||||
custom_cache_dir,
|
||||
)?;
|
||||
return Ok(Some(Box::new(bundle)));
|
||||
}
|
||||
} else if url.scheme() == "file" {
|
||||
let file_path = url.to_file_path().map_err(|_| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"failed to parse local path",
|
||||
)
|
||||
})?;
|
||||
return bundle_from_path(file_path);
|
||||
} else {
|
||||
return Ok(None);
|
||||
}
|
||||
} else {
|
||||
// If we couldn't parse the URL, this is probably a local path.
|
||||
return bundle_from_path(PathBuf::from(source));
|
||||
}
|
||||
|
||||
fn bundle_from_path(p: PathBuf) -> Result<Option<Box<dyn Bundle>>> {
|
||||
let ext = p.extension().map_or("", |x| x.to_str().unwrap_or(""));
|
||||
|
||||
if p.is_dir() {
|
||||
Ok(Some(Box::new(DirBundle::new(p))))
|
||||
} else if ext == "zip" {
|
||||
Ok(Some(Box::new(ZipBundle::open(p)?)))
|
||||
} else if ext == "ttb" {
|
||||
Ok(Some(Box::new(TTBFsBundle::open(p)?)))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -127,12 +282,11 @@ pub fn get_fallback_bundle_url(format_version: u32) -> String {
|
|||
/// `tectonic` crate provides a configuration mechanism to allow the user to
|
||||
/// override the bundle URL setting, and that should be preferred if you’re in a
|
||||
/// position to use it.
|
||||
pub fn get_fallback_bundle(
|
||||
format_version: u32,
|
||||
only_cached: bool,
|
||||
status: &mut dyn StatusBackend,
|
||||
) -> Result<cache::CachingBundle<itar::IndexedTarBackend>> {
|
||||
pub fn get_fallback_bundle(format_version: u32, only_cached: bool) -> Result<Box<dyn Bundle>> {
|
||||
let url = get_fallback_bundle_url(format_version);
|
||||
let mut cache = cache::Cache::get_user_default()?;
|
||||
cache.open(&url, only_cached, status)
|
||||
let bundle = detect_bundle(url, only_cached, None)?;
|
||||
if bundle.is_none() {
|
||||
bail!("could not open default bundle")
|
||||
}
|
||||
Ok(bundle.unwrap())
|
||||
}
|
||||
|
|
|
@ -0,0 +1,288 @@
|
|||
// Copyright 2023-2024 the Tectonic Project
|
||||
// Licensed under the MIT License.
|
||||
|
||||
//! Common tools for the ttbv1 format, used in both
|
||||
//! network and filesystem bundles.
|
||||
|
||||
use crate::{FileIndex, FileInfo};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
convert::{TryFrom, TryInto},
|
||||
io::{BufRead, BufReader, Read},
|
||||
str::FromStr,
|
||||
};
|
||||
use tectonic_errors::prelude::*;
|
||||
use tectonic_io_base::digest::{self, DigestData};
|
||||
|
||||
pub struct TTBv1Header {
|
||||
pub index_start: u64,
|
||||
pub index_real_len: u32,
|
||||
pub index_gzip_len: u32,
|
||||
pub digest: DigestData,
|
||||
}
|
||||
|
||||
impl TryFrom<[u8; 70]> for TTBv1Header {
|
||||
type Error = Error;
|
||||
|
||||
fn try_from(header: [u8; 70]) -> Result<Self, Self::Error> {
|
||||
let signature = &header[0..14];
|
||||
let version = u32::from_le_bytes(header[14..18].try_into()?);
|
||||
let index_start = u64::from_le_bytes(header[18..26].try_into()?);
|
||||
let index_gzip_len = u32::from_le_bytes(header[26..30].try_into()?);
|
||||
let index_real_len = u32::from_le_bytes(header[30..34].try_into()?);
|
||||
let digest: DigestData = DigestData::from_str(&digest::bytes_to_hex(&header[34..66]))?;
|
||||
|
||||
if signature != b"tectonicbundle" {
|
||||
bail!("this is not a bundle");
|
||||
}
|
||||
|
||||
if version != 1 {
|
||||
bail!("wrong ttb version");
|
||||
}
|
||||
|
||||
Ok(TTBv1Header {
|
||||
digest,
|
||||
index_start,
|
||||
index_real_len,
|
||||
index_gzip_len,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// file info for TTbundle
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct TTBFileInfo {
|
||||
pub start: u64,
|
||||
pub real_len: u32,
|
||||
pub gzip_len: u32,
|
||||
pub path: String,
|
||||
pub name: String,
|
||||
pub hash: Option<String>,
|
||||
}
|
||||
|
||||
impl FileInfo for TTBFileInfo {
|
||||
fn name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
|
||||
fn path(&self) -> &str {
|
||||
&self.path
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct TTBFileIndex {
|
||||
// Vector of fileinfos.
|
||||
// This MUST be sorted by path for search() to work properly!
|
||||
pub content: Vec<TTBFileInfo>,
|
||||
|
||||
search_orders: HashMap<String, Vec<String>>,
|
||||
default_search_order: String,
|
||||
|
||||
// Remember previous searches so we don't have to iterate over content again.
|
||||
search_cache: HashMap<String, Option<TTBFileInfo>>,
|
||||
}
|
||||
|
||||
impl TTBFileIndex {
|
||||
fn read_filelist_line(&mut self, line: String) -> Result<()> {
|
||||
let mut bits = line.split_whitespace();
|
||||
|
||||
if let (Some(start), Some(gzip_len), Some(real_len), Some(hash)) =
|
||||
(bits.next(), bits.next(), bits.next(), bits.next())
|
||||
{
|
||||
let path = bits.collect::<Vec<&str>>().join(" ");
|
||||
let (_, name) = path.rsplit_once('/').unwrap_or(("", &path));
|
||||
|
||||
// Basic path validation.
|
||||
// TODO: more robust checks
|
||||
if path.starts_with('/')
|
||||
|| path.contains("./") // Also catches "/../"
|
||||
|| path.contains("//")
|
||||
{
|
||||
bail!("bad bundle file path `{path}`");
|
||||
}
|
||||
|
||||
self.content.push(TTBFileInfo {
|
||||
start: start.parse::<u64>()?,
|
||||
gzip_len: gzip_len.parse::<u32>()?,
|
||||
real_len: real_len.parse::<u32>()?,
|
||||
path: path.to_owned(),
|
||||
name: name.to_owned(),
|
||||
hash: match hash {
|
||||
"nohash" => None,
|
||||
_ => Some(hash.to_owned()),
|
||||
},
|
||||
});
|
||||
} else {
|
||||
// TODO: preserve the warning info or something!
|
||||
bail!("malformed FILELIST line");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn read_search_line(&mut self, name: String, line: String) -> Result<()> {
|
||||
let stat = self.search_orders.entry(name).or_default();
|
||||
stat.push(line);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn read_defaultsearch_line(&mut self, line: String) -> Result<()> {
|
||||
self.default_search_order = line;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'this> FileIndex<'this> for TTBFileIndex {
|
||||
type InfoType = TTBFileInfo;
|
||||
|
||||
fn iter(&'this self) -> Box<dyn Iterator<Item = &'this TTBFileInfo> + 'this> {
|
||||
Box::new(self.content.iter())
|
||||
}
|
||||
|
||||
fn len(&self) -> usize {
|
||||
self.content.len()
|
||||
}
|
||||
|
||||
fn initialize(&mut self, reader: &mut dyn Read) -> Result<()> {
|
||||
self.content.clear();
|
||||
self.search_orders.clear();
|
||||
self.search_cache.clear();
|
||||
self.default_search_order.clear();
|
||||
|
||||
let mut mode: String = String::new();
|
||||
for line in BufReader::new(reader).lines() {
|
||||
let line = line?;
|
||||
|
||||
if line.starts_with('[') {
|
||||
mode = line[1..line.len() - 1].to_owned();
|
||||
continue;
|
||||
}
|
||||
|
||||
if mode.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let (cmd, arg) = mode.rsplit_once(':').unwrap_or((&mode[..], ""));
|
||||
|
||||
match cmd {
|
||||
"DEFAULTSEARCH" => self.read_defaultsearch_line(line)?,
|
||||
"FILELIST" => self.read_filelist_line(line)?,
|
||||
"SEARCH" => self.read_search_line(arg.to_owned(), line)?,
|
||||
_ => continue,
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn search(&'this mut self, name: &str) -> Option<TTBFileInfo> {
|
||||
match self.search_cache.get(name) {
|
||||
None => {}
|
||||
Some(r) => return r.clone(),
|
||||
}
|
||||
|
||||
let search = self.search_orders.get(&self.default_search_order).unwrap();
|
||||
|
||||
// Edge case: absolute paths
|
||||
if name.starts_with('/') {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Get last element of path, since
|
||||
// some packages reference a path to a file.
|
||||
// `fithesis4` is one example.
|
||||
let relative_parent: bool;
|
||||
|
||||
let n = match name.rsplit_once('/') {
|
||||
Some(n) => {
|
||||
relative_parent = true;
|
||||
n.1
|
||||
}
|
||||
None => {
|
||||
relative_parent = false;
|
||||
name
|
||||
}
|
||||
};
|
||||
|
||||
// If we don't have this path in the index, this file doesn't exist.
|
||||
// The code below will clone these strings iff it has to.
|
||||
let mut infos: Vec<&TTBFileInfo> = Vec::new();
|
||||
for i in self.iter() {
|
||||
if i.name() == n {
|
||||
infos.push(i);
|
||||
} else if !infos.is_empty() {
|
||||
// infos is sorted, so we can stop searching now.
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if relative_parent {
|
||||
// TODO: REWORK
|
||||
let mut matching: Option<&TTBFileInfo> = None;
|
||||
for info in &infos {
|
||||
if info.path().ends_with(&name) {
|
||||
match matching {
|
||||
Some(_) => return None, // TODO: warning. This shouldn't happen.
|
||||
None => matching = Some(info),
|
||||
}
|
||||
}
|
||||
}
|
||||
let matching = Some(matching?.clone());
|
||||
self.search_cache.insert(name.to_owned(), matching.clone());
|
||||
matching
|
||||
} else {
|
||||
// Even if paths.len() is 1, we don't return here.
|
||||
// We need to make sure this file matches a search path:
|
||||
// if it's in a directory we don't search, we shouldn't find it!
|
||||
|
||||
let mut picked: Vec<&TTBFileInfo> = Vec::new();
|
||||
for rule in search {
|
||||
// Remove leading slash from rule
|
||||
// (search patterns start with slashes, but paths do not)
|
||||
let rule = &rule[1..];
|
||||
|
||||
for info in &infos {
|
||||
if rule.ends_with("//") {
|
||||
// Match start of patent path
|
||||
// (cutting off the last slash)
|
||||
if info.path().starts_with(&rule[0..rule.len() - 1]) {
|
||||
picked.push(info);
|
||||
}
|
||||
} else {
|
||||
// Match full parent path
|
||||
if &info.path()[0..info.path().len() - name.len()] == rule {
|
||||
picked.push(info);
|
||||
}
|
||||
}
|
||||
}
|
||||
if !picked.is_empty() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let r = {
|
||||
if picked.is_empty() {
|
||||
// No file in our search dirs had this name.
|
||||
None
|
||||
} else if picked.len() == 1 {
|
||||
// We found exactly one file with this name.
|
||||
//
|
||||
// This chain of functions is essentially picked[0],
|
||||
// but takes ownership of the string without requiring
|
||||
// a .clone().
|
||||
Some(picked[0].clone())
|
||||
} else {
|
||||
// We found multiple files with this name, all of which
|
||||
// have the same priority. Pick alphabetically to emulate
|
||||
// an "alphabetic DFS" search order.
|
||||
picked.sort_by(|a, b| a.path().cmp(b.path()));
|
||||
Some(picked[0].clone())
|
||||
}
|
||||
};
|
||||
|
||||
self.search_cache.insert(name.to_owned(), r.clone());
|
||||
r
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,131 @@
|
|||
// Copyright 2023-2024 the Tectonic Project
|
||||
// Licensed under the MIT License.
|
||||
|
||||
//! Read ttb v1 bundles on the filesystem.
|
||||
//!
|
||||
//! The main type offered by this module is the [`Ttbv1NetBundle`] struct.
|
||||
|
||||
use crate::{
|
||||
ttb::{TTBFileIndex, TTBFileInfo, TTBv1Header},
|
||||
Bundle, FileIndex, FileInfo,
|
||||
};
|
||||
use flate2::read::GzDecoder;
|
||||
use std::{
|
||||
convert::TryFrom,
|
||||
fs::File,
|
||||
io::{Cursor, Read, Seek, SeekFrom},
|
||||
path::Path,
|
||||
};
|
||||
use tectonic_errors::prelude::*;
|
||||
use tectonic_io_base::{digest::DigestData, InputHandle, InputOrigin, IoProvider, OpenResult};
|
||||
use tectonic_status_base::StatusBackend;
|
||||
|
||||
/// Read a [`TTBFileInfo`] from this bundle.
|
||||
/// We assume that `fileinfo` points to a valid file in this bundle.
|
||||
fn read_fileinfo<'a>(fileinfo: &TTBFileInfo, reader: &'a mut File) -> Result<Box<dyn Read + 'a>> {
|
||||
reader.seek(SeekFrom::Start(fileinfo.start))?;
|
||||
Ok(Box::new(GzDecoder::new(
|
||||
reader.take(fileinfo.gzip_len as u64),
|
||||
)))
|
||||
}
|
||||
|
||||
/// A bundle backed by a ZIP file.
|
||||
pub struct TTBFsBundle<T>
|
||||
where
|
||||
for<'a> T: FileIndex<'a>,
|
||||
{
|
||||
file: File,
|
||||
index: T,
|
||||
}
|
||||
|
||||
/// The internal file-information struct used by the [`TTBFsBundle`].
|
||||
|
||||
impl TTBFsBundle<TTBFileIndex> {
|
||||
/// Create a new ZIP bundle for a generic readable and seekable stream.
|
||||
pub fn new(file: File) -> Result<Self> {
|
||||
Ok(TTBFsBundle {
|
||||
file,
|
||||
index: TTBFileIndex::default(),
|
||||
})
|
||||
}
|
||||
|
||||
fn get_header(&mut self) -> Result<TTBv1Header> {
|
||||
self.file.seek(SeekFrom::Start(0))?;
|
||||
let mut header: [u8; 70] = [0u8; 70];
|
||||
self.file.read_exact(&mut header)?;
|
||||
self.file.seek(SeekFrom::Start(0))?;
|
||||
let header = TTBv1Header::try_from(header)?;
|
||||
Ok(header)
|
||||
}
|
||||
|
||||
// Fill this bundle's search rules, fetching files from our backend.
|
||||
fn fill_index(&mut self) -> Result<()> {
|
||||
let header = self.get_header()?;
|
||||
let info = TTBFileInfo {
|
||||
start: header.index_start,
|
||||
gzip_len: header.index_real_len,
|
||||
real_len: header.index_gzip_len,
|
||||
path: "/INDEX".to_owned(),
|
||||
name: "INDEX".to_owned(),
|
||||
hash: None,
|
||||
};
|
||||
|
||||
let mut reader = read_fileinfo(&info, &mut self.file)?;
|
||||
self.index.initialize(&mut reader)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Open a file on the filesystem as a zip bundle.
|
||||
pub fn open<P: AsRef<Path>>(path: P) -> Result<Self> {
|
||||
Self::new(File::open(path)?)
|
||||
}
|
||||
}
|
||||
|
||||
impl IoProvider for TTBFsBundle<TTBFileIndex> {
|
||||
fn input_open_name(
|
||||
&mut self,
|
||||
name: &str,
|
||||
_status: &mut dyn StatusBackend,
|
||||
) -> OpenResult<InputHandle> {
|
||||
// Fetch index if it is empty
|
||||
if self.index.is_empty() {
|
||||
if let Err(e) = self.fill_index() {
|
||||
return OpenResult::Err(e);
|
||||
}
|
||||
}
|
||||
|
||||
let info = match self.index.search(name) {
|
||||
None => return OpenResult::NotAvailable,
|
||||
Some(s) => s,
|
||||
};
|
||||
|
||||
let mut v: Vec<u8> = Vec::with_capacity(info.real_len as usize);
|
||||
|
||||
match read_fileinfo(&info, &mut self.file) {
|
||||
Err(e) => return OpenResult::Err(e),
|
||||
Ok(mut b) => {
|
||||
if let Err(e) = b.read_to_end(&mut v) {
|
||||
return OpenResult::Err(e.into());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
OpenResult::Ok(InputHandle::new_read_only(
|
||||
name,
|
||||
Cursor::new(v),
|
||||
InputOrigin::Other,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl Bundle for TTBFsBundle<TTBFileIndex> {
|
||||
fn all_files(&self) -> Vec<String> {
|
||||
self.index.iter().map(|x| x.path().to_owned()).collect()
|
||||
}
|
||||
|
||||
fn get_digest(&mut self) -> Result<DigestData> {
|
||||
let header = self.get_header()?;
|
||||
Ok(header.digest)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,222 @@
|
|||
// Copyright 2023-2024 the Tectonic Project
|
||||
// Licensed under the MIT License.
|
||||
|
||||
//! Read ttb v1 bundles on the internet.
|
||||
//!
|
||||
//! The main type offered by this module is the [`TTBNetBundle`] struct,
|
||||
//! which can (but should not) be used directly as a [`tectonic_io_base::IoProvider`].
|
||||
//!
|
||||
//! Instead, wrap it in a [`crate::BundleCache`] for filesystem-backed caching.
|
||||
|
||||
use crate::{
|
||||
ttb::{TTBFileIndex, TTBFileInfo, TTBv1Header},
|
||||
Bundle, CachableBundle, FileIndex, FileInfo, NET_RETRY_ATTEMPTS, NET_RETRY_SLEEP_MS,
|
||||
};
|
||||
use flate2::read::GzDecoder;
|
||||
use std::{
|
||||
convert::TryFrom,
|
||||
io::{Cursor, Read},
|
||||
thread,
|
||||
time::Duration,
|
||||
};
|
||||
use tectonic_errors::prelude::*;
|
||||
use tectonic_geturl::{DefaultBackend, DefaultRangeReader, GetUrlBackend, RangeReader};
|
||||
use tectonic_io_base::{InputHandle, InputOrigin, IoProvider, OpenResult};
|
||||
use tectonic_status_base::{tt_note, tt_warning, StatusBackend};
|
||||
|
||||
/// Read a [`TTBFileInfo`] from this bundle.
|
||||
/// We assume that `fileinfo` points to a valid file in this bundle.
|
||||
fn read_fileinfo(fileinfo: &TTBFileInfo, reader: &mut DefaultRangeReader) -> Result<Box<dyn Read>> {
|
||||
// fileinfo.length is a u32, so it must fit inside a usize (assuming 32/64-bit machine).
|
||||
let stream = reader.read_range(fileinfo.start, fileinfo.gzip_len as usize)?;
|
||||
Ok(Box::new(GzDecoder::new(stream)))
|
||||
}
|
||||
|
||||
/// Access ttbv1 bundle hosted on the internet.
|
||||
/// This struct provides NO caching. All files
|
||||
/// are downloaded.
|
||||
///
|
||||
/// As such, this bundle should probably be wrapped in a [`crate::BundleCache`].
|
||||
pub struct TTBNetBundle<T>
|
||||
where
|
||||
for<'a> T: FileIndex<'a>,
|
||||
{
|
||||
url: String,
|
||||
index: T,
|
||||
|
||||
// We need the network to load these.
|
||||
// They're None until absolutely necessary.
|
||||
reader: Option<DefaultRangeReader>,
|
||||
}
|
||||
|
||||
/// The internal file-information struct used by the [`TTBNetBundle`].
|
||||
|
||||
impl TTBNetBundle<TTBFileIndex> {
|
||||
/// Create a new ZIP bundle for a generic readable and seekable stream.
|
||||
/// This method does not require network access.
|
||||
/// It will succeed even in we can't connect to the bundle, or if we're given a bad url.
|
||||
pub fn new(url: String) -> Result<Self> {
|
||||
Ok(TTBNetBundle {
|
||||
reader: None,
|
||||
index: TTBFileIndex::default(),
|
||||
url,
|
||||
})
|
||||
}
|
||||
|
||||
fn connect_reader(&mut self) -> Result<()> {
|
||||
if self.reader.is_some() {
|
||||
return Ok(());
|
||||
}
|
||||
let geturl_backend = DefaultBackend::default();
|
||||
self.reader = Some(geturl_backend.open_range_reader(&self.url));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_header(&mut self) -> Result<TTBv1Header> {
|
||||
self.connect_reader()?;
|
||||
let mut header: [u8; 70] = [0u8; 70];
|
||||
self.reader
|
||||
.as_mut()
|
||||
.unwrap()
|
||||
.read_range(0, 70)?
|
||||
.read_exact(&mut header)?;
|
||||
let header = TTBv1Header::try_from(header)?;
|
||||
Ok(header)
|
||||
}
|
||||
|
||||
// Fill this bundle's index if it is empty.
|
||||
fn ensure_index(&mut self) -> Result<()> {
|
||||
if self.index.is_initialized() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut reader = self.get_index_reader()?;
|
||||
self.index.initialize(&mut reader)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl IoProvider for TTBNetBundle<TTBFileIndex> {
|
||||
fn input_open_name(
|
||||
&mut self,
|
||||
name: &str,
|
||||
status: &mut dyn StatusBackend,
|
||||
) -> OpenResult<InputHandle> {
|
||||
if let Err(e) = self.ensure_index() {
|
||||
return OpenResult::Err(e);
|
||||
};
|
||||
|
||||
let info = match self.search(name) {
|
||||
None => return OpenResult::NotAvailable,
|
||||
Some(s) => s,
|
||||
};
|
||||
|
||||
// Retries are handled in open_fileinfo,
|
||||
// since BundleCache never calls input_open_name.
|
||||
self.open_fileinfo(&info, status)
|
||||
}
|
||||
}
|
||||
|
||||
impl Bundle for TTBNetBundle<TTBFileIndex> {
|
||||
fn all_files(&self) -> Vec<String> {
|
||||
self.index.iter().map(|x| x.path().to_owned()).collect()
|
||||
}
|
||||
|
||||
fn get_digest(&mut self) -> Result<tectonic_io_base::digest::DigestData> {
|
||||
let header = self.get_header()?;
|
||||
Ok(header.digest)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'this> CachableBundle<'this, TTBFileIndex> for TTBNetBundle<TTBFileIndex> {
|
||||
fn get_location(&mut self) -> String {
|
||||
self.url.clone()
|
||||
}
|
||||
|
||||
fn initialize_index(&mut self, source: &mut dyn Read) -> Result<()> {
|
||||
self.index.initialize(source)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn index(&mut self) -> &mut TTBFileIndex {
|
||||
&mut self.index
|
||||
}
|
||||
|
||||
fn search(&mut self, name: &str) -> Option<TTBFileInfo> {
|
||||
self.index.search(name)
|
||||
}
|
||||
|
||||
fn get_index_reader(&mut self) -> Result<Box<dyn Read>> {
|
||||
self.connect_reader()?;
|
||||
let header = self.get_header()?;
|
||||
|
||||
read_fileinfo(
|
||||
&TTBFileInfo {
|
||||
start: header.index_start,
|
||||
gzip_len: header.index_gzip_len,
|
||||
real_len: header.index_real_len,
|
||||
path: "".to_owned(),
|
||||
name: "".to_owned(),
|
||||
hash: None,
|
||||
},
|
||||
self.reader.as_mut().unwrap(),
|
||||
)
|
||||
}
|
||||
|
||||
fn open_fileinfo(
|
||||
&mut self,
|
||||
info: &TTBFileInfo,
|
||||
status: &mut dyn StatusBackend,
|
||||
) -> OpenResult<InputHandle> {
|
||||
let mut v: Vec<u8> = Vec::with_capacity(info.real_len as usize);
|
||||
tt_note!(status, "downloading {}", info.name);
|
||||
|
||||
// Edge case for zero-sized reads
|
||||
// (these cause errors on some web hosts)
|
||||
if info.gzip_len == 0 {
|
||||
return OpenResult::Ok(InputHandle::new_read_only(
|
||||
info.name.to_owned(),
|
||||
Cursor::new(v),
|
||||
InputOrigin::Other,
|
||||
));
|
||||
}
|
||||
|
||||
// Get file with retries
|
||||
for i in 0..NET_RETRY_ATTEMPTS {
|
||||
let mut reader = match read_fileinfo(info, self.reader.as_mut().unwrap()) {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
tt_warning!(status,
|
||||
"failure fetching \"{}\" from network ({}/{NET_RETRY_ATTEMPTS})",
|
||||
info.name, i+1; e
|
||||
);
|
||||
thread::sleep(Duration::from_millis(NET_RETRY_SLEEP_MS));
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
match reader.read_to_end(&mut v) {
|
||||
Ok(_) => {}
|
||||
Err(e) => {
|
||||
tt_warning!(status,
|
||||
"failure downloading \"{}\" from network ({}/{NET_RETRY_ATTEMPTS})",
|
||||
info.name, i+1; e.into()
|
||||
);
|
||||
thread::sleep(Duration::from_millis(NET_RETRY_SLEEP_MS));
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
return OpenResult::Ok(InputHandle::new_read_only(
|
||||
info.name.to_owned(),
|
||||
Cursor::new(v),
|
||||
InputOrigin::Other,
|
||||
));
|
||||
}
|
||||
|
||||
OpenResult::Err(anyhow!(
|
||||
"failed to download \"{}\"; please check your network connection.",
|
||||
info.name
|
||||
))
|
||||
}
|
||||
}
|
|
@ -3,18 +3,18 @@
|
|||
|
||||
//! ZIP files as Tectonic bundles.
|
||||
|
||||
use crate::Bundle;
|
||||
use std::{
|
||||
fs::File,
|
||||
io::{Cursor, Read, Seek},
|
||||
path::Path,
|
||||
str::FromStr,
|
||||
};
|
||||
use tectonic_errors::prelude::*;
|
||||
use tectonic_io_base::{InputHandle, InputOrigin, IoProvider, OpenResult};
|
||||
use tectonic_status_base::StatusBackend;
|
||||
use tectonic_io_base::{digest, InputHandle, InputOrigin, IoProvider, OpenResult};
|
||||
use tectonic_status_base::{NoopStatusBackend, StatusBackend};
|
||||
use zip::{result::ZipError, ZipArchive};
|
||||
|
||||
use crate::Bundle;
|
||||
|
||||
/// A bundle backed by a ZIP file.
|
||||
pub struct ZipBundle<R: Read + Seek> {
|
||||
zip: ZipArchive<R>,
|
||||
|
@ -57,7 +57,11 @@ impl<R: Read + Seek> IoProvider for ZipBundle<R> {
|
|||
}
|
||||
};
|
||||
|
||||
let mut buf = Vec::with_capacity(zipitem.size() as usize);
|
||||
let s = zipitem.size();
|
||||
if s >= u32::MAX as u64 {
|
||||
return OpenResult::Err(anyhow!("Zip item too large."));
|
||||
}
|
||||
let mut buf = Vec::with_capacity(s as usize);
|
||||
|
||||
if let Err(e) = zipitem.read_to_end(&mut buf) {
|
||||
return OpenResult::Err(e.into());
|
||||
|
@ -72,7 +76,28 @@ impl<R: Read + Seek> IoProvider for ZipBundle<R> {
|
|||
}
|
||||
|
||||
impl<R: Read + Seek> Bundle for ZipBundle<R> {
|
||||
fn all_files(&mut self, _status: &mut dyn StatusBackend) -> Result<Vec<String>> {
|
||||
Ok(self.zip.file_names().map(|s| s.to_owned()).collect())
|
||||
fn all_files(&self) -> Vec<String> {
|
||||
self.zip.file_names().map(|x| x.to_owned()).collect()
|
||||
}
|
||||
|
||||
fn get_digest(&mut self) -> Result<tectonic_io_base::digest::DigestData> {
|
||||
let digest_text = match self.input_open_name(digest::DIGEST_NAME, &mut NoopStatusBackend {})
|
||||
{
|
||||
OpenResult::Ok(h) => {
|
||||
let mut text = String::new();
|
||||
h.take(64).read_to_string(&mut text)?;
|
||||
text
|
||||
}
|
||||
|
||||
OpenResult::NotAvailable => {
|
||||
bail!("bundle does not provide needed SHA256SUM file");
|
||||
}
|
||||
|
||||
OpenResult::Err(e) => {
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
|
||||
Ok(atry!(digest::DigestData::from_str(&digest_text); ["corrupted SHA256 digest data"]))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -154,7 +154,7 @@ impl Spx2HtmlEngine {
|
|||
let mut output = hooks.io().output_open_name(asp).must_exist()?;
|
||||
serde_json::to_writer_pretty(&mut output, &ser)?;
|
||||
let (name, digest) = output.into_name_digest();
|
||||
hooks.event_output_closed(name, digest, status);
|
||||
hooks.event_output_closed(name, digest);
|
||||
} else if !self.do_not_emit_assets {
|
||||
assets.emit(fonts, &mut common)?;
|
||||
}
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
use curl::easy::Easy;
|
||||
use std::io::Cursor;
|
||||
use tectonic_errors::{anyhow::bail, Result};
|
||||
use tectonic_status_base::StatusBackend;
|
||||
|
||||
use crate::{GetUrlBackend, RangeReader};
|
||||
|
||||
|
@ -67,11 +66,11 @@ impl GetUrlBackend for CurlBackend {
|
|||
type Response = Cursor<Vec<u8>>;
|
||||
type RangeReader = CurlRangeReader;
|
||||
|
||||
fn get_url(&mut self, url: &str, _status: &mut dyn StatusBackend) -> Result<Self::Response> {
|
||||
fn get_url(&mut self, url: &str) -> Result<Self::Response> {
|
||||
get_url_generic(&mut self.handle, url, None)
|
||||
}
|
||||
|
||||
fn resolve_url(&mut self, url: &str, _status: &mut dyn StatusBackend) -> Result<String> {
|
||||
fn resolve_url(&mut self, url: &str) -> Result<String> {
|
||||
Ok(url.into())
|
||||
}
|
||||
|
||||
|
|
|
@ -25,7 +25,6 @@
|
|||
use cfg_if::cfg_if;
|
||||
use std::io::Read;
|
||||
use tectonic_errors::Result;
|
||||
use tectonic_status_base::StatusBackend;
|
||||
|
||||
/// A trait for reading byte ranges from an HTTP resource.
|
||||
pub trait RangeReader {
|
||||
|
@ -48,10 +47,10 @@ pub trait GetUrlBackend: Default {
|
|||
///
|
||||
/// But we attempt to detect redirects into CDNs/S3/etc and *stop* following
|
||||
/// before we get that deep.
|
||||
fn resolve_url(&mut self, url: &str, status: &mut dyn StatusBackend) -> Result<String>;
|
||||
fn resolve_url(&mut self, url: &str) -> Result<String>;
|
||||
|
||||
/// Perform an HTTP GET on a URL, returning a readable result.
|
||||
fn get_url(&mut self, url: &str, status: &mut dyn StatusBackend) -> Result<Self::Response>;
|
||||
fn get_url(&mut self, url: &str) -> Result<Self::Response>;
|
||||
|
||||
/// Open a range reader that can perform byte-range reads on the specified URL.
|
||||
fn open_range_reader(&self, url: &str) -> Self::RangeReader;
|
||||
|
|
|
@ -10,7 +10,6 @@ use std::{
|
|||
result::Result as StdResult,
|
||||
};
|
||||
use tectonic_errors::Result;
|
||||
use tectonic_status_base::StatusBackend;
|
||||
|
||||
use crate::{GetUrlBackend, RangeReader};
|
||||
|
||||
|
@ -34,11 +33,11 @@ impl GetUrlBackend for NullBackend {
|
|||
type Response = Empty;
|
||||
type RangeReader = NullRangeReader;
|
||||
|
||||
fn get_url(&mut self, _url: &str, _status: &mut dyn StatusBackend) -> Result<Empty> {
|
||||
fn get_url(&mut self, _url: &str) -> Result<Empty> {
|
||||
Err((NoGetUrlBackendError {}).into())
|
||||
}
|
||||
|
||||
fn resolve_url(&mut self, _url: &str, _status: &mut dyn StatusBackend) -> Result<String> {
|
||||
fn resolve_url(&mut self, _url: &str) -> Result<String> {
|
||||
Err((NoGetUrlBackendError {}).into())
|
||||
}
|
||||
|
||||
|
|
|
@ -10,7 +10,6 @@ use reqwest::{
|
|||
StatusCode, Url,
|
||||
};
|
||||
use tectonic_errors::{anyhow::bail, Result};
|
||||
use tectonic_status_base::{tt_note, StatusBackend};
|
||||
|
||||
use crate::{GetUrlBackend, RangeReader};
|
||||
|
||||
|
@ -24,7 +23,7 @@ impl GetUrlBackend for ReqwestBackend {
|
|||
type Response = Response;
|
||||
type RangeReader = ReqwestRangeReader;
|
||||
|
||||
fn get_url(&mut self, url: &str, _status: &mut dyn StatusBackend) -> Result<Response> {
|
||||
fn get_url(&mut self, url: &str) -> Result<Response> {
|
||||
let res = Client::new().get(url).send()?;
|
||||
if !res.status().is_success() {
|
||||
bail!(
|
||||
|
@ -36,9 +35,7 @@ impl GetUrlBackend for ReqwestBackend {
|
|||
Ok(res)
|
||||
}
|
||||
|
||||
fn resolve_url(&mut self, url: &str, status: &mut dyn StatusBackend) -> Result<String> {
|
||||
tt_note!(status, "connecting to {}", url);
|
||||
|
||||
fn resolve_url(&mut self, url: &str) -> Result<String> {
|
||||
let parsed = Url::parse(url)?;
|
||||
let original_filename = parsed
|
||||
.path_segments()
|
||||
|
@ -96,10 +93,6 @@ impl GetUrlBackend for ReqwestBackend {
|
|||
}
|
||||
|
||||
let final_url: String = res.url().clone().into();
|
||||
if final_url != url {
|
||||
tt_note!(status, "resolved to {}", final_url);
|
||||
}
|
||||
|
||||
Ok(final_url)
|
||||
}
|
||||
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
|
||||
use app_dirs2::AppDataType;
|
||||
use std::path::PathBuf;
|
||||
use std::{env, fs};
|
||||
use tectonic_errors::prelude::*;
|
||||
|
||||
/// The instance of the `app_dirs2` crate that this crate links to.
|
||||
|
@ -61,6 +62,27 @@ pub fn ensure_user_config() -> Result<PathBuf> {
|
|||
/// - macOS: `$HOME/Library/Caches/Tectonic`
|
||||
/// - Others: `$XDG_CACHE_HOME/Tectonic` if defined, otherwise
|
||||
/// `$HOME/.cache/Tectonic`
|
||||
pub fn ensure_user_cache_dir(path: &str) -> Result<PathBuf> {
|
||||
Ok(app_dirs2::app_dir(AppDataType::UserCache, &APP_INFO, path)?)
|
||||
///
|
||||
///
|
||||
/// The cache location defaults to the `AppDataType::UserCache`
|
||||
/// provided by `app_dirs2` but can be overwritten using the
|
||||
/// `TECTONIC_CACHE_DIR` environment variable.
|
||||
///
|
||||
/// This method may perform I/O to create the user cache directory, so it is
|
||||
/// fallible. (Due to its `app_dirs2` implementation, it would have to be
|
||||
/// fallible even if it didn't perform I/O.)
|
||||
pub fn get_user_cache_dir(subdir: &str) -> Result<PathBuf> {
|
||||
let env_cache_path = env::var_os("TECTONIC_CACHE_DIR");
|
||||
|
||||
let cache_path = match env_cache_path {
|
||||
Some(env_cache_path) => {
|
||||
let mut env_cache_path: PathBuf = env_cache_path.into();
|
||||
env_cache_path.push(subdir);
|
||||
fs::create_dir_all(&env_cache_path)?;
|
||||
env_cache_path
|
||||
}
|
||||
None => app_dirs2::app_dir(AppDataType::UserCache, &APP_INFO, subdir)?,
|
||||
};
|
||||
|
||||
Ok(cache_path)
|
||||
}
|
||||
|
|
|
@ -15,7 +15,14 @@ the file are detailed below.
|
|||
```toml
|
||||
[doc]
|
||||
name = <string> # the document name
|
||||
bundle = <url or filesystem path> # the source of the TeX bundle
|
||||
|
||||
# A string identifying the location of the “bundle” of TeX support files
|
||||
# underlying the processing of the document. The `tectonic -X new` command
|
||||
# will populate this field with the current recommended default.
|
||||
#
|
||||
# This field should be a URL (a `.ttb` on the web, or a legacy indexed tar bundle),
|
||||
# or a file on your computer (a `.ttb` or a legacy `.zip` bundle).
|
||||
bundle = <url or filesystem path>
|
||||
|
||||
# Extra search paths for TeX sources, images, etc.
|
||||
#
|
||||
|
|
|
@ -45,7 +45,7 @@ The following are the available flags.
|
|||
|
||||
| Short | Full | Explanation |
|
||||
|:------|:-------------------------------|:-------------------------------------------------------------------------------------------------------|
|
||||
| `-b` | `--bundle <file_path>` | Use this directory or Zip-format bundle file to find resource files instead of the default |
|
||||
| `-b` | `--bundle <path or url>` | Use this bundle instead of the default |
|
||||
| `-c` | `--chatter <level>` | How much chatter to print when running [default: `default`] [possible values: `default`, `minimal`] |
|
||||
| | `--color <when>` | Enable/disable colorful log output [default: `auto`] [possible values: `always`, `auto`, `never`] |
|
||||
| `-f` | `--format <path>` | The name of the "format" file used to initialize the TeX engine [default: `latex`] |
|
||||
|
@ -63,5 +63,4 @@ The following are the available flags.
|
|||
| | `--synctex` | Generate SyncTeX data |
|
||||
| | `--untrusted` | Input is untrusted — disable all known-insecure features |
|
||||
| `-V` | `--version` | Prints version information |
|
||||
| `-w` | `--web-bundle <url>` | Use this URL to find resource files instead of the default |
|
||||
| `-Z` | `-Z <option>...` | Unstable options. Pass `-Zhelp` to show a list |
|
||||
|
|
|
@ -12,6 +12,7 @@ The `bundle` subcommands are:
|
|||
|
||||
- [`tectonic -X bundle cat`](#tectonic--x-bundle-cat)
|
||||
- [`tectonic -X bundle search`](#tectonic--x-bundle-search)
|
||||
- [`tectonic -X bundle create`](#tectonic--x-bundle-create)
|
||||
|
||||
|
||||
## tectonic -X bundle cat
|
||||
|
@ -71,3 +72,8 @@ be added in the future, activated by additional options.
|
|||
|
||||
If this command is run outside of a [document workspace](../ref/workspaces.md),
|
||||
the system default bundle will be used.
|
||||
|
||||
|
||||
## tectonic -X bundle create
|
||||
|
||||
Create a new bundle. See `./bundles` in the `tectonic` repository.
|
|
@ -36,7 +36,6 @@ tectonic -X compile # full form
|
|||
[--reruns <count>] [-r <count>]
|
||||
[--synctex]
|
||||
[--untrusted]
|
||||
[--web-bundle <url>] [-w <url>]
|
||||
[-Z <option>...]
|
||||
<input>
|
||||
```
|
||||
|
@ -92,9 +91,9 @@ The following are the available flags.
|
|||
<!-- Keep alphabetized by full name: -->
|
||||
|
||||
| Short | Full | Explanation |
|
||||
|:------|:-------------------------------|:-------------------------------------------------------------------------------------------------------|
|
||||
| `-b` | `--bundle <file_path>` | Use this directory or Zip-format bundle file to find resource files instead of the default |
|
||||
| `-f` | `--format <path>` | The name of the "format" file used to initialize the TeX engine [default: `latex`] |
|
||||
| :---- | :----------------------------- | :----------------------------------------------------------------------------------------------------- |
|
||||
| `-b` | `--bundle <path or url>` | Use this bundle instead of the default |
|
||||
| `-f` | `--format <path>` | The name of the “format” file used to initialize the TeX engine [default: `latex`] |
|
||||
| `-h` | `--help` | Prints help information |
|
||||
| | `--hide <hide_path>...` | Tell the engine that no file at `<hide_path>` exists, if it tries to read it |
|
||||
| `-k` | `--keep-intermediates` | Keep the intermediate files generated during processing |
|
||||
|
@ -109,7 +108,6 @@ The following are the available flags.
|
|||
| | `--synctex` | Generate SyncTeX data |
|
||||
| | `--untrusted` | Input is untrusted — disable all known-insecure features |
|
||||
| `-V` | `--version` | Prints version information |
|
||||
| `-w` | `--web-bundle <url>` | Use this URL to find resource files instead of the default |
|
||||
| `-Z` | `-Z <option>...` | Unstable options. Pass `-Zhelp` to show a list |
|
||||
|
||||
#### Unstable options
|
||||
|
@ -119,8 +117,8 @@ the set of unstable options is subject to change at any time.
|
|||
|
||||
<!-- Keep alphabetized: -->
|
||||
|
||||
| Expression | Explanation |
|
||||
|:-----------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Expression | Explanation |
|
||||
| :--------------------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `-Z help` | List all unstable options |
|
||||
| `-Z continue-on-errors` | Keep compiling even when severe errors occur |
|
||||
| `-Z min-crossrefs=<num>` | Equivalent to bibtex’s `-min-crossrefs` flag - "include after `<num>` crossrefs" [default: `2`] |
|
||||
|
|
|
@ -10,7 +10,7 @@ use std::path::{Path, PathBuf};
|
|||
use tectonic_bridge_core::{SecuritySettings, SecurityStance};
|
||||
|
||||
use tectonic::{
|
||||
config::PersistentConfig,
|
||||
config::{maybe_return_test_bundle, PersistentConfig},
|
||||
driver::{OutputFormat, PassSetting, ProcessingSession, ProcessingSessionBuilder},
|
||||
errmsg,
|
||||
errors::{ErrorKind, Result},
|
||||
|
@ -19,19 +19,21 @@ use tectonic::{
|
|||
unstable_opts::{UnstableArg, UnstableOptions},
|
||||
};
|
||||
|
||||
use tectonic_bundles::detect_bundle;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
pub struct CompileOptions {
|
||||
/// The file to process, or "-" to process the standard input stream
|
||||
#[arg(name = "input", value_hint = clap::ValueHint::FilePath)]
|
||||
#[arg(value_hint = clap::ValueHint::FilePath)]
|
||||
input: String,
|
||||
|
||||
/// The name of the "format" file used to initialize the TeX engine
|
||||
#[arg(long, short, name = "path", default_value = "latex")]
|
||||
format: String,
|
||||
|
||||
/// Use this directory or Zip-format bundle file to find resource files instead of the default
|
||||
#[arg(long, short, name = "file_path")]
|
||||
bundle: Option<PathBuf>,
|
||||
/// Use this URL or path to find resource files instead of the default
|
||||
#[arg(long, short)]
|
||||
bundle: Option<String>,
|
||||
|
||||
/// Use only resource files cached locally
|
||||
#[arg(short = 'C', long)]
|
||||
|
@ -84,18 +86,12 @@ pub struct CompileOptions {
|
|||
/// Unstable options. Pass -Zhelp to show a list
|
||||
#[arg(name = "option", short = 'Z')]
|
||||
unstable: Vec<UnstableArg>,
|
||||
|
||||
/// Use this URL to find resource files instead of the default
|
||||
#[arg(long, short, name = "url", overrides_with = "url", global(true))]
|
||||
web_bundle: Option<String>,
|
||||
}
|
||||
|
||||
// TODO: deprecate v1 interface and move this to v2cli/commands
|
||||
|
||||
//impl TectonicCommand for CompileOptions {
|
||||
impl CompileOptions {
|
||||
//fn customize(&self, _cc: &mut CommandCustomizations) {}
|
||||
|
||||
pub fn execute(self, config: PersistentConfig, status: &mut dyn StatusBackend) -> Result<i32> {
|
||||
let unstable = UnstableOptions::from_unstable_args(self.unstable.into_iter());
|
||||
|
||||
|
@ -185,16 +181,26 @@ impl CompileOptions {
|
|||
}
|
||||
}
|
||||
|
||||
let only_cached = self.only_cached;
|
||||
if only_cached {
|
||||
if self.only_cached {
|
||||
tt_note!(status, "using only cached resource files");
|
||||
}
|
||||
if let Some(path) = self.bundle {
|
||||
sess_builder.bundle(config.make_local_file_provider(path, status)?);
|
||||
} else if let Some(u) = self.web_bundle {
|
||||
sess_builder.bundle(config.make_cached_url_provider(&u, only_cached, None, status)?);
|
||||
|
||||
if let Some(bundle) = self.bundle {
|
||||
// TODO: this is ugly.
|
||||
// It's probably a good idea to re-design our code so we
|
||||
// don't need special cases for tests our source.
|
||||
if let Ok(bundle) = maybe_return_test_bundle(Some(bundle.clone())) {
|
||||
sess_builder.bundle(bundle);
|
||||
} else if let Some(bundle) = detect_bundle(bundle.clone(), self.only_cached, None)? {
|
||||
sess_builder.bundle(bundle);
|
||||
} else {
|
||||
return Err(errmsg!("`{bundle}` doesn't specify a valid bundle."));
|
||||
}
|
||||
} else if let Ok(bundle) = maybe_return_test_bundle(None) {
|
||||
// TODO: this is ugly too.
|
||||
sess_builder.bundle(bundle);
|
||||
} else {
|
||||
sess_builder.bundle(config.default_bundle(only_cached, status)?);
|
||||
sess_builder.bundle(config.default_bundle(self.only_cached)?);
|
||||
}
|
||||
sess_builder.build_date_from_env(deterministic_mode);
|
||||
|
||||
|
|
|
@ -43,11 +43,11 @@ struct CliOptions {
|
|||
use_v2: bool,
|
||||
|
||||
/// How much chatter to print when running
|
||||
#[arg(long = "chatter", short, name = "level", default_value = "default")]
|
||||
#[arg(long = "chatter", short, default_value = "default")]
|
||||
chatter_level: ChatterLevel,
|
||||
|
||||
/// Enable/disable colorful log output
|
||||
#[arg(long = "color", name = "when", default_value = "auto")]
|
||||
#[arg(long = "color", default_value = "auto")]
|
||||
cli_color: CliColor,
|
||||
|
||||
#[command(flatten)]
|
||||
|
|
|
@ -44,18 +44,18 @@ pub struct BuildCommand {
|
|||
target: Option<String>,
|
||||
|
||||
/// Use this URL to find resource files instead of the default
|
||||
#[arg(long, short, name = "url", overrides_with = "url", global(true))]
|
||||
web_bundle: Option<String>,
|
||||
#[arg(long, short)]
|
||||
bundle: Option<String>,
|
||||
}
|
||||
|
||||
impl TectonicCommand for BuildCommand {
|
||||
fn customize(&self, _cc: &mut CommandCustomizations) {}
|
||||
|
||||
fn execute(self, config: PersistentConfig, status: &mut dyn StatusBackend) -> Result<i32> {
|
||||
// `--web-bundle` is not actually used for `-X build`,
|
||||
// so inform the user instead of ignoring silently.
|
||||
if let Some(url) = &self.web_bundle {
|
||||
tt_note!(status, "--web-bundle {} ignored", url);
|
||||
// `--bundle` is not used for `-X build`,
|
||||
// tell the user instead of ignoring silently.
|
||||
if let Some(url) = &self.bundle {
|
||||
tt_note!(status, "--bundle {} ignored", url);
|
||||
tt_note!(status, "using workspace bundle configuration");
|
||||
}
|
||||
let ws = Workspace::open_from_environment()?;
|
||||
|
|
|
@ -0,0 +1,140 @@
|
|||
use super::{
|
||||
create::{BundleCreateCommand, BundleFormat},
|
||||
pack::bundlev1::BundleV1,
|
||||
select::{picker::FilePicker, spec::BundleSpec},
|
||||
};
|
||||
use anyhow::{Context, Result};
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
fs::{self, File},
|
||||
io::Read,
|
||||
thread,
|
||||
time::Duration,
|
||||
};
|
||||
use tracing::{error, info, warn};
|
||||
|
||||
pub(super) fn select(cli: &BundleCreateCommand) -> Result<()> {
|
||||
let bundle_dir = cli
|
||||
.bundle_spec
|
||||
.canonicalize()
|
||||
.unwrap()
|
||||
.parent()
|
||||
.unwrap()
|
||||
.to_path_buf();
|
||||
|
||||
let mut file = File::open(&cli.bundle_spec)?;
|
||||
let mut file_str = String::new();
|
||||
file.read_to_string(&mut file_str)?;
|
||||
let bundle_config: BundleSpec = match toml::from_str(&file_str) {
|
||||
Ok(x) => x,
|
||||
Err(e) => {
|
||||
error!("failed to load bundle specification",);
|
||||
return Err(e.into());
|
||||
}
|
||||
};
|
||||
|
||||
if let Err(e) = bundle_config.validate() {
|
||||
error!("failed to validate bundle specification");
|
||||
return Err(e);
|
||||
};
|
||||
|
||||
// Remove build dir if it exists
|
||||
if cli.build_dir.exists() {
|
||||
warn!(
|
||||
"build dir {} aleady exists",
|
||||
cli.build_dir.to_str().unwrap()
|
||||
);
|
||||
|
||||
for i in (1..=5).rev() {
|
||||
warn!(
|
||||
"recursively removing {} in {i} second{}",
|
||||
cli.build_dir.to_str().unwrap(),
|
||||
if i != 1 { "s" } else { "" }
|
||||
);
|
||||
thread::sleep(Duration::from_secs(1));
|
||||
}
|
||||
thread::sleep(Duration::from_secs(2));
|
||||
|
||||
fs::remove_dir_all(&cli.build_dir)?;
|
||||
}
|
||||
fs::create_dir_all(&cli.build_dir).context("while creating build dir")?;
|
||||
|
||||
let mut picker = FilePicker::new(
|
||||
bundle_config.clone(),
|
||||
cli.build_dir.clone(),
|
||||
bundle_dir.clone(),
|
||||
)?;
|
||||
|
||||
// Run selector
|
||||
let sources: Vec<String> = picker.iter_sources().map(|x| x.to_string()).collect();
|
||||
for source in sources {
|
||||
picker.add_source(cli, &source)?;
|
||||
}
|
||||
picker.finish(true)?;
|
||||
|
||||
// Print statistics
|
||||
info!("summary is below:\n{}", picker.stats.make_string());
|
||||
|
||||
match picker.stats.compare_patch_found_applied() {
|
||||
Ordering::Equal => {}
|
||||
Ordering::Greater => {
|
||||
warn!("some patches were not applied");
|
||||
}
|
||||
Ordering::Less => {
|
||||
warn!("some patches applied multiple times");
|
||||
}
|
||||
}
|
||||
|
||||
// Check output hash
|
||||
{
|
||||
let mut file = File::open(cli.build_dir.join("content/SHA256SUM"))?;
|
||||
let mut hash = String::new();
|
||||
file.read_to_string(&mut hash)?;
|
||||
let hash = hash.trim();
|
||||
if hash != bundle_config.bundle.expected_hash {
|
||||
warn!("final bundle hash doesn't match bundle configuration:");
|
||||
warn!("bundle hash is {hash}");
|
||||
warn!("config hash is {}", bundle_config.bundle.expected_hash);
|
||||
} else {
|
||||
info!("final bundle hash matches configuration");
|
||||
info!("hash is {hash}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(super) fn pack(cli: &BundleCreateCommand) -> Result<()> {
|
||||
let mut file = File::open(&cli.bundle_spec)?;
|
||||
let mut file_str = String::new();
|
||||
file.read_to_string(&mut file_str)?;
|
||||
let bundle_config: BundleSpec = toml::from_str(&file_str)?;
|
||||
|
||||
if !cli.build_dir.join("content").is_dir() {
|
||||
error!(
|
||||
"content directory `{}/content` doesn't exist, can't continue",
|
||||
cli.build_dir.to_str().unwrap()
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let target_name = format!("{}.ttb", &bundle_config.bundle.name);
|
||||
let target = cli.build_dir.join(&target_name);
|
||||
if target.exists() {
|
||||
if target.is_file() {
|
||||
warn!("target bundle `{target_name}` exists, removing");
|
||||
fs::remove_file(&target)?;
|
||||
} else {
|
||||
error!("target bundle `{target_name}` exists and isn't a file, can't continue");
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
match cli.format {
|
||||
BundleFormat::BundleV1 => {
|
||||
BundleV1::make(Box::new(File::create(target)?), cli.build_dir.clone())?
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -0,0 +1,118 @@
|
|||
use clap::{Parser, ValueEnum};
|
||||
use std::{fmt::Display, path::PathBuf};
|
||||
use tectonic::{config::PersistentConfig, Result};
|
||||
use tectonic_status_base::StatusBackend;
|
||||
use tracing::error;
|
||||
|
||||
use crate::v2cli::{CommandCustomizations, TectonicCommand};
|
||||
|
||||
//
|
||||
// MARK: Cli arguments
|
||||
//
|
||||
|
||||
#[derive(Debug, Copy, Clone, ValueEnum)]
|
||||
pub enum BundleJob {
|
||||
/// Run the following jobs in order
|
||||
#[value(name = "all")]
|
||||
All,
|
||||
|
||||
/// (Stage 1) Select and patch all files in this bundle
|
||||
#[value(name = "select")]
|
||||
Select,
|
||||
|
||||
/// (Stage 2) Pack selected files into a bundle
|
||||
#[value(name = "pack")]
|
||||
Pack,
|
||||
}
|
||||
|
||||
impl Display for BundleJob {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::All => write!(f, "all"),
|
||||
Self::Select => write!(f, "select"),
|
||||
Self::Pack => write!(f, "pack"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BundleJob {
|
||||
pub fn do_select(&self) -> bool {
|
||||
matches!(self, Self::All | Self::Select)
|
||||
}
|
||||
|
||||
pub fn do_pack(&self) -> bool {
|
||||
matches!(self, Self::All | Self::Pack)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
pub struct BundleCreateCommand {
|
||||
/// Which job we should run. `all` is default,
|
||||
/// but single jobs can be run on their own for debugging.
|
||||
#[arg(long, default_value_t = BundleJob::All)]
|
||||
pub job: BundleJob,
|
||||
|
||||
/// Bundle specification TOML file.
|
||||
pub bundle_spec: PathBuf,
|
||||
|
||||
/// Build directory for this bundle.
|
||||
/// Will be removed.
|
||||
#[arg(long)]
|
||||
pub build_dir: PathBuf,
|
||||
|
||||
/// What kind of bundle should we produce?
|
||||
/// This only has an effect when running jobs `all` or `pack`
|
||||
#[arg(default_value_t = BundleFormat::BundleV1)]
|
||||
pub format: BundleFormat,
|
||||
|
||||
/// If this flag is set, don't fail when an input's hash doesn't match
|
||||
/// the hash specified in the bundle's configuration file.
|
||||
/// This only has an effect when running jobs `all` or `select`
|
||||
#[arg(long, default_value_t = false)]
|
||||
pub allow_hash_mismatch: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum)]
|
||||
pub enum BundleFormat {
|
||||
#[value(name = "v1")]
|
||||
BundleV1,
|
||||
}
|
||||
|
||||
impl Display for BundleFormat {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::BundleV1 => write!(f, "v1")?,
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl TectonicCommand for BundleCreateCommand {
|
||||
fn customize(&self, cc: &mut CommandCustomizations) {
|
||||
cc.always_stderr = true;
|
||||
}
|
||||
|
||||
fn execute(self, _config: PersistentConfig, _status: &mut dyn StatusBackend) -> Result<i32> {
|
||||
if self.job.do_select() {
|
||||
match super::actions::select(&self) {
|
||||
Ok(_) => {}
|
||||
Err(e) => {
|
||||
error!("select job failed with error: {e}");
|
||||
return Err(e.into());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if self.job.do_pack() {
|
||||
match super::actions::pack(&self) {
|
||||
Ok(_) => {}
|
||||
Err(e) => {
|
||||
error!("bundle packer failed with error: {e}");
|
||||
return Err(e.into());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Ok(0)
|
||||
}
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
use clap::{Parser, Subcommand};
|
||||
use create::BundleCreateCommand;
|
||||
use tectonic::{
|
||||
config::PersistentConfig,
|
||||
docmodel::{DocumentExt, DocumentSetupOptions},
|
||||
|
@ -11,6 +12,11 @@ use tectonic_status_base::StatusBackend;
|
|||
|
||||
use crate::v2cli::{CommandCustomizations, TectonicCommand};
|
||||
|
||||
mod actions;
|
||||
mod create;
|
||||
mod pack;
|
||||
mod select;
|
||||
|
||||
fn get_a_bundle(
|
||||
_config: PersistentConfig,
|
||||
only_cached: bool,
|
||||
|
@ -23,7 +29,7 @@ fn get_a_bundle(
|
|||
let doc = ws.first_document();
|
||||
let mut options: DocumentSetupOptions = Default::default();
|
||||
options.only_cached(only_cached);
|
||||
doc.bundle(&options, status)
|
||||
doc.bundle(&options)
|
||||
}
|
||||
|
||||
Err(e) => {
|
||||
|
@ -37,7 +43,6 @@ fn get_a_bundle(
|
|||
Ok(Box::new(tectonic_bundles::get_fallback_bundle(
|
||||
tectonic_engine_xetex::FORMAT_SERIAL,
|
||||
only_cached,
|
||||
status,
|
||||
)?))
|
||||
}
|
||||
}
|
||||
|
@ -45,13 +50,13 @@ fn get_a_bundle(
|
|||
}
|
||||
|
||||
/// `bundle`: Commands relating to Tectonic bundles
|
||||
#[derive(Debug, Eq, PartialEq, Parser)]
|
||||
#[derive(Debug, Parser)]
|
||||
pub struct BundleCommand {
|
||||
#[command(subcommand)]
|
||||
command: BundleCommands,
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Subcommand)]
|
||||
#[derive(Debug, Subcommand)]
|
||||
enum BundleCommands {
|
||||
#[command(name = "cat")]
|
||||
/// Dump the contents of a file in the bundle
|
||||
|
@ -60,6 +65,10 @@ enum BundleCommands {
|
|||
#[command(name = "search")]
|
||||
/// Filter the list of filenames contained in the bundle
|
||||
Search(BundleSearchCommand),
|
||||
|
||||
#[command(name = "create")]
|
||||
/// Create a new bundle
|
||||
Create(BundleCreateCommand),
|
||||
}
|
||||
|
||||
impl TectonicCommand for BundleCommand {
|
||||
|
@ -67,6 +76,7 @@ impl TectonicCommand for BundleCommand {
|
|||
match &self.command {
|
||||
BundleCommands::Cat(c) => c.customize(cc),
|
||||
BundleCommands::Search(c) => c.customize(cc),
|
||||
BundleCommands::Create(c) => c.customize(cc),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -74,11 +84,12 @@ impl TectonicCommand for BundleCommand {
|
|||
match self.command {
|
||||
BundleCommands::Cat(c) => c.execute(config, status),
|
||||
BundleCommands::Search(c) => c.execute(config, status),
|
||||
BundleCommands::Create(c) => c.execute(config, status),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Parser)]
|
||||
#[derive(Debug, Parser)]
|
||||
struct BundleCatCommand {
|
||||
/// Use only resource files cached locally
|
||||
#[arg(short = 'C', long)]
|
||||
|
@ -119,8 +130,8 @@ impl BundleSearchCommand {
|
|||
}
|
||||
|
||||
fn execute(self, config: PersistentConfig, status: &mut dyn StatusBackend) -> Result<i32> {
|
||||
let mut bundle = get_a_bundle(config, self.only_cached, status)?;
|
||||
let files = bundle.all_files(status)?;
|
||||
let bundle = get_a_bundle(config, self.only_cached, status)?;
|
||||
let files = bundle.all_files();
|
||||
|
||||
// Is there a better way to do this?
|
||||
let filter: Box<dyn Fn(&str) -> bool> = if let Some(t) = self.term {
|
|
@ -0,0 +1,218 @@
|
|||
use anyhow::{bail, Result};
|
||||
use flate2::{write::GzEncoder, Compression};
|
||||
use std::{
|
||||
fmt::Display,
|
||||
fs::{self, File},
|
||||
io::{stdout, BufRead, BufReader, Read, Seek, Write},
|
||||
num::ParseIntError,
|
||||
path::PathBuf,
|
||||
};
|
||||
use tracing::info;
|
||||
|
||||
pub trait WriteSeek: std::io::Write + Seek {}
|
||||
impl<T: Write + Seek> WriteSeek for T {}
|
||||
|
||||
pub fn decode_hex(s: &str) -> Result<Vec<u8>, ParseIntError> {
|
||||
(0..s.len())
|
||||
.step_by(2)
|
||||
.map(|i| u8::from_str_radix(&s[i..i + 2], 16))
|
||||
.collect()
|
||||
}
|
||||
|
||||
// Size of ttbv1 header
|
||||
const HEADER_SIZE: u64 = 66u64;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct FileListEntry {
|
||||
path: PathBuf,
|
||||
hash: String,
|
||||
start: u64,
|
||||
|
||||
// We need the compressed length to build
|
||||
// a range request for this bundle. We also
|
||||
// keep the real length around for performance
|
||||
// (we'll only need to allocate vectors once)
|
||||
real_len: u32,
|
||||
gzip_len: u32,
|
||||
}
|
||||
|
||||
impl Display for FileListEntry {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
format!(
|
||||
"{} {} {} {} {}",
|
||||
self.start,
|
||||
self.gzip_len,
|
||||
self.real_len,
|
||||
self.hash,
|
||||
self.path.to_str().unwrap()
|
||||
)
|
||||
.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BundleV1 {
|
||||
filelist: Vec<FileListEntry>,
|
||||
target: Box<dyn WriteSeek>,
|
||||
content_dir: PathBuf,
|
||||
|
||||
index_start: u64,
|
||||
index_real_len: u32,
|
||||
index_gzip_len: u32,
|
||||
}
|
||||
|
||||
impl BundleV1 {
|
||||
pub fn make(target: Box<dyn WriteSeek>, build_dir: PathBuf) -> Result<()> {
|
||||
let mut bundle = BundleV1::new(target, build_dir)?;
|
||||
|
||||
bundle.add_files()?;
|
||||
bundle.write_index()?;
|
||||
bundle.write_header()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn new(target: Box<dyn WriteSeek>, build_dir: PathBuf) -> Result<BundleV1> {
|
||||
Ok(BundleV1 {
|
||||
filelist: Vec::new(),
|
||||
target,
|
||||
content_dir: build_dir.join("content"),
|
||||
index_start: 0,
|
||||
index_gzip_len: 0,
|
||||
index_real_len: 0,
|
||||
})
|
||||
}
|
||||
|
||||
fn add_files(&mut self) -> Result<u64> {
|
||||
let mut byte_count = HEADER_SIZE; // Start after header
|
||||
let mut real_len_sum = 0; // Compute average compression ratio
|
||||
|
||||
self.target.seek(std::io::SeekFrom::Start(byte_count))?;
|
||||
|
||||
let filelist_file = File::open(self.content_dir.join("FILELIST"))?;
|
||||
let reader = BufReader::new(filelist_file);
|
||||
|
||||
info!("Building ttbv1 bundle...");
|
||||
|
||||
for line in reader.lines() {
|
||||
stdout().flush()?;
|
||||
|
||||
let line = line?;
|
||||
let mut bits = line.split_whitespace();
|
||||
|
||||
if let Some(hash) = bits.next() {
|
||||
let path = bits.collect::<Vec<&str>>().join(" ");
|
||||
|
||||
let mut file = fs::File::open(self.content_dir.join(&path))?;
|
||||
|
||||
// Compress and write bytes
|
||||
let mut encoder = GzEncoder::new(Vec::new(), Compression::default());
|
||||
let real_len = std::io::copy(&mut file, &mut encoder)?;
|
||||
let gzip_len = self.target.write(&encoder.finish()?)?;
|
||||
assert!(real_len < u32::MAX as u64);
|
||||
assert!(gzip_len < u32::MAX as usize);
|
||||
|
||||
// Add to index
|
||||
self.filelist.push(FileListEntry {
|
||||
start: byte_count,
|
||||
gzip_len: gzip_len as u32,
|
||||
real_len: real_len as u32,
|
||||
path: PathBuf::from(path),
|
||||
hash: hash.to_owned(),
|
||||
});
|
||||
byte_count += gzip_len as u64;
|
||||
real_len_sum += real_len;
|
||||
} else {
|
||||
bail!("malformed filelist line");
|
||||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
"Average compression ratio: {:.2}",
|
||||
real_len_sum as f64 / byte_count as f64
|
||||
);
|
||||
|
||||
Ok(byte_count)
|
||||
}
|
||||
|
||||
fn write_index(&mut self) -> Result<()> {
|
||||
// Generate a ttbv1 index and write it to the bundle.
|
||||
//
|
||||
// This index is a replacement for FILELIST and SEARCH, containing everything in those files
|
||||
// (in addition to some ttbv1-specific information)
|
||||
//
|
||||
// The original FILELIST and SEARCH files are still included in the bundle.
|
||||
|
||||
// Get current position
|
||||
self.index_start = self.target.stream_position()?;
|
||||
|
||||
info!("Writing index");
|
||||
|
||||
let mut encoder = GzEncoder::new(Vec::new(), Compression::default());
|
||||
let mut real_len = 0usize;
|
||||
|
||||
real_len += encoder.write("[DEFAULTSEARCH]\n".as_bytes())?;
|
||||
real_len += encoder.write("MAIN\n".as_bytes())?;
|
||||
|
||||
real_len += encoder.write("[SEARCH:MAIN]\n".as_bytes())?;
|
||||
for l in fs::read_to_string(self.content_dir.join("SEARCH"))?.lines() {
|
||||
real_len += encoder.write(l.as_bytes())?;
|
||||
real_len += encoder.write(b"\n")?;
|
||||
}
|
||||
|
||||
real_len += encoder.write("[FILELIST]\n".as_bytes())?;
|
||||
for i in &self.filelist {
|
||||
let s = format!("{i}\n");
|
||||
real_len += encoder.write(s.as_bytes())?;
|
||||
}
|
||||
let gzip_len = self.target.write(&encoder.finish()?)?;
|
||||
assert!(gzip_len < u32::MAX as usize);
|
||||
assert!(real_len < u32::MAX as usize);
|
||||
self.index_gzip_len = gzip_len as u32;
|
||||
self.index_real_len = real_len as u32;
|
||||
|
||||
info!(
|
||||
"index is at {} and has length {}",
|
||||
self.index_start, self.index_gzip_len
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_header(&mut self) -> Result<u64> {
|
||||
self.target.seek(std::io::SeekFrom::Start(0))?;
|
||||
|
||||
info!("Writing header");
|
||||
|
||||
// Parse bundle hash
|
||||
let mut hash_file = File::open(self.content_dir.join("SHA256SUM")).unwrap();
|
||||
let mut hash_text = String::new();
|
||||
hash_file.read_to_string(&mut hash_text)?;
|
||||
let digest = decode_hex(hash_text.trim())?;
|
||||
|
||||
let mut byte_count = 0u64;
|
||||
|
||||
// 14 bytes: signature
|
||||
// Always "tectonicbundle", in any bundle version.
|
||||
//
|
||||
// This "magic sequence" lets us more easily distinguish between
|
||||
// random binary files and proper tectonic bundles.
|
||||
byte_count += self.target.write(b"tectonicbundle")? as u64;
|
||||
|
||||
// 4 bytes: bundle version
|
||||
byte_count += self.target.write(&1u32.to_le_bytes())? as u64;
|
||||
|
||||
// 8 + 4 + 4 = 12 bytes: location and real length of index
|
||||
byte_count += self.target.write(&self.index_start.to_le_bytes())? as u64;
|
||||
byte_count += self.target.write(&self.index_gzip_len.to_le_bytes())? as u64;
|
||||
byte_count += self.target.write(&self.index_real_len.to_le_bytes())? as u64;
|
||||
|
||||
// 32 bytes: bundle hash
|
||||
// We include this in the header so we don't need to load the index to get the hash.
|
||||
byte_count += self.target.write(&digest)? as u64;
|
||||
|
||||
// Make sure we wrote the expected number of bytes
|
||||
assert!(byte_count == HEADER_SIZE);
|
||||
|
||||
Ok(byte_count)
|
||||
}
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
pub mod bundlev1;
|
|
@ -0,0 +1,56 @@
|
|||
use super::BundleInput;
|
||||
use anyhow::Result;
|
||||
use std::{
|
||||
fs::{self},
|
||||
io::Read,
|
||||
path::PathBuf,
|
||||
};
|
||||
use walkdir::WalkDir;
|
||||
|
||||
pub struct DirBundleInput {
|
||||
dir: PathBuf,
|
||||
}
|
||||
|
||||
impl DirBundleInput {
|
||||
pub fn new(dir: PathBuf) -> Self {
|
||||
Self {
|
||||
dir: dir.canonicalize().unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BundleInput for DirBundleInput {
|
||||
fn iter_files(&mut self) -> impl Iterator<Item = Result<(String, Box<dyn Read + '_>)>> {
|
||||
WalkDir::new(&self.dir)
|
||||
.into_iter()
|
||||
.filter_map(|x| match x {
|
||||
Err(_) => Some(x),
|
||||
Ok(x) => {
|
||||
if !x.file_type().is_file() {
|
||||
None
|
||||
} else {
|
||||
Some(Ok(x))
|
||||
}
|
||||
}
|
||||
})
|
||||
.map(move |x| match x {
|
||||
Ok(x) => {
|
||||
let path = x
|
||||
.into_path()
|
||||
.canonicalize()
|
||||
.unwrap()
|
||||
.strip_prefix(&self.dir)
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string();
|
||||
|
||||
Ok((
|
||||
path.clone(),
|
||||
Box::new(fs::File::open(self.dir.join(path))?) as Box<dyn Read>,
|
||||
))
|
||||
}
|
||||
Err(e) => Err(anyhow::Error::from(e)),
|
||||
})
|
||||
}
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
mod dir;
|
||||
mod tar;
|
||||
|
||||
use anyhow::Result;
|
||||
use std::{io::Read, path::PathBuf};
|
||||
|
||||
trait BundleInput {
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn iter_files(&mut self) -> impl Iterator<Item = Result<(String, Box<dyn Read + '_>)>>;
|
||||
}
|
||||
|
||||
pub enum Input {
|
||||
Directory(dir::DirBundleInput),
|
||||
Tarball(tar::TarBundleInput),
|
||||
}
|
||||
|
||||
impl<'a> Input {
|
||||
pub fn new_dir(path: PathBuf) -> Self {
|
||||
Self::Directory(dir::DirBundleInput::new(path))
|
||||
}
|
||||
|
||||
pub fn new_tarball(path: PathBuf, root: Option<PathBuf>) -> Result<Self> {
|
||||
Ok(Self::Tarball(tar::TarBundleInput::new(path, root)?))
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn iter_files(
|
||||
&'a mut self,
|
||||
) -> Box<dyn Iterator<Item = Result<(String, Box<dyn Read + 'a>)>> + 'a> {
|
||||
match self {
|
||||
Self::Directory(x) => Box::new(x.iter_files()),
|
||||
Self::Tarball(x) => Box::new(x.iter_files()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hash(&self) -> Option<&str> {
|
||||
match self {
|
||||
Self::Directory(_) => None,
|
||||
Self::Tarball(x) => Some(x.hash()),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,77 @@
|
|||
use anyhow::Result;
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::{
|
||||
fs::File,
|
||||
io::{Read, Seek},
|
||||
path::PathBuf,
|
||||
};
|
||||
use tar::Archive;
|
||||
use tracing::info;
|
||||
|
||||
use super::BundleInput;
|
||||
|
||||
pub struct TarBundleInput {
|
||||
archive: Archive<File>,
|
||||
root: PathBuf,
|
||||
hash: String,
|
||||
}
|
||||
|
||||
impl TarBundleInput {
|
||||
pub fn new(path: PathBuf, root: Option<PathBuf>) -> Result<Self> {
|
||||
let path = path.canonicalize()?;
|
||||
let mut file = File::open(&path)?;
|
||||
|
||||
info!("computing hash of {}", path.to_str().unwrap());
|
||||
|
||||
let hash = {
|
||||
let mut hasher = Sha256::new();
|
||||
let _ = std::io::copy(&mut file, &mut hasher)?;
|
||||
hasher
|
||||
.finalize()
|
||||
.iter()
|
||||
.map(|b| format!("{b:02x}"))
|
||||
.collect::<Vec<_>>()
|
||||
.concat()
|
||||
};
|
||||
|
||||
file.seek(std::io::SeekFrom::Start(0))?;
|
||||
Ok(Self {
|
||||
archive: Archive::new(file),
|
||||
root: root.unwrap_or(PathBuf::from("")),
|
||||
hash,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn hash(&self) -> &str {
|
||||
&self.hash
|
||||
}
|
||||
}
|
||||
|
||||
impl BundleInput for TarBundleInput {
|
||||
fn iter_files(&mut self) -> impl Iterator<Item = Result<(String, Box<dyn Read + '_>)>> {
|
||||
let root = self.root.clone();
|
||||
self.archive.entries().unwrap().filter_map(move |x| {
|
||||
// TODO: error handling
|
||||
let xr = x.as_ref().unwrap();
|
||||
|
||||
if !xr.header().entry_type().is_file() {
|
||||
None
|
||||
} else {
|
||||
let path = xr.path().unwrap();
|
||||
|
||||
if !path.starts_with(&root) {
|
||||
None
|
||||
} else {
|
||||
Some(Ok((
|
||||
path.strip_prefix(&root)
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string(),
|
||||
Box::new(x.unwrap()) as Box<dyn Read>,
|
||||
)))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
pub mod input;
|
||||
pub mod picker;
|
||||
pub mod spec;
|
|
@ -0,0 +1,602 @@
|
|||
use anyhow::{bail, Context, Result};
|
||||
use regex::Regex;
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
collections::HashMap,
|
||||
fmt::Display,
|
||||
fs::{self, File},
|
||||
io::{self, Cursor, Read, Write},
|
||||
iter::FromIterator,
|
||||
path::{Path, PathBuf},
|
||||
process::{Command, Stdio},
|
||||
};
|
||||
use tracing::{debug, error, info, trace, warn};
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::v2cli::commands::bundle::create::BundleCreateCommand;
|
||||
|
||||
use super::{
|
||||
input::Input,
|
||||
spec::BundleSearchOrder,
|
||||
spec::{BundleInputSource, BundleSpec},
|
||||
};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct PickStatistics {
|
||||
/// Total number of files added from each source
|
||||
added: HashMap<String, usize>,
|
||||
|
||||
/// Number of file conflicts
|
||||
conflicts: usize,
|
||||
|
||||
/// Total number of files ignored
|
||||
ignored: usize,
|
||||
|
||||
/// Total number of patches applied
|
||||
patch_applied: usize,
|
||||
|
||||
/// Total number of patches found
|
||||
patch_found: usize,
|
||||
}
|
||||
|
||||
impl PickStatistics {
|
||||
/// Returns a pretty status summary string
|
||||
pub fn make_string(&self) -> String {
|
||||
let mut output_string = format!(
|
||||
concat!(
|
||||
"=============== Summary ===============\n",
|
||||
" file conflicts: {}\n",
|
||||
" files ignored: {}\n",
|
||||
" diffs applied/found: {}/{}\n",
|
||||
" =============================\n",
|
||||
),
|
||||
self.conflicts, self.ignored, self.patch_applied, self.patch_found,
|
||||
);
|
||||
|
||||
let mut sum = 0;
|
||||
for (source, count) in &self.added {
|
||||
let s = format!("{source} files: ");
|
||||
output_string.push_str(&format!(" {s}{}{count}\n", " ".repeat(22 - s.len())));
|
||||
sum += count;
|
||||
}
|
||||
output_string.push_str(&format!(" total files: {sum}\n\n"));
|
||||
|
||||
output_string.push_str(&"=".repeat(39).to_string());
|
||||
output_string
|
||||
}
|
||||
|
||||
/// Did we find as many, fewer, or more patches than we applied?
|
||||
pub fn compare_patch_found_applied(&self) -> Ordering {
|
||||
self.patch_found.cmp(&self.patch_applied)
|
||||
}
|
||||
}
|
||||
|
||||
struct FileListEntry {
|
||||
/// Path relative to content dir (does not start with a slash)
|
||||
path: PathBuf,
|
||||
hash: Option<String>,
|
||||
}
|
||||
|
||||
impl Display for FileListEntry {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
format!(
|
||||
"{} {}",
|
||||
match &self.hash {
|
||||
Some(s) => &s,
|
||||
None => "nohash",
|
||||
},
|
||||
self.path.to_str().unwrap(),
|
||||
)
|
||||
.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FilePicker {
|
||||
/// This bundle specification's root directory.
|
||||
/// (i.e, where we found bundle.toml)
|
||||
bundle_dir: PathBuf,
|
||||
|
||||
/// Where to place this bundle's files
|
||||
build_dir: PathBuf,
|
||||
|
||||
/// This file picker's statistics
|
||||
pub stats: PickStatistics,
|
||||
|
||||
/// All files we've picked so far.
|
||||
/// This map's keys are the `path` value of `FileListEntry`.
|
||||
filelist: HashMap<PathBuf, FileListEntry>,
|
||||
|
||||
bundle_spec: BundleSpec,
|
||||
}
|
||||
|
||||
impl FilePicker {
|
||||
/// Transform a search order file with shortcuts
|
||||
/// (bash-like brace expansion, like `/a/b/{tex,latex}/c`)
|
||||
/// into a plain list of strings.
|
||||
fn expand_search_line(s: &str) -> Result<Vec<String>> {
|
||||
if !(s.contains('{') || s.contains('}')) {
|
||||
return Ok(vec![s.to_owned()]);
|
||||
}
|
||||
|
||||
let first = match s.find('{') {
|
||||
Some(x) => x,
|
||||
None => bail!("Bad search path format"),
|
||||
};
|
||||
|
||||
let last = match s.find('}') {
|
||||
Some(x) => x,
|
||||
None => bail!("Bad search path format"),
|
||||
};
|
||||
|
||||
let head = &s[..first];
|
||||
let mid = &s[first + 1..last];
|
||||
|
||||
if mid.contains('{') || mid.contains('}') {
|
||||
// Mismatched or nested braces
|
||||
bail!("Bad search path format");
|
||||
}
|
||||
|
||||
// We find the first brace, so only tail may have other expansions.
|
||||
let tail = Self::expand_search_line(&s[last + 1..s.len()])?;
|
||||
|
||||
if mid.is_empty() {
|
||||
bail!("Bad search path format");
|
||||
}
|
||||
|
||||
let mut output: Vec<String> = Vec::new();
|
||||
for m in mid.split(',') {
|
||||
for t in &tail {
|
||||
if m.is_empty() {
|
||||
bail!("Bad search path format");
|
||||
}
|
||||
output.push(format!("{}{}{}", head, m, t));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
/// Patch a file in-place.
|
||||
/// This should be done after calling `add_file`.
|
||||
fn apply_patch(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
path_in_source: &Path,
|
||||
diffs: &HashMap<PathBuf, PathBuf>,
|
||||
) -> Result<bool> {
|
||||
// Is this file patched?
|
||||
if !diffs.contains_key(path_in_source) {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
info!("patching `{}`", path_in_source.to_str().unwrap());
|
||||
|
||||
self.stats.patch_applied += 1;
|
||||
|
||||
// Discard first line of diff
|
||||
let diff_file = fs::read_to_string(&diffs[path_in_source]).unwrap();
|
||||
let (_, diff) = diff_file.split_once('\n').unwrap();
|
||||
|
||||
// TODO: don't require `patch`
|
||||
let mut child = Command::new("patch")
|
||||
.arg("--quiet")
|
||||
.arg("--no-backup")
|
||||
.arg(path)
|
||||
.stdin(Stdio::piped())
|
||||
.spawn()
|
||||
.context("while spawning `patch`")?;
|
||||
|
||||
let mut stdin = child.stdin.take().unwrap();
|
||||
stdin
|
||||
.write_all(diff.as_bytes())
|
||||
.context("while passing diff to `patch`")?;
|
||||
drop(stdin);
|
||||
child.wait().context("while waiting for `patch`")?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Add a file into the file list.
|
||||
fn add_to_filelist(&mut self, path: PathBuf, file: Option<&Path>) -> Result<()> {
|
||||
trace!("adding `{path:?}` to file list");
|
||||
|
||||
self.filelist.insert(
|
||||
path.clone(),
|
||||
FileListEntry {
|
||||
path: path.clone(),
|
||||
hash: match file {
|
||||
None => None,
|
||||
Some(f) => {
|
||||
let mut hasher = Sha256::new();
|
||||
let _ = std::io::copy(
|
||||
&mut fs::File::open(f)
|
||||
.with_context(|| format!("while computing hash of {path:?}"))?,
|
||||
&mut hasher,
|
||||
)?;
|
||||
Some(
|
||||
hasher
|
||||
.finalize()
|
||||
.iter()
|
||||
.map(|b| format!("{b:02x}"))
|
||||
.collect::<Vec<_>>()
|
||||
.concat(),
|
||||
)
|
||||
}
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Add a file to this picker's content directory
|
||||
fn add_file(
|
||||
&mut self,
|
||||
path_in_source: &Path,
|
||||
source: &str,
|
||||
file_content: &mut dyn Read,
|
||||
diffs: &HashMap<PathBuf, PathBuf>,
|
||||
) -> Result<()> {
|
||||
let target_path = self
|
||||
.build_dir
|
||||
.join("content")
|
||||
.join(source)
|
||||
.join(path_in_source);
|
||||
|
||||
// Path to this file, relative to content dir
|
||||
let rel = target_path
|
||||
.strip_prefix(self.build_dir.join("content"))
|
||||
.unwrap()
|
||||
.to_path_buf();
|
||||
|
||||
trace!("adding {path_in_source:?} from source `{source}`");
|
||||
|
||||
// Skip files that already exist
|
||||
if self.filelist.contains_key(&rel) {
|
||||
self.stats.conflicts += 1;
|
||||
warn!("{path_in_source:?} from source `{source}` already exists, skipping");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
fs::create_dir_all(match target_path.parent() {
|
||||
Some(x) => x,
|
||||
None => bail!("couldn't get parent of target"),
|
||||
})
|
||||
.context("failed to create content directory")?;
|
||||
|
||||
// Copy to content dir.
|
||||
let mut file = fs::File::create(&target_path)?;
|
||||
io::copy(file_content, &mut file).with_context(|| {
|
||||
format!("while writing file `{path_in_source:?}` from source `{source}`")
|
||||
})?;
|
||||
|
||||
// Apply patch if one exists
|
||||
self.apply_patch(&target_path, path_in_source, diffs)
|
||||
.with_context(|| {
|
||||
format!("while patching `{path_in_source:?}` from source `{source}`")
|
||||
})?;
|
||||
|
||||
self.add_to_filelist(rel, Some(&target_path))
|
||||
.with_context(|| {
|
||||
format!("while adding file `{path_in_source:?}` from source `{source}`")
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// Public methods
|
||||
impl FilePicker {
|
||||
/// Create a new file picker working in build_dir
|
||||
pub fn new(bundle_spec: BundleSpec, build_dir: PathBuf, bundle_dir: PathBuf) -> Result<Self> {
|
||||
if !build_dir.is_dir() {
|
||||
bail!("build_dir is not a directory!")
|
||||
}
|
||||
|
||||
if build_dir.read_dir()?.next().is_some() {
|
||||
bail!("build_dir is not empty!")
|
||||
}
|
||||
|
||||
Ok(FilePicker {
|
||||
bundle_dir,
|
||||
build_dir,
|
||||
filelist: HashMap::new(),
|
||||
bundle_spec,
|
||||
stats: PickStatistics::default(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Iterate over this bundle's sources
|
||||
pub fn iter_sources(&self) -> impl Iterator<Item = &String> {
|
||||
self.bundle_spec.inputs.keys()
|
||||
}
|
||||
|
||||
/// Add a directory of files to this bundle under `source_name`,
|
||||
/// applying patches and checking for replacements.
|
||||
pub fn add_source(&mut self, cli: &BundleCreateCommand, source: &str) -> Result<()> {
|
||||
info!("adding source `{source}`");
|
||||
|
||||
let input = self.bundle_spec.inputs.get(source).unwrap().clone();
|
||||
let mut added = 0usize;
|
||||
|
||||
// Load diff files
|
||||
let diffs = input
|
||||
.patch_dir
|
||||
.as_ref()
|
||||
.map(|x| -> Result<HashMap<PathBuf, PathBuf>> {
|
||||
let mut diffs = HashMap::new();
|
||||
|
||||
for entry in WalkDir::new(self.bundle_dir.join(x)) {
|
||||
// Only iterate files
|
||||
let entry = entry?;
|
||||
if !entry.file_type().is_file() {
|
||||
continue;
|
||||
}
|
||||
let entry = entry.into_path();
|
||||
|
||||
// Only include files with a `.diff extension`
|
||||
if entry.extension().map(|x| x != "diff").unwrap_or(true) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Read first line of diff to get target path
|
||||
let diff_file = fs::read_to_string(&entry).unwrap();
|
||||
let (target, _) = diff_file.split_once('\n').unwrap();
|
||||
|
||||
trace!(tectonic_log_source = "select", "adding diff {entry:?}");
|
||||
|
||||
for t in Self::expand_search_line(target)?
|
||||
.into_iter()
|
||||
.map(PathBuf::from)
|
||||
{
|
||||
if diffs.contains_key(&t) {
|
||||
warn!("the target of diff {entry:?} conflicts with another, ignoring");
|
||||
continue;
|
||||
}
|
||||
|
||||
diffs.insert(t, entry.clone());
|
||||
self.stats.patch_found += 1;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(diffs)
|
||||
})
|
||||
.unwrap_or(Ok(HashMap::new()))?;
|
||||
|
||||
// Load and compile ignore patterns
|
||||
let ignore_patterns = {
|
||||
// Global patterns
|
||||
let mut ignore = self
|
||||
.bundle_spec
|
||||
.bundle
|
||||
.ignore
|
||||
.as_ref()
|
||||
.map(|v| {
|
||||
v.iter()
|
||||
.map(|x| Regex::new(&format!("^{x}$")))
|
||||
.collect::<Result<Vec<Regex>, regex::Error>>()
|
||||
})
|
||||
.unwrap_or(Ok(Vec::new()))?;
|
||||
|
||||
// Input patterns
|
||||
ignore.extend(
|
||||
input
|
||||
.ignore
|
||||
.as_ref()
|
||||
.map(|v| {
|
||||
v.iter()
|
||||
.map(|x| Regex::new(&format!("^/{source}/{x}$")))
|
||||
.collect::<Result<Vec<Regex>, regex::Error>>()
|
||||
})
|
||||
.unwrap_or(Ok(Vec::new()))?,
|
||||
);
|
||||
|
||||
ignore
|
||||
};
|
||||
|
||||
let mut source_backend = match &input.source {
|
||||
BundleInputSource::Directory { path, .. } => Input::new_dir(self.bundle_dir.join(path)),
|
||||
BundleInputSource::Tarball {
|
||||
path,
|
||||
root_dir,
|
||||
hash,
|
||||
} => {
|
||||
let x = match Input::new_tarball(self.bundle_dir.join(path), root_dir.clone()) {
|
||||
Ok(x) => x,
|
||||
Err(e) => {
|
||||
error!("could not add source `{source}` from tarball");
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
let hash = hash.clone();
|
||||
self.add_file(
|
||||
Path::new("TAR-SHA256SUM"),
|
||||
source,
|
||||
&mut Cursor::new(format!("{}\n", x.hash().unwrap())),
|
||||
&HashMap::new(),
|
||||
)?;
|
||||
|
||||
if x.hash().unwrap() != hash {
|
||||
if cli.allow_hash_mismatch {
|
||||
warn!("hash of tarball for source `{source}` doesn't match expected value");
|
||||
warn!("expected: {}", x.hash().unwrap());
|
||||
warn!("got: {}", hash);
|
||||
} else {
|
||||
error!(
|
||||
"hash of tarball for source `{source}` doesn't match expected value"
|
||||
);
|
||||
error!("expected: {}", x.hash().unwrap());
|
||||
error!("got: {}", hash);
|
||||
bail!("hash of tarball for source `{source}` doesn't match expected value")
|
||||
}
|
||||
}
|
||||
|
||||
info!("OK, tar hash matches bundle config");
|
||||
x
|
||||
}
|
||||
};
|
||||
|
||||
for x in source_backend.iter_files() {
|
||||
let (rel_file_path, mut read) = x?;
|
||||
|
||||
let ignore = {
|
||||
let f = format!("/{source}/{}", rel_file_path);
|
||||
let mut ignore = false;
|
||||
for pattern in &ignore_patterns {
|
||||
if pattern.is_match(&f) {
|
||||
ignore = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
ignore
|
||||
};
|
||||
|
||||
// Skip ignored files
|
||||
if ignore {
|
||||
debug!(
|
||||
"skipping file {rel_file_path:?} from source `{source}` because of ignore patterns"
|
||||
);
|
||||
self.stats.ignored += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Debug info
|
||||
if self.filelist.len() % 1937 == 1936 {
|
||||
info!("selecting files ({source}, {})", self.filelist.len());
|
||||
}
|
||||
|
||||
trace!("adding file {rel_file_path:?} from source `{source}`");
|
||||
|
||||
self.add_file(Path::new(&rel_file_path), source, &mut read, &diffs)
|
||||
.with_context(|| format!("while adding file `{rel_file_path:?}`"))?;
|
||||
added += 1;
|
||||
}
|
||||
|
||||
self.stats.added.insert(source.to_owned(), added);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn finish(&mut self, save_debug_files: bool) -> Result<()> {
|
||||
info!("writing auxillary files");
|
||||
|
||||
// Save search specification
|
||||
let search = {
|
||||
let mut search = Vec::new();
|
||||
let path = self.build_dir.join("content/SEARCH");
|
||||
|
||||
for s in &self.bundle_spec.bundle.search_order {
|
||||
match s {
|
||||
BundleSearchOrder::Plain(s) => {
|
||||
for i in Self::expand_search_line(s)? {
|
||||
search.push(i);
|
||||
}
|
||||
}
|
||||
BundleSearchOrder::Input { input } => {
|
||||
let s = &self.bundle_spec.inputs.get(input).unwrap().search_order;
|
||||
if let Some(s) = s {
|
||||
for line in s {
|
||||
for i in Self::expand_search_line(&format!("/{input}/{line}"))? {
|
||||
search.push(i);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for i in Self::expand_search_line(&format!("/{input}//"))? {
|
||||
search.push(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut file = File::create(&path).context("while writing SEARCH")?;
|
||||
for s in &search {
|
||||
writeln!(file, "{s}")?;
|
||||
}
|
||||
|
||||
self.add_to_filelist(PathBuf::from("SEARCH"), Some(&path))?;
|
||||
|
||||
search
|
||||
};
|
||||
|
||||
{
|
||||
// These aren't hashed, but must be listed anyway.
|
||||
// The hash is generated from the filelist, so we must add these before hashing.
|
||||
self.add_to_filelist(PathBuf::from("SHA256SUM"), None)?;
|
||||
self.add_to_filelist(PathBuf::from("FILELIST"), None)?;
|
||||
|
||||
let mut filelist_vec = Vec::from_iter(self.filelist.values());
|
||||
filelist_vec.sort_by(|a, b| a.path.cmp(&b.path));
|
||||
|
||||
let filelist_path = self.build_dir.join("content/FILELIST");
|
||||
|
||||
// Save FILELIST.
|
||||
let mut file = File::create(&filelist_path).context("while writing FILELIST")?;
|
||||
for entry in filelist_vec {
|
||||
writeln!(file, "{entry}")?;
|
||||
}
|
||||
|
||||
// Compute and save hash
|
||||
let mut file = File::create(self.build_dir.join("content/SHA256SUM"))
|
||||
.context("while writing SHA256SUM")?;
|
||||
|
||||
let mut hasher = Sha256::new();
|
||||
let _ = std::io::copy(&mut fs::File::open(&filelist_path)?, &mut hasher)?;
|
||||
let hash = hasher
|
||||
.finalize()
|
||||
.iter()
|
||||
.map(|b| format!("{b:02x}"))
|
||||
.collect::<Vec<_>>()
|
||||
.concat();
|
||||
|
||||
writeln!(file, "{hash}")?;
|
||||
}
|
||||
|
||||
if save_debug_files {
|
||||
// Generate search-report
|
||||
{
|
||||
let mut file = File::create(self.build_dir.join("search-report"))
|
||||
.context("while writing search-report")?;
|
||||
for entry in WalkDir::new(self.build_dir.join("content")) {
|
||||
let entry = entry?;
|
||||
if !entry.file_type().is_dir() {
|
||||
continue;
|
||||
}
|
||||
let entry = entry
|
||||
.into_path()
|
||||
.strip_prefix(self.build_dir.join("content"))
|
||||
.unwrap()
|
||||
.to_owned();
|
||||
let entry = PathBuf::from("/").join(entry);
|
||||
|
||||
// Will this directory be searched?
|
||||
let mut is_searched = false;
|
||||
for rule in &search {
|
||||
if rule.ends_with("//") {
|
||||
// Match start of patent path
|
||||
// (cutting off the last slash from)
|
||||
if entry.starts_with(&rule[0..rule.len() - 1]) {
|
||||
is_searched = true;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
// Match full parent path
|
||||
if entry.to_str().unwrap() == rule {
|
||||
is_searched = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !is_searched {
|
||||
let s = entry.to_str().unwrap();
|
||||
let t = s.matches('/').count();
|
||||
writeln!(file, "{}{s}", "\t".repeat(t - 1))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
use anyhow::{bail, Result};
|
||||
use serde::Deserialize;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct BundleSpec {
|
||||
pub bundle: BundleConfig,
|
||||
pub inputs: HashMap<String, BundleInput>,
|
||||
}
|
||||
|
||||
impl BundleSpec {
|
||||
/// Make sure this bundle specification is valid
|
||||
pub fn validate(&self) -> Result<()> {
|
||||
for i in &self.bundle.search_order {
|
||||
match i {
|
||||
BundleSearchOrder::Input { ref input } => {
|
||||
if !self.inputs.contains_key(input) {
|
||||
bail!("root search order contains unknown input `{input}`");
|
||||
}
|
||||
}
|
||||
BundleSearchOrder::Plain(_) => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct BundleConfig {
|
||||
/// The bundle's name
|
||||
pub name: String,
|
||||
|
||||
/// The hash of the resulting ttbv1 bundle
|
||||
pub expected_hash: String,
|
||||
|
||||
/// Search paths for this bundle
|
||||
pub search_order: Vec<BundleSearchOrder>,
|
||||
|
||||
/// Files to ignore from this input
|
||||
pub ignore: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum BundleSearchOrder {
|
||||
Plain(String),
|
||||
Input { input: String },
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct BundleInput {
|
||||
pub source: BundleInputSource,
|
||||
pub ignore: Option<Vec<String>>,
|
||||
pub patch_dir: Option<PathBuf>,
|
||||
pub search_order: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub enum BundleInputSource {
|
||||
#[serde(rename = "dir")]
|
||||
Directory { path: PathBuf },
|
||||
|
||||
#[serde(rename = "tarball")]
|
||||
Tarball {
|
||||
hash: String,
|
||||
path: PathBuf,
|
||||
root_dir: Option<PathBuf>,
|
||||
},
|
||||
}
|
|
@ -13,8 +13,8 @@ use crate::v2cli::{CommandCustomizations, TectonicCommand};
|
|||
#[derive(Debug, Eq, PartialEq, Parser)]
|
||||
pub struct InitCommand {
|
||||
/// Use this URL to find resource files instead of the default
|
||||
#[arg(long, short, name = "url", overrides_with = "url", global(true))]
|
||||
web_bundle: Option<String>,
|
||||
#[arg(long, short)]
|
||||
bundle: Option<String>,
|
||||
}
|
||||
|
||||
impl TectonicCommand for InitCommand {
|
||||
|
@ -30,7 +30,7 @@ impl TectonicCommand for InitCommand {
|
|||
|
||||
let wc = WorkspaceCreator::new(path);
|
||||
ctry!(
|
||||
wc.create_defaulted(config, status, self.web_bundle);
|
||||
wc.create_defaulted(&config, self.bundle);
|
||||
"failed to create the new Tectonic workspace"
|
||||
);
|
||||
Ok(0)
|
||||
|
@ -45,8 +45,8 @@ pub struct NewCommand {
|
|||
path: PathBuf,
|
||||
|
||||
/// Use this URL to find resource files instead of the default
|
||||
#[arg(long, short, name = "url", overrides_with = "url", global(true))]
|
||||
web_bundle: Option<String>,
|
||||
#[arg(long, short)]
|
||||
bundle: Option<String>,
|
||||
}
|
||||
|
||||
impl TectonicCommand for NewCommand {
|
||||
|
@ -61,7 +61,7 @@ impl TectonicCommand for NewCommand {
|
|||
|
||||
let wc = WorkspaceCreator::new(self.path);
|
||||
ctry!(
|
||||
wc.create_defaulted(config, status, self.web_bundle);
|
||||
wc.create_defaulted(&config, self.bundle);
|
||||
"failed to create the new Tectonic workspace"
|
||||
);
|
||||
Ok(0)
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use clap::{CommandFactory, Parser};
|
||||
use tectonic::{config::PersistentConfig, errors::Result};
|
||||
use tectonic_io_base::app_dirs;
|
||||
use tectonic_status_base::StatusBackend;
|
||||
|
||||
use crate::v2cli::{CommandCustomizations, TectonicCommand, V2CliOptions};
|
||||
|
@ -47,9 +48,7 @@ impl ShowUserCacheDirCommand {
|
|||
}
|
||||
|
||||
fn execute(self, _config: PersistentConfig, _status: &mut dyn StatusBackend) -> Result<i32> {
|
||||
use tectonic_bundles::cache::Cache;
|
||||
let cache = Cache::get_user_default()?;
|
||||
println!("{}", cache.root().display());
|
||||
println!("{}", app_dirs::get_user_cache_dir("bundles")?.display());
|
||||
Ok(0)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ use tectonic::{
|
|||
};
|
||||
use tectonic_errors::prelude::anyhow;
|
||||
use tectonic_status_base::plain::PlainStatusBackend;
|
||||
use tracing::level_filters::LevelFilter;
|
||||
|
||||
use self::commands::{
|
||||
build::BuildCommand,
|
||||
|
@ -36,25 +37,13 @@ mod commands;
|
|||
|
||||
struct V2CliOptions {
|
||||
/// How much chatter to print when running
|
||||
#[arg(long = "chatter", short, name = "level", default_value = "default")]
|
||||
#[arg(long = "chatter", short, default_value = "default")]
|
||||
chatter_level: ChatterLevel,
|
||||
|
||||
/// Control colorization of output
|
||||
#[arg(long = "color", name = "when", default_value = "auto")]
|
||||
#[arg(long = "color", default_value = "auto")]
|
||||
cli_color: crate::CliColor,
|
||||
|
||||
/// Use this URL to find resource files instead of the default
|
||||
// TODO add URL validation
|
||||
#[arg(
|
||||
long,
|
||||
short,
|
||||
name = "url",
|
||||
overrides_with = "url",
|
||||
// This is inherited by some subcommands
|
||||
global(true)
|
||||
)]
|
||||
web_bundle: Option<String>,
|
||||
|
||||
/// The command to run
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
|
@ -91,6 +80,13 @@ pub fn v2_main(effective_args: &[OsString]) {
|
|||
|
||||
let args = V2CliOptions::parse_from(effective_args);
|
||||
|
||||
tracing_subscriber::fmt()
|
||||
.with_max_level(LevelFilter::INFO)
|
||||
.with_target(false)
|
||||
.without_time()
|
||||
.with_ansi(args.cli_color.should_enable())
|
||||
.init();
|
||||
|
||||
// Command-specific customizations before we do our centralized setup.
|
||||
// This is a semi-hack so that we can set up certain commands to ensure
|
||||
// that status info is always printed to stderr.
|
||||
|
|
|
@ -12,19 +12,13 @@
|
|||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
path::PathBuf,
|
||||
sync::atomic::{AtomicBool, Ordering},
|
||||
};
|
||||
use tectonic_bundles::{
|
||||
cache::Cache, dir::DirBundle, itar::IndexedTarBackend, zip::ZipBundle, Bundle,
|
||||
};
|
||||
use tectonic_bundles::{detect_bundle, Bundle};
|
||||
use tectonic_io_base::app_dirs;
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
errors::{ErrorKind, Result},
|
||||
status::StatusBackend,
|
||||
};
|
||||
use crate::errors::{ErrorKind, Result};
|
||||
|
||||
/// Awesome hack time!!!
|
||||
///
|
||||
|
@ -44,19 +38,19 @@ pub fn is_config_test_mode_activated() -> bool {
|
|||
CONFIG_TEST_MODE_ACTIVATED.load(Ordering::SeqCst)
|
||||
}
|
||||
|
||||
pub fn is_test_bundle_wanted(web_bundle: Option<String>) -> bool {
|
||||
pub fn is_test_bundle_wanted(bundle: Option<String>) -> bool {
|
||||
if !is_config_test_mode_activated() {
|
||||
return false;
|
||||
}
|
||||
match web_bundle {
|
||||
match bundle {
|
||||
None => true,
|
||||
Some(x) if x.contains("test-bundle://") => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn maybe_return_test_bundle(web_bundle: Option<String>) -> Result<Box<dyn Bundle>> {
|
||||
if is_test_bundle_wanted(web_bundle) {
|
||||
pub fn maybe_return_test_bundle(bundle: Option<String>) -> Result<Box<dyn Bundle>> {
|
||||
if is_test_bundle_wanted(bundle) {
|
||||
Ok(Box::<crate::test_util::TestBundle>::default())
|
||||
} else {
|
||||
Err(ErrorKind::Msg("not asking for the default test bundle".to_owned()).into())
|
||||
|
@ -134,53 +128,14 @@ impl PersistentConfig {
|
|||
Ok(PersistentConfig::default())
|
||||
}
|
||||
|
||||
pub fn make_cached_url_provider(
|
||||
&self,
|
||||
url: &str,
|
||||
only_cached: bool,
|
||||
custom_cache_root: Option<&Path>,
|
||||
status: &mut dyn StatusBackend,
|
||||
) -> Result<Box<dyn Bundle>> {
|
||||
if let Ok(test_bundle) = maybe_return_test_bundle(Some(url.to_owned())) {
|
||||
return Ok(test_bundle);
|
||||
}
|
||||
|
||||
let mut cache = if let Some(root) = custom_cache_root {
|
||||
Cache::get_for_custom_directory(root)
|
||||
} else {
|
||||
Cache::get_user_default()?
|
||||
};
|
||||
|
||||
let bundle = cache.open::<IndexedTarBackend>(url, only_cached, status)?;
|
||||
Ok(Box::new(bundle) as _)
|
||||
}
|
||||
|
||||
pub fn make_local_file_provider(
|
||||
&self,
|
||||
file_path: PathBuf,
|
||||
_status: &mut dyn StatusBackend,
|
||||
) -> Result<Box<dyn Bundle>> {
|
||||
let bundle: Box<dyn Bundle> = if file_path.is_dir() {
|
||||
Box::new(DirBundle::new(file_path))
|
||||
} else {
|
||||
Box::new(ZipBundle::open(file_path)?)
|
||||
};
|
||||
Ok(bundle)
|
||||
}
|
||||
|
||||
pub fn default_bundle_loc(&self) -> &str {
|
||||
&self.default_bundles[0].url
|
||||
}
|
||||
|
||||
pub fn default_bundle(
|
||||
&self,
|
||||
only_cached: bool,
|
||||
status: &mut dyn StatusBackend,
|
||||
) -> Result<Box<dyn Bundle>> {
|
||||
use std::io;
|
||||
|
||||
if let Ok(test_bundle) = maybe_return_test_bundle(None) {
|
||||
return Ok(test_bundle);
|
||||
pub fn default_bundle(&self, only_cached: bool) -> Result<Box<dyn Bundle>> {
|
||||
if CONFIG_TEST_MODE_ACTIVATED.load(Ordering::SeqCst) {
|
||||
let bundle = crate::test_util::TestBundle::default();
|
||||
return Ok(Box::new(bundle));
|
||||
}
|
||||
|
||||
if self.default_bundles.len() != 1 {
|
||||
|
@ -190,25 +145,18 @@ impl PersistentConfig {
|
|||
.into());
|
||||
}
|
||||
|
||||
let url = Url::parse(&self.default_bundles[0].url)
|
||||
.map_err(|_| io::Error::new(io::ErrorKind::InvalidInput, "failed to parse url"))?;
|
||||
if url.scheme() == "file" {
|
||||
// load the local zip file.
|
||||
let file_path = url.to_file_path().map_err(|_| {
|
||||
io::Error::new(io::ErrorKind::InvalidInput, "failed to parse local path")
|
||||
})?;
|
||||
return self.make_local_file_provider(file_path, status);
|
||||
}
|
||||
let bundle =
|
||||
self.make_cached_url_provider(&self.default_bundles[0].url, only_cached, None, status)?;
|
||||
Ok(Box::new(bundle) as _)
|
||||
Ok(
|
||||
detect_bundle(self.default_bundles[0].url.to_owned(), only_cached, None)
|
||||
.unwrap()
|
||||
.unwrap(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn format_cache_path(&self) -> Result<PathBuf> {
|
||||
if is_config_test_mode_activated() {
|
||||
Ok(crate::test_util::test_path(&[]))
|
||||
} else {
|
||||
Ok(app_dirs::ensure_user_cache_dir("formats")?)
|
||||
Ok(app_dirs::get_user_cache_dir("formats")?)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,28 +7,21 @@
|
|||
//! `tectonic_docmodel` crate with the actual document-processing capabilities
|
||||
//! provided by the processing engines.
|
||||
|
||||
use std::{
|
||||
fmt::Write as FmtWrite,
|
||||
fs, io,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use std::{fmt::Write as FmtWrite, fs, io, path::PathBuf};
|
||||
use tectonic_bridge_core::SecuritySettings;
|
||||
use tectonic_bundles::{
|
||||
cache::Cache, dir::DirBundle, itar::IndexedTarBackend, zip::ZipBundle, Bundle,
|
||||
};
|
||||
use tectonic_bundles::{detect_bundle, Bundle};
|
||||
use tectonic_docmodel::{
|
||||
document::{BuildTargetType, Document, InputFile},
|
||||
workspace::{Workspace, WorkspaceCreator},
|
||||
};
|
||||
use tectonic_geturl::{DefaultBackend, GetUrlBackend};
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
config, ctry,
|
||||
driver::{OutputFormat, PassSetting, ProcessingSessionBuilder},
|
||||
errors::{ErrorKind, Result},
|
||||
status::StatusBackend,
|
||||
tt_note,
|
||||
test_util, tt_note,
|
||||
unstable_opts::UnstableOptions,
|
||||
};
|
||||
|
||||
|
@ -79,11 +72,7 @@ pub trait DocumentExt {
|
|||
///
|
||||
/// This parses [`Document::bundle_loc`] and turns it into the appropriate
|
||||
/// bundle backend.
|
||||
fn bundle(
|
||||
&self,
|
||||
setup_options: &DocumentSetupOptions,
|
||||
status: &mut dyn StatusBackend,
|
||||
) -> Result<Box<dyn Bundle>>;
|
||||
fn bundle(&self, setup_options: &DocumentSetupOptions) -> Result<Box<dyn Bundle>>;
|
||||
|
||||
/// Set up a [`ProcessingSessionBuilder`] for one of the outputs.
|
||||
///
|
||||
|
@ -98,38 +87,18 @@ pub trait DocumentExt {
|
|||
}
|
||||
|
||||
impl DocumentExt for Document {
|
||||
fn bundle(
|
||||
&self,
|
||||
setup_options: &DocumentSetupOptions,
|
||||
status: &mut dyn StatusBackend,
|
||||
) -> Result<Box<dyn Bundle>> {
|
||||
fn bundle_from_path(p: PathBuf) -> Result<Box<dyn Bundle>> {
|
||||
if p.is_dir() {
|
||||
Ok(Box::new(DirBundle::new(p)))
|
||||
} else {
|
||||
Ok(Box::new(ZipBundle::open(p)?))
|
||||
}
|
||||
fn bundle(&self, setup_options: &DocumentSetupOptions) -> Result<Box<dyn Bundle>> {
|
||||
// Load test bundle
|
||||
if config::is_config_test_mode_activated() {
|
||||
let bundle = test_util::TestBundle::default();
|
||||
return Ok(Box::new(bundle));
|
||||
}
|
||||
|
||||
if let Ok(test_bundle) = config::maybe_return_test_bundle(None) {
|
||||
Ok(test_bundle)
|
||||
} else if let Ok(url) = Url::parse(&self.bundle_loc) {
|
||||
if url.scheme() != "file" {
|
||||
let mut cache = Cache::get_user_default()?;
|
||||
let bundle = cache.open::<IndexedTarBackend>(
|
||||
&self.bundle_loc,
|
||||
setup_options.only_cached,
|
||||
status,
|
||||
)?;
|
||||
Ok(Box::new(bundle))
|
||||
} else {
|
||||
let file_path = url.to_file_path().map_err(|_| {
|
||||
io::Error::new(io::ErrorKind::InvalidInput, "failed to parse local path")
|
||||
})?;
|
||||
bundle_from_path(file_path)
|
||||
}
|
||||
} else {
|
||||
bundle_from_path(Path::new(&self.bundle_loc).to_owned())
|
||||
let d = detect_bundle(self.bundle_loc.clone(), setup_options.only_cached, None)?;
|
||||
|
||||
match d {
|
||||
Some(b) => Ok(b),
|
||||
None => Err(io::Error::new(io::ErrorKind::InvalidInput, "Could not get bundle").into()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -198,7 +167,7 @@ impl DocumentExt for Document {
|
|||
if setup_options.only_cached {
|
||||
tt_note!(status, "using only cached resource files");
|
||||
}
|
||||
sess_builder.bundle(self.bundle(setup_options, status)?);
|
||||
sess_builder.bundle(self.bundle(setup_options)?);
|
||||
|
||||
let mut tex_dir = self.src_dir().to_owned();
|
||||
tex_dir.push("src");
|
||||
|
@ -225,25 +194,23 @@ pub trait WorkspaceCreatorExt {
|
|||
/// for the main document.
|
||||
fn create_defaulted(
|
||||
self,
|
||||
config: config::PersistentConfig,
|
||||
status: &mut dyn StatusBackend,
|
||||
web_bundle: Option<String>,
|
||||
config: &config::PersistentConfig,
|
||||
bundle: Option<String>,
|
||||
) -> Result<Workspace>;
|
||||
}
|
||||
|
||||
impl WorkspaceCreatorExt for WorkspaceCreator {
|
||||
fn create_defaulted(
|
||||
self,
|
||||
config: config::PersistentConfig,
|
||||
status: &mut dyn StatusBackend,
|
||||
web_bundle: Option<String>,
|
||||
config: &config::PersistentConfig,
|
||||
bundle: Option<String>,
|
||||
) -> Result<Workspace> {
|
||||
let bundle_loc = if config::is_test_bundle_wanted(web_bundle.clone()) {
|
||||
let bundle_loc = if config::is_test_bundle_wanted(bundle.clone()) {
|
||||
"test-bundle://".to_owned()
|
||||
} else {
|
||||
let unresolved_loc = web_bundle.unwrap_or(config.default_bundle_loc().to_owned());
|
||||
let loc = bundle.unwrap_or(config.default_bundle_loc().to_owned());
|
||||
let mut gub = DefaultBackend::default();
|
||||
gub.resolve_url(&unresolved_loc, status)?
|
||||
gub.resolve_url(&loc)?
|
||||
};
|
||||
|
||||
Ok(self.create(bundle_loc, Vec::new())?)
|
||||
|
|
|
@ -641,12 +641,7 @@ impl DriverHooks for BridgeState {
|
|||
self
|
||||
}
|
||||
|
||||
fn event_output_closed(
|
||||
&mut self,
|
||||
name: String,
|
||||
digest: DigestData,
|
||||
_status: &mut dyn StatusBackend,
|
||||
) {
|
||||
fn event_output_closed(&mut self, name: String, digest: DigestData) {
|
||||
let summ = self
|
||||
.events
|
||||
.get_mut(&name)
|
||||
|
@ -1166,7 +1161,7 @@ impl ProcessingSessionBuilder {
|
|||
let format_cache_path = self
|
||||
.format_cache_path
|
||||
.unwrap_or_else(|| filesystem_root.clone());
|
||||
let format_cache = FormatCache::new(bundle.get_digest(status)?, format_cache_path);
|
||||
let format_cache = FormatCache::new(bundle.get_digest()?, format_cache_path);
|
||||
|
||||
let genuine_stdout = if self.print_stdout {
|
||||
Some(GenuineStdoutIo::new())
|
||||
|
|
|
@ -149,7 +149,7 @@ pub fn latex_to_pdf<T: AsRef<str>>(latex: T) -> Result<Vec<u8>> {
|
|||
"failed to open the default configuration file");
|
||||
|
||||
let only_cached = false;
|
||||
let bundle = ctry!(config.default_bundle(only_cached, &mut status);
|
||||
let bundle = ctry!(config.default_bundle(only_cached);
|
||||
"failed to load the default resource bundle");
|
||||
|
||||
let format_cache_path = ctry!(config.format_cache_path();
|
||||
|
|
|
@ -126,11 +126,11 @@ impl IoProvider for TestBundle {
|
|||
}
|
||||
|
||||
impl Bundle for TestBundle {
|
||||
fn get_digest(&mut self, _status: &mut dyn StatusBackend) -> Result<DigestData> {
|
||||
fn get_digest(&mut self) -> Result<DigestData> {
|
||||
Ok(DigestData::zeros())
|
||||
}
|
||||
|
||||
fn all_files(&mut self, status: &mut dyn StatusBackend) -> Result<Vec<String>> {
|
||||
self.0.all_files(status)
|
||||
fn all_files(&self) -> Vec<String> {
|
||||
self.0.all_files()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,547 +0,0 @@
|
|||
use flate2::{write::GzEncoder, GzBuilder};
|
||||
use futures::channel::oneshot::Canceled;
|
||||
use headers::HeaderMapExt;
|
||||
use http_body_util::{Either, Empty, Full};
|
||||
use hyper::body::{Body, Bytes};
|
||||
use hyper::header::{self, HeaderValue};
|
||||
use hyper::server::conn::http1;
|
||||
use hyper::service::service_fn;
|
||||
use hyper::{Method, Request, Response, StatusCode};
|
||||
use hyper_util::rt::TokioIo;
|
||||
use std::collections::HashMap;
|
||||
use std::convert::Infallible;
|
||||
use std::error::Error;
|
||||
use std::future::Future;
|
||||
use std::io::{self, Write};
|
||||
use std::net::SocketAddr;
|
||||
use std::ops::Bound;
|
||||
use std::path::Path;
|
||||
use std::pin::Pin;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::{env, fs, thread};
|
||||
use tectonic::config::PersistentConfig;
|
||||
use tectonic::driver::ProcessingSessionBuilder;
|
||||
use tectonic::io::OpenResult;
|
||||
use tectonic::status::termcolor::TermcolorStatusBackend;
|
||||
use tectonic::status::ChatterLevel;
|
||||
use tokio::net::TcpListener;
|
||||
use tokio::runtime;
|
||||
|
||||
mod util;
|
||||
|
||||
/// Build a fake tarindex by concatenating files.
|
||||
struct TarIndexBuilder {
|
||||
tar: Vec<u8>,
|
||||
index: GzEncoder<Vec<u8>>,
|
||||
/// Map from (offset, length) to file name.
|
||||
map: HashMap<(u64, u64), String>,
|
||||
}
|
||||
|
||||
impl TarIndexBuilder {
|
||||
fn new() -> TarIndexBuilder {
|
||||
let tar = Vec::new();
|
||||
let index = GzBuilder::new()
|
||||
.filename("bundle.tar.index.gz")
|
||||
.write(Vec::new(), flate2::Compression::default());
|
||||
let map = HashMap::new();
|
||||
|
||||
TarIndexBuilder { tar, index, map }
|
||||
}
|
||||
|
||||
/// Add a file.
|
||||
fn push(&mut self, name: &str, content: &[u8]) -> &mut Self {
|
||||
let offset = self.tar.len();
|
||||
let len = content.len();
|
||||
let _ = writeln!(&mut self.index, "{name} {offset} {len}");
|
||||
self.map
|
||||
.insert((offset as u64, len as u64), name.to_owned());
|
||||
self.tar.extend_from_slice(content);
|
||||
self
|
||||
}
|
||||
|
||||
/// Create a tar index.
|
||||
fn finish(self) -> TarIndex {
|
||||
TarIndex {
|
||||
tar: self.tar,
|
||||
index: self.index.finish().unwrap(),
|
||||
map: self.map,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct TarIndex {
|
||||
tar: Vec<u8>,
|
||||
index: Vec<u8>,
|
||||
map: HashMap<(u64, u64), String>,
|
||||
}
|
||||
|
||||
impl TarIndex {
|
||||
fn from_dir<P: AsRef<Path>>(path: P) -> io::Result<TarIndex> {
|
||||
let path = path.as_ref();
|
||||
let mut builder = TarIndexBuilder::new();
|
||||
for de in path.read_dir()? {
|
||||
let path = de?.path();
|
||||
let content = fs::read(&path)?;
|
||||
builder.push(path.file_name().unwrap().to_str().unwrap(), &content);
|
||||
}
|
||||
|
||||
builder.push(
|
||||
tectonic::digest::DIGEST_NAME,
|
||||
b"0000000000000000000000000000000000000000000000000000000000000000",
|
||||
);
|
||||
|
||||
Ok(builder.finish())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
enum TectonicRequest {
|
||||
Head(String),
|
||||
Index,
|
||||
File(String),
|
||||
}
|
||||
|
||||
struct TarIndexService {
|
||||
tar_index: Mutex<TarIndex>,
|
||||
requests: Mutex<Vec<TectonicRequest>>,
|
||||
local_addr: Mutex<Option<SocketAddr>>,
|
||||
}
|
||||
|
||||
type ResponseFuture<B> = Pin<Box<dyn Future<Output = Response<B>> + Send + Sync + 'static>>;
|
||||
|
||||
impl TarIndexService {
|
||||
fn new(tar_index: TarIndex) -> TarIndexService {
|
||||
TarIndexService {
|
||||
tar_index: Mutex::new(tar_index),
|
||||
requests: Mutex::new(Vec::new()),
|
||||
local_addr: Mutex::new(None),
|
||||
}
|
||||
}
|
||||
|
||||
fn set_local_addr(&self, local_addr: SocketAddr) {
|
||||
*self.local_addr.lock().unwrap() = Some(local_addr);
|
||||
}
|
||||
|
||||
fn set_tar_index(&self, tar_index: TarIndex) {
|
||||
*self.tar_index.lock().unwrap() = tar_index;
|
||||
}
|
||||
|
||||
fn response(
|
||||
&self,
|
||||
req: Request<impl Body>,
|
||||
) -> ResponseFuture<Either<Empty<Bytes>, Full<Bytes>>> {
|
||||
match (
|
||||
req.method(),
|
||||
req.uri().path(),
|
||||
req.headers().typed_get::<headers::Range>(),
|
||||
) {
|
||||
(&Method::HEAD, "/tectonic-default", None) => {
|
||||
self.log_request(TectonicRequest::Head(req.uri().path().to_owned()));
|
||||
let mut resp = Response::builder().status(StatusCode::FOUND);
|
||||
resp.headers_mut().unwrap().insert(
|
||||
header::LOCATION,
|
||||
HeaderValue::from_str(&format!(
|
||||
"http://{}/bundle.tar",
|
||||
self.local_addr.lock().unwrap().unwrap()
|
||||
))
|
||||
.unwrap(),
|
||||
);
|
||||
Box::pin(async move { resp.body(Either::Left(Empty::new())).unwrap() })
|
||||
}
|
||||
(&Method::HEAD, "/bundle.tar", None) => {
|
||||
self.log_request(TectonicRequest::Head(req.uri().path().to_owned()));
|
||||
Box::pin(async move { Response::new(Either::Left(Empty::new())) })
|
||||
}
|
||||
(&Method::GET, "/bundle.tar", Some(range)) => {
|
||||
if let Some((Bound::Included(l), Bound::Included(h))) =
|
||||
range.satisfiable_ranges(u64::MAX).next()
|
||||
{
|
||||
let tar_index = self.tar_index.lock().unwrap();
|
||||
let name = tar_index
|
||||
.map
|
||||
.get(&(l, h - l + 1))
|
||||
.expect("unknown file data requested");
|
||||
self.log_request(TectonicRequest::File(name.to_owned()));
|
||||
let mut resp = Response::builder().status(StatusCode::PARTIAL_CONTENT);
|
||||
resp.headers_mut()
|
||||
.unwrap()
|
||||
.typed_insert(headers::ContentRange::bytes(l..=h, None).unwrap());
|
||||
let body = tar_index.tar[l as usize..=h as usize].to_vec().into();
|
||||
Box::pin(async move { resp.body(Either::Right(body)).unwrap() })
|
||||
} else {
|
||||
panic!("unexpected");
|
||||
}
|
||||
}
|
||||
(&Method::GET, "/bundle.tar.index.gz", None) => {
|
||||
self.log_request(TectonicRequest::Index);
|
||||
let resp = self.tar_index.lock().unwrap().index.to_vec().into();
|
||||
Box::pin(async move { Response::new(Either::Right(resp)) })
|
||||
}
|
||||
_ => Box::pin(async move {
|
||||
Response::builder()
|
||||
.status(StatusCode::NOT_FOUND)
|
||||
.body(Either::Left(Empty::new()))
|
||||
.unwrap()
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn log_request(&self, request: TectonicRequest) {
|
||||
self.requests.lock().unwrap().push(request);
|
||||
}
|
||||
|
||||
fn url(&self) -> String {
|
||||
format!(
|
||||
"http://{}/tectonic-default",
|
||||
self.local_addr.lock().unwrap().unwrap()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Run the provided closure while http service is running. Use the tar index given as
|
||||
/// the first variable, or a default on if None.
|
||||
fn run_test<R>(tar_index: Option<TarIndex>, run: R) -> Vec<TectonicRequest>
|
||||
where
|
||||
R: FnOnce(Arc<TarIndexService>, &str),
|
||||
{
|
||||
// Automatically select a port
|
||||
let addr = SocketAddr::from(([127, 0, 0, 1], 0));
|
||||
|
||||
let tar_service = Arc::new(TarIndexService::new(tar_index.unwrap_or_else(|| {
|
||||
let root = Path::new(&env!("CARGO_MANIFEST_DIR"))
|
||||
.join("tests")
|
||||
.join("assets");
|
||||
TarIndex::from_dir(root).unwrap()
|
||||
})));
|
||||
|
||||
let (url_available_tx, url_available_rx) = std::sync::mpsc::channel();
|
||||
let (server_shutdown_tx, server_shutdown_rx) = futures::channel::oneshot::channel::<()>();
|
||||
let tar_service_clone = Arc::clone(&tar_service);
|
||||
|
||||
let server_thread = thread::spawn(move || {
|
||||
let tar_service = tar_service_clone;
|
||||
|
||||
let rt = runtime::Builder::new_current_thread()
|
||||
.enable_io()
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let tar_service_clone = Arc::clone(&tar_service);
|
||||
async fn ignore_cancel(f: impl Future<Output = Result<(), Box<dyn Error>>>) {
|
||||
if let Err(err) = f.await {
|
||||
if err.downcast_ref::<Canceled>().is_none() {
|
||||
panic!("{}", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
rt.block_on(async move {
|
||||
let server = tokio::task::spawn(ignore_cancel(async move {
|
||||
let listener = TcpListener::bind(addr).await?;
|
||||
|
||||
// server is listening now
|
||||
tar_service.set_local_addr(listener.local_addr().unwrap());
|
||||
let url = tar_service.url();
|
||||
url_available_tx.send(url).unwrap();
|
||||
|
||||
loop {
|
||||
let (stream, _) = listener.accept().await?;
|
||||
|
||||
let io = TokioIo::new(stream);
|
||||
|
||||
let tar_service_clone = Arc::clone(&tar_service_clone);
|
||||
let service = service_fn(move |req| {
|
||||
let tar_service = Arc::clone(&tar_service_clone);
|
||||
async move { Ok::<_, Infallible>(tar_service.response(req).await) }
|
||||
});
|
||||
|
||||
let conn = http1::Builder::new().serve_connection(io, service);
|
||||
|
||||
if let Err(err) = conn.await {
|
||||
println!("Error serving connection: {:?}", err);
|
||||
}
|
||||
}
|
||||
}));
|
||||
server_shutdown_rx.await.unwrap();
|
||||
server.abort();
|
||||
})
|
||||
});
|
||||
|
||||
// Server running, run the provided test
|
||||
let url = url_available_rx.recv().unwrap();
|
||||
run(Arc::clone(&tar_service), &url);
|
||||
|
||||
println!("Shutting down");
|
||||
|
||||
// Shut down server
|
||||
let _ = server_shutdown_tx.send(());
|
||||
server_thread.join().unwrap();
|
||||
|
||||
// Check tectonic's requests.
|
||||
let requests = tar_service.requests.lock().unwrap();
|
||||
|
||||
requests.clone()
|
||||
}
|
||||
|
||||
fn check_req_count(requests: &[TectonicRequest], request: TectonicRequest, expected_number: usize) {
|
||||
let number = requests.iter().filter(|r| **r == request).count();
|
||||
assert_eq!(
|
||||
number, expected_number,
|
||||
"Expected {expected_number} requests of {request:?}, got {number}"
|
||||
);
|
||||
}
|
||||
#[test]
|
||||
fn test_full_session() {
|
||||
let requests = run_test(None, |_, url| {
|
||||
let tempdir = tempfile::tempdir().unwrap();
|
||||
|
||||
let config = PersistentConfig::default();
|
||||
|
||||
let run = |path| {
|
||||
let mut status = TermcolorStatusBackend::new(ChatterLevel::Minimal);
|
||||
let mut sess_builder = ProcessingSessionBuilder::default();
|
||||
sess_builder.bundle(Box::new(
|
||||
config
|
||||
.make_cached_url_provider(url, false, Some(tempdir.path()), &mut status)
|
||||
.unwrap(),
|
||||
));
|
||||
let input_path = Path::new(path);
|
||||
sess_builder.primary_input_path(input_path);
|
||||
sess_builder.tex_input_name(&input_path.file_name().unwrap().to_string_lossy());
|
||||
sess_builder.output_dir(tempdir.path());
|
||||
sess_builder.format_name("plain");
|
||||
sess_builder.format_cache_path(tempdir.path());
|
||||
|
||||
let mut sess = sess_builder.create(&mut status).unwrap();
|
||||
sess.run(&mut status).unwrap();
|
||||
};
|
||||
|
||||
// Run tectonic twice
|
||||
run("tests/tex-outputs/the_letter_a.tex");
|
||||
// On this run everything should be cached.
|
||||
run("tests/tex-outputs/the_letter_a.tex");
|
||||
// Run tectonic with a file that needs a new resource
|
||||
run("tests/tex-outputs/redbox_png.tex");
|
||||
});
|
||||
|
||||
check_req_count(&requests, TectonicRequest::Index, 1);
|
||||
check_req_count(
|
||||
&requests,
|
||||
TectonicRequest::File(tectonic::digest::DIGEST_NAME.into()),
|
||||
2,
|
||||
);
|
||||
// This file should be cached.
|
||||
check_req_count(&requests, TectonicRequest::File("plain.tex".into()), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cached_url_provider() {
|
||||
let tar_index = {
|
||||
let mut builder = TarIndexBuilder::new();
|
||||
builder
|
||||
.push("plain.tex", b"test")
|
||||
.push("other.tex", b"other content")
|
||||
.push(
|
||||
tectonic::digest::DIGEST_NAME,
|
||||
b"0000000000000000000000000000000000000000000000000000000000000000",
|
||||
);
|
||||
builder.finish()
|
||||
};
|
||||
|
||||
let requests = run_test(Some(tar_index), |_, url| {
|
||||
let tempdir = tempfile::tempdir().unwrap();
|
||||
let mut status = TermcolorStatusBackend::new(ChatterLevel::Minimal);
|
||||
|
||||
let config = PersistentConfig::default();
|
||||
|
||||
{
|
||||
let mut cache = config
|
||||
.make_cached_url_provider(url, false, Some(tempdir.path()), &mut status)
|
||||
.unwrap();
|
||||
|
||||
match cache.input_open_name("plain.tex", &mut status) {
|
||||
OpenResult::Ok(_) => {}
|
||||
_ => panic!("Failed to open plain.tex"),
|
||||
}
|
||||
match cache.input_open_name("plain.tex", &mut status) {
|
||||
OpenResult::Ok(_) => {}
|
||||
_ => panic!("Failed to open plain.tex"),
|
||||
}
|
||||
}
|
||||
{
|
||||
let mut cache = config
|
||||
.make_cached_url_provider(url, false, Some(tempdir.path()), &mut status)
|
||||
.unwrap();
|
||||
|
||||
// should be cached
|
||||
match cache.input_open_name("plain.tex", &mut status) {
|
||||
OpenResult::Ok(_) => {}
|
||||
_ => panic!("Failed to open plain.tex"),
|
||||
}
|
||||
}
|
||||
{
|
||||
let mut cache = config
|
||||
.make_cached_url_provider(url, false, Some(tempdir.path()), &mut status)
|
||||
.unwrap();
|
||||
|
||||
// should be cached
|
||||
match cache.input_open_name("plain.tex", &mut status) {
|
||||
OpenResult::Ok(_) => {}
|
||||
_ => panic!("Failed to open plain.tex"),
|
||||
}
|
||||
// in index, should check digest and download the file
|
||||
match cache.input_open_name("other.tex", &mut status) {
|
||||
OpenResult::Ok(_) => {}
|
||||
_ => panic!("Failed to open other.tex"),
|
||||
}
|
||||
}
|
||||
{
|
||||
let mut cache = config
|
||||
.make_cached_url_provider(url, false, Some(tempdir.path()), &mut status)
|
||||
.unwrap();
|
||||
|
||||
// not in index
|
||||
match cache.input_open_name("my-favourite-file.tex", &mut status) {
|
||||
OpenResult::NotAvailable => {}
|
||||
_ => panic!("'my-favourite-file.tex' file exists?"),
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
check_req_count(&requests, TectonicRequest::Index, 1);
|
||||
check_req_count(
|
||||
&requests,
|
||||
TectonicRequest::File(tectonic::digest::DIGEST_NAME.into()),
|
||||
2,
|
||||
);
|
||||
// This files should be cached.
|
||||
check_req_count(&requests, TectonicRequest::File("plain.tex".into()), 1);
|
||||
check_req_count(&requests, TectonicRequest::File("other.tex".into()), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bundle_update() {
|
||||
let tempdir = tempfile::tempdir().unwrap();
|
||||
let tar_index = {
|
||||
let mut builder = TarIndexBuilder::new();
|
||||
builder
|
||||
.push("only-first.tex", b"test")
|
||||
.push("file-in-both.tex", b"in both")
|
||||
.push(
|
||||
tectonic::digest::DIGEST_NAME,
|
||||
b"0000000000000000000000000000000000000000000000000000000000000000",
|
||||
);
|
||||
builder.finish()
|
||||
};
|
||||
|
||||
run_test(Some(tar_index), |service, url| {
|
||||
let mut status = TermcolorStatusBackend::new(ChatterLevel::Minimal);
|
||||
|
||||
let config = PersistentConfig::default();
|
||||
|
||||
{
|
||||
// Run with first tar index.
|
||||
{
|
||||
let mut cache = config
|
||||
.make_cached_url_provider(url, false, Some(tempdir.path()), &mut status)
|
||||
.unwrap();
|
||||
|
||||
match cache.input_open_name("only-first.tex", &mut status) {
|
||||
OpenResult::Ok(_) => {}
|
||||
_ => panic!("Failed to open only-first.tex"),
|
||||
}
|
||||
}
|
||||
|
||||
// Set a tar index with a different digest.
|
||||
let tar_index = {
|
||||
let mut builder = TarIndexBuilder::new();
|
||||
builder
|
||||
.push("only-second.tex", b"test")
|
||||
.push("file-in-both.tex", b"in both")
|
||||
.push(
|
||||
tectonic::digest::DIGEST_NAME,
|
||||
b"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
||||
);
|
||||
builder.finish()
|
||||
};
|
||||
service.set_tar_index(tar_index);
|
||||
|
||||
// Run with the new tar index.
|
||||
{
|
||||
let mut status = TermcolorStatusBackend::new(ChatterLevel::Minimal);
|
||||
|
||||
let config = PersistentConfig::default();
|
||||
|
||||
{
|
||||
let mut cache = config
|
||||
.make_cached_url_provider(url, false, Some(tempdir.path()), &mut status)
|
||||
.unwrap();
|
||||
|
||||
// This should be cached even thought the bundle does not contain it.
|
||||
match cache.input_open_name("only-first.tex", &mut status) {
|
||||
OpenResult::Ok(_) => {}
|
||||
_ => panic!("Failed to open only-first.tex"),
|
||||
}
|
||||
|
||||
// Not in index of the first bundle and therefore no digest check.
|
||||
match cache.input_open_name("only-second.tex", &mut status) {
|
||||
OpenResult::NotAvailable => {}
|
||||
_ => panic!("File should not be in the first bundle"),
|
||||
}
|
||||
// File in the first bundle and the second bundle, but not cached yet. Should
|
||||
// trigger a digest check.
|
||||
match cache.input_open_name("file-in-both.tex", &mut status) {
|
||||
OpenResult::Err(_) => {}
|
||||
_ => panic!("Bundle digest changed but no error"),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cache_location_redirect() {
|
||||
const CACHE_DIR_KEY: &str = "TECTONIC_CACHE_DIR";
|
||||
let tempdir = tempfile::tempdir().unwrap();
|
||||
|
||||
// In this test we intentionally set the environment variable and don't use the custom cache root parameter,
|
||||
// to test the internal mechanism for a custom cache location based on an environment variable.
|
||||
env::set_var(CACHE_DIR_KEY, tempdir.path().as_os_str());
|
||||
|
||||
let tar_index = {
|
||||
let mut builder = TarIndexBuilder::new();
|
||||
builder.push("plain.tex", b"simple").push(
|
||||
tectonic::digest::DIGEST_NAME,
|
||||
b"0000000000000000000000000000000000000000000000000000000000000000",
|
||||
);
|
||||
|
||||
builder.finish()
|
||||
};
|
||||
|
||||
run_test(Some(tar_index), |_, url| {
|
||||
let mut status = TermcolorStatusBackend::new(ChatterLevel::Minimal);
|
||||
let config = PersistentConfig::default();
|
||||
|
||||
let mut cache = config
|
||||
.make_cached_url_provider(url, false, None, &mut status)
|
||||
.unwrap();
|
||||
|
||||
match cache.input_open_name("plain.tex", &mut status) {
|
||||
OpenResult::Ok(_) => {}
|
||||
_ => panic!("Failed to open plain.tex"),
|
||||
}
|
||||
|
||||
// the filename of the target location is the SHA256 hash of the file content "simple"
|
||||
let expected_file_path = tempdir
|
||||
.path()
|
||||
.join("files")
|
||||
.join("a7")
|
||||
.join("a39b72f29718e653e73503210fbb597057b7a1c77d1fe321a1afcff041d4e1");
|
||||
|
||||
if !expected_file_path.exists() {
|
||||
panic!("Couldn't find the cached file in the expected location.");
|
||||
}
|
||||
});
|
||||
}
|
|
@ -634,14 +634,14 @@ fn stdin_content() {
|
|||
|
||||
/// Test various web bundle overrides for the v1 CLI & `-X compile`
|
||||
#[test]
|
||||
fn web_bundle_overrides() {
|
||||
fn bundle_overrides() {
|
||||
let filename = "subdirectory/content/1.tex";
|
||||
let fmt_arg: &str = &get_plain_format_arg();
|
||||
let tempdir = setup_and_copy_files(&[filename]);
|
||||
let temppath = tempdir.path().to_owned();
|
||||
|
||||
let arg_bad_bundle = ["--web-bundle", "bad-bundle"];
|
||||
let arg_good_bundle = ["--web-bundle", "test-bundle://"];
|
||||
let arg_bad_bundle = ["--bundle", "bad-bundle"];
|
||||
let arg_good_bundle = ["--bundle", "test-bundle://"];
|
||||
|
||||
// test with a bad bundle
|
||||
let output = run_tectonic(
|
||||
|
@ -656,46 +656,36 @@ fn web_bundle_overrides() {
|
|||
[&arg_good_bundle[..], &[fmt_arg, filename]].concat(),
|
||||
[&[fmt_arg], &arg_good_bundle[..], &[filename]].concat(),
|
||||
[&[fmt_arg], &[filename], &arg_good_bundle[..]].concat(),
|
||||
// overriding vendor presets
|
||||
[
|
||||
&arg_bad_bundle[..],
|
||||
&arg_good_bundle[..],
|
||||
&[fmt_arg],
|
||||
&[filename],
|
||||
]
|
||||
.concat(),
|
||||
// stress test
|
||||
[
|
||||
&arg_bad_bundle[..],
|
||||
&arg_bad_bundle[..],
|
||||
&[fmt_arg],
|
||||
&arg_bad_bundle[..],
|
||||
&arg_bad_bundle[..],
|
||||
&[filename],
|
||||
&arg_bad_bundle[..],
|
||||
&arg_good_bundle[..],
|
||||
]
|
||||
.concat(),
|
||||
];
|
||||
|
||||
// test `-X compile`
|
||||
#[cfg(feature = "serialization")]
|
||||
valid_args.push(
|
||||
valid_args.extend([
|
||||
[
|
||||
&["-X"],
|
||||
&["compile"],
|
||||
&arg_good_bundle[..],
|
||||
&[fmt_arg],
|
||||
&[filename],
|
||||
]
|
||||
.concat(),
|
||||
[
|
||||
&["-X"],
|
||||
&["compile"],
|
||||
&[fmt_arg],
|
||||
&arg_good_bundle[..],
|
||||
&[filename],
|
||||
]
|
||||
.concat(),
|
||||
[
|
||||
&arg_bad_bundle[..],
|
||||
&arg_bad_bundle[..],
|
||||
&["-X"],
|
||||
&arg_bad_bundle[..],
|
||||
&["compile"],
|
||||
&arg_bad_bundle[..],
|
||||
&[fmt_arg],
|
||||
&arg_bad_bundle[..],
|
||||
&[filename],
|
||||
&arg_bad_bundle[..],
|
||||
&arg_good_bundle[..],
|
||||
]
|
||||
.concat(),
|
||||
);
|
||||
]);
|
||||
|
||||
for args in valid_args {
|
||||
let output = run_tectonic(&temppath, &args);
|
||||
|
@ -707,8 +697,8 @@ fn web_bundle_overrides() {
|
|||
#[cfg(feature = "serialization")]
|
||||
#[test]
|
||||
fn v2_bundle_overrides() {
|
||||
let arg_bad_bundle = ["--web-bundle", "bad-bundle"];
|
||||
let arg_good_bundle = ["--web-bundle", "test-bundle://"];
|
||||
let arg_bad_bundle = ["--bundle", "bad-bundle"];
|
||||
let arg_good_bundle = ["--bundle", "test-bundle://"];
|
||||
|
||||
// test `-X command`
|
||||
for command in ["new", "init"] {
|
||||
|
@ -719,34 +709,7 @@ fn v2_bundle_overrides() {
|
|||
error_or_panic(&output);
|
||||
|
||||
// test with a good bundle (override)
|
||||
let valid_args: Vec<Vec<&str>> = vec![
|
||||
// different positions
|
||||
[&arg_good_bundle[..], &["-X", command]].concat(),
|
||||
[&["-X"], &arg_good_bundle[..], &[command]].concat(),
|
||||
[&["-X", command], &arg_good_bundle[..]].concat(),
|
||||
// overriding vendor presets
|
||||
[&arg_bad_bundle[..], &arg_good_bundle[..], &["-X", command]].concat(),
|
||||
[
|
||||
&arg_bad_bundle[..],
|
||||
&["-X"],
|
||||
&arg_good_bundle[..],
|
||||
&[command],
|
||||
]
|
||||
.concat(),
|
||||
[&arg_bad_bundle[..], &["-X", command], &arg_good_bundle[..]].concat(),
|
||||
// stress test
|
||||
[
|
||||
&arg_bad_bundle[..],
|
||||
&arg_bad_bundle[..],
|
||||
&["-X"],
|
||||
&arg_bad_bundle[..],
|
||||
&arg_bad_bundle[..],
|
||||
&[command],
|
||||
&arg_bad_bundle[..],
|
||||
&arg_good_bundle[..],
|
||||
]
|
||||
.concat(),
|
||||
];
|
||||
let valid_args: Vec<Vec<&str>> = vec![[&["-X", command], &arg_good_bundle[..]].concat()];
|
||||
|
||||
for args in valid_args {
|
||||
let tempdir = setup_and_copy_files(&[]);
|
||||
|
@ -759,10 +722,10 @@ fn v2_bundle_overrides() {
|
|||
// test `-X build`
|
||||
let (_tempdir, temppath) = setup_v2();
|
||||
|
||||
// `--web-bundle` is ignored
|
||||
// `--bundle` is ignored
|
||||
let output = run_tectonic(
|
||||
&temppath,
|
||||
&[&arg_bad_bundle[..], &["-X"], &["build"]].concat(),
|
||||
&[&["-X"], &["build"], &arg_bad_bundle[..]].concat(),
|
||||
);
|
||||
success_or_panic(&output);
|
||||
}
|
||||
|
|
|
@ -117,12 +117,7 @@ impl<'a> DriverHooks for FormatTestDriver<'a> {
|
|||
self
|
||||
}
|
||||
|
||||
fn event_output_closed(
|
||||
&mut self,
|
||||
name: String,
|
||||
digest: DigestData,
|
||||
_status: &mut dyn StatusBackend,
|
||||
) {
|
||||
fn event_output_closed(&mut self, name: String, digest: DigestData) {
|
||||
let summ = self
|
||||
.events
|
||||
.get_mut(&name)
|
||||
|
|
Loading…
Reference in New Issue