Compare commits

...

19 Commits

Author SHA1 Message Date
stelzo be849526b6
rm falsely classified docs 2025-02-25 21:40:38 +01:00
stelzo 91245311a8
bump needed rustc version for dep 2025-02-25 21:39:07 +01:00
stelzo 9cc920cc62
Merge remote-tracking branch 'typst/0.13' 2025-02-25 21:18:47 +01:00
stelzo e0246192ee
merge 0.13-rc1 2025-02-25 21:18:14 +01:00
stelzo a935bf8fbb
merge git packages 2025-01-22 20:12:18 +01:00
Christopher Sieh e29ea242bc
Merge branch 'main' into vendor 2025-01-22 18:22:38 +01:00
stelzo c8e838036b
vendor highest prio when searching 2025-01-22 18:05:54 +01:00
stelzo a87b426e66
comment irrelevant now 2025-01-22 01:24:24 +01:00
stelzo 476b79ddfc
add vendor dir name param 2025-01-22 01:19:19 +01:00
stelzo 9256871d62
const vendor dir name 2025-01-21 23:49:47 +01:00
stelzo ddec8feab3
add vendoring 2025-01-21 23:13:33 +01:00
Stefano Fontana 3962be8ebf migrating from git2 to gitoxide crate for git downloads 2024-12-15 19:04:17 +01:00
Stefano Fontana a295495dc5 add git downloader default impl 2024-12-15 13:05:28 +01:00
Stefano Fontana 236cb0884f enhanced documentation 2024-12-15 12:56:07 +01:00
Stefano Fontana d687d23e5e cli updater adaptation and clippy fixes 2024-12-15 12:35:06 +01:00
Stefano Fontana b5689cfc72 documentation 2024-12-14 18:23:08 +01:00
Stefano Fontana 0a1df1ee67 fixed warinings 2024-12-14 18:05:35 +01:00
Stefano Fontana 3436f825f2 git download method 2024-12-14 17:59:55 +01:00
Stefano Fontana ec3bc7dd7e refactoring downloading system to accomodate multiple downloader types. Moved http downloads into respective implementation 2024-12-14 16:27:14 +01:00
19 changed files with 1895 additions and 231 deletions

View File

@ -73,7 +73,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@1.80.0
- uses: dtolnay/rust-toolchain@1.81.0
- uses: Swatinem/rust-cache@v2
- run: cargo check --workspace

1203
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -5,7 +5,7 @@ resolver = "2"
[workspace.package]
version = "0.13.0"
rust-version = "1.80" # also change in ci.yml
rust-version = "1.81" # also change in ci.yml
authors = ["The Typst Project Developers"]
edition = "2021"
homepage = "https://typst.app"
@ -40,8 +40,13 @@ base64 = "0.22"
bitflags = { version = "2", features = ["serde"] }
bumpalo = { version = "3.15.4", features = ["boxed", "collections"] }
bytemuck = "1"
chinese-number = { version = "0.7.2", default-features = false, features = ["number-to-chinese"] }
chrono = { version = "0.4.24", default-features = false, features = ["clock", "std"] }
chinese-number = { version = "0.7.2", default-features = false, features = [
"number-to-chinese",
] }
chrono = { version = "0.4.24", default-features = false, features = [
"clock",
"std",
] }
ciborium = "0.2.1"
clap = { version = "4.4", features = ["derive", "env", "wrap_help"] }
clap_complete = "4.2.1"
@ -58,6 +63,7 @@ env_proxy = "0.4"
flate2 = "1"
fontdb = { version = "0.21", default-features = false }
fs_extra = "1.3"
gix = "0.68.0"
hayagriva = "0.8.1"
heck = "0.5"
hypher = "0.1.4"
@ -67,7 +73,11 @@ icu_provider_adapters = "1.4"
icu_provider_blob = "1.4"
icu_segmenter = { version = "1.4", features = ["serde"] }
if_chain = "1"
image = { version = "0.25.5", default-features = false, features = ["png", "jpeg", "gif"] }
image = { version = "0.25.5", default-features = false, features = [
"png",
"jpeg",
"gif",
] }
indexmap = { version = "2", features = ["serde"] }
kamadak-exif = "0.6"
kurbo = "0.11"
@ -80,8 +90,15 @@ notify = "8"
once_cell = "1"
open = "5.0.1"
openssl = "0.10"
oxipng = { version = "9.0", default-features = false, features = ["filetime", "parallel", "zopfli"] }
palette = { version = "0.7.3", default-features = false, features = ["approx", "libm"] }
oxipng = { version = "9.0", default-features = false, features = [
"filetime",
"parallel",
"zopfli",
] }
palette = { version = "0.7.3", default-features = false, features = [
"approx",
"libm",
] }
parking_lot = "0.12.1"
pathdiff = "0.2"
pdf-writer = "0.12.1"
@ -96,9 +113,13 @@ quote = "1"
rayon = "1.7.0"
regex = "1"
regex-syntax = "0.8"
resvg = { version = "0.43", default-features = false, features = ["raster-images"] }
resvg = { version = "0.43", default-features = false, features = [
"raster-images",
] }
roxmltree = "0.20"
rust_decimal = { version = "1.36.0", default-features = false, features = ["maths"] }
rust_decimal = { version = "1.36.0", default-features = false, features = [
"maths",
] }
rustybuzz = "0.18"
same-file = "1"
self-replace = "1.3.7"
@ -109,21 +130,35 @@ serde_yaml = "0.9"
shell-escape = "0.1.5"
sigpipe = "0.1"
siphasher = "1"
smallvec = { version = "1.11.1", features = ["union", "const_generics", "const_new"] }
smallvec = { version = "1.11.1", features = [
"union",
"const_generics",
"const_new",
] }
stacker = "0.1.15"
subsetter = "0.2"
svg2pdf = "0.12"
syn = { version = "2", features = ["full", "extra-traits"] }
syntect = { version = "5", default-features = false, features = ["parsing", "regex-fancy", "plist-load", "yaml-load"] }
syntect = { version = "5", default-features = false, features = [
"parsing",
"regex-fancy",
"plist-load",
"yaml-load",
] }
tar = "0.4"
tempfile = "3.7.0"
thin-vec = "0.2.13"
time = { version = "0.3.20", features = ["formatting", "macros", "parsing"] }
tiny_http = "0.12"
tiny-skia = "0.11"
toml = { version = "0.8", default-features = false, features = ["parse", "display"] }
toml = { version = "0.8", default-features = false, features = [
"parse",
"display",
] }
ttf-parser = "0.24.1"
two-face = { version = "0.4.3", default-features = false, features = ["syntect-fancy"] }
two-face = { version = "0.4.3", default-features = false, features = [
"syntect-fancy",
] }
typed-arena = "2"
unicode-bidi = "0.3.18"
unicode-ident = "1.0"
@ -131,7 +166,11 @@ unicode-math-class = "0.1"
unicode-script = "0.5"
unicode-segmentation = "1"
unscanny = "0.1"
ureq = { version = "2", default-features = false, features = ["native-tls", "gzip", "json"] }
ureq = { version = "2", default-features = false, features = [
"native-tls",
"gzip",
"json",
] }
usvg = { version = "0.43", default-features = false, features = ["text"] }
walkdir = "2"
wasmi = "0.40.0"

View File

@ -21,7 +21,7 @@ doc = false
typst = { workspace = true }
typst-eval = { workspace = true }
typst-html = { workspace = true }
typst-kit = { workspace = true }
typst-kit = { workspace = true, features = ["downloads_http"] }
typst-macros = { workspace = true }
typst-pdf = { workspace = true }
typst-render = { workspace = true }

View File

@ -75,6 +75,9 @@ pub enum Command {
/// Processes an input file to extract provided metadata.
Query(QueryCommand),
/// Create a vendor directory with all used packages.
Vendor(VendorCommand),
/// Lists all discovered fonts in system and custom font paths.
Fonts(FontsCommand),
@ -160,6 +163,22 @@ pub struct QueryCommand {
pub process: ProcessArgs,
}
/// Create a vendor directory with all used packages in the current directory.
#[derive(Debug, Clone, Parser)]
pub struct VendorCommand {
/// Path to input Typst file. Use `-` to read input from stdin.
#[clap(value_parser = input_value_parser(), value_hint = ValueHint::FilePath)]
pub input: Input,
/// World arguments.
#[clap(flatten)]
pub world: WorldArgs,
/// Processing arguments.
#[clap(flatten)]
pub process: ProcessArgs,
}
/// Lists all discovered fonts in system and custom font paths.
#[derive(Debug, Clone, Parser)]
pub struct FontsCommand {
@ -342,6 +361,14 @@ pub struct PackageArgs {
value_name = "DIR"
)]
pub package_cache_path: Option<PathBuf>,
/// Custom vendor directory name.
#[clap(
long = "package-vendor-path",
env = "TYPST_PACKAGE_VENDOR_PATH",
value_name = "DIR"
)]
pub vendor_path: Option<PathBuf>,
}
/// Common arguments to customize available fonts.

View File

@ -6,7 +6,7 @@ use std::time::{Duration, Instant};
use codespan_reporting::term;
use codespan_reporting::term::termcolor::WriteColor;
use typst::utils::format_duration;
use typst_kit::download::{DownloadState, Downloader, Progress};
use typst_kit::package_downloads::{DownloadState, Downloader, Progress};
use crate::terminal::{self, TermOut};
use crate::ARGS;
@ -43,11 +43,7 @@ impl<T: Display> Progress for PrintDownload<T> {
/// Returns a new downloader.
pub fn downloader() -> Downloader {
let user_agent = concat!("typst/", env!("CARGO_PKG_VERSION"));
match ARGS.cert.clone() {
Some(cert) => Downloader::with_path(user_agent, cert),
None => Downloader::new(user_agent),
}
Downloader::new(ARGS.cert.clone())
}
/// Compile and format several download statistics and make and attempt at

View File

@ -15,7 +15,7 @@ use crate::package;
/// Execute an initialization command.
pub fn init(command: &InitCommand) -> StrResult<()> {
let package_storage = package::storage(&command.package);
let package_storage = package::storage(&command.package, None);
// Parse the package specification. If the user didn't specify the version,
// we try to figure it out automatically by downloading the package index

View File

@ -12,6 +12,7 @@ mod terminal;
mod timings;
#[cfg(feature = "self-update")]
mod update;
mod vendor;
mod watch;
mod world;
@ -69,6 +70,7 @@ fn dispatch() -> HintedStrResult<()> {
Command::Watch(command) => crate::watch::watch(&mut timer, command)?,
Command::Init(command) => crate::init::init(command)?,
Command::Query(command) => crate::query::query(command)?,
Command::Vendor(command) => crate::vendor::vendor(command)?,
Command::Fonts(command) => crate::fonts::fonts(command),
Command::Update(command) => crate::update::update(command)?,
}

View File

@ -1,13 +1,17 @@
use std::path::PathBuf;
use typst_kit::package::PackageStorage;
use crate::args::PackageArgs;
use crate::download;
/// Returns a new package storage for the given args.
pub fn storage(args: &PackageArgs) -> PackageStorage {
pub fn storage(args: &PackageArgs, workdir: Option<PathBuf>) -> PackageStorage {
PackageStorage::new(
args.vendor_path.clone(),
args.package_cache_path.clone(),
args.package_path.clone(),
download::downloader(),
workdir,
)
}

View File

@ -7,12 +7,12 @@ use semver::Version;
use serde::Deserialize;
use tempfile::NamedTempFile;
use typst::diag::{bail, StrResult};
use typst_kit::download::Downloader;
use typst_kit::package_downloads::http::HttpDownloader;
use xz2::bufread::XzDecoder;
use zip::ZipArchive;
use crate::args::UpdateCommand;
use crate::download::{self, PrintDownload};
use crate::download::PrintDownload;
const TYPST_GITHUB_ORG: &str = "typst";
const TYPST_REPO: &str = "typst";
@ -91,7 +91,8 @@ pub fn update(command: &UpdateCommand) -> StrResult<()> {
fs::copy(current_exe, &backup_path)
.map_err(|err| eco_format!("failed to create backup ({err})"))?;
let downloader = download::downloader();
//no certificate is needed to download from GitHub
let downloader = HttpDownloader::new(HttpDownloader::default_user_agent());
let release = Release::from_tag(command.version.as_ref(), &downloader)?;
if !update_needed(&release)? && !command.force {
@ -133,7 +134,7 @@ impl Release {
/// Typst repository.
pub fn from_tag(
tag: Option<&Version>,
downloader: &Downloader,
downloader: &HttpDownloader,
) -> StrResult<Release> {
let url = match tag {
Some(tag) => format!(
@ -144,7 +145,7 @@ impl Release {
),
};
match downloader.download(&url) {
match downloader.perform_download(&url) {
Ok(response) => response.into_json().map_err(|err| {
eco_format!("failed to parse release information ({err})")
}),
@ -161,7 +162,7 @@ impl Release {
pub fn download_binary(
&self,
asset_name: &str,
downloader: &Downloader,
downloader: &HttpDownloader,
) -> StrResult<Vec<u8>> {
let asset = self.assets.iter().find(|a| a.name.starts_with(asset_name)).ok_or(
eco_format!(

View File

@ -0,0 +1,109 @@
use std::{
fs::{create_dir, create_dir_all},
path::PathBuf,
};
use ecow::eco_format;
use typst::{
diag::{bail, HintedStrResult, Warned},
layout::PagedDocument,
};
use typst_kit::package::{DEFAULT_PACKAGES_SUBDIR, DEFAULT_VENDOR_SUBDIR};
use crate::{
args::VendorCommand, compile::print_diagnostics, set_failed, world::SystemWorld,
};
use typst::World;
/// Execute a vendor command.
pub fn vendor(command: &VendorCommand) -> HintedStrResult<()> {
let mut world = SystemWorld::new(&command.input, &command.world, &command.process)?;
// Reset everything and ensure that the main file is present.
world.reset();
world.source(world.main()).map_err(|err| err.to_string())?;
let Warned { output, warnings } = typst::compile::<PagedDocument>(&world);
match output {
Ok(_) => {
copy_deps(&mut world, &command.world.package.vendor_path)?;
print_diagnostics(&world, &[], &warnings, command.process.diagnostic_format)
.map_err(|err| eco_format!("failed to print diagnostics ({err})"))?;
}
// Print diagnostics.
Err(errors) => {
set_failed();
print_diagnostics(
&world,
&errors,
&warnings,
command.process.diagnostic_format,
)
.map_err(|err| eco_format!("failed to print diagnostics ({err})"))?;
}
}
Ok(())
}
fn copy_deps(
world: &mut SystemWorld,
vendor_path: &Option<PathBuf>,
) -> HintedStrResult<()> {
let vendor_dir = match vendor_path {
Some(path) => match path.canonicalize() {
Ok(path) => path,
Err(err) => {
if err.kind() == std::io::ErrorKind::NotFound {
if let Err(err) = create_dir(path) {
bail!("failed to create vendor directory: {:?}", err);
}
path.clone()
} else {
bail!("failed to canonicalize vendor directory path: {:?}", err);
}
}
},
None => world.workdir().join(DEFAULT_VENDOR_SUBDIR),
};
// Must iterate two times in total. As soon as the parent directory is created,
// world tries to read the subsequent files from the same package
// from the vendor directory since it is higher priority.
let all_deps = world
.dependencies()
.filter_map(|dep_path| {
let path = dep_path.to_str().unwrap();
path.find(DEFAULT_PACKAGES_SUBDIR).map(|pos| {
let dependency_path = &path[pos + DEFAULT_PACKAGES_SUBDIR.len() + 1..];
(dep_path.clone(), vendor_dir.join(dependency_path))
})
})
.collect::<Vec<_>>();
for (from_data_path, to_vendor_path) in all_deps {
if let Some(parent) = to_vendor_path.parent() {
match parent.try_exists() {
Ok(false) => {
if let Err(err) = create_dir_all(parent) {
bail!(
"failed to create package inside the vendor directory: {:?}",
err
);
}
}
Err(err) => {
bail!("failed to check existence of a package inside the vendor directory: {:?}", err);
}
_ => {}
}
}
if let Err(err) = std::fs::copy(from_data_path, to_vendor_path) {
bail!("failed to copy dependency to vendor directory: {:?}", err);
}
}
Ok(())
}

View File

@ -29,7 +29,7 @@ static STDIN_ID: LazyLock<FileId> =
/// A world that provides access to the operating system.
pub struct SystemWorld {
/// The working directory.
workdir: Option<PathBuf>,
workdir: PathBuf,
/// The root relative to which absolute paths are resolved.
root: PathBuf,
/// The input path.
@ -132,15 +132,18 @@ impl SystemWorld {
None => Now::System(OnceLock::new()),
};
let env_workdir = std::env::current_dir().ok();
let workdir = env_workdir.unwrap_or(PathBuf::from("."));
Ok(Self {
workdir: std::env::current_dir().ok(),
workdir: workdir.clone(),
root,
main,
library: LazyHash::new(library),
book: LazyHash::new(fonts.book),
fonts: fonts.fonts,
slots: Mutex::new(HashMap::new()),
package_storage: package::storage(&world_args.package),
package_storage: package::storage(&world_args.package, Some(workdir)),
now,
})
}
@ -157,7 +160,7 @@ impl SystemWorld {
/// The current working directory.
pub fn workdir(&self) -> &Path {
self.workdir.as_deref().unwrap_or(Path::new("."))
self.workdir.as_path()
}
/// Return all paths the last compilation depended on.

View File

@ -27,6 +27,7 @@ serde = { workspace = true }
serde_json = { workspace = true }
tar = { workspace = true, optional = true }
ureq = { workspace = true, optional = true }
gix = { workspace = true, optional = true, features = ["worktree-mutation", "blocking-network-client"] }
# Explicitly depend on OpenSSL if applicable, so that we can add the
# `openssl/vendored` feature to it if `vendor-openssl` is enabled.
@ -40,7 +41,9 @@ default = ["fonts", "packages"]
fonts = ["dep:fontdb", "fontdb/memmap", "fontdb/fontconfig"]
# Add generic downloading utilities
downloads = ["dep:env_proxy", "dep:native-tls", "dep:ureq", "dep:openssl"]
downloads = ["downloads_http", "downloads_git"]
downloads_http = ["dep:env_proxy", "dep:native-tls", "dep:ureq", "dep:openssl"]
downloads_git = ["gix"]
# Add package downloading utilities, implies `downloads`
packages = ["downloads", "dep:dirs", "dep:flate2", "dep:tar"]

View File

@ -10,18 +10,15 @@
//! - For text: Libertinus Serif, New Computer Modern
//! - For math: New Computer Modern Math
//! - For code: Deja Vu Sans Mono
//! - [download] contains functionality for making simple web requests with
//! status reporting, useful for downloading packages from package registries.
//! It is enabled by the `downloads` feature flag, additionally the
//! `vendor-openssl` can be used on operating systems other than macOS and
//! Windows to vendor OpenSSL when building.
//! - [package_downloads] contains functionality for handling package downloading
//! It is enabled by the `downloads` feature flag.
//! - [package] contains package storage and downloading functionality based on
//! [download]. It is enabled by the `packages` feature flag and implies the
//! [package_downloads]. It is enabled by the `packages` feature flag and implies the
//! `downloads` feature flag.
#[cfg(feature = "downloads")]
pub mod download;
#[cfg(feature = "fonts")]
pub mod fonts;
#[cfg(feature = "packages")]
pub mod package;
#[cfg(feature = "downloads")]
pub mod package_downloads;

View File

@ -1,29 +1,30 @@
//! Download and unpack packages and package indices.
use std::fs;
use std::path::{Path, PathBuf};
use crate::package_downloads::{Downloader, PackageDownloader, Progress};
use ecow::eco_format;
use once_cell::sync::OnceCell;
use serde::Deserialize;
use typst_library::diag::{bail, PackageError, PackageResult, StrResult};
use typst_syntax::package::{PackageSpec, PackageVersion, VersionlessPackageSpec};
use crate::download::{Downloader, Progress};
/// The default Typst registry.
pub const DEFAULT_REGISTRY: &str = "https://packages.typst.org";
/// The public namespace in the default Typst registry.
pub const DEFAULT_NAMESPACE: &str = "preview";
use typst_library::diag::{PackageError, PackageResult, StrResult};
use typst_syntax::package::{
PackageInfo, PackageSpec, PackageVersion, VersionlessPackageSpec,
};
/// The default packages sub directory within the package and package cache paths.
pub const DEFAULT_PACKAGES_SUBDIR: &str = "typst/packages";
/// The default vendor sub directory within the project root.
pub const DEFAULT_VENDOR_SUBDIR: &str = "vendor";
/// The public namespace in the default Typst registry.
pub const DEFAULT_NAMESPACE: &str = "preview";
/// Holds information about where packages should be stored and downloads them
/// on demand, if possible.
#[derive(Debug)]
pub struct PackageStorage {
/// The path at which packages are stored by the vendor command.
package_vendor_path: Option<PathBuf>,
/// The path at which non-local packages should be stored when downloaded.
package_cache_path: Option<PathBuf>,
/// The path at which local packages are stored.
@ -31,30 +32,22 @@ pub struct PackageStorage {
/// The downloader used for fetching the index and packages.
downloader: Downloader,
/// The cached index of the default namespace.
index: OnceCell<Vec<serde_json::Value>>,
index: OnceCell<Vec<PackageInfo>>,
}
impl PackageStorage {
/// Creates a new package storage for the given package paths. Falls back to
/// the recommended XDG directories if they are `None`.
pub fn new(
package_vendor_path: Option<PathBuf>,
package_cache_path: Option<PathBuf>,
package_path: Option<PathBuf>,
downloader: Downloader,
) -> Self {
Self::with_index(package_cache_path, package_path, downloader, OnceCell::new())
}
/// Creates a new package storage with a pre-defined index.
///
/// Useful for testing.
fn with_index(
package_cache_path: Option<PathBuf>,
package_path: Option<PathBuf>,
downloader: Downloader,
index: OnceCell<Vec<serde_json::Value>>,
workdir: Option<PathBuf>,
) -> Self {
Self {
package_vendor_path: package_vendor_path
.or_else(|| workdir.map(|workdir| workdir.join(DEFAULT_VENDOR_SUBDIR))),
package_cache_path: package_cache_path.or_else(|| {
dirs::cache_dir().map(|cache_dir| cache_dir.join(DEFAULT_PACKAGES_SUBDIR))
}),
@ -62,7 +55,7 @@ impl PackageStorage {
dirs::data_dir().map(|data_dir| data_dir.join(DEFAULT_PACKAGES_SUBDIR))
}),
downloader,
index,
index: OnceCell::new(),
}
}
@ -85,25 +78,37 @@ impl PackageStorage {
) -> PackageResult<PathBuf> {
let subdir = format!("{}/{}/{}", spec.namespace, spec.name, spec.version);
// Read from vendor dir if it exists.
if let Some(vendor_dir) = &self.package_vendor_path {
if let Ok(true) = vendor_dir.try_exists() {
let dir = vendor_dir.join(&subdir);
if dir.exists() {
return Ok(dir);
}
}
}
// check the package_path for the package directory.
if let Some(packages_dir) = &self.package_path {
let dir = packages_dir.join(&subdir);
if dir.exists() {
// no need to download, already in the path.
return Ok(dir);
}
}
// package was not in the package_path. check if it has been cached
if let Some(cache_dir) = &self.package_cache_path {
let dir = cache_dir.join(&subdir);
if dir.exists() {
//package was cached, so return the cached directory
return Ok(dir);
}
// Download from network if it doesn't exist yet.
if spec.namespace == DEFAULT_NAMESPACE {
self.download_package(spec, &dir, progress)?;
if dir.exists() {
return Ok(dir);
}
self.download_package(spec, &dir, progress)?;
if dir.exists() {
return Ok(dir);
}
}
@ -115,7 +120,7 @@ impl PackageStorage {
&self,
spec: &VersionlessPackageSpec,
) -> StrResult<PackageVersion> {
if spec.namespace == DEFAULT_NAMESPACE {
/*if spec.namespace == DEFAULT_NAMESPACE {
// For `DEFAULT_NAMESPACE`, download the package index and find the latest
// version.
self.download_index()?
@ -139,24 +144,23 @@ impl PackageStorage {
.filter_map(|path| path.file_name()?.to_string_lossy().parse().ok())
.max()
.ok_or_else(|| eco_format!("please specify the desired version"))
}
}*/
self.download_index(spec)?
.iter()
.filter(|package| package.name == spec.name)
.map(|package| package.version)
.max()
.ok_or_else(|| eco_format!("failed to find package {spec}"))
}
/// Download the package index. The result of this is cached for efficiency.
pub fn download_index(&self) -> StrResult<&[serde_json::Value]> {
pub fn download_index(
&self,
spec: &VersionlessPackageSpec,
) -> StrResult<&[PackageInfo]> {
self.index
.get_or_try_init(|| {
let url = format!("{DEFAULT_REGISTRY}/{DEFAULT_NAMESPACE}/index.json");
match self.downloader.download(&url) {
Ok(response) => response.into_json().map_err(|err| {
eco_format!("failed to parse package index: {err}")
}),
Err(ureq::Error::Status(404, _)) => {
bail!("failed to fetch package index (not found)")
}
Err(err) => bail!("failed to fetch package index ({err})"),
}
})
.get_or_try_init(|| self.downloader.download_index(spec))
.map(AsRef::as_ref)
}
@ -170,82 +174,15 @@ impl PackageStorage {
package_dir: &Path,
progress: &mut dyn Progress,
) -> PackageResult<()> {
assert_eq!(spec.namespace, DEFAULT_NAMESPACE);
let url = format!(
"{DEFAULT_REGISTRY}/{DEFAULT_NAMESPACE}/{}-{}.tar.gz",
spec.name, spec.version
);
let data = match self.downloader.download_with_progress(&url, progress) {
Ok(data) => data,
Err(ureq::Error::Status(404, _)) => {
match self.downloader.download(spec, package_dir, progress) {
Err(PackageError::NotFound(spec)) => {
if let Ok(version) = self.determine_latest_version(&spec.versionless()) {
return Err(PackageError::VersionNotFound(spec.clone(), version));
Err(PackageError::VersionNotFound(spec.clone(), version))
} else {
return Err(PackageError::NotFound(spec.clone()));
Err(PackageError::NotFound(spec.clone()))
}
}
Err(err) => {
return Err(PackageError::NetworkFailed(Some(eco_format!("{err}"))))
}
};
let decompressed = flate2::read::GzDecoder::new(data.as_slice());
tar::Archive::new(decompressed).unpack(package_dir).map_err(|err| {
fs::remove_dir_all(package_dir).ok();
PackageError::MalformedArchive(Some(eco_format!("{err}")))
})
}
}
/// Minimal information required about a package to determine its latest
/// version.
#[derive(Deserialize)]
struct MinimalPackageInfo {
name: String,
version: PackageVersion,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn lazy_deser_index() {
let storage = PackageStorage::with_index(
None,
None,
Downloader::new("typst/test"),
OnceCell::with_value(vec![
serde_json::json!({
"name": "charged-ieee",
"version": "0.1.0",
"entrypoint": "lib.typ",
}),
serde_json::json!({
"name": "unequivocal-ams",
// This version number is currently not valid, so this package
// can't be parsed.
"version": "0.2.0-dev",
"entrypoint": "lib.typ",
}),
]),
);
let ieee_version = storage.determine_latest_version(&VersionlessPackageSpec {
namespace: "preview".into(),
name: "charged-ieee".into(),
});
assert_eq!(ieee_version, Ok(PackageVersion { major: 0, minor: 1, patch: 0 }));
let ams_version = storage.determine_latest_version(&VersionlessPackageSpec {
namespace: "preview".into(),
name: "unequivocal-ams".into(),
});
assert_eq!(
ams_version,
Err("failed to find package @preview/unequivocal-ams".into())
)
val => val,
}
}
}

View File

@ -0,0 +1,116 @@
use crate::package_downloads::{DownloadState, PackageDownloader, Progress};
use ecow::{eco_format, EcoString};
use gix::remote::fetch::Shallow;
use std::fmt::Debug;
use std::num::NonZero;
use std::path::Path;
use std::time::Instant;
use typst_library::diag::{PackageError, PackageResult};
use typst_syntax::package::{PackageInfo, PackageSpec, VersionlessPackageSpec};
#[derive(Debug)]
pub struct GitDownloader;
impl Default for GitDownloader {
fn default() -> Self {
Self::new()
}
}
impl GitDownloader {
pub fn new() -> Self {
Self {}
}
pub fn download_with_progress(
&self,
repo: &str,
tag: &str,
dest: &Path,
progress: &mut dyn Progress,
) -> Result<(), EcoString> {
progress.print_start();
let state = DownloadState {
content_len: None,
total_downloaded: 0,
bytes_per_second: Default::default(),
start_time: Instant::now(),
};
std::fs::create_dir_all(dest).map_err(|x| eco_format!("{x}"))?;
let url = gix::url::parse(repo.into()).map_err(|x| eco_format!("{x}"))?;
let mut prepare_fetch =
gix::prepare_clone(url, dest).map_err(|x| eco_format!("{x}"))?;
prepare_fetch = prepare_fetch
.with_shallow(Shallow::DepthAtRemote(NonZero::new(1).unwrap()))
.with_ref_name(Some(tag))
.map_err(|x| eco_format!("{x}"))?;
let (mut prepare_checkout, _) = prepare_fetch
.fetch_then_checkout(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED)
.map_err(|x| eco_format!("{x}"))?;
if prepare_checkout.repo().work_dir().is_none() {
return Err(eco_format!(
"Cloned git repository but files are not available."
))?;
}
prepare_checkout
.main_worktree(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED)
.map_err(|x| eco_format!("{x}"))?;
progress.print_finish(&state);
Ok(())
}
/// Parses the namespace of the package into the correct registry and namespace.
/// The namespace format is the following:
///
/// @git:<git host and user>
///
/// The final repository cloned will be formed by the git host and the repository name
/// with the adequate extension, checking out to the tag specified by the version in the format
/// v<major>.<minor>.<patch>
///
/// For example, the package
/// @git:git@github.com:typst/package:0.0
/// will result in the cloning of the repository git@github.com:typst/package.git
/// and the checkout and detached head state at tag v0.1.0
///
/// NOTE: no index download is possible.
fn parse_namespace(ns: &str, name: &str) -> Result<String, EcoString> {
let mut parts = ns.splitn(2, ":");
let schema =
parts.next().ok_or_else(|| eco_format!("expected schema in {}", ns))?;
let repo = parts
.next()
.ok_or_else(|| eco_format!("invalid package repo {}", ns))?;
if !schema.eq("git") {
Err(eco_format!("invalid schema in {}", ns))?
}
Ok(format!("{repo}/{name}.git"))
}
}
impl PackageDownloader for GitDownloader {
fn download_index(
&self,
_spec: &VersionlessPackageSpec,
) -> Result<Vec<PackageInfo>, EcoString> {
Err(eco_format!("Downloading index is not supported for git repositories"))
}
fn download(
&self,
spec: &PackageSpec,
package_dir: &Path,
progress: &mut dyn Progress,
) -> PackageResult<()> {
let repo = Self::parse_namespace(spec.namespace.as_str(), spec.name.as_str())
.map_err(|x| PackageError::Other(Some(x)))?;
let tag = format!("refs/tags/v{}", spec.version);
self.download_with_progress(repo.as_str(), tag.as_str(), package_dir, progress)
.map_err(|x| PackageError::Other(Some(x)))
}
}

View File

@ -7,27 +7,24 @@
use std::collections::VecDeque;
use std::fmt::Debug;
use std::fs;
use std::io::{self, ErrorKind, Read};
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::{Duration, Instant};
use ecow::EcoString;
use crate::package_downloads::{
DownloadState, PackageDownloader, Progress, DEFAULT_NAMESPACE,
};
use ecow::{eco_format, EcoString};
use native_tls::{Certificate, TlsConnector};
use once_cell::sync::OnceCell;
use typst_library::diag::{bail, PackageError, PackageResult};
use typst_syntax::package::{PackageInfo, PackageSpec, VersionlessPackageSpec};
use ureq::Response;
/// Manages progress reporting for downloads.
pub trait Progress {
/// Invoked when a download is started.
fn print_start(&mut self);
/// Invoked repeatedly while a download is ongoing.
fn print_progress(&mut self, state: &DownloadState);
/// Invoked when a download is finished.
fn print_finish(&mut self, state: &DownloadState);
}
/// The default Typst registry.
pub const DEFAULT_REGISTRY: &str = "https://packages.typst.org";
/// An implementation of [`Progress`] with no-op reporting, i.e., reporting
/// events are swallowed.
@ -39,28 +36,18 @@ impl Progress for ProgressSink {
fn print_finish(&mut self, _: &DownloadState) {}
}
/// The current state of an in progress or finished download.
#[derive(Debug)]
pub struct DownloadState {
/// The expected amount of bytes to download, `None` if the response header
/// was not set.
pub content_len: Option<usize>,
/// The total amount of downloaded bytes until now.
pub total_downloaded: usize,
/// A backlog of the amount of downloaded bytes each second.
pub bytes_per_second: VecDeque<usize>,
/// The download starting instant.
pub start_time: Instant,
}
/// A minimal https client for downloading various resources.
pub struct Downloader {
pub struct HttpDownloader {
user_agent: EcoString,
cert_path: Option<PathBuf>,
cert: OnceCell<Certificate>,
}
impl Downloader {
impl HttpDownloader {
pub fn default_user_agent() -> String {
format!("typst-kit/{}", env!("CARGO_PKG_VERSION"))
}
/// Crates a new downloader with the given user agent and no certificate.
pub fn new(user_agent: impl Into<EcoString>) -> Self {
Self {
@ -81,15 +68,6 @@ impl Downloader {
}
}
/// Crates a new downloader with the given user agent and certificate.
pub fn with_cert(user_agent: impl Into<EcoString>, cert: Certificate) -> Self {
Self {
user_agent: user_agent.into(),
cert_path: None,
cert: OnceCell::with_value(cert),
}
}
/// Returns the certificate this client is using, if a custom certificate
/// is used it is loaded on first access.
///
@ -107,7 +85,7 @@ impl Downloader {
/// Download binary data from the given url.
#[allow(clippy::result_large_err)]
pub fn download(&self, url: &str) -> Result<ureq::Response, ureq::Error> {
pub fn perform_download(&self, url: &str) -> Result<ureq::Response, ureq::Error> {
let mut builder = ureq::AgentBuilder::new();
let mut tls = TlsConnector::builder();
@ -143,12 +121,47 @@ impl Downloader {
progress: &mut dyn Progress,
) -> Result<Vec<u8>, ureq::Error> {
progress.print_start();
let response = self.download(url)?;
let response = self.perform_download(url)?;
Ok(RemoteReader::from_response(response, progress).download()?)
}
/// Parses the namespace of the package into the correct registry and namespace.
/// The namespace format is the following:
///
/// @http[s]:<registry host>:<namespace>/package-name>:package-version
///
/// resulting in the package location to be resolved as
/// http[s]://<registry host>/<namespace>/<package-name>-<package-version>.tar.gz
///
/// and the index to be resolved as
/// http[s]://<registry host>/<namespace>/index.json
///
/// NOTE: preview namespace is treated as the namespace formed as
/// @https:packages.typst.org:preview/package-name>:package-version
fn parse_namespace(ns: &str) -> Result<(String, String), EcoString> {
if ns.eq(DEFAULT_NAMESPACE) {
return Ok((DEFAULT_REGISTRY.to_string(), DEFAULT_NAMESPACE.to_string()));
}
let mut parts = ns.splitn(3, ":");
let schema =
parts.next().ok_or_else(|| eco_format!("expected schema in {}", ns))?;
let registry = parts
.next()
.ok_or_else(|| eco_format!("invalid package registry in namespace {}", ns))?;
let ns = parts
.next()
.ok_or_else(|| eco_format!("invalid package namespace in {}", ns))?;
if !schema.eq("http") && !schema.eq("https") {
Err(eco_format!("invalid schema in {}", ns))?
}
Ok((format!("{schema}://{registry}"), ns.to_string()))
}
}
impl Debug for Downloader {
impl Debug for HttpDownloader {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Downloader")
.field("user_agent", &self.user_agent)
@ -257,3 +270,48 @@ impl<'p> RemoteReader<'p> {
Ok(data)
}
}
impl PackageDownloader for HttpDownloader {
fn download_index(
&self,
spec: &VersionlessPackageSpec,
) -> Result<Vec<PackageInfo>, EcoString> {
let (registry, namespace) = Self::parse_namespace(spec.namespace.as_str())?;
let url = format!("{registry}/{namespace}/index.json");
match self.perform_download(&url) {
Ok(response) => response
.into_json()
.map_err(|err| eco_format!("failed to parse package index: {err}")),
Err(ureq::Error::Status(404, _)) => {
bail!("failed to fetch package index (not found)")
}
Err(err) => bail!("failed to fetch package index ({err})"),
}
}
fn download(
&self,
spec: &PackageSpec,
package_dir: &Path,
progress: &mut dyn Progress,
) -> PackageResult<()> {
let (registry, namespace) = Self::parse_namespace(spec.namespace.as_str())
.map_err(|x| PackageError::Other(Some(x)))?;
let url =
format!("{}/{}/{}-{}.tar.gz", registry, namespace, spec.name, spec.version);
let data = match self.download_with_progress(&url, progress) {
Ok(data) => data,
Err(ureq::Error::Status(404, _)) => {
Err(PackageError::NotFound(spec.clone()))?
}
Err(err) => Err(PackageError::NetworkFailed(Some(eco_format!("{err}"))))?,
};
let decompressed = flate2::read::GzDecoder::new(data.as_slice());
tar::Archive::new(decompressed).unpack(package_dir).map_err(|err| {
fs::remove_dir_all(package_dir).ok();
PackageError::MalformedArchive(Some(eco_format!("{err}")))
})
}
}

View File

@ -0,0 +1,203 @@
//! This module provides the package downloader abstraction needed
//! for remote package handling.
//!
//! # Content
//!
//! ## Traits
//! The [PackageDownloader] trait provides the abstraction needed to implement
//! multiple download method handlers.
//! Each method must allow for a package download to the local filesystem and it should provide a
//! method for downloading the repository index if it exists.
//!
//! The [Progress] trait allows for the implementation of a progress reporting struct.
//!
//! ## Module
//! [http] contains functionality for making simple web requests with status reporting,
//! useful for downloading packages from package registries.
//! It is enabled by the `downloads_http` feature flag.
//! Additionally the `vendor-openssl` can be used on operating systems other than macOS
//! and Windows to vendor OpenSSL when building.
//!
//! [git] contains functionality for handling package downloads through git repositories.
use ecow::{eco_format, EcoString};
use std::collections::VecDeque;
use std::fmt::Debug;
use std::path::{Path, PathBuf};
use std::time::Instant;
use typst_library::diag::{PackageError, PackageResult};
use typst_syntax::package::{PackageInfo, PackageSpec, VersionlessPackageSpec};
/// The public namespace in the default Typst registry.
pub const DEFAULT_NAMESPACE: &str = "preview";
/*========BEGIN DOWNLOAD METHODS DECLARATION=========*/
#[cfg(feature = "downloads_http")]
pub mod http;
#[cfg(feature = "downloads_git")]
pub mod git;
/*========END DOWNLOAD METHODS DECLARATION===========*/
/// Trait abstraction for package a downloader.
pub trait PackageDownloader: Debug + Sync + Send {
/// Download the repository index and returns the
/// list of PackageInfo elements contained in it.
fn download_index(
&self,
spec: &VersionlessPackageSpec,
) -> Result<Vec<PackageInfo>, EcoString>;
/// Download a package from a remote repository/registry
/// and writes it in the file system cache directory
fn download(
&self,
spec: &PackageSpec,
package_dir: &Path,
progress: &mut dyn Progress,
) -> PackageResult<()>;
}
/// The current state of an in progress or finished download.
#[derive(Debug)]
pub struct DownloadState {
/// The expected amount of bytes to download, `None` if the response header
/// was not set.
pub content_len: Option<usize>,
/// The total amount of downloaded bytes until now.
pub total_downloaded: usize,
/// A backlog of the amount of downloaded bytes each second.
pub bytes_per_second: VecDeque<usize>,
/// The download starting instant.
pub start_time: Instant,
}
/// Manages progress reporting for downloads.
pub trait Progress {
/// Invoked when a download is started.
fn print_start(&mut self);
/// Invoked repeatedly while a download is ongoing.
fn print_progress(&mut self, state: &DownloadState);
/// Invoked when a download is finished.
fn print_finish(&mut self, state: &DownloadState);
}
/// The downloader object used for downloading packages
#[derive(Debug)]
pub struct Downloader {
///List of all available downloaders which can be instantiated at runtime
http_downloader: Option<Box<dyn PackageDownloader>>,
git_downloader: Option<Box<dyn PackageDownloader>>,
}
impl Downloader {
/// Construct the Downloader object instantiating all the available methods.
/// The methods can be compile-time selected by features.
pub fn new(cert: Option<PathBuf>) -> Self {
Self {
http_downloader: Self::make_http_downloader(cert.clone()),
git_downloader: Self::make_git_downloader(cert),
}
}
/// Creation function for the HTTP(S) download method
fn make_http_downloader(cert: Option<PathBuf>) -> Option<Box<dyn PackageDownloader>> {
#[cfg(not(feature = "downloads_http"))]
{
None
}
#[cfg(feature = "downloads_http")]
{
match cert {
Some(cert_path) => Some(Box::new(http::HttpDownloader::with_path(
http::HttpDownloader::default_user_agent(),
cert_path,
))),
None => Some(Box::new(http::HttpDownloader::new(
http::HttpDownloader::default_user_agent(),
))),
}
}
}
fn get_http_downloader(&self) -> Result<&dyn PackageDownloader, PackageError> {
let reference = self.http_downloader.as_ref().ok_or_else(|| {
PackageError::Other(Some(EcoString::from(
"Http downloader has not been initialized correctly",
)))
})?;
Ok(&**reference)
}
/// Creation function for the GIT clone method
fn make_git_downloader(_cert: Option<PathBuf>) -> Option<Box<dyn PackageDownloader>> {
#[cfg(not(feature = "downloads_git"))]
{
None
}
#[cfg(feature = "downloads_git")]
{
Some(Box::new(git::GitDownloader::new()))
}
}
fn get_git_downloader(&self) -> Result<&dyn PackageDownloader, PackageError> {
let reference = self.git_downloader.as_ref().ok_or_else(|| {
PackageError::Other(Some(EcoString::from(
"Http downloader has not been initialized correctly",
)))
})?;
Ok(&**reference)
}
/// Returns the correct downloader in function of the package namespace.
/// The remote location of a package is encoded in its namespace in the form
/// @<source type>:<source path>
///
/// It's the downloader instance's job to parse the source path in any substructure.
///
/// NOTE: Treating @preview as a special case of the https downloader.
fn get_downloader(&self, ns: &str) -> Result<&dyn PackageDownloader, PackageError> {
let download_type = ns.split(":").next();
match download_type {
#[cfg(feature = "downloads_http")]
Some("http") | Some("https") | Some("preview") => self.get_http_downloader(),
#[cfg(feature = "downloads_git")]
Some("git") => self.get_git_downloader(),
Some(dwld) => Err(PackageError::Other(Some(eco_format!(
"Unknown downloader type: {}",
dwld
)))),
None => Err(PackageError::Other(Some(EcoString::from(
"No downloader type specified",
)))),
}
}
}
impl PackageDownloader for Downloader {
fn download_index(
&self,
spec: &VersionlessPackageSpec,
) -> Result<Vec<PackageInfo>, EcoString> {
let downloader = self.get_downloader(spec.namespace.as_str())?;
downloader.download_index(spec)
}
fn download(
&self,
spec: &PackageSpec,
package_dir: &Path,
progress: &mut dyn Progress,
) -> PackageResult<()> {
let downloader = self.get_downloader(spec.namespace.as_str())?;
downloader.download(spec, package_dir, progress)
}
}

View File

@ -263,15 +263,37 @@ impl Display for VersionlessPackageSpec {
}
}
fn is_namespace_valid(namespace: &str) -> bool {
if is_ident(namespace) {
//standard namespace
return true;
}
//if not ident, the namespace should be formed as @<package_remote_type>:<package_path>
let mut tokenized = namespace.splitn(2, ":");
//package type
let package_remote_type = tokenized.next();
if package_remote_type.is_none() || !is_ident(package_remote_type.unwrap()) {
return false;
}
//the package_path parsing is left to the downloader implementation
true
}
fn parse_namespace<'s>(s: &mut Scanner<'s>) -> Result<&'s str, EcoString> {
if !s.eat_if('@') {
Err("package specification must start with '@'")?;
}
//todo: allow for multiple slashes in the by eating until last slash
let namespace = s.eat_until('/');
if namespace.is_empty() {
Err("package specification is missing namespace")?;
} else if !is_ident(namespace) {
}
if !is_namespace_valid(namespace) {
Err(eco_format!("`{namespace}` is not a valid package namespace"))?;
}