mirror of https://github.com/stelzo/typst.git
Compare commits
29 Commits
| Author | SHA1 | Date |
|---|---|---|
|
|
e204a6cab1 | |
|
|
269fa46eb3 | |
|
|
7a02240264 | |
|
|
8ace67d942 | |
|
|
81e9bc7c8f | |
|
|
381ff0cc2c | |
|
|
393be881f8 | |
|
|
74826fc6ec | |
|
|
fe94b2b54f | |
|
|
e0074dfc01 | |
|
|
d97967dd40 | |
|
|
9c41234574 | |
|
|
59569cbf61 | |
|
|
d04f014fc6 | |
|
|
a935bf8fbb | |
|
|
e29ea242bc | |
|
|
c8e838036b | |
|
|
a87b426e66 | |
|
|
476b79ddfc | |
|
|
9256871d62 | |
|
|
ddec8feab3 | |
|
|
3962be8ebf | |
|
|
a295495dc5 | |
|
|
236cb0884f | |
|
|
d687d23e5e | |
|
|
b5689cfc72 | |
|
|
0a1df1ee67 | |
|
|
3436f825f2 | |
|
|
ec3bc7dd7e |
|
|
@ -73,7 +73,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: dtolnay/rust-toolchain@1.80.0
|
- uses: dtolnay/rust-toolchain@1.81.0
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
- run: cargo check --workspace
|
- run: cargo check --workspace
|
||||||
|
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
41
Cargo.toml
41
Cargo.toml
|
|
@ -4,8 +4,8 @@ default-members = ["crates/typst-cli"]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "0.13.0"
|
version = "0.13.1"
|
||||||
rust-version = "1.80" # also change in ci.yml
|
rust-version = "1.81" # also change in ci.yml
|
||||||
authors = ["The Typst Project Developers"]
|
authors = ["The Typst Project Developers"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
homepage = "https://typst.app"
|
homepage = "https://typst.app"
|
||||||
|
|
@ -16,24 +16,24 @@ keywords = ["typst"]
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
typst = { path = "crates/typst", version = "0.13.0" }
|
typst = { path = "crates/typst", version = "0.13.1" }
|
||||||
typst-cli = { path = "crates/typst-cli", version = "0.13.0" }
|
typst-cli = { path = "crates/typst-cli", version = "0.13.1" }
|
||||||
typst-eval = { path = "crates/typst-eval", version = "0.13.0" }
|
typst-eval = { path = "crates/typst-eval", version = "0.13.1" }
|
||||||
typst-html = { path = "crates/typst-html", version = "0.13.0" }
|
typst-html = { path = "crates/typst-html", version = "0.13.1" }
|
||||||
typst-ide = { path = "crates/typst-ide", version = "0.13.0" }
|
typst-ide = { path = "crates/typst-ide", version = "0.13.1" }
|
||||||
typst-kit = { path = "crates/typst-kit", version = "0.13.0" }
|
typst-kit = { path = "crates/typst-kit", version = "0.13.1" }
|
||||||
typst-layout = { path = "crates/typst-layout", version = "0.13.0" }
|
typst-layout = { path = "crates/typst-layout", version = "0.13.1" }
|
||||||
typst-library = { path = "crates/typst-library", version = "0.13.0" }
|
typst-library = { path = "crates/typst-library", version = "0.13.1" }
|
||||||
typst-macros = { path = "crates/typst-macros", version = "0.13.0" }
|
typst-macros = { path = "crates/typst-macros", version = "0.13.1" }
|
||||||
typst-pdf = { path = "crates/typst-pdf", version = "0.13.0" }
|
typst-pdf = { path = "crates/typst-pdf", version = "0.13.1" }
|
||||||
typst-realize = { path = "crates/typst-realize", version = "0.13.0" }
|
typst-realize = { path = "crates/typst-realize", version = "0.13.1" }
|
||||||
typst-render = { path = "crates/typst-render", version = "0.13.0" }
|
typst-render = { path = "crates/typst-render", version = "0.13.1" }
|
||||||
typst-svg = { path = "crates/typst-svg", version = "0.13.0" }
|
typst-svg = { path = "crates/typst-svg", version = "0.13.1" }
|
||||||
typst-syntax = { path = "crates/typst-syntax", version = "0.13.0" }
|
typst-syntax = { path = "crates/typst-syntax", version = "0.13.1" }
|
||||||
typst-timing = { path = "crates/typst-timing", version = "0.13.0" }
|
typst-timing = { path = "crates/typst-timing", version = "0.13.1" }
|
||||||
typst-utils = { path = "crates/typst-utils", version = "0.13.0" }
|
typst-utils = { path = "crates/typst-utils", version = "0.13.1" }
|
||||||
typst-assets = "0.13.0"
|
typst-assets = "0.13.1"
|
||||||
typst-dev-assets = { git = "https://github.com/typst/typst-dev-assets", tag = "v0.13.0" }
|
typst-dev-assets = { git = "https://github.com/typst/typst-dev-assets", tag = "v0.13.1" }
|
||||||
arrayvec = "0.7.4"
|
arrayvec = "0.7.4"
|
||||||
az = "1.2"
|
az = "1.2"
|
||||||
base64 = "0.22"
|
base64 = "0.22"
|
||||||
|
|
@ -58,6 +58,7 @@ env_proxy = "0.4"
|
||||||
flate2 = "1"
|
flate2 = "1"
|
||||||
fontdb = { version = "0.21", default-features = false }
|
fontdb = { version = "0.21", default-features = false }
|
||||||
fs_extra = "1.3"
|
fs_extra = "1.3"
|
||||||
|
gix = "0.68.0"
|
||||||
hayagriva = "0.8.1"
|
hayagriva = "0.8.1"
|
||||||
heck = "0.5"
|
heck = "0.5"
|
||||||
hypher = "0.1.4"
|
hypher = "0.1.4"
|
||||||
|
|
|
||||||
|
|
@ -21,7 +21,7 @@ doc = false
|
||||||
typst = { workspace = true }
|
typst = { workspace = true }
|
||||||
typst-eval = { workspace = true }
|
typst-eval = { workspace = true }
|
||||||
typst-html = { workspace = true }
|
typst-html = { workspace = true }
|
||||||
typst-kit = { workspace = true }
|
typst-kit = { workspace = true, features = ["downloads_http"] }
|
||||||
typst-macros = { workspace = true }
|
typst-macros = { workspace = true }
|
||||||
typst-pdf = { workspace = true }
|
typst-pdf = { workspace = true }
|
||||||
typst-render = { workspace = true }
|
typst-render = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -75,6 +75,9 @@ pub enum Command {
|
||||||
/// Processes an input file to extract provided metadata.
|
/// Processes an input file to extract provided metadata.
|
||||||
Query(QueryCommand),
|
Query(QueryCommand),
|
||||||
|
|
||||||
|
/// Create a vendor directory with all used packages.
|
||||||
|
Vendor(VendorCommand),
|
||||||
|
|
||||||
/// Lists all discovered fonts in system and custom font paths.
|
/// Lists all discovered fonts in system and custom font paths.
|
||||||
Fonts(FontsCommand),
|
Fonts(FontsCommand),
|
||||||
|
|
||||||
|
|
@ -160,6 +163,22 @@ pub struct QueryCommand {
|
||||||
pub process: ProcessArgs,
|
pub process: ProcessArgs,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create a vendor directory with all used packages in the current directory.
|
||||||
|
#[derive(Debug, Clone, Parser)]
|
||||||
|
pub struct VendorCommand {
|
||||||
|
/// Path to input Typst file. Use `-` to read input from stdin.
|
||||||
|
#[clap(value_parser = input_value_parser(), value_hint = ValueHint::FilePath)]
|
||||||
|
pub input: Input,
|
||||||
|
|
||||||
|
/// World arguments.
|
||||||
|
#[clap(flatten)]
|
||||||
|
pub world: WorldArgs,
|
||||||
|
|
||||||
|
/// Processing arguments.
|
||||||
|
#[clap(flatten)]
|
||||||
|
pub process: ProcessArgs,
|
||||||
|
}
|
||||||
|
|
||||||
/// Lists all discovered fonts in system and custom font paths.
|
/// Lists all discovered fonts in system and custom font paths.
|
||||||
#[derive(Debug, Clone, Parser)]
|
#[derive(Debug, Clone, Parser)]
|
||||||
pub struct FontsCommand {
|
pub struct FontsCommand {
|
||||||
|
|
@ -342,6 +361,14 @@ pub struct PackageArgs {
|
||||||
value_name = "DIR"
|
value_name = "DIR"
|
||||||
)]
|
)]
|
||||||
pub package_cache_path: Option<PathBuf>,
|
pub package_cache_path: Option<PathBuf>,
|
||||||
|
|
||||||
|
/// Custom vendor directory name.
|
||||||
|
#[clap(
|
||||||
|
long = "package-vendor-path",
|
||||||
|
env = "TYPST_PACKAGE_VENDOR_PATH",
|
||||||
|
value_name = "DIR"
|
||||||
|
)]
|
||||||
|
pub vendor_path: Option<PathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Common arguments to customize available fonts.
|
/// Common arguments to customize available fonts.
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ use std::time::{Duration, Instant};
|
||||||
use codespan_reporting::term;
|
use codespan_reporting::term;
|
||||||
use codespan_reporting::term::termcolor::WriteColor;
|
use codespan_reporting::term::termcolor::WriteColor;
|
||||||
use typst::utils::format_duration;
|
use typst::utils::format_duration;
|
||||||
use typst_kit::download::{DownloadState, Downloader, Progress};
|
use typst_kit::package_downloads::{DownloadState, Downloader, Progress};
|
||||||
|
|
||||||
use crate::terminal::{self, TermOut};
|
use crate::terminal::{self, TermOut};
|
||||||
use crate::ARGS;
|
use crate::ARGS;
|
||||||
|
|
@ -43,11 +43,7 @@ impl<T: Display> Progress for PrintDownload<T> {
|
||||||
|
|
||||||
/// Returns a new downloader.
|
/// Returns a new downloader.
|
||||||
pub fn downloader() -> Downloader {
|
pub fn downloader() -> Downloader {
|
||||||
let user_agent = concat!("typst/", env!("CARGO_PKG_VERSION"));
|
Downloader::new(ARGS.cert.clone())
|
||||||
match ARGS.cert.clone() {
|
|
||||||
Some(cert) => Downloader::with_path(user_agent, cert),
|
|
||||||
None => Downloader::new(user_agent),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Compile and format several download statistics and make and attempt at
|
/// Compile and format several download statistics and make and attempt at
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ use crate::package;
|
||||||
|
|
||||||
/// Execute an initialization command.
|
/// Execute an initialization command.
|
||||||
pub fn init(command: &InitCommand) -> StrResult<()> {
|
pub fn init(command: &InitCommand) -> StrResult<()> {
|
||||||
let package_storage = package::storage(&command.package);
|
let package_storage = package::storage(&command.package, None);
|
||||||
|
|
||||||
// Parse the package specification. If the user didn't specify the version,
|
// Parse the package specification. If the user didn't specify the version,
|
||||||
// we try to figure it out automatically by downloading the package index
|
// we try to figure it out automatically by downloading the package index
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,7 @@ mod terminal;
|
||||||
mod timings;
|
mod timings;
|
||||||
#[cfg(feature = "self-update")]
|
#[cfg(feature = "self-update")]
|
||||||
mod update;
|
mod update;
|
||||||
|
mod vendor;
|
||||||
mod watch;
|
mod watch;
|
||||||
mod world;
|
mod world;
|
||||||
|
|
||||||
|
|
@ -69,6 +70,7 @@ fn dispatch() -> HintedStrResult<()> {
|
||||||
Command::Watch(command) => crate::watch::watch(&mut timer, command)?,
|
Command::Watch(command) => crate::watch::watch(&mut timer, command)?,
|
||||||
Command::Init(command) => crate::init::init(command)?,
|
Command::Init(command) => crate::init::init(command)?,
|
||||||
Command::Query(command) => crate::query::query(command)?,
|
Command::Query(command) => crate::query::query(command)?,
|
||||||
|
Command::Vendor(command) => crate::vendor::vendor(command)?,
|
||||||
Command::Fonts(command) => crate::fonts::fonts(command),
|
Command::Fonts(command) => crate::fonts::fonts(command),
|
||||||
Command::Update(command) => crate::update::update(command)?,
|
Command::Update(command) => crate::update::update(command)?,
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,17 @@
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use typst_kit::package::PackageStorage;
|
use typst_kit::package::PackageStorage;
|
||||||
|
|
||||||
use crate::args::PackageArgs;
|
use crate::args::PackageArgs;
|
||||||
use crate::download;
|
use crate::download;
|
||||||
|
|
||||||
/// Returns a new package storage for the given args.
|
/// Returns a new package storage for the given args.
|
||||||
pub fn storage(args: &PackageArgs) -> PackageStorage {
|
pub fn storage(args: &PackageArgs, workdir: Option<PathBuf>) -> PackageStorage {
|
||||||
PackageStorage::new(
|
PackageStorage::new(
|
||||||
|
args.vendor_path.clone(),
|
||||||
args.package_cache_path.clone(),
|
args.package_cache_path.clone(),
|
||||||
args.package_path.clone(),
|
args.package_path.clone(),
|
||||||
download::downloader(),
|
download::downloader(),
|
||||||
|
workdir,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -7,12 +7,12 @@ use semver::Version;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use tempfile::NamedTempFile;
|
use tempfile::NamedTempFile;
|
||||||
use typst::diag::{bail, StrResult};
|
use typst::diag::{bail, StrResult};
|
||||||
use typst_kit::download::Downloader;
|
use typst_kit::package_downloads::http::HttpDownloader;
|
||||||
use xz2::bufread::XzDecoder;
|
use xz2::bufread::XzDecoder;
|
||||||
use zip::ZipArchive;
|
use zip::ZipArchive;
|
||||||
|
|
||||||
use crate::args::UpdateCommand;
|
use crate::args::UpdateCommand;
|
||||||
use crate::download::{self, PrintDownload};
|
use crate::download::PrintDownload;
|
||||||
|
|
||||||
const TYPST_GITHUB_ORG: &str = "typst";
|
const TYPST_GITHUB_ORG: &str = "typst";
|
||||||
const TYPST_REPO: &str = "typst";
|
const TYPST_REPO: &str = "typst";
|
||||||
|
|
@ -91,7 +91,8 @@ pub fn update(command: &UpdateCommand) -> StrResult<()> {
|
||||||
fs::copy(current_exe, &backup_path)
|
fs::copy(current_exe, &backup_path)
|
||||||
.map_err(|err| eco_format!("failed to create backup ({err})"))?;
|
.map_err(|err| eco_format!("failed to create backup ({err})"))?;
|
||||||
|
|
||||||
let downloader = download::downloader();
|
//no certificate is needed to download from GitHub
|
||||||
|
let downloader = HttpDownloader::new(HttpDownloader::default_user_agent());
|
||||||
|
|
||||||
let release = Release::from_tag(command.version.as_ref(), &downloader)?;
|
let release = Release::from_tag(command.version.as_ref(), &downloader)?;
|
||||||
if !update_needed(&release)? && !command.force {
|
if !update_needed(&release)? && !command.force {
|
||||||
|
|
@ -133,7 +134,7 @@ impl Release {
|
||||||
/// Typst repository.
|
/// Typst repository.
|
||||||
pub fn from_tag(
|
pub fn from_tag(
|
||||||
tag: Option<&Version>,
|
tag: Option<&Version>,
|
||||||
downloader: &Downloader,
|
downloader: &HttpDownloader,
|
||||||
) -> StrResult<Release> {
|
) -> StrResult<Release> {
|
||||||
let url = match tag {
|
let url = match tag {
|
||||||
Some(tag) => format!(
|
Some(tag) => format!(
|
||||||
|
|
@ -144,7 +145,7 @@ impl Release {
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
|
||||||
match downloader.download(&url) {
|
match downloader.perform_download(&url) {
|
||||||
Ok(response) => response.into_json().map_err(|err| {
|
Ok(response) => response.into_json().map_err(|err| {
|
||||||
eco_format!("failed to parse release information ({err})")
|
eco_format!("failed to parse release information ({err})")
|
||||||
}),
|
}),
|
||||||
|
|
@ -161,7 +162,7 @@ impl Release {
|
||||||
pub fn download_binary(
|
pub fn download_binary(
|
||||||
&self,
|
&self,
|
||||||
asset_name: &str,
|
asset_name: &str,
|
||||||
downloader: &Downloader,
|
downloader: &HttpDownloader,
|
||||||
) -> StrResult<Vec<u8>> {
|
) -> StrResult<Vec<u8>> {
|
||||||
let asset = self.assets.iter().find(|a| a.name.starts_with(asset_name)).ok_or(
|
let asset = self.assets.iter().find(|a| a.name.starts_with(asset_name)).ok_or(
|
||||||
eco_format!(
|
eco_format!(
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,109 @@
|
||||||
|
use std::{
|
||||||
|
fs::{create_dir, create_dir_all},
|
||||||
|
path::PathBuf,
|
||||||
|
};
|
||||||
|
|
||||||
|
use ecow::eco_format;
|
||||||
|
use typst::{
|
||||||
|
diag::{bail, HintedStrResult, Warned},
|
||||||
|
layout::PagedDocument,
|
||||||
|
};
|
||||||
|
use typst_kit::package::{DEFAULT_PACKAGES_SUBDIR, DEFAULT_VENDOR_SUBDIR};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
args::VendorCommand, compile::print_diagnostics, set_failed, world::SystemWorld,
|
||||||
|
};
|
||||||
|
use typst::World;
|
||||||
|
|
||||||
|
/// Execute a vendor command.
|
||||||
|
pub fn vendor(command: &VendorCommand) -> HintedStrResult<()> {
|
||||||
|
let mut world = SystemWorld::new(&command.input, &command.world, &command.process)?;
|
||||||
|
|
||||||
|
// Reset everything and ensure that the main file is present.
|
||||||
|
world.reset();
|
||||||
|
world.source(world.main()).map_err(|err| err.to_string())?;
|
||||||
|
|
||||||
|
let Warned { output, warnings } = typst::compile::<PagedDocument>(&world);
|
||||||
|
|
||||||
|
match output {
|
||||||
|
Ok(_) => {
|
||||||
|
copy_deps(&mut world, &command.world.package.vendor_path)?;
|
||||||
|
print_diagnostics(&world, &[], &warnings, command.process.diagnostic_format)
|
||||||
|
.map_err(|err| eco_format!("failed to print diagnostics ({err})"))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print diagnostics.
|
||||||
|
Err(errors) => {
|
||||||
|
set_failed();
|
||||||
|
print_diagnostics(
|
||||||
|
&world,
|
||||||
|
&errors,
|
||||||
|
&warnings,
|
||||||
|
command.process.diagnostic_format,
|
||||||
|
)
|
||||||
|
.map_err(|err| eco_format!("failed to print diagnostics ({err})"))?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn copy_deps(
|
||||||
|
world: &mut SystemWorld,
|
||||||
|
vendor_path: &Option<PathBuf>,
|
||||||
|
) -> HintedStrResult<()> {
|
||||||
|
let vendor_dir = match vendor_path {
|
||||||
|
Some(path) => match path.canonicalize() {
|
||||||
|
Ok(path) => path,
|
||||||
|
Err(err) => {
|
||||||
|
if err.kind() == std::io::ErrorKind::NotFound {
|
||||||
|
if let Err(err) = create_dir(path) {
|
||||||
|
bail!("failed to create vendor directory: {:?}", err);
|
||||||
|
}
|
||||||
|
path.clone()
|
||||||
|
} else {
|
||||||
|
bail!("failed to canonicalize vendor directory path: {:?}", err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
None => world.workdir().join(DEFAULT_VENDOR_SUBDIR),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Must iterate two times in total. As soon as the parent directory is created,
|
||||||
|
// world tries to read the subsequent files from the same package
|
||||||
|
// from the vendor directory since it is higher priority.
|
||||||
|
let all_deps = world
|
||||||
|
.dependencies()
|
||||||
|
.filter_map(|dep_path| {
|
||||||
|
let path = dep_path.to_str().unwrap();
|
||||||
|
path.find(DEFAULT_PACKAGES_SUBDIR).map(|pos| {
|
||||||
|
let dependency_path = &path[pos + DEFAULT_PACKAGES_SUBDIR.len() + 1..];
|
||||||
|
(dep_path.clone(), vendor_dir.join(dependency_path))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
for (from_data_path, to_vendor_path) in all_deps {
|
||||||
|
if let Some(parent) = to_vendor_path.parent() {
|
||||||
|
match parent.try_exists() {
|
||||||
|
Ok(false) => {
|
||||||
|
if let Err(err) = create_dir_all(parent) {
|
||||||
|
bail!(
|
||||||
|
"failed to create package inside the vendor directory: {:?}",
|
||||||
|
err
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
bail!("failed to check existence of a package inside the vendor directory: {:?}", err);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Err(err) = std::fs::copy(from_data_path, to_vendor_path) {
|
||||||
|
bail!("failed to copy dependency to vendor directory: {:?}", err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
@ -29,7 +29,7 @@ static STDIN_ID: LazyLock<FileId> =
|
||||||
/// A world that provides access to the operating system.
|
/// A world that provides access to the operating system.
|
||||||
pub struct SystemWorld {
|
pub struct SystemWorld {
|
||||||
/// The working directory.
|
/// The working directory.
|
||||||
workdir: Option<PathBuf>,
|
workdir: PathBuf,
|
||||||
/// The root relative to which absolute paths are resolved.
|
/// The root relative to which absolute paths are resolved.
|
||||||
root: PathBuf,
|
root: PathBuf,
|
||||||
/// The input path.
|
/// The input path.
|
||||||
|
|
@ -132,15 +132,18 @@ impl SystemWorld {
|
||||||
None => Now::System(OnceLock::new()),
|
None => Now::System(OnceLock::new()),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let env_workdir = std::env::current_dir().ok();
|
||||||
|
let workdir = env_workdir.unwrap_or(PathBuf::from("."));
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
workdir: std::env::current_dir().ok(),
|
workdir: workdir.clone(),
|
||||||
root,
|
root,
|
||||||
main,
|
main,
|
||||||
library: LazyHash::new(library),
|
library: LazyHash::new(library),
|
||||||
book: LazyHash::new(fonts.book),
|
book: LazyHash::new(fonts.book),
|
||||||
fonts: fonts.fonts,
|
fonts: fonts.fonts,
|
||||||
slots: Mutex::new(HashMap::new()),
|
slots: Mutex::new(HashMap::new()),
|
||||||
package_storage: package::storage(&world_args.package),
|
package_storage: package::storage(&world_args.package, Some(workdir)),
|
||||||
now,
|
now,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
@ -157,7 +160,7 @@ impl SystemWorld {
|
||||||
|
|
||||||
/// The current working directory.
|
/// The current working directory.
|
||||||
pub fn workdir(&self) -> &Path {
|
pub fn workdir(&self) -> &Path {
|
||||||
self.workdir.as_deref().unwrap_or(Path::new("."))
|
self.workdir.as_path()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return all paths the last compilation depended on.
|
/// Return all paths the last compilation depended on.
|
||||||
|
|
|
||||||
|
|
@ -27,6 +27,7 @@ serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
tar = { workspace = true, optional = true }
|
tar = { workspace = true, optional = true }
|
||||||
ureq = { workspace = true, optional = true }
|
ureq = { workspace = true, optional = true }
|
||||||
|
gix = { workspace = true, optional = true, features = ["worktree-mutation", "blocking-network-client"] }
|
||||||
|
|
||||||
# Explicitly depend on OpenSSL if applicable, so that we can add the
|
# Explicitly depend on OpenSSL if applicable, so that we can add the
|
||||||
# `openssl/vendored` feature to it if `vendor-openssl` is enabled.
|
# `openssl/vendored` feature to it if `vendor-openssl` is enabled.
|
||||||
|
|
@ -40,7 +41,9 @@ default = ["fonts", "packages"]
|
||||||
fonts = ["dep:fontdb", "fontdb/memmap", "fontdb/fontconfig"]
|
fonts = ["dep:fontdb", "fontdb/memmap", "fontdb/fontconfig"]
|
||||||
|
|
||||||
# Add generic downloading utilities
|
# Add generic downloading utilities
|
||||||
downloads = ["dep:env_proxy", "dep:native-tls", "dep:ureq", "dep:openssl"]
|
downloads = ["downloads_http", "downloads_git"]
|
||||||
|
downloads_http = ["dep:env_proxy", "dep:native-tls", "dep:ureq", "dep:openssl"]
|
||||||
|
downloads_git = ["gix"]
|
||||||
|
|
||||||
# Add package downloading utilities, implies `downloads`
|
# Add package downloading utilities, implies `downloads`
|
||||||
packages = ["downloads", "dep:dirs", "dep:flate2", "dep:tar"]
|
packages = ["downloads", "dep:dirs", "dep:flate2", "dep:tar"]
|
||||||
|
|
|
||||||
|
|
@ -10,18 +10,15 @@
|
||||||
//! - For text: Libertinus Serif, New Computer Modern
|
//! - For text: Libertinus Serif, New Computer Modern
|
||||||
//! - For math: New Computer Modern Math
|
//! - For math: New Computer Modern Math
|
||||||
//! - For code: Deja Vu Sans Mono
|
//! - For code: Deja Vu Sans Mono
|
||||||
//! - [download] contains functionality for making simple web requests with
|
//! - [package_downloads] contains functionality for handling package downloading
|
||||||
//! status reporting, useful for downloading packages from package registries.
|
//! It is enabled by the `downloads` feature flag.
|
||||||
//! It is enabled by the `downloads` feature flag, additionally the
|
|
||||||
//! `vendor-openssl` can be used on operating systems other than macOS and
|
|
||||||
//! Windows to vendor OpenSSL when building.
|
|
||||||
//! - [package] contains package storage and downloading functionality based on
|
//! - [package] contains package storage and downloading functionality based on
|
||||||
//! [download]. It is enabled by the `packages` feature flag and implies the
|
//! [package_downloads]. It is enabled by the `packages` feature flag and implies the
|
||||||
//! `downloads` feature flag.
|
//! `downloads` feature flag.
|
||||||
|
|
||||||
#[cfg(feature = "downloads")]
|
|
||||||
pub mod download;
|
|
||||||
#[cfg(feature = "fonts")]
|
#[cfg(feature = "fonts")]
|
||||||
pub mod fonts;
|
pub mod fonts;
|
||||||
#[cfg(feature = "packages")]
|
#[cfg(feature = "packages")]
|
||||||
pub mod package;
|
pub mod package;
|
||||||
|
#[cfg(feature = "downloads")]
|
||||||
|
pub mod package_downloads;
|
||||||
|
|
|
||||||
|
|
@ -1,29 +1,30 @@
|
||||||
//! Download and unpack packages and package indices.
|
//! Download and unpack packages and package indices.
|
||||||
|
|
||||||
use std::fs;
|
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
use crate::package_downloads::{Downloader, PackageDownloader, Progress};
|
||||||
use ecow::eco_format;
|
use ecow::eco_format;
|
||||||
use once_cell::sync::OnceCell;
|
use once_cell::sync::OnceCell;
|
||||||
use serde::Deserialize;
|
|
||||||
use typst_library::diag::{bail, PackageError, PackageResult, StrResult};
|
|
||||||
use typst_syntax::package::{PackageSpec, PackageVersion, VersionlessPackageSpec};
|
|
||||||
|
|
||||||
use crate::download::{Downloader, Progress};
|
use typst_library::diag::{PackageError, PackageResult, StrResult};
|
||||||
|
use typst_syntax::package::{
|
||||||
/// The default Typst registry.
|
PackageInfo, PackageSpec, PackageVersion, VersionlessPackageSpec,
|
||||||
pub const DEFAULT_REGISTRY: &str = "https://packages.typst.org";
|
};
|
||||||
|
|
||||||
/// The public namespace in the default Typst registry.
|
|
||||||
pub const DEFAULT_NAMESPACE: &str = "preview";
|
|
||||||
|
|
||||||
/// The default packages sub directory within the package and package cache paths.
|
/// The default packages sub directory within the package and package cache paths.
|
||||||
pub const DEFAULT_PACKAGES_SUBDIR: &str = "typst/packages";
|
pub const DEFAULT_PACKAGES_SUBDIR: &str = "typst/packages";
|
||||||
|
|
||||||
|
/// The default vendor sub directory within the project root.
|
||||||
|
pub const DEFAULT_VENDOR_SUBDIR: &str = "vendor";
|
||||||
|
|
||||||
|
/// The public namespace in the default Typst registry.
|
||||||
|
pub const DEFAULT_NAMESPACE: &str = "preview";
|
||||||
|
|
||||||
/// Holds information about where packages should be stored and downloads them
|
/// Holds information about where packages should be stored and downloads them
|
||||||
/// on demand, if possible.
|
/// on demand, if possible.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct PackageStorage {
|
pub struct PackageStorage {
|
||||||
|
/// The path at which packages are stored by the vendor command.
|
||||||
|
package_vendor_path: Option<PathBuf>,
|
||||||
/// The path at which non-local packages should be stored when downloaded.
|
/// The path at which non-local packages should be stored when downloaded.
|
||||||
package_cache_path: Option<PathBuf>,
|
package_cache_path: Option<PathBuf>,
|
||||||
/// The path at which local packages are stored.
|
/// The path at which local packages are stored.
|
||||||
|
|
@ -31,30 +32,22 @@ pub struct PackageStorage {
|
||||||
/// The downloader used for fetching the index and packages.
|
/// The downloader used for fetching the index and packages.
|
||||||
downloader: Downloader,
|
downloader: Downloader,
|
||||||
/// The cached index of the default namespace.
|
/// The cached index of the default namespace.
|
||||||
index: OnceCell<Vec<serde_json::Value>>,
|
index: OnceCell<Vec<PackageInfo>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PackageStorage {
|
impl PackageStorage {
|
||||||
/// Creates a new package storage for the given package paths. Falls back to
|
/// Creates a new package storage for the given package paths. Falls back to
|
||||||
/// the recommended XDG directories if they are `None`.
|
/// the recommended XDG directories if they are `None`.
|
||||||
pub fn new(
|
pub fn new(
|
||||||
|
package_vendor_path: Option<PathBuf>,
|
||||||
package_cache_path: Option<PathBuf>,
|
package_cache_path: Option<PathBuf>,
|
||||||
package_path: Option<PathBuf>,
|
package_path: Option<PathBuf>,
|
||||||
downloader: Downloader,
|
downloader: Downloader,
|
||||||
) -> Self {
|
workdir: Option<PathBuf>,
|
||||||
Self::with_index(package_cache_path, package_path, downloader, OnceCell::new())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a new package storage with a pre-defined index.
|
|
||||||
///
|
|
||||||
/// Useful for testing.
|
|
||||||
fn with_index(
|
|
||||||
package_cache_path: Option<PathBuf>,
|
|
||||||
package_path: Option<PathBuf>,
|
|
||||||
downloader: Downloader,
|
|
||||||
index: OnceCell<Vec<serde_json::Value>>,
|
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
package_vendor_path: package_vendor_path
|
||||||
|
.or_else(|| workdir.map(|workdir| workdir.join(DEFAULT_VENDOR_SUBDIR))),
|
||||||
package_cache_path: package_cache_path.or_else(|| {
|
package_cache_path: package_cache_path.or_else(|| {
|
||||||
dirs::cache_dir().map(|cache_dir| cache_dir.join(DEFAULT_PACKAGES_SUBDIR))
|
dirs::cache_dir().map(|cache_dir| cache_dir.join(DEFAULT_PACKAGES_SUBDIR))
|
||||||
}),
|
}),
|
||||||
|
|
@ -62,7 +55,7 @@ impl PackageStorage {
|
||||||
dirs::data_dir().map(|data_dir| data_dir.join(DEFAULT_PACKAGES_SUBDIR))
|
dirs::data_dir().map(|data_dir| data_dir.join(DEFAULT_PACKAGES_SUBDIR))
|
||||||
}),
|
}),
|
||||||
downloader,
|
downloader,
|
||||||
index,
|
index: OnceCell::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -85,27 +78,39 @@ impl PackageStorage {
|
||||||
) -> PackageResult<PathBuf> {
|
) -> PackageResult<PathBuf> {
|
||||||
let subdir = format!("{}/{}/{}", spec.namespace, spec.name, spec.version);
|
let subdir = format!("{}/{}/{}", spec.namespace, spec.name, spec.version);
|
||||||
|
|
||||||
if let Some(packages_dir) = &self.package_path {
|
// Read from vendor dir if it exists.
|
||||||
let dir = packages_dir.join(&subdir);
|
if let Some(vendor_dir) = &self.package_vendor_path {
|
||||||
|
if let Ok(true) = vendor_dir.try_exists() {
|
||||||
|
let dir = vendor_dir.join(&subdir);
|
||||||
if dir.exists() {
|
if dir.exists() {
|
||||||
return Ok(dir);
|
return Ok(dir);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check the package_path for the package directory.
|
||||||
|
if let Some(packages_dir) = &self.package_path {
|
||||||
|
let dir = packages_dir.join(&subdir);
|
||||||
|
if dir.exists() {
|
||||||
|
// no need to download, already in the path.
|
||||||
|
return Ok(dir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// package was not in the package_path. check if it has been cached
|
||||||
if let Some(cache_dir) = &self.package_cache_path {
|
if let Some(cache_dir) = &self.package_cache_path {
|
||||||
let dir = cache_dir.join(&subdir);
|
let dir = cache_dir.join(&subdir);
|
||||||
if dir.exists() {
|
if dir.exists() {
|
||||||
|
//package was cached, so return the cached directory
|
||||||
return Ok(dir);
|
return Ok(dir);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Download from network if it doesn't exist yet.
|
// Download from network if it doesn't exist yet.
|
||||||
if spec.namespace == DEFAULT_NAMESPACE {
|
|
||||||
self.download_package(spec, &dir, progress)?;
|
self.download_package(spec, &dir, progress)?;
|
||||||
if dir.exists() {
|
if dir.exists() {
|
||||||
return Ok(dir);
|
return Ok(dir);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
Err(PackageError::NotFound(spec.clone()))
|
Err(PackageError::NotFound(spec.clone()))
|
||||||
}
|
}
|
||||||
|
|
@ -115,48 +120,21 @@ impl PackageStorage {
|
||||||
&self,
|
&self,
|
||||||
spec: &VersionlessPackageSpec,
|
spec: &VersionlessPackageSpec,
|
||||||
) -> StrResult<PackageVersion> {
|
) -> StrResult<PackageVersion> {
|
||||||
if spec.namespace == DEFAULT_NAMESPACE {
|
self.download_index(spec)?
|
||||||
// For `DEFAULT_NAMESPACE`, download the package index and find the latest
|
|
||||||
// version.
|
|
||||||
self.download_index()?
|
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|value| MinimalPackageInfo::deserialize(value).ok())
|
|
||||||
.filter(|package| package.name == spec.name)
|
.filter(|package| package.name == spec.name)
|
||||||
.map(|package| package.version)
|
.map(|package| package.version)
|
||||||
.max()
|
.max()
|
||||||
.ok_or_else(|| eco_format!("failed to find package {spec}"))
|
.ok_or_else(|| eco_format!("failed to find package {spec}"))
|
||||||
} else {
|
|
||||||
// For other namespaces, search locally. We only search in the data
|
|
||||||
// directory and not the cache directory, because the latter is not
|
|
||||||
// intended for storage of local packages.
|
|
||||||
let subdir = format!("{}/{}", spec.namespace, spec.name);
|
|
||||||
self.package_path
|
|
||||||
.iter()
|
|
||||||
.flat_map(|dir| std::fs::read_dir(dir.join(&subdir)).ok())
|
|
||||||
.flatten()
|
|
||||||
.filter_map(|entry| entry.ok())
|
|
||||||
.map(|entry| entry.path())
|
|
||||||
.filter_map(|path| path.file_name()?.to_string_lossy().parse().ok())
|
|
||||||
.max()
|
|
||||||
.ok_or_else(|| eco_format!("please specify the desired version"))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Download the package index. The result of this is cached for efficiency.
|
/// Download the package index. The result of this is cached for efficiency.
|
||||||
pub fn download_index(&self) -> StrResult<&[serde_json::Value]> {
|
pub fn download_index(
|
||||||
|
&self,
|
||||||
|
spec: &VersionlessPackageSpec,
|
||||||
|
) -> StrResult<&[PackageInfo]> {
|
||||||
self.index
|
self.index
|
||||||
.get_or_try_init(|| {
|
.get_or_try_init(|| self.downloader.download_index(spec))
|
||||||
let url = format!("{DEFAULT_REGISTRY}/{DEFAULT_NAMESPACE}/index.json");
|
|
||||||
match self.downloader.download(&url) {
|
|
||||||
Ok(response) => response.into_json().map_err(|err| {
|
|
||||||
eco_format!("failed to parse package index: {err}")
|
|
||||||
}),
|
|
||||||
Err(ureq::Error::Status(404, _)) => {
|
|
||||||
bail!("failed to fetch package index (not found)")
|
|
||||||
}
|
|
||||||
Err(err) => bail!("failed to fetch package index ({err})"),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.map(AsRef::as_ref)
|
.map(AsRef::as_ref)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -170,82 +148,15 @@ impl PackageStorage {
|
||||||
package_dir: &Path,
|
package_dir: &Path,
|
||||||
progress: &mut dyn Progress,
|
progress: &mut dyn Progress,
|
||||||
) -> PackageResult<()> {
|
) -> PackageResult<()> {
|
||||||
assert_eq!(spec.namespace, DEFAULT_NAMESPACE);
|
match self.downloader.download(spec, package_dir, progress) {
|
||||||
|
Err(PackageError::NotFound(spec)) => {
|
||||||
let url = format!(
|
|
||||||
"{DEFAULT_REGISTRY}/{DEFAULT_NAMESPACE}/{}-{}.tar.gz",
|
|
||||||
spec.name, spec.version
|
|
||||||
);
|
|
||||||
|
|
||||||
let data = match self.downloader.download_with_progress(&url, progress) {
|
|
||||||
Ok(data) => data,
|
|
||||||
Err(ureq::Error::Status(404, _)) => {
|
|
||||||
if let Ok(version) = self.determine_latest_version(&spec.versionless()) {
|
if let Ok(version) = self.determine_latest_version(&spec.versionless()) {
|
||||||
return Err(PackageError::VersionNotFound(spec.clone(), version));
|
Err(PackageError::VersionNotFound(spec.clone(), version))
|
||||||
} else {
|
} else {
|
||||||
return Err(PackageError::NotFound(spec.clone()));
|
Err(PackageError::NotFound(spec.clone()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(err) => {
|
val => val,
|
||||||
return Err(PackageError::NetworkFailed(Some(eco_format!("{err}"))))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let decompressed = flate2::read::GzDecoder::new(data.as_slice());
|
|
||||||
tar::Archive::new(decompressed).unpack(package_dir).map_err(|err| {
|
|
||||||
fs::remove_dir_all(package_dir).ok();
|
|
||||||
PackageError::MalformedArchive(Some(eco_format!("{err}")))
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Minimal information required about a package to determine its latest
|
|
||||||
/// version.
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
struct MinimalPackageInfo {
|
|
||||||
name: String,
|
|
||||||
version: PackageVersion,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn lazy_deser_index() {
|
|
||||||
let storage = PackageStorage::with_index(
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
Downloader::new("typst/test"),
|
|
||||||
OnceCell::with_value(vec![
|
|
||||||
serde_json::json!({
|
|
||||||
"name": "charged-ieee",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"entrypoint": "lib.typ",
|
|
||||||
}),
|
|
||||||
serde_json::json!({
|
|
||||||
"name": "unequivocal-ams",
|
|
||||||
// This version number is currently not valid, so this package
|
|
||||||
// can't be parsed.
|
|
||||||
"version": "0.2.0-dev",
|
|
||||||
"entrypoint": "lib.typ",
|
|
||||||
}),
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
|
|
||||||
let ieee_version = storage.determine_latest_version(&VersionlessPackageSpec {
|
|
||||||
namespace: "preview".into(),
|
|
||||||
name: "charged-ieee".into(),
|
|
||||||
});
|
|
||||||
assert_eq!(ieee_version, Ok(PackageVersion { major: 0, minor: 1, patch: 0 }));
|
|
||||||
|
|
||||||
let ams_version = storage.determine_latest_version(&VersionlessPackageSpec {
|
|
||||||
namespace: "preview".into(),
|
|
||||||
name: "unequivocal-ams".into(),
|
|
||||||
});
|
|
||||||
assert_eq!(
|
|
||||||
ams_version,
|
|
||||||
Err("failed to find package @preview/unequivocal-ams".into())
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,116 @@
|
||||||
|
use crate::package_downloads::{DownloadState, PackageDownloader, Progress};
|
||||||
|
use ecow::{eco_format, EcoString};
|
||||||
|
use gix::remote::fetch::Shallow;
|
||||||
|
use std::fmt::Debug;
|
||||||
|
use std::num::NonZero;
|
||||||
|
use std::path::Path;
|
||||||
|
use std::time::Instant;
|
||||||
|
use typst_library::diag::{PackageError, PackageResult};
|
||||||
|
use typst_syntax::package::{PackageInfo, PackageSpec, VersionlessPackageSpec};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct GitDownloader;
|
||||||
|
|
||||||
|
impl Default for GitDownloader {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GitDownloader {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn download_with_progress(
|
||||||
|
&self,
|
||||||
|
repo: &str,
|
||||||
|
tag: &str,
|
||||||
|
dest: &Path,
|
||||||
|
progress: &mut dyn Progress,
|
||||||
|
) -> Result<(), EcoString> {
|
||||||
|
progress.print_start();
|
||||||
|
let state = DownloadState {
|
||||||
|
content_len: None,
|
||||||
|
total_downloaded: 0,
|
||||||
|
bytes_per_second: Default::default(),
|
||||||
|
start_time: Instant::now(),
|
||||||
|
};
|
||||||
|
|
||||||
|
std::fs::create_dir_all(dest).map_err(|x| eco_format!("{x}"))?;
|
||||||
|
let url = gix::url::parse(repo.into()).map_err(|x| eco_format!("{x}"))?;
|
||||||
|
let mut prepare_fetch =
|
||||||
|
gix::prepare_clone(url, dest).map_err(|x| eco_format!("{x}"))?;
|
||||||
|
prepare_fetch = prepare_fetch
|
||||||
|
.with_shallow(Shallow::DepthAtRemote(NonZero::new(1).unwrap()))
|
||||||
|
.with_ref_name(Some(tag))
|
||||||
|
.map_err(|x| eco_format!("{x}"))?;
|
||||||
|
|
||||||
|
let (mut prepare_checkout, _) = prepare_fetch
|
||||||
|
.fetch_then_checkout(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED)
|
||||||
|
.map_err(|x| eco_format!("{x}"))?;
|
||||||
|
if prepare_checkout.repo().work_dir().is_none() {
|
||||||
|
return Err(eco_format!(
|
||||||
|
"Cloned git repository but files are not available."
|
||||||
|
))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
prepare_checkout
|
||||||
|
.main_worktree(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED)
|
||||||
|
.map_err(|x| eco_format!("{x}"))?;
|
||||||
|
progress.print_finish(&state);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parses the namespace of the package into the correct registry and namespace.
|
||||||
|
/// The namespace format is the following:
|
||||||
|
///
|
||||||
|
/// @git:<git host and user>
|
||||||
|
///
|
||||||
|
/// The final repository cloned will be formed by the git host and the repository name
|
||||||
|
/// with the adequate extension, checking out to the tag specified by the version in the format
|
||||||
|
/// v<major>.<minor>.<patch>
|
||||||
|
///
|
||||||
|
/// For example, the package
|
||||||
|
/// @git:git@github.com:typst/package:0.0
|
||||||
|
/// will result in the cloning of the repository git@github.com:typst/package.git
|
||||||
|
/// and the checkout and detached head state at tag v0.1.0
|
||||||
|
///
|
||||||
|
/// NOTE: no index download is possible.
|
||||||
|
fn parse_namespace(ns: &str, name: &str) -> Result<String, EcoString> {
|
||||||
|
let mut parts = ns.splitn(2, ":");
|
||||||
|
let schema =
|
||||||
|
parts.next().ok_or_else(|| eco_format!("expected schema in {}", ns))?;
|
||||||
|
let repo = parts
|
||||||
|
.next()
|
||||||
|
.ok_or_else(|| eco_format!("invalid package repo {}", ns))?;
|
||||||
|
|
||||||
|
if !schema.eq("git") {
|
||||||
|
Err(eco_format!("invalid schema in {}", ns))?
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(format!("{repo}/{name}.git"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PackageDownloader for GitDownloader {
|
||||||
|
fn download_index(
|
||||||
|
&self,
|
||||||
|
_spec: &VersionlessPackageSpec,
|
||||||
|
) -> Result<Vec<PackageInfo>, EcoString> {
|
||||||
|
Err(eco_format!("Downloading index is not supported for git repositories"))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn download(
|
||||||
|
&self,
|
||||||
|
spec: &PackageSpec,
|
||||||
|
package_dir: &Path,
|
||||||
|
progress: &mut dyn Progress,
|
||||||
|
) -> PackageResult<()> {
|
||||||
|
let repo = Self::parse_namespace(spec.namespace.as_str(), spec.name.as_str())
|
||||||
|
.map_err(|x| PackageError::Other(Some(x)))?;
|
||||||
|
let tag = format!("refs/tags/v{}", spec.version);
|
||||||
|
self.download_with_progress(repo.as_str(), tag.as_str(), package_dir, progress)
|
||||||
|
.map_err(|x| PackageError::Other(Some(x)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -7,27 +7,24 @@
|
||||||
|
|
||||||
use std::collections::VecDeque;
|
use std::collections::VecDeque;
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
|
use std::fs;
|
||||||
use std::io::{self, ErrorKind, Read};
|
use std::io::{self, ErrorKind, Read};
|
||||||
use std::path::PathBuf;
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
|
|
||||||
use ecow::EcoString;
|
use crate::package_downloads::{
|
||||||
|
DownloadState, PackageDownloader, Progress, DEFAULT_NAMESPACE,
|
||||||
|
};
|
||||||
|
use ecow::{eco_format, EcoString};
|
||||||
use native_tls::{Certificate, TlsConnector};
|
use native_tls::{Certificate, TlsConnector};
|
||||||
use once_cell::sync::OnceCell;
|
use once_cell::sync::OnceCell;
|
||||||
|
use typst_library::diag::{bail, PackageError, PackageResult};
|
||||||
|
use typst_syntax::package::{PackageInfo, PackageSpec, VersionlessPackageSpec};
|
||||||
use ureq::Response;
|
use ureq::Response;
|
||||||
|
|
||||||
/// Manages progress reporting for downloads.
|
/// The default Typst registry.
|
||||||
pub trait Progress {
|
pub const DEFAULT_REGISTRY: &str = "https://packages.typst.org";
|
||||||
/// Invoked when a download is started.
|
|
||||||
fn print_start(&mut self);
|
|
||||||
|
|
||||||
/// Invoked repeatedly while a download is ongoing.
|
|
||||||
fn print_progress(&mut self, state: &DownloadState);
|
|
||||||
|
|
||||||
/// Invoked when a download is finished.
|
|
||||||
fn print_finish(&mut self, state: &DownloadState);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An implementation of [`Progress`] with no-op reporting, i.e., reporting
|
/// An implementation of [`Progress`] with no-op reporting, i.e., reporting
|
||||||
/// events are swallowed.
|
/// events are swallowed.
|
||||||
|
|
@ -39,28 +36,18 @@ impl Progress for ProgressSink {
|
||||||
fn print_finish(&mut self, _: &DownloadState) {}
|
fn print_finish(&mut self, _: &DownloadState) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The current state of an in progress or finished download.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct DownloadState {
|
|
||||||
/// The expected amount of bytes to download, `None` if the response header
|
|
||||||
/// was not set.
|
|
||||||
pub content_len: Option<usize>,
|
|
||||||
/// The total amount of downloaded bytes until now.
|
|
||||||
pub total_downloaded: usize,
|
|
||||||
/// A backlog of the amount of downloaded bytes each second.
|
|
||||||
pub bytes_per_second: VecDeque<usize>,
|
|
||||||
/// The download starting instant.
|
|
||||||
pub start_time: Instant,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A minimal https client for downloading various resources.
|
/// A minimal https client for downloading various resources.
|
||||||
pub struct Downloader {
|
pub struct HttpDownloader {
|
||||||
user_agent: EcoString,
|
user_agent: EcoString,
|
||||||
cert_path: Option<PathBuf>,
|
cert_path: Option<PathBuf>,
|
||||||
cert: OnceCell<Certificate>,
|
cert: OnceCell<Certificate>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Downloader {
|
impl HttpDownloader {
|
||||||
|
pub fn default_user_agent() -> String {
|
||||||
|
format!("typst-kit/{}", env!("CARGO_PKG_VERSION"))
|
||||||
|
}
|
||||||
|
|
||||||
/// Crates a new downloader with the given user agent and no certificate.
|
/// Crates a new downloader with the given user agent and no certificate.
|
||||||
pub fn new(user_agent: impl Into<EcoString>) -> Self {
|
pub fn new(user_agent: impl Into<EcoString>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
|
@ -81,15 +68,6 @@ impl Downloader {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Crates a new downloader with the given user agent and certificate.
|
|
||||||
pub fn with_cert(user_agent: impl Into<EcoString>, cert: Certificate) -> Self {
|
|
||||||
Self {
|
|
||||||
user_agent: user_agent.into(),
|
|
||||||
cert_path: None,
|
|
||||||
cert: OnceCell::with_value(cert),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the certificate this client is using, if a custom certificate
|
/// Returns the certificate this client is using, if a custom certificate
|
||||||
/// is used it is loaded on first access.
|
/// is used it is loaded on first access.
|
||||||
///
|
///
|
||||||
|
|
@ -107,7 +85,7 @@ impl Downloader {
|
||||||
|
|
||||||
/// Download binary data from the given url.
|
/// Download binary data from the given url.
|
||||||
#[allow(clippy::result_large_err)]
|
#[allow(clippy::result_large_err)]
|
||||||
pub fn download(&self, url: &str) -> Result<ureq::Response, ureq::Error> {
|
pub fn perform_download(&self, url: &str) -> Result<ureq::Response, ureq::Error> {
|
||||||
let mut builder = ureq::AgentBuilder::new();
|
let mut builder = ureq::AgentBuilder::new();
|
||||||
let mut tls = TlsConnector::builder();
|
let mut tls = TlsConnector::builder();
|
||||||
|
|
||||||
|
|
@ -143,12 +121,47 @@ impl Downloader {
|
||||||
progress: &mut dyn Progress,
|
progress: &mut dyn Progress,
|
||||||
) -> Result<Vec<u8>, ureq::Error> {
|
) -> Result<Vec<u8>, ureq::Error> {
|
||||||
progress.print_start();
|
progress.print_start();
|
||||||
let response = self.download(url)?;
|
let response = self.perform_download(url)?;
|
||||||
Ok(RemoteReader::from_response(response, progress).download()?)
|
Ok(RemoteReader::from_response(response, progress).download()?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Parses the namespace of the package into the correct registry and namespace.
|
||||||
|
/// The namespace format is the following:
|
||||||
|
///
|
||||||
|
/// @http[s]:<registry host>:<namespace>/package-name>:package-version
|
||||||
|
///
|
||||||
|
/// resulting in the package location to be resolved as
|
||||||
|
/// http[s]://<registry host>/<namespace>/<package-name>-<package-version>.tar.gz
|
||||||
|
///
|
||||||
|
/// and the index to be resolved as
|
||||||
|
/// http[s]://<registry host>/<namespace>/index.json
|
||||||
|
///
|
||||||
|
/// NOTE: preview namespace is treated as the namespace formed as
|
||||||
|
/// @https:packages.typst.org:preview/package-name>:package-version
|
||||||
|
fn parse_namespace(ns: &str) -> Result<(String, String), EcoString> {
|
||||||
|
if ns.eq(DEFAULT_NAMESPACE) {
|
||||||
|
return Ok((DEFAULT_REGISTRY.to_string(), DEFAULT_NAMESPACE.to_string()));
|
||||||
|
}
|
||||||
|
let mut parts = ns.splitn(3, ":");
|
||||||
|
|
||||||
|
let schema =
|
||||||
|
parts.next().ok_or_else(|| eco_format!("expected schema in {}", ns))?;
|
||||||
|
let registry = parts
|
||||||
|
.next()
|
||||||
|
.ok_or_else(|| eco_format!("invalid package registry in namespace {}", ns))?;
|
||||||
|
let ns = parts
|
||||||
|
.next()
|
||||||
|
.ok_or_else(|| eco_format!("invalid package namespace in {}", ns))?;
|
||||||
|
|
||||||
|
if !schema.eq("http") && !schema.eq("https") {
|
||||||
|
Err(eco_format!("invalid schema in {}", ns))?
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((format!("{schema}://{registry}"), ns.to_string()))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for Downloader {
|
impl Debug for HttpDownloader {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
f.debug_struct("Downloader")
|
f.debug_struct("Downloader")
|
||||||
.field("user_agent", &self.user_agent)
|
.field("user_agent", &self.user_agent)
|
||||||
|
|
@ -257,3 +270,48 @@ impl<'p> RemoteReader<'p> {
|
||||||
Ok(data)
|
Ok(data)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl PackageDownloader for HttpDownloader {
|
||||||
|
fn download_index(
|
||||||
|
&self,
|
||||||
|
spec: &VersionlessPackageSpec,
|
||||||
|
) -> Result<Vec<PackageInfo>, EcoString> {
|
||||||
|
let (registry, namespace) = Self::parse_namespace(spec.namespace.as_str())?;
|
||||||
|
let url = format!("{registry}/{namespace}/index.json");
|
||||||
|
match self.perform_download(&url) {
|
||||||
|
Ok(response) => response
|
||||||
|
.into_json()
|
||||||
|
.map_err(|err| eco_format!("failed to parse package index: {err}")),
|
||||||
|
Err(ureq::Error::Status(404, _)) => {
|
||||||
|
bail!("failed to fetch package index (not found)")
|
||||||
|
}
|
||||||
|
Err(err) => bail!("failed to fetch package index ({err})"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn download(
|
||||||
|
&self,
|
||||||
|
spec: &PackageSpec,
|
||||||
|
package_dir: &Path,
|
||||||
|
progress: &mut dyn Progress,
|
||||||
|
) -> PackageResult<()> {
|
||||||
|
let (registry, namespace) = Self::parse_namespace(spec.namespace.as_str())
|
||||||
|
.map_err(|x| PackageError::Other(Some(x)))?;
|
||||||
|
|
||||||
|
let url =
|
||||||
|
format!("{}/{}/{}-{}.tar.gz", registry, namespace, spec.name, spec.version);
|
||||||
|
let data = match self.download_with_progress(&url, progress) {
|
||||||
|
Ok(data) => data,
|
||||||
|
Err(ureq::Error::Status(404, _)) => {
|
||||||
|
Err(PackageError::NotFound(spec.clone()))?
|
||||||
|
}
|
||||||
|
Err(err) => Err(PackageError::NetworkFailed(Some(eco_format!("{err}"))))?,
|
||||||
|
};
|
||||||
|
|
||||||
|
let decompressed = flate2::read::GzDecoder::new(data.as_slice());
|
||||||
|
tar::Archive::new(decompressed).unpack(package_dir).map_err(|err| {
|
||||||
|
fs::remove_dir_all(package_dir).ok();
|
||||||
|
PackageError::MalformedArchive(Some(eco_format!("{err}")))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,203 @@
|
||||||
|
//! This module provides the package downloader abstraction needed
|
||||||
|
//! for remote package handling.
|
||||||
|
//!
|
||||||
|
//! # Content
|
||||||
|
//!
|
||||||
|
//! ## Traits
|
||||||
|
//! The [PackageDownloader] trait provides the abstraction needed to implement
|
||||||
|
//! multiple download method handlers.
|
||||||
|
//! Each method must allow for a package download to the local filesystem and it should provide a
|
||||||
|
//! method for downloading the repository index if it exists.
|
||||||
|
//!
|
||||||
|
//! The [Progress] trait allows for the implementation of a progress reporting struct.
|
||||||
|
//!
|
||||||
|
//! ## Module
|
||||||
|
//! [http] contains functionality for making simple web requests with status reporting,
|
||||||
|
//! useful for downloading packages from package registries.
|
||||||
|
//! It is enabled by the `downloads_http` feature flag.
|
||||||
|
//! Additionally the `vendor-openssl` can be used on operating systems other than macOS
|
||||||
|
//! and Windows to vendor OpenSSL when building.
|
||||||
|
//!
|
||||||
|
//! [git] contains functionality for handling package downloads through git repositories.
|
||||||
|
|
||||||
|
use ecow::{eco_format, EcoString};
|
||||||
|
use std::collections::VecDeque;
|
||||||
|
use std::fmt::Debug;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::time::Instant;
|
||||||
|
use typst_library::diag::{PackageError, PackageResult};
|
||||||
|
use typst_syntax::package::{PackageInfo, PackageSpec, VersionlessPackageSpec};
|
||||||
|
|
||||||
|
/// The public namespace in the default Typst registry.
|
||||||
|
pub const DEFAULT_NAMESPACE: &str = "preview";
|
||||||
|
|
||||||
|
/*========BEGIN DOWNLOAD METHODS DECLARATION=========*/
|
||||||
|
#[cfg(feature = "downloads_http")]
|
||||||
|
pub mod http;
|
||||||
|
|
||||||
|
#[cfg(feature = "downloads_git")]
|
||||||
|
pub mod git;
|
||||||
|
/*========END DOWNLOAD METHODS DECLARATION===========*/
|
||||||
|
|
||||||
|
/// Trait abstraction for package a downloader.
|
||||||
|
pub trait PackageDownloader: Debug + Sync + Send {
|
||||||
|
/// Download the repository index and returns the
|
||||||
|
/// list of PackageInfo elements contained in it.
|
||||||
|
fn download_index(
|
||||||
|
&self,
|
||||||
|
spec: &VersionlessPackageSpec,
|
||||||
|
) -> Result<Vec<PackageInfo>, EcoString>;
|
||||||
|
|
||||||
|
/// Download a package from a remote repository/registry
|
||||||
|
/// and writes it in the file system cache directory
|
||||||
|
fn download(
|
||||||
|
&self,
|
||||||
|
spec: &PackageSpec,
|
||||||
|
package_dir: &Path,
|
||||||
|
progress: &mut dyn Progress,
|
||||||
|
) -> PackageResult<()>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The current state of an in progress or finished download.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct DownloadState {
|
||||||
|
/// The expected amount of bytes to download, `None` if the response header
|
||||||
|
/// was not set.
|
||||||
|
pub content_len: Option<usize>,
|
||||||
|
/// The total amount of downloaded bytes until now.
|
||||||
|
pub total_downloaded: usize,
|
||||||
|
/// A backlog of the amount of downloaded bytes each second.
|
||||||
|
pub bytes_per_second: VecDeque<usize>,
|
||||||
|
/// The download starting instant.
|
||||||
|
pub start_time: Instant,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Manages progress reporting for downloads.
|
||||||
|
pub trait Progress {
|
||||||
|
/// Invoked when a download is started.
|
||||||
|
fn print_start(&mut self);
|
||||||
|
|
||||||
|
/// Invoked repeatedly while a download is ongoing.
|
||||||
|
fn print_progress(&mut self, state: &DownloadState);
|
||||||
|
|
||||||
|
/// Invoked when a download is finished.
|
||||||
|
fn print_finish(&mut self, state: &DownloadState);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The downloader object used for downloading packages
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Downloader {
|
||||||
|
///List of all available downloaders which can be instantiated at runtime
|
||||||
|
http_downloader: Option<Box<dyn PackageDownloader>>,
|
||||||
|
git_downloader: Option<Box<dyn PackageDownloader>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Downloader {
|
||||||
|
/// Construct the Downloader object instantiating all the available methods.
|
||||||
|
/// The methods can be compile-time selected by features.
|
||||||
|
pub fn new(cert: Option<PathBuf>) -> Self {
|
||||||
|
Self {
|
||||||
|
http_downloader: Self::make_http_downloader(cert.clone()),
|
||||||
|
git_downloader: Self::make_git_downloader(cert),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creation function for the HTTP(S) download method
|
||||||
|
fn make_http_downloader(cert: Option<PathBuf>) -> Option<Box<dyn PackageDownloader>> {
|
||||||
|
#[cfg(not(feature = "downloads_http"))]
|
||||||
|
{
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "downloads_http")]
|
||||||
|
{
|
||||||
|
match cert {
|
||||||
|
Some(cert_path) => Some(Box::new(http::HttpDownloader::with_path(
|
||||||
|
http::HttpDownloader::default_user_agent(),
|
||||||
|
cert_path,
|
||||||
|
))),
|
||||||
|
None => Some(Box::new(http::HttpDownloader::new(
|
||||||
|
http::HttpDownloader::default_user_agent(),
|
||||||
|
))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_http_downloader(&self) -> Result<&dyn PackageDownloader, PackageError> {
|
||||||
|
let reference = self.http_downloader.as_ref().ok_or_else(|| {
|
||||||
|
PackageError::Other(Some(EcoString::from(
|
||||||
|
"Http downloader has not been initialized correctly",
|
||||||
|
)))
|
||||||
|
})?;
|
||||||
|
Ok(&**reference)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creation function for the GIT clone method
|
||||||
|
fn make_git_downloader(_cert: Option<PathBuf>) -> Option<Box<dyn PackageDownloader>> {
|
||||||
|
#[cfg(not(feature = "downloads_git"))]
|
||||||
|
{
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "downloads_git")]
|
||||||
|
{
|
||||||
|
Some(Box::new(git::GitDownloader::new()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_git_downloader(&self) -> Result<&dyn PackageDownloader, PackageError> {
|
||||||
|
let reference = self.git_downloader.as_ref().ok_or_else(|| {
|
||||||
|
PackageError::Other(Some(EcoString::from(
|
||||||
|
"Http downloader has not been initialized correctly",
|
||||||
|
)))
|
||||||
|
})?;
|
||||||
|
Ok(&**reference)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the correct downloader in function of the package namespace.
|
||||||
|
/// The remote location of a package is encoded in its namespace in the form
|
||||||
|
/// @<source type>:<source path>
|
||||||
|
///
|
||||||
|
/// It's the downloader instance's job to parse the source path in any substructure.
|
||||||
|
///
|
||||||
|
/// NOTE: Treating @preview as a special case of the https downloader.
|
||||||
|
fn get_downloader(&self, ns: &str) -> Result<&dyn PackageDownloader, PackageError> {
|
||||||
|
let download_type = ns.split(":").next();
|
||||||
|
|
||||||
|
match download_type {
|
||||||
|
#[cfg(feature = "downloads_http")]
|
||||||
|
Some("http") | Some("https") | Some("preview") => self.get_http_downloader(),
|
||||||
|
|
||||||
|
#[cfg(feature = "downloads_git")]
|
||||||
|
Some("git") => self.get_git_downloader(),
|
||||||
|
|
||||||
|
Some(dwld) => Err(PackageError::Other(Some(eco_format!(
|
||||||
|
"Unknown downloader type: {}",
|
||||||
|
dwld
|
||||||
|
)))),
|
||||||
|
None => Err(PackageError::Other(Some(EcoString::from(
|
||||||
|
"No downloader type specified",
|
||||||
|
)))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PackageDownloader for Downloader {
|
||||||
|
fn download_index(
|
||||||
|
&self,
|
||||||
|
spec: &VersionlessPackageSpec,
|
||||||
|
) -> Result<Vec<PackageInfo>, EcoString> {
|
||||||
|
let downloader = self.get_downloader(spec.namespace.as_str())?;
|
||||||
|
downloader.download_index(spec)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn download(
|
||||||
|
&self,
|
||||||
|
spec: &PackageSpec,
|
||||||
|
package_dir: &Path,
|
||||||
|
progress: &mut dyn Progress,
|
||||||
|
) -> PackageResult<()> {
|
||||||
|
let downloader = self.get_downloader(spec.namespace.as_str())?;
|
||||||
|
downloader.download(spec, package_dir, progress)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -284,6 +284,7 @@ impl<'a> CurveBuilder<'a> {
|
||||||
self.last_point = point;
|
self.last_point = point;
|
||||||
self.last_control_from = point;
|
self.last_control_from = point;
|
||||||
self.is_started = true;
|
self.is_started = true;
|
||||||
|
self.is_empty = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a line segment.
|
/// Add a line segment.
|
||||||
|
|
|
||||||
|
|
@ -769,7 +769,7 @@ impl Array {
|
||||||
///
|
///
|
||||||
/// ```example
|
/// ```example
|
||||||
/// #let array = (1, 2, 3, 4, 5, 6, 7, 8)
|
/// #let array = (1, 2, 3, 4, 5, 6, 7, 8)
|
||||||
/// #array.chunks(3)
|
/// #array.chunks(3) \
|
||||||
/// #array.chunks(3, exact: true)
|
/// #array.chunks(3, exact: true)
|
||||||
/// ```
|
/// ```
|
||||||
#[func]
|
#[func]
|
||||||
|
|
|
||||||
|
|
@ -34,14 +34,14 @@ use crate::loading::{DataSource, Load, Readable};
|
||||||
/// let author = find-child(elem, "author")
|
/// let author = find-child(elem, "author")
|
||||||
/// let pars = find-child(elem, "content")
|
/// let pars = find-child(elem, "content")
|
||||||
///
|
///
|
||||||
/// heading(title.children.first())
|
/// [= #title.children.first()]
|
||||||
/// text(10pt, weight: "medium")[
|
/// text(10pt, weight: "medium")[
|
||||||
/// Published by
|
/// Published by
|
||||||
/// #author.children.first()
|
/// #author.children.first()
|
||||||
/// ]
|
/// ]
|
||||||
///
|
///
|
||||||
/// for p in pars.children {
|
/// for p in pars.children {
|
||||||
/// if (type(p) == "dictionary") {
|
/// if type(p) == dictionary {
|
||||||
/// parbreak()
|
/// parbreak()
|
||||||
/// p.children.first()
|
/// p.children.first()
|
||||||
/// }
|
/// }
|
||||||
|
|
@ -50,7 +50,7 @@ use crate::loading::{DataSource, Load, Readable};
|
||||||
///
|
///
|
||||||
/// #let data = xml("example.xml")
|
/// #let data = xml("example.xml")
|
||||||
/// #for elem in data.first().children {
|
/// #for elem in data.first().children {
|
||||||
/// if (type(elem) == "dictionary") {
|
/// if type(elem) == dictionary {
|
||||||
/// article(elem)
|
/// article(elem)
|
||||||
/// }
|
/// }
|
||||||
/// }
|
/// }
|
||||||
|
|
|
||||||
|
|
@ -326,7 +326,10 @@ fn visit_math_rules<'a>(
|
||||||
// Symbols in non-math content transparently convert to `TextElem` so we
|
// Symbols in non-math content transparently convert to `TextElem` so we
|
||||||
// don't have to handle them in non-math layout.
|
// don't have to handle them in non-math layout.
|
||||||
if let Some(elem) = content.to_packed::<SymbolElem>() {
|
if let Some(elem) = content.to_packed::<SymbolElem>() {
|
||||||
let text = TextElem::packed(elem.text).spanned(elem.span());
|
let mut text = TextElem::packed(elem.text).spanned(elem.span());
|
||||||
|
if let Some(label) = elem.label() {
|
||||||
|
text.set_label(label);
|
||||||
|
}
|
||||||
visit(s, s.store(text), styles)?;
|
visit(s, s.store(text), styles)?;
|
||||||
return Ok(true);
|
return Ok(true);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -263,15 +263,37 @@ impl Display for VersionlessPackageSpec {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn is_namespace_valid(namespace: &str) -> bool {
|
||||||
|
if is_ident(namespace) {
|
||||||
|
//standard namespace
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
//if not ident, the namespace should be formed as @<package_remote_type>:<package_path>
|
||||||
|
let mut tokenized = namespace.splitn(2, ":");
|
||||||
|
|
||||||
|
//package type
|
||||||
|
let package_remote_type = tokenized.next();
|
||||||
|
if package_remote_type.is_none() || !is_ident(package_remote_type.unwrap()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
//the package_path parsing is left to the downloader implementation
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
fn parse_namespace<'s>(s: &mut Scanner<'s>) -> Result<&'s str, EcoString> {
|
fn parse_namespace<'s>(s: &mut Scanner<'s>) -> Result<&'s str, EcoString> {
|
||||||
if !s.eat_if('@') {
|
if !s.eat_if('@') {
|
||||||
Err("package specification must start with '@'")?;
|
Err("package specification must start with '@'")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//todo: allow for multiple slashes in the by eating until last slash
|
||||||
let namespace = s.eat_until('/');
|
let namespace = s.eat_until('/');
|
||||||
if namespace.is_empty() {
|
if namespace.is_empty() {
|
||||||
Err("package specification is missing namespace")?;
|
Err("package specification is missing namespace")?;
|
||||||
} else if !is_ident(namespace) {
|
}
|
||||||
|
|
||||||
|
if !is_namespace_valid(namespace) {
|
||||||
Err(eco_format!("`{namespace}` is not a valid package namespace"))?;
|
Err(eco_format!("`{namespace}` is not a valid package namespace"))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -294,20 +294,20 @@ feature flag.
|
||||||
`errorbar.diamond.stroked`, `errorbar.diamond.filled`,
|
`errorbar.diamond.stroked`, `errorbar.diamond.filled`,
|
||||||
`errorbar.circle.stroked`, `errorbar.circle.filled`
|
`errorbar.circle.stroked`, `errorbar.circle.filled`
|
||||||
- `numero`
|
- `numero`
|
||||||
- `Omega.inv`
|
- Renamed **(Breaking change)**
|
||||||
- Renamed
|
|
||||||
- `ohm.inv` to `Omega.inv`
|
- `ohm.inv` to `Omega.inv`
|
||||||
- Changed codepoint
|
- Changed codepoint **(Breaking change)**
|
||||||
- `angle.l.double` from `《` to `⟪`
|
- `angle.l.double` from `《` to `⟪`
|
||||||
- `angle.r.double` from `》` to `⟫`
|
- `angle.r.double` from `》` to `⟫`
|
||||||
- `angstrom` from U+212B (`Å`) to U+00C5 (`Å`)
|
- `angstrom` from U+212B (`Å`) to U+00C5 (`Å`)
|
||||||
- Deprecated
|
- Deprecated
|
||||||
- `sect` and all its variants in favor of `inter`
|
- `sect` and all its variants in favor of `inter`
|
||||||
- `integral.sect` in favor of `integral.inter`
|
- `integral.sect` in favor of `integral.inter`
|
||||||
- Removed
|
- Removed **(Breaking change)**
|
||||||
- `degree.c` in favor of `°C` (`[$upright(°C)$]` or `[$upright(degree C)$]` in math)
|
- `degree.c` in favor of `°C` (`[$upright(°C)$]` or `[$upright(degree C)$]` in math)
|
||||||
- `degree.f` in favor of `°F` (`[$upright(°F)$]` or `[$upright(degree F)$]` in math)
|
- `degree.f` in favor of `°F` (`[$upright(°F)$]` or `[$upright(degree F)$]` in math)
|
||||||
- `kelvin` in favor of just K (`[$upright(K)$]` in math)
|
- `kelvin` in favor of just K (`[$upright(K)$]` in math)
|
||||||
|
- `ohm` in favor of `Omega`
|
||||||
|
|
||||||
## Deprecations
|
## Deprecations
|
||||||
- The [`path`] function in favor of the [`curve`] function
|
- The [`path`] function in favor of the [`curve`] function
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,29 @@
|
||||||
|
---
|
||||||
|
title: 0.13.1
|
||||||
|
description: Changes in Typst 0.13.1
|
||||||
|
---
|
||||||
|
|
||||||
|
# Version 0.13.1 (March 7, 2025)
|
||||||
|
|
||||||
|
## Command Line Interface
|
||||||
|
- Fixed high CPU usage for `typst watch` on Linux. Depending on the project
|
||||||
|
size, CPU usage would spike for varying amounts of time. This bug appeared
|
||||||
|
with 0.13.0 due to a behavioral change in the inotify file watching backend.
|
||||||
|
|
||||||
|
## HTML export
|
||||||
|
- Fixed export of tables with [gutters]($table.gutter)
|
||||||
|
- Fixed usage of `<html>` and `<body>` element within [context]
|
||||||
|
- Fixed querying of [metadata] next to `<html>` and `<body>` element
|
||||||
|
|
||||||
|
## Visualization
|
||||||
|
- Fixed [curves]($curve) with multiple non-closed components
|
||||||
|
|
||||||
|
## Introspection
|
||||||
|
- Fixed a regression where labelled [symbols]($symbol) could not be
|
||||||
|
[queried]($query) by label
|
||||||
|
|
||||||
|
## Deprecations
|
||||||
|
- Fixed false positives in deprecation warnings for type/str comparisons
|
||||||
|
|
||||||
|
## Contributors
|
||||||
|
<contributors from="v0.13.0" to="v0.13.1" />
|
||||||
|
|
@ -10,6 +10,7 @@ forward. This section documents all changes to Typst since its initial public
|
||||||
release.
|
release.
|
||||||
|
|
||||||
## Versions
|
## Versions
|
||||||
|
- [Typst 0.13.1]($changelog/0.13.1)
|
||||||
- [Typst 0.13.0]($changelog/0.13.0)
|
- [Typst 0.13.0]($changelog/0.13.0)
|
||||||
- [Typst 0.12.0]($changelog/0.12.0)
|
- [Typst 0.12.0]($changelog/0.12.0)
|
||||||
- [Typst 0.11.1]($changelog/0.11.1)
|
- [Typst 0.11.1]($changelog/0.11.1)
|
||||||
|
|
|
||||||
|
|
@ -447,7 +447,7 @@ document.
|
||||||
To let a function style your whole document, the show rule processes everything
|
To let a function style your whole document, the show rule processes everything
|
||||||
that comes after it and calls the function specified after the colon with the
|
that comes after it and calls the function specified after the colon with the
|
||||||
result as an argument. The `.with` part is a _method_ that takes the `conf`
|
result as an argument. The `.with` part is a _method_ that takes the `conf`
|
||||||
function and pre-configures some if its arguments before passing it on to the
|
function and pre-configures some of its arguments before passing it on to the
|
||||||
show rule.
|
show rule.
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -56,7 +56,7 @@ requirements with examples.
|
||||||
Typst's default page size is A4 paper. Depending on your region and your use
|
Typst's default page size is A4 paper. Depending on your region and your use
|
||||||
case, you will want to change this. You can do this by using the
|
case, you will want to change this. You can do this by using the
|
||||||
[`{page}`]($page) set rule and passing it a string argument to use a common page
|
[`{page}`]($page) set rule and passing it a string argument to use a common page
|
||||||
size. Options include the complete ISO 216 series (e.g. `"iso-a4"`, `"iso-c2"`),
|
size. Options include the complete ISO 216 series (e.g. `"a4"` and `"iso-c2"`),
|
||||||
customary US formats like `"us-legal"` or `"us-letter"`, and more. Check out the
|
customary US formats like `"us-legal"` or `"us-letter"`, and more. Check out the
|
||||||
reference for the [page's paper argument]($page.paper) to learn about all
|
reference for the [page's paper argument]($page.paper) to learn about all
|
||||||
available options.
|
available options.
|
||||||
|
|
|
||||||
|
|
@ -188,6 +188,7 @@ fn changelog_pages(resolver: &dyn Resolver) -> PageModel {
|
||||||
let mut page = md_page(resolver, resolver.base(), load!("changelog/welcome.md"));
|
let mut page = md_page(resolver, resolver.base(), load!("changelog/welcome.md"));
|
||||||
let base = format!("{}changelog/", resolver.base());
|
let base = format!("{}changelog/", resolver.base());
|
||||||
page.children = vec![
|
page.children = vec![
|
||||||
|
md_page(resolver, &base, load!("changelog/0.13.1.md")),
|
||||||
md_page(resolver, &base, load!("changelog/0.13.0.md")),
|
md_page(resolver, &base, load!("changelog/0.13.0.md")),
|
||||||
md_page(resolver, &base, load!("changelog/0.12.0.md")),
|
md_page(resolver, &base, load!("changelog/0.12.0.md")),
|
||||||
md_page(resolver, &base, load!("changelog/0.11.1.md")),
|
md_page(resolver, &base, load!("changelog/0.11.1.md")),
|
||||||
|
|
|
||||||
|
|
@ -13,11 +13,11 @@ your report using Typst's styling system.
|
||||||
As we have seen in the previous chapter, Typst has functions that _insert_
|
As we have seen in the previous chapter, Typst has functions that _insert_
|
||||||
content (e.g. the [`image`] function) and others that _manipulate_ content that
|
content (e.g. the [`image`] function) and others that _manipulate_ content that
|
||||||
they received as arguments (e.g. the [`align`] function). The first impulse you
|
they received as arguments (e.g. the [`align`] function). The first impulse you
|
||||||
might have when you want, for example, to justify the report, could be to look
|
might have when you want, for example, to change the font, could be to look
|
||||||
for a function that does that and wrap the complete document in it.
|
for a function that does that and wrap the complete document in it.
|
||||||
|
|
||||||
```example
|
```example
|
||||||
#par(justify: true)[
|
#text(font: "New Computer Modern")[
|
||||||
= Background
|
= Background
|
||||||
In the case of glaciers, fluid
|
In the case of glaciers, fluid
|
||||||
dynamics principles can be used
|
dynamics principles can be used
|
||||||
|
|
@ -37,9 +37,9 @@ do in Typst, there is special syntax for it: Instead of putting the content
|
||||||
inside of the argument list, you can write it in square brackets directly after
|
inside of the argument list, you can write it in square brackets directly after
|
||||||
the normal arguments, saving on punctuation.
|
the normal arguments, saving on punctuation.
|
||||||
|
|
||||||
As seen above, that works. The [`par`] function justifies all paragraphs within
|
As seen above, that works. With the [`text`] function, we can adjust the font
|
||||||
it. However, wrapping the document in countless functions and applying styles
|
for all text within it. However, wrapping the document in countless functions
|
||||||
selectively and in-situ can quickly become cumbersome.
|
and applying styles selectively and in-situ can quickly become cumbersome.
|
||||||
|
|
||||||
Fortunately, Typst has a more elegant solution. With _set rules,_ you can apply
|
Fortunately, Typst has a more elegant solution. With _set rules,_ you can apply
|
||||||
style properties to all occurrences of some kind of content. You write a set
|
style properties to all occurrences of some kind of content. You write a set
|
||||||
|
|
@ -47,7 +47,9 @@ rule by entering the `{set}` keyword, followed by the name of the function whose
|
||||||
properties you want to set, and a list of arguments in parentheses.
|
properties you want to set, and a list of arguments in parentheses.
|
||||||
|
|
||||||
```example
|
```example
|
||||||
#set par(justify: true)
|
#set text(
|
||||||
|
font: "New Computer Modern"
|
||||||
|
)
|
||||||
|
|
||||||
= Background
|
= Background
|
||||||
In the case of glaciers, fluid
|
In the case of glaciers, fluid
|
||||||
|
|
|
||||||
Binary file not shown.
|
After Width: | Height: | Size: 85 B |
Binary file not shown.
|
After Width: | Height: | Size: 243 B |
|
|
@ -151,3 +151,7 @@
|
||||||
--- symbol-sect-deprecated ---
|
--- symbol-sect-deprecated ---
|
||||||
// Warning: 5-9 `sect` is deprecated, use `inter` instead
|
// Warning: 5-9 `sect` is deprecated, use `inter` instead
|
||||||
$ A sect B = A inter B $
|
$ A sect B = A inter B $
|
||||||
|
|
||||||
|
--- issue-5930-symbol-label ---
|
||||||
|
#emoji.face<lab>
|
||||||
|
#context test(query(<lab>).first().text, "😀")
|
||||||
|
|
|
||||||
|
|
@ -38,6 +38,16 @@
|
||||||
curve.close(mode: "smooth"),
|
curve.close(mode: "smooth"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
--- curve-multiple-non-closed ---
|
||||||
|
#curve(
|
||||||
|
stroke: 2pt,
|
||||||
|
curve.line((20pt, 0pt)),
|
||||||
|
curve.move((0pt, 10pt)),
|
||||||
|
curve.line((20pt, 10pt)),
|
||||||
|
curve.move((0pt, 20pt)),
|
||||||
|
curve.line((20pt, 20pt)),
|
||||||
|
)
|
||||||
|
|
||||||
--- curve-line ---
|
--- curve-line ---
|
||||||
#curve(
|
#curve(
|
||||||
fill: purple,
|
fill: purple,
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue