mirror of
https://github.com/tauri-apps/tauri.git
synced 2026-01-31 00:35:19 +01:00
refactor(cli): improve errors (#14126)
* refactor(cli): improve errors * update change files * license * add errorext with fs_context helper * update linux * lint * fmt * windows * revert bundler breaking change * fix ios mod * ref * reduce amount of enum variants * fix macos build * Fix windows build * Clippy * capitalize cargo [skip ci] --------- Co-authored-by: Tony <legendmastertony@gmail.com>
This commit is contained in:
committed by
GitHub
parent
eb60b9966b
commit
b06b3bd091
8
.changes/improve-errors.md
Normal file
8
.changes/improve-errors.md
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
---
|
||||||
|
"@tauri-apps/cli": minor:enhance
|
||||||
|
"tauri-cli": minor:enhance
|
||||||
|
"tauri-bundler": minor:enhance
|
||||||
|
---
|
||||||
|
|
||||||
|
Improve error messages with more context.
|
||||||
|
|
||||||
4
Cargo.lock
generated
4
Cargo.lock
generated
@@ -8615,7 +8615,6 @@ dependencies = [
|
|||||||
name = "tauri-cli"
|
name = "tauri-cli"
|
||||||
version = "2.8.4"
|
version = "2.8.4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
|
||||||
"ar",
|
"ar",
|
||||||
"axum",
|
"axum",
|
||||||
"base64 0.22.1",
|
"base64 0.22.1",
|
||||||
@@ -8681,6 +8680,7 @@ dependencies = [
|
|||||||
"tauri-macos-sign",
|
"tauri-macos-sign",
|
||||||
"tauri-utils",
|
"tauri-utils",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
|
"thiserror 2.0.12",
|
||||||
"tokio",
|
"tokio",
|
||||||
"toml 0.9.4",
|
"toml 0.9.4",
|
||||||
"toml_edit 0.23.2",
|
"toml_edit 0.23.2",
|
||||||
@@ -8774,7 +8774,6 @@ dependencies = [
|
|||||||
name = "tauri-macos-sign"
|
name = "tauri-macos-sign"
|
||||||
version = "2.2.0"
|
version = "2.2.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
|
||||||
"apple-codesign",
|
"apple-codesign",
|
||||||
"chrono",
|
"chrono",
|
||||||
"dirs 6.0.0",
|
"dirs 6.0.0",
|
||||||
@@ -8787,6 +8786,7 @@ dependencies = [
|
|||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
|
"thiserror 2.0.12",
|
||||||
"x509-certificate 0.23.1",
|
"x509-certificate 0.23.1",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -375,7 +375,7 @@ fn main() -> Result<()> {
|
|||||||
if let Some(filename) = bench_file.to_str() {
|
if let Some(filename) = bench_file.to_str() {
|
||||||
utils::write_json(filename, &serde_json::to_value(&new_data)?)
|
utils::write_json(filename, &serde_json::to_value(&new_data)?)
|
||||||
.context("failed to write benchmark results to file")?;
|
.context("failed to write benchmark results to file")?;
|
||||||
println!("Results written to: {}", filename);
|
println!("Results written to: {filename}");
|
||||||
} else {
|
} else {
|
||||||
eprintln!("Cannot write bench.json, path contains invalid UTF-8");
|
eprintln!("Cannot write bench.json, path contains invalid UTF-8");
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,8 +20,8 @@ tauri-utils = { version = "2.7.0", path = "../tauri-utils", features = [
|
|||||||
] }
|
] }
|
||||||
image = "0.25"
|
image = "0.25"
|
||||||
flate2 = "1"
|
flate2 = "1"
|
||||||
anyhow = "1"
|
|
||||||
thiserror = "2"
|
thiserror = "2"
|
||||||
|
anyhow = "1"
|
||||||
serde_json = "1"
|
serde_json = "1"
|
||||||
serde = { version = "1", features = ["derive"] }
|
serde = { version = "1", features = ["derive"] }
|
||||||
strsim = "0.11"
|
strsim = "0.11"
|
||||||
|
|||||||
@@ -49,8 +49,6 @@ pub use self::{
|
|||||||
Settings, SettingsBuilder, Size, UpdaterSettings,
|
Settings, SettingsBuilder, Size, UpdaterSettings,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
#[cfg(target_os = "macos")]
|
|
||||||
use anyhow::Context;
|
|
||||||
pub use settings::{NsisSettings, WindowsSettings, WixLanguage, WixLanguageConfig, WixSettings};
|
pub use settings::{NsisSettings, WindowsSettings, WixLanguage, WixLanguageConfig, WixSettings};
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
@@ -223,24 +221,24 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<Bundle>> {
|
|||||||
.map(|b| b.bundle_paths)
|
.map(|b| b.bundle_paths)
|
||||||
{
|
{
|
||||||
for app_bundle_path in &app_bundle_paths {
|
for app_bundle_path in &app_bundle_paths {
|
||||||
|
use crate::error::ErrorExt;
|
||||||
|
|
||||||
log::info!(action = "Cleaning"; "{}", app_bundle_path.display());
|
log::info!(action = "Cleaning"; "{}", app_bundle_path.display());
|
||||||
match app_bundle_path.is_dir() {
|
match app_bundle_path.is_dir() {
|
||||||
true => std::fs::remove_dir_all(app_bundle_path),
|
true => std::fs::remove_dir_all(app_bundle_path),
|
||||||
false => std::fs::remove_file(app_bundle_path),
|
false => std::fs::remove_file(app_bundle_path),
|
||||||
}
|
}
|
||||||
.with_context(|| {
|
.fs_context(
|
||||||
format!(
|
"failed to clean the app bundle",
|
||||||
"Failed to clean the app bundle at {}",
|
app_bundle_path.to_path_buf(),
|
||||||
app_bundle_path.display()
|
)?;
|
||||||
)
|
|
||||||
})?
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if bundles.is_empty() {
|
if bundles.is_empty() {
|
||||||
return Err(anyhow::anyhow!("No bundles were built").into());
|
return Ok(bundles);
|
||||||
}
|
}
|
||||||
|
|
||||||
let bundles_wo_updater = bundles
|
let bundles_wo_updater = bundles
|
||||||
|
|||||||
@@ -6,10 +6,10 @@
|
|||||||
use super::debian;
|
use super::debian;
|
||||||
use crate::{
|
use crate::{
|
||||||
bundle::settings::Arch,
|
bundle::settings::Arch,
|
||||||
|
error::{Context, ErrorExt},
|
||||||
utils::{fs_utils, http_utils::download, CommandExt},
|
utils::{fs_utils, http_utils::download, CommandExt},
|
||||||
Settings,
|
Settings,
|
||||||
};
|
};
|
||||||
use anyhow::Context;
|
|
||||||
use std::{
|
use std::{
|
||||||
fs,
|
fs,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
@@ -124,13 +124,13 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<PathBuf>> {
|
|||||||
// xdg-open will be handled by the `files` config instead
|
// xdg-open will be handled by the `files` config instead
|
||||||
if settings.deep_link_protocols().is_some() && !app_dir_usr_bin.join("xdg-open").exists() {
|
if settings.deep_link_protocols().is_some() && !app_dir_usr_bin.join("xdg-open").exists() {
|
||||||
fs::copy("/usr/bin/xdg-mime", app_dir_usr_bin.join("xdg-mime"))
|
fs::copy("/usr/bin/xdg-mime", app_dir_usr_bin.join("xdg-mime"))
|
||||||
.context("xdg-mime binary not found")?;
|
.fs_context("xdg-mime binary not found", "/usr/bin/xdg-mime".to_string())?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// we also check if the user may have provided their own copy already
|
// we also check if the user may have provided their own copy already
|
||||||
if settings.appimage().bundle_xdg_open && !app_dir_usr_bin.join("xdg-open").exists() {
|
if settings.appimage().bundle_xdg_open && !app_dir_usr_bin.join("xdg-open").exists() {
|
||||||
fs::copy("/usr/bin/xdg-open", app_dir_usr_bin.join("xdg-open"))
|
fs::copy("/usr/bin/xdg-open", app_dir_usr_bin.join("xdg-open"))
|
||||||
.context("xdg-open binary not found")?;
|
.fs_context("xdg-open binary not found", "/usr/bin/xdg-open".to_string())?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let search_dirs = [
|
let search_dirs = [
|
||||||
|
|||||||
@@ -24,8 +24,12 @@
|
|||||||
// generate postinst or prerm files.
|
// generate postinst or prerm files.
|
||||||
|
|
||||||
use super::freedesktop;
|
use super::freedesktop;
|
||||||
use crate::{bundle::settings::Arch, utils::fs_utils, Settings};
|
use crate::{
|
||||||
use anyhow::Context;
|
bundle::settings::Arch,
|
||||||
|
error::{Context, ErrorExt},
|
||||||
|
utils::fs_utils,
|
||||||
|
Settings,
|
||||||
|
};
|
||||||
use flate2::{write::GzEncoder, Compression};
|
use flate2::{write::GzEncoder, Compression};
|
||||||
use tar::HeaderMode;
|
use tar::HeaderMode;
|
||||||
use walkdir::WalkDir;
|
use walkdir::WalkDir;
|
||||||
@@ -64,30 +68,32 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<PathBuf>> {
|
|||||||
let base_dir = settings.project_out_directory().join("bundle/deb");
|
let base_dir = settings.project_out_directory().join("bundle/deb");
|
||||||
let package_dir = base_dir.join(&package_base_name);
|
let package_dir = base_dir.join(&package_base_name);
|
||||||
if package_dir.exists() {
|
if package_dir.exists() {
|
||||||
fs::remove_dir_all(&package_dir)
|
fs::remove_dir_all(&package_dir).fs_context(
|
||||||
.with_context(|| format!("Failed to remove old {package_base_name}"))?;
|
"Failed to Remove old package directory",
|
||||||
|
package_dir.clone(),
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
let package_path = base_dir.join(&package_name);
|
let package_path = base_dir.join(&package_name);
|
||||||
|
|
||||||
log::info!(action = "Bundling"; "{} ({})", package_name, package_path.display());
|
log::info!(action = "Bundling"; "{} ({})", package_name, package_path.display());
|
||||||
|
|
||||||
let (data_dir, _) = generate_data(settings, &package_dir)
|
let (data_dir, _) =
|
||||||
.with_context(|| "Failed to build data folders and files")?;
|
generate_data(settings, &package_dir).context("Failed to build data folders and files")?;
|
||||||
fs_utils::copy_custom_files(&settings.deb().files, &data_dir)
|
fs_utils::copy_custom_files(&settings.deb().files, &data_dir)
|
||||||
.with_context(|| "Failed to copy custom files")?;
|
.context("Failed to copy custom files")?;
|
||||||
|
|
||||||
// Generate control files.
|
// Generate control files.
|
||||||
let control_dir = package_dir.join("control");
|
let control_dir = package_dir.join("control");
|
||||||
generate_control_file(settings, arch, &control_dir, &data_dir)
|
generate_control_file(settings, arch, &control_dir, &data_dir)
|
||||||
.with_context(|| "Failed to create control file")?;
|
.context("Failed to create control file")?;
|
||||||
generate_scripts(settings, &control_dir).with_context(|| "Failed to create control scripts")?;
|
generate_scripts(settings, &control_dir).context("Failed to create control scripts")?;
|
||||||
generate_md5sums(&control_dir, &data_dir).with_context(|| "Failed to create md5sums file")?;
|
generate_md5sums(&control_dir, &data_dir).context("Failed to create md5sums file")?;
|
||||||
|
|
||||||
// Generate `debian-binary` file; see
|
// Generate `debian-binary` file; see
|
||||||
// http://www.tldp.org/HOWTO/Debian-Binary-Package-Building-HOWTO/x60.html#AEN66
|
// http://www.tldp.org/HOWTO/Debian-Binary-Package-Building-HOWTO/x60.html#AEN66
|
||||||
let debian_binary_path = package_dir.join("debian-binary");
|
let debian_binary_path = package_dir.join("debian-binary");
|
||||||
create_file_with_data(&debian_binary_path, "2.0\n")
|
create_file_with_data(&debian_binary_path, "2.0\n")
|
||||||
.with_context(|| "Failed to create debian-binary file")?;
|
.context("Failed to create debian-binary file")?;
|
||||||
|
|
||||||
// Apply tar/gzip/ar to create the final package file.
|
// Apply tar/gzip/ar to create the final package file.
|
||||||
let control_tar_gz_path =
|
let control_tar_gz_path =
|
||||||
|
|||||||
@@ -21,12 +21,12 @@ use std::fs::{read_to_string, File};
|
|||||||
use std::io::BufReader;
|
use std::io::BufReader;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use handlebars::Handlebars;
|
use handlebars::Handlebars;
|
||||||
use image::{self, codecs::png::PngDecoder, ImageDecoder};
|
use image::{self, codecs::png::PngDecoder, ImageDecoder};
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::Context,
|
||||||
utils::{self, fs_utils},
|
utils::{self, fs_utils},
|
||||||
Settings,
|
Settings,
|
||||||
};
|
};
|
||||||
@@ -114,11 +114,13 @@ pub fn generate_desktop_file(
|
|||||||
if let Some(template) = custom_template_path {
|
if let Some(template) = custom_template_path {
|
||||||
handlebars
|
handlebars
|
||||||
.register_template_string("main.desktop", read_to_string(template)?)
|
.register_template_string("main.desktop", read_to_string(template)?)
|
||||||
.with_context(|| "Failed to setup custom handlebar template")?;
|
.map_err(Into::into)
|
||||||
|
.context("Failed to setup custom handlebar template")?;
|
||||||
} else {
|
} else {
|
||||||
handlebars
|
handlebars
|
||||||
.register_template_string("main.desktop", include_str!("./main.desktop"))
|
.register_template_string("main.desktop", include_str!("./main.desktop"))
|
||||||
.with_context(|| "Failed to setup default handlebar template")?;
|
.map_err(Into::into)
|
||||||
|
.context("Failed to setup default handlebar template")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
|
|||||||
@@ -3,9 +3,8 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::{bundle::settings::Arch, Settings};
|
use crate::{bundle::settings::Arch, error::ErrorExt, Settings};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use rpm::{self, signature::pgp, Dependency, FileMode, FileOptions};
|
use rpm::{self, signature::pgp, Dependency, FileMode, FileOptions};
|
||||||
use std::{
|
use std::{
|
||||||
env,
|
env,
|
||||||
@@ -48,10 +47,13 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<PathBuf>> {
|
|||||||
let base_dir = settings.project_out_directory().join("bundle/rpm");
|
let base_dir = settings.project_out_directory().join("bundle/rpm");
|
||||||
let package_dir = base_dir.join(&package_base_name);
|
let package_dir = base_dir.join(&package_base_name);
|
||||||
if package_dir.exists() {
|
if package_dir.exists() {
|
||||||
fs::remove_dir_all(&package_dir)
|
fs::remove_dir_all(&package_dir).fs_context(
|
||||||
.with_context(|| format!("Failed to remove old {package_base_name}"))?;
|
"Failed to remove old package directory",
|
||||||
|
package_dir.clone(),
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
fs::create_dir_all(&package_dir)?;
|
fs::create_dir_all(&package_dir)
|
||||||
|
.fs_context("Failed to create package directory", package_dir.clone())?;
|
||||||
let package_path = base_dir.join(&package_name);
|
let package_path = base_dir.join(&package_name);
|
||||||
|
|
||||||
log::info!(action = "Bundling"; "{} ({})", package_name, package_path.display());
|
log::info!(action = "Bundling"; "{} ({})", package_name, package_path.display());
|
||||||
|
|||||||
@@ -24,16 +24,15 @@
|
|||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
icon::create_icns_file,
|
icon::create_icns_file,
|
||||||
sign::{notarize, notarize_auth, notarize_without_stapling, sign, NotarizeAuthError, SignTarget},
|
sign::{notarize, notarize_auth, notarize_without_stapling, sign, SignTarget},
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::{Context, ErrorExt, NotarizeAuthError},
|
||||||
utils::{fs_utils, CommandExt},
|
utils::{fs_utils, CommandExt},
|
||||||
Error::GenericError,
|
Error::GenericError,
|
||||||
Settings,
|
Settings,
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
ffi::OsStr,
|
ffi::OsStr,
|
||||||
fs,
|
fs,
|
||||||
@@ -65,12 +64,16 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<PathBuf>> {
|
|||||||
log::info!(action = "Bundling"; "{} ({})", app_product_name, app_bundle_path.display());
|
log::info!(action = "Bundling"; "{} ({})", app_product_name, app_bundle_path.display());
|
||||||
|
|
||||||
if app_bundle_path.exists() {
|
if app_bundle_path.exists() {
|
||||||
fs::remove_dir_all(&app_bundle_path)
|
fs::remove_dir_all(&app_bundle_path).fs_context(
|
||||||
.with_context(|| format!("Failed to remove old {app_product_name}"))?;
|
"failed to remove old app bundle",
|
||||||
|
app_bundle_path.to_path_buf(),
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
let bundle_directory = app_bundle_path.join("Contents");
|
let bundle_directory = app_bundle_path.join("Contents");
|
||||||
fs::create_dir_all(&bundle_directory)
|
fs::create_dir_all(&bundle_directory).fs_context(
|
||||||
.with_context(|| format!("Failed to create bundle directory at {bundle_directory:?}"))?;
|
"failed to create bundle directory",
|
||||||
|
bundle_directory.to_path_buf(),
|
||||||
|
)?;
|
||||||
|
|
||||||
let resources_dir = bundle_directory.join("Resources");
|
let resources_dir = bundle_directory.join("Resources");
|
||||||
let bin_dir = bundle_directory.join("MacOS");
|
let bin_dir = bundle_directory.join("MacOS");
|
||||||
@@ -134,7 +137,7 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<PathBuf>> {
|
|||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
if matches!(e, NotarizeAuthError::MissingTeamId) {
|
if matches!(e, NotarizeAuthError::MissingTeamId) {
|
||||||
return Err(anyhow::anyhow!("{e}").into());
|
return Err(e.into());
|
||||||
} else {
|
} else {
|
||||||
log::warn!("skipping app notarization, {}", e.to_string());
|
log::warn!("skipping app notarization, {}", e.to_string());
|
||||||
}
|
}
|
||||||
@@ -401,8 +404,10 @@ fn copy_frameworks_to_bundle(
|
|||||||
return Ok(paths);
|
return Ok(paths);
|
||||||
}
|
}
|
||||||
let dest_dir = bundle_directory.join("Frameworks");
|
let dest_dir = bundle_directory.join("Frameworks");
|
||||||
fs::create_dir_all(bundle_directory)
|
fs::create_dir_all(&dest_dir).fs_context(
|
||||||
.with_context(|| format!("Failed to create Frameworks directory at {dest_dir:?}"))?;
|
"failed to create Frameworks directory",
|
||||||
|
dest_dir.to_path_buf(),
|
||||||
|
)?;
|
||||||
for framework in frameworks.iter() {
|
for framework in frameworks.iter() {
|
||||||
if framework.ends_with(".framework") {
|
if framework.ends_with(".framework") {
|
||||||
let src_path = PathBuf::from(framework);
|
let src_path = PathBuf::from(framework);
|
||||||
|
|||||||
@@ -6,12 +6,11 @@
|
|||||||
use super::{app, icon::create_icns_file};
|
use super::{app, icon::create_icns_file};
|
||||||
use crate::{
|
use crate::{
|
||||||
bundle::{settings::Arch, Bundle},
|
bundle::{settings::Arch, Bundle},
|
||||||
|
error::{Context, ErrorExt},
|
||||||
utils::CommandExt,
|
utils::CommandExt,
|
||||||
PackageType, Settings,
|
PackageType, Settings,
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
env,
|
env,
|
||||||
fs::{self, write},
|
fs::{self, write},
|
||||||
@@ -68,10 +67,9 @@ pub fn bundle_project(settings: &Settings, bundles: &[Bundle]) -> crate::Result<
|
|||||||
|
|
||||||
for path in &[&support_directory_path, &output_path] {
|
for path in &[&support_directory_path, &output_path] {
|
||||||
if path.exists() {
|
if path.exists() {
|
||||||
fs::remove_dir_all(path).with_context(|| format!("Failed to remove old {dmg_name}"))?;
|
fs::remove_dir_all(path).fs_context("failed to remove old dmg", path.to_path_buf())?;
|
||||||
}
|
}
|
||||||
fs::create_dir_all(path)
|
fs::create_dir_all(path).fs_context("failed to create output directory", path.to_path_buf())?;
|
||||||
.with_context(|| format!("Failed to create output directory at {path:?}"))?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// create paths for script
|
// create paths for script
|
||||||
|
|||||||
@@ -14,11 +14,11 @@
|
|||||||
// explanation.
|
// explanation.
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::{Context, ErrorExt},
|
||||||
utils::{self, fs_utils},
|
utils::{self, fs_utils},
|
||||||
Settings,
|
Settings,
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use image::{codecs::png::PngDecoder, GenericImageView, ImageDecoder};
|
use image::{codecs::png::PngDecoder, GenericImageView, ImageDecoder};
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
@@ -44,11 +44,15 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<PathBuf>> {
|
|||||||
log::info!(action = "Bundling"; "{} ({})", app_product_name, app_bundle_path.display());
|
log::info!(action = "Bundling"; "{} ({})", app_product_name, app_bundle_path.display());
|
||||||
|
|
||||||
if app_bundle_path.exists() {
|
if app_bundle_path.exists() {
|
||||||
fs::remove_dir_all(&app_bundle_path)
|
fs::remove_dir_all(&app_bundle_path).fs_context(
|
||||||
.with_context(|| format!("Failed to remove old {app_product_name}"))?;
|
"failed to remove old app bundle",
|
||||||
|
app_bundle_path.to_path_buf(),
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
fs::create_dir_all(&app_bundle_path)
|
fs::create_dir_all(&app_bundle_path).fs_context(
|
||||||
.with_context(|| format!("Failed to create bundle directory at {app_bundle_path:?}"))?;
|
"failed to create bundle directory",
|
||||||
|
app_bundle_path.to_path_buf(),
|
||||||
|
)?;
|
||||||
|
|
||||||
for src in settings.resource_files() {
|
for src in settings.resource_files() {
|
||||||
let src = src?;
|
let src = src?;
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ use std::{
|
|||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::Settings;
|
use crate::{error::NotarizeAuthError, Settings};
|
||||||
|
|
||||||
pub struct SignTarget {
|
pub struct SignTarget {
|
||||||
pub path: PathBuf,
|
pub path: PathBuf,
|
||||||
@@ -23,11 +23,14 @@ pub fn keychain(identity: Option<&str>) -> crate::Result<Option<tauri_macos_sign
|
|||||||
) {
|
) {
|
||||||
// import user certificate - useful for for CI build
|
// import user certificate - useful for for CI build
|
||||||
let keychain =
|
let keychain =
|
||||||
tauri_macos_sign::Keychain::with_certificate(&certificate_encoded, &certificate_password)?;
|
tauri_macos_sign::Keychain::with_certificate(&certificate_encoded, &certificate_password)
|
||||||
|
.map_err(Box::new)?;
|
||||||
if let Some(identity) = identity {
|
if let Some(identity) = identity {
|
||||||
let certificate_identity = keychain.signing_identity();
|
let certificate_identity = keychain.signing_identity();
|
||||||
if !certificate_identity.contains(identity) {
|
if !certificate_identity.contains(identity) {
|
||||||
return Err(anyhow::anyhow!("certificate from APPLE_CERTIFICATE \"{certificate_identity}\" environment variable does not match provided identity \"{identity}\"").into());
|
return Err(crate::Error::GenericError(format!(
|
||||||
|
"certificate from APPLE_CERTIFICATE \"{certificate_identity}\" environment variable does not match provided identity \"{identity}\""
|
||||||
|
)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(Some(keychain))
|
Ok(Some(keychain))
|
||||||
@@ -53,11 +56,13 @@ pub fn sign(
|
|||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
keychain.sign(
|
keychain
|
||||||
&target.path,
|
.sign(
|
||||||
entitlements_path,
|
&target.path,
|
||||||
target.is_an_executable && settings.macos().hardened_runtime,
|
entitlements_path,
|
||||||
)?;
|
target.is_an_executable && settings.macos().hardened_runtime,
|
||||||
|
)
|
||||||
|
.map_err(Box::new)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -68,7 +73,9 @@ pub fn notarize(
|
|||||||
app_bundle_path: PathBuf,
|
app_bundle_path: PathBuf,
|
||||||
credentials: &tauri_macos_sign::AppleNotarizationCredentials,
|
credentials: &tauri_macos_sign::AppleNotarizationCredentials,
|
||||||
) -> crate::Result<()> {
|
) -> crate::Result<()> {
|
||||||
tauri_macos_sign::notarize(keychain, &app_bundle_path, credentials).map_err(Into::into)
|
tauri_macos_sign::notarize(keychain, &app_bundle_path, credentials)
|
||||||
|
.map_err(Box::new)
|
||||||
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn notarize_without_stapling(
|
pub fn notarize_without_stapling(
|
||||||
@@ -77,19 +84,10 @@ pub fn notarize_without_stapling(
|
|||||||
credentials: &tauri_macos_sign::AppleNotarizationCredentials,
|
credentials: &tauri_macos_sign::AppleNotarizationCredentials,
|
||||||
) -> crate::Result<()> {
|
) -> crate::Result<()> {
|
||||||
tauri_macos_sign::notarize_without_stapling(keychain, &app_bundle_path, credentials)
|
tauri_macos_sign::notarize_without_stapling(keychain, &app_bundle_path, credentials)
|
||||||
|
.map_err(Box::new)
|
||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
|
||||||
pub enum NotarizeAuthError {
|
|
||||||
#[error(
|
|
||||||
"The team ID is now required for notarization with app-specific password as authentication. Please set the `APPLE_TEAM_ID` environment variable. You can find the team ID in https://developer.apple.com/account#MembershipDetailsCard."
|
|
||||||
)]
|
|
||||||
MissingTeamId,
|
|
||||||
#[error(transparent)]
|
|
||||||
Anyhow(#[from] anyhow::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn notarize_auth() -> Result<tauri_macos_sign::AppleNotarizationCredentials, NotarizeAuthError>
|
pub fn notarize_auth() -> Result<tauri_macos_sign::AppleNotarizationCredentials, NotarizeAuthError>
|
||||||
{
|
{
|
||||||
match (
|
match (
|
||||||
@@ -106,10 +104,18 @@ pub fn notarize_auth() -> Result<tauri_macos_sign::AppleNotarizationCredentials,
|
|||||||
}
|
}
|
||||||
(Some(_apple_id), Some(_password), None) => Err(NotarizeAuthError::MissingTeamId),
|
(Some(_apple_id), Some(_password), None) => Err(NotarizeAuthError::MissingTeamId),
|
||||||
_ => {
|
_ => {
|
||||||
match (var_os("APPLE_API_KEY"), var_os("APPLE_API_ISSUER"), var("APPLE_API_KEY_PATH")) {
|
match (
|
||||||
|
var_os("APPLE_API_KEY"),
|
||||||
|
var_os("APPLE_API_ISSUER"),
|
||||||
|
var("APPLE_API_KEY_PATH"),
|
||||||
|
) {
|
||||||
(Some(key_id), Some(issuer), Ok(key_path)) => {
|
(Some(key_id), Some(issuer), Ok(key_path)) => {
|
||||||
Ok(tauri_macos_sign::AppleNotarizationCredentials::ApiKey { key_id, key: tauri_macos_sign::ApiKey::Path( key_path.into()), issuer })
|
Ok(tauri_macos_sign::AppleNotarizationCredentials::ApiKey {
|
||||||
},
|
key_id,
|
||||||
|
key: tauri_macos_sign::ApiKey::Path(key_path.into()),
|
||||||
|
issuer,
|
||||||
|
})
|
||||||
|
}
|
||||||
(Some(key_id), Some(issuer), Err(_)) => {
|
(Some(key_id), Some(issuer), Err(_)) => {
|
||||||
let mut api_key_file_name = OsString::from("AuthKey_");
|
let mut api_key_file_name = OsString::from("AuthKey_");
|
||||||
api_key_file_name.push(&key_id);
|
api_key_file_name.push(&key_id);
|
||||||
@@ -131,12 +137,18 @@ pub fn notarize_auth() -> Result<tauri_macos_sign::AppleNotarizationCredentials,
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let Some(key_path) = key_path {
|
if let Some(key_path) = key_path {
|
||||||
Ok(tauri_macos_sign::AppleNotarizationCredentials::ApiKey { key_id, key: tauri_macos_sign::ApiKey::Path(key_path), issuer })
|
Ok(tauri_macos_sign::AppleNotarizationCredentials::ApiKey {
|
||||||
|
key_id,
|
||||||
|
key: tauri_macos_sign::ApiKey::Path(key_path),
|
||||||
|
issuer,
|
||||||
|
})
|
||||||
} else {
|
} else {
|
||||||
Err(anyhow::anyhow!("could not find API key file. Please set the APPLE_API_KEY_PATH environment variables to the path to the {api_key_file_name:?} file").into())
|
Err(NotarizeAuthError::MissingApiKey {
|
||||||
|
file_name: api_key_file_name.to_string_lossy().into_owned(),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => Err(anyhow::anyhow!("no APPLE_ID & APPLE_PASSWORD & APPLE_TEAM_ID or APPLE_API_KEY & APPLE_API_ISSUER & APPLE_API_KEY_PATH environment variables found").into())
|
_ => Err(NotarizeAuthError::MissingCredentials),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,8 +4,7 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use super::category::AppCategory;
|
use super::category::AppCategory;
|
||||||
use crate::{bundle::platform::target_triple, utils::fs_utils};
|
use crate::{bundle::platform::target_triple, error::Context, utils::fs_utils};
|
||||||
use anyhow::Context;
|
|
||||||
pub use tauri_utils::config::WebviewInstallMode;
|
pub use tauri_utils::config::WebviewInstallMode;
|
||||||
use tauri_utils::{
|
use tauri_utils::{
|
||||||
config::{
|
config::{
|
||||||
@@ -969,7 +968,6 @@ impl Settings {
|
|||||||
.iter()
|
.iter()
|
||||||
.find(|bin| bin.main)
|
.find(|bin| bin.main)
|
||||||
.context("failed to find main binary, make sure you have a `package > default-run` in the Cargo.toml file")
|
.context("failed to find main binary, make sure you have a `package > default-run` in the Cargo.toml file")
|
||||||
.map_err(Into::into)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the file name of the binary being bundled.
|
/// Returns the file name of the binary being bundled.
|
||||||
@@ -979,7 +977,6 @@ impl Settings {
|
|||||||
.iter_mut()
|
.iter_mut()
|
||||||
.find(|bin| bin.main)
|
.find(|bin| bin.main)
|
||||||
.context("failed to find main binary, make sure you have a `package > default-run` in the Cargo.toml file")
|
.context("failed to find main binary, make sure you have a `package > default-run` in the Cargo.toml file")
|
||||||
.map_err(Into::into)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the file name of the binary being bundled.
|
/// Returns the file name of the binary being bundled.
|
||||||
@@ -990,7 +987,6 @@ impl Settings {
|
|||||||
.find(|bin| bin.main)
|
.find(|bin| bin.main)
|
||||||
.context("failed to find main binary, make sure you have a `package > default-run` in the Cargo.toml file")
|
.context("failed to find main binary, make sure you have a `package > default-run` in the Cargo.toml file")
|
||||||
.map(|b| b.name())
|
.map(|b| b.name())
|
||||||
.map_err(Into::into)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the path to the specified binary.
|
/// Returns the path to the specified binary.
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ use crate::{
|
|||||||
},
|
},
|
||||||
Bundle,
|
Bundle,
|
||||||
},
|
},
|
||||||
|
error::{Context, ErrorExt},
|
||||||
utils::fs_utils,
|
utils::fs_utils,
|
||||||
Settings,
|
Settings,
|
||||||
};
|
};
|
||||||
@@ -22,7 +23,6 @@ use std::{
|
|||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use zip::write::SimpleFileOptions;
|
use zip::write::SimpleFileOptions;
|
||||||
|
|
||||||
// Build update
|
// Build update
|
||||||
@@ -216,7 +216,9 @@ pub fn create_zip(src_file: &Path, dst_file: &Path) -> crate::Result<PathBuf> {
|
|||||||
.unix_permissions(0o755);
|
.unix_permissions(0o755);
|
||||||
|
|
||||||
zip.start_file(file_name.to_string_lossy(), options)?;
|
zip.start_file(file_name.to_string_lossy(), options)?;
|
||||||
let mut f = File::open(src_file)?;
|
let mut f =
|
||||||
|
File::open(src_file).fs_context("failed to open updater ZIP file", src_file.to_path_buf())?;
|
||||||
|
|
||||||
let mut buffer = Vec::new();
|
let mut buffer = Vec::new();
|
||||||
f.read_to_end(&mut buffer)?;
|
f.read_to_end(&mut buffer)?;
|
||||||
zip.write_all(&buffer)?;
|
zip.write_all(&buffer)?;
|
||||||
|
|||||||
@@ -14,13 +14,13 @@ use crate::{
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
error::Context,
|
||||||
utils::{
|
utils::{
|
||||||
fs_utils::copy_file,
|
fs_utils::copy_file,
|
||||||
http_utils::{download_and_verify, extract_zip, HashAlgorithm},
|
http_utils::{download_and_verify, extract_zip, HashAlgorithm},
|
||||||
CommandExt,
|
CommandExt,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use anyhow::{bail, Context};
|
|
||||||
use handlebars::{html_escape, to_json, Handlebars};
|
use handlebars::{html_escape, to_json, Handlebars};
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
@@ -279,37 +279,40 @@ fn clear_env_for_wix(cmd: &mut Command) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn validate_wix_version(version_str: &str) -> anyhow::Result<()> {
|
fn validate_wix_version(version_str: &str) -> crate::Result<()> {
|
||||||
let components = version_str
|
let components = version_str
|
||||||
.split('.')
|
.split('.')
|
||||||
.flat_map(|c| c.parse::<u64>().ok())
|
.flat_map(|c| c.parse::<u64>().ok())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
anyhow::ensure!(
|
if components.len() < 3 {
|
||||||
components.len() >= 3,
|
crate::error::bail!(
|
||||||
"app wix version should be in the format major.minor.patch.build (build is optional)"
|
"app wix version should be in the format major.minor.patch.build (build is optional)"
|
||||||
);
|
);
|
||||||
|
}
|
||||||
|
|
||||||
if components[0] > 255 {
|
if components[0] > 255 {
|
||||||
bail!("app version major number cannot be greater than 255");
|
crate::error::bail!("app version major number cannot be greater than 255");
|
||||||
}
|
}
|
||||||
if components[1] > 255 {
|
if components[1] > 255 {
|
||||||
bail!("app version minor number cannot be greater than 255");
|
crate::error::bail!("app version minor number cannot be greater than 255");
|
||||||
}
|
}
|
||||||
if components[2] > 65535 {
|
if components[2] > 65535 {
|
||||||
bail!("app version patch number cannot be greater than 65535");
|
crate::error::bail!("app version patch number cannot be greater than 65535");
|
||||||
}
|
}
|
||||||
|
|
||||||
if components.len() == 4 && components[3] > 65535 {
|
if components.len() == 4 && components[3] > 65535 {
|
||||||
bail!("app version build number cannot be greater than 65535");
|
crate::error::bail!("app version build number cannot be greater than 65535");
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
// WiX requires versions to be numeric only in a `major.minor.patch.build` format
|
// WiX requires versions to be numeric only in a `major.minor.patch.build` format
|
||||||
fn convert_version(version_str: &str) -> anyhow::Result<String> {
|
fn convert_version(version_str: &str) -> crate::Result<String> {
|
||||||
let version = semver::Version::parse(version_str).context("invalid app version")?;
|
let version = semver::Version::parse(version_str)
|
||||||
|
.map_err(Into::into)
|
||||||
|
.context("invalid app version")?;
|
||||||
if !version.build.is_empty() {
|
if !version.build.is_empty() {
|
||||||
let build = version.build.parse::<u64>();
|
let build = version.build.parse::<u64>();
|
||||||
if build.map(|b| b <= 65535).unwrap_or_default() {
|
if build.map(|b| b <= 65535).unwrap_or_default() {
|
||||||
@@ -318,7 +321,7 @@ fn convert_version(version_str: &str) -> anyhow::Result<String> {
|
|||||||
version.major, version.minor, version.patch, version.build
|
version.major, version.minor, version.patch, version.build
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
bail!("optional build metadata in app version must be numeric-only and cannot be greater than 65535 for msi target");
|
crate::error::bail!("optional build metadata in app version must be numeric-only and cannot be greater than 65535 for msi target");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -330,7 +333,7 @@ fn convert_version(version_str: &str) -> anyhow::Result<String> {
|
|||||||
version.major, version.minor, version.patch, version.pre
|
version.major, version.minor, version.patch, version.pre
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
bail!("optional pre-release identifier in app version must be numeric-only and cannot be greater than 65535 for msi target");
|
crate::error::bail!("optional pre-release identifier in app version must be numeric-only and cannot be greater than 65535 for msi target");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -387,11 +390,7 @@ fn run_candle(
|
|||||||
cmd.arg(ext);
|
cmd.arg(ext);
|
||||||
}
|
}
|
||||||
clear_env_for_wix(&mut cmd);
|
clear_env_for_wix(&mut cmd);
|
||||||
cmd
|
cmd.args(&args).current_dir(cwd).output_ok()?;
|
||||||
.args(&args)
|
|
||||||
.current_dir(cwd)
|
|
||||||
.output_ok()
|
|
||||||
.context("error running candle.exe")?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -416,11 +415,7 @@ fn run_light(
|
|||||||
cmd.arg(ext);
|
cmd.arg(ext);
|
||||||
}
|
}
|
||||||
clear_env_for_wix(&mut cmd);
|
clear_env_for_wix(&mut cmd);
|
||||||
cmd
|
cmd.args(&args).current_dir(build_path).output_ok()?;
|
||||||
.args(&args)
|
|
||||||
.current_dir(build_path)
|
|
||||||
.output_ok()
|
|
||||||
.context("error running light.exe")?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -472,8 +467,7 @@ pub fn build_wix_app_installer(
|
|||||||
// when we're performing code signing, we'll sign some WiX DLLs, so we make a local copy
|
// when we're performing code signing, we'll sign some WiX DLLs, so we make a local copy
|
||||||
let wix_toolset_path = if settings.windows().can_sign() {
|
let wix_toolset_path = if settings.windows().can_sign() {
|
||||||
let wix_path = output_path.join("wix");
|
let wix_path = output_path.join("wix");
|
||||||
crate::utils::fs_utils::copy_dir(wix_toolset_path, &wix_path)
|
crate::utils::fs_utils::copy_dir(wix_toolset_path, &wix_path)?;
|
||||||
.context("failed to copy wix directory")?;
|
|
||||||
wix_path
|
wix_path
|
||||||
} else {
|
} else {
|
||||||
wix_toolset_path.to_path_buf()
|
wix_toolset_path.to_path_buf()
|
||||||
|
|||||||
@@ -13,15 +13,16 @@ use crate::{
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
error::ErrorExt,
|
||||||
utils::{
|
utils::{
|
||||||
http_utils::{download_and_verify, verify_file_hash, HashAlgorithm},
|
http_utils::{download_and_verify, verify_file_hash, HashAlgorithm},
|
||||||
CommandExt,
|
CommandExt,
|
||||||
},
|
},
|
||||||
Settings,
|
Error, Settings,
|
||||||
};
|
};
|
||||||
use tauri_utils::display_path;
|
use tauri_utils::display_path;
|
||||||
|
|
||||||
use anyhow::Context;
|
use crate::error::Context;
|
||||||
use handlebars::{to_json, Handlebars};
|
use handlebars::{to_json, Handlebars};
|
||||||
use tauri_utils::config::{NSISInstallerMode, NsisCompression, WebviewInstallMode};
|
use tauri_utils::config::{NSISInstallerMode, NsisCompression, WebviewInstallMode};
|
||||||
|
|
||||||
@@ -105,8 +106,9 @@ pub fn bundle_project(settings: &Settings, updater: bool) -> crate::Result<Vec<P
|
|||||||
let data = download_and_verify(url, hash, *hash_algorithm)?;
|
let data = download_and_verify(url, hash, *hash_algorithm)?;
|
||||||
let out_path = nsis_toolset_path.join(path);
|
let out_path = nsis_toolset_path.join(path);
|
||||||
std::fs::create_dir_all(out_path.parent().context("output path has no parent")?)
|
std::fs::create_dir_all(out_path.parent().context("output path has no parent")?)
|
||||||
.context("failed to create file output directory")?;
|
.fs_context("failed to create file output directory", out_path.clone())?;
|
||||||
fs::write(out_path, data).with_context(|| format!("failed to save {path}"))?;
|
fs::write(&out_path, data)
|
||||||
|
.fs_context("failed to save NSIS downloaded file", out_path.clone())?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -142,8 +144,9 @@ fn get_and_extract_nsis(nsis_toolset_path: &Path, _tauri_tools_path: &Path) -> c
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn try_add_numeric_build_number(version_str: &str) -> anyhow::Result<String> {
|
fn try_add_numeric_build_number(version_str: &str) -> crate::Result<String> {
|
||||||
let version = semver::Version::parse(version_str).context("invalid app version")?;
|
let version = semver::Version::parse(version_str)
|
||||||
|
.map_err(|error| Error::GenericError(format!("invalid app version: {error}")))?;
|
||||||
if !version.build.is_empty() {
|
if !version.build.is_empty() {
|
||||||
let build = version.build.parse::<u64>();
|
let build = version.build.parse::<u64>();
|
||||||
if build.is_ok() {
|
if build.is_ok() {
|
||||||
@@ -199,31 +202,39 @@ fn build_nsis_app_installer(
|
|||||||
.map(PathBuf::from)
|
.map(PathBuf::from)
|
||||||
.unwrap_or_else(|| PathBuf::from("/usr/share/nsis"));
|
.unwrap_or_else(|| PathBuf::from("/usr/share/nsis"));
|
||||||
#[cfg(target_os = "macos")]
|
#[cfg(target_os = "macos")]
|
||||||
let system_nsis_toolset_path = std::env::var_os("NSIS_PATH")
|
let system_nsis_toolset_path = std::env::var_os("NSIS_PATH")
|
||||||
.map(PathBuf::from)
|
.map(PathBuf::from)
|
||||||
.ok_or_else(|| anyhow::anyhow!("failed to resolve NSIS path"))
|
.context("failed to resolve NSIS path")
|
||||||
.or_else(|_| {
|
.or_else(|_| {
|
||||||
let mut makensis_path =
|
let mut makensis_path = which::which("makensis").map_err(|error| Error::CommandFailed {
|
||||||
which::which("makensis").context("failed to resolve `makensis`; did you install nsis? See https://tauri.app/distribute/windows-installer/#install-nsis for more information")?;
|
command: "makensis".to_string(),
|
||||||
// homebrew installs it as a symlink
|
error: std::io::Error::other(format!("failed to find makensis: {error}")),
|
||||||
if makensis_path.is_symlink() {
|
|
||||||
// read_link might return a path relative to makensis_path so we must use join() and canonicalize
|
|
||||||
makensis_path = makensis_path
|
|
||||||
.parent()
|
|
||||||
.context("missing makensis parent")?
|
|
||||||
.join(std::fs::read_link(&makensis_path).context("failed to resolve makensis symlink")?)
|
|
||||||
.canonicalize()
|
|
||||||
.context("failed to resolve makensis path")?;
|
|
||||||
}
|
|
||||||
// file structure:
|
|
||||||
// ├── bin
|
|
||||||
// │ ├── makensis
|
|
||||||
// ├── share
|
|
||||||
// │ ├── nsis
|
|
||||||
let bin_folder = makensis_path.parent().context("missing makensis parent")?;
|
|
||||||
let root_folder = bin_folder.parent().context("missing makensis root")?;
|
|
||||||
crate::Result::Ok(root_folder.join("share").join("nsis"))
|
|
||||||
})?;
|
})?;
|
||||||
|
// homebrew installs it as a symlink
|
||||||
|
if makensis_path.is_symlink() {
|
||||||
|
// read_link might return a path relative to makensis_path so we must use join() and canonicalize
|
||||||
|
makensis_path = makensis_path
|
||||||
|
.parent()
|
||||||
|
.context("missing makensis parent")?
|
||||||
|
.join(
|
||||||
|
std::fs::read_link(&makensis_path)
|
||||||
|
.fs_context("failed to resolve makensis symlink", makensis_path.clone())?,
|
||||||
|
)
|
||||||
|
.canonicalize()
|
||||||
|
.fs_context(
|
||||||
|
"failed to canonicalize makensis path",
|
||||||
|
makensis_path.clone(),
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
// file structure:
|
||||||
|
// ├── bin
|
||||||
|
// │ ├── makensis
|
||||||
|
// ├── share
|
||||||
|
// │ ├── nsis
|
||||||
|
let bin_folder = makensis_path.parent().context("missing makensis parent")?;
|
||||||
|
let root_folder = bin_folder.parent().context("missing makensis root")?;
|
||||||
|
crate::Result::Ok(root_folder.join("share").join("nsis"))
|
||||||
|
})?;
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
let system_nsis_toolset_path = nsis_toolset_path.to_path_buf();
|
let system_nsis_toolset_path = nsis_toolset_path.to_path_buf();
|
||||||
|
|
||||||
@@ -636,7 +647,10 @@ fn build_nsis_app_installer(
|
|||||||
.env_remove("NSISCONFDIR")
|
.env_remove("NSISCONFDIR")
|
||||||
.current_dir(output_path)
|
.current_dir(output_path)
|
||||||
.piped()
|
.piped()
|
||||||
.context("error running makensis.exe")?;
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "makensis.exe".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
fs::rename(nsis_output_path, &nsis_installer_path)?;
|
fs::rename(nsis_output_path, &nsis_installer_path)?;
|
||||||
|
|
||||||
@@ -808,7 +822,11 @@ fn generate_estimated_size(
|
|||||||
.chain(resources.keys())
|
.chain(resources.keys())
|
||||||
{
|
{
|
||||||
size += std::fs::metadata(k)
|
size += std::fs::metadata(k)
|
||||||
.with_context(|| format!("when getting size of {}", k.display()))?
|
.map_err(|error| Error::Fs {
|
||||||
|
context: "when getting size of",
|
||||||
|
path: k.to_path_buf(),
|
||||||
|
error,
|
||||||
|
})?
|
||||||
.len();
|
.len();
|
||||||
}
|
}
|
||||||
Ok(size / 1024)
|
Ok(size / 1024)
|
||||||
|
|||||||
@@ -27,17 +27,14 @@ pub fn webview2_guid_path(url: &str) -> crate::Result<(String, String)> {
|
|||||||
let response = agent.head(url).call().map_err(Box::new)?;
|
let response = agent.head(url).call().map_err(Box::new)?;
|
||||||
let final_url = response.get_uri().to_string();
|
let final_url = response.get_uri().to_string();
|
||||||
let remaining_url = final_url.strip_prefix(WEBVIEW2_URL_PREFIX).ok_or_else(|| {
|
let remaining_url = final_url.strip_prefix(WEBVIEW2_URL_PREFIX).ok_or_else(|| {
|
||||||
anyhow::anyhow!(
|
crate::Error::GenericError(format!(
|
||||||
"WebView2 URL prefix mismatch. Expected `{}`, found `{}`.",
|
"WebView2 URL prefix mismatch. Expected `{WEBVIEW2_URL_PREFIX}`, found `{final_url}`."
|
||||||
WEBVIEW2_URL_PREFIX,
|
))
|
||||||
final_url
|
|
||||||
)
|
|
||||||
})?;
|
})?;
|
||||||
let (guid, filename) = remaining_url.split_once('/').ok_or_else(|| {
|
let (guid, filename) = remaining_url.split_once('/').ok_or_else(|| {
|
||||||
anyhow::anyhow!(
|
crate::Error::GenericError(format!(
|
||||||
"WebView2 URL format mismatch. Expected `<GUID>/<FILENAME>`, found `{}`.",
|
"WebView2 URL format mismatch. Expected `<GUID>/<FILENAME>`, found `{remaining_url}`."
|
||||||
remaining_url
|
))
|
||||||
)
|
|
||||||
})?;
|
})?;
|
||||||
Ok((guid.into(), filename.into()))
|
Ok((guid.into(), filename.into()))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,17 +3,45 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use std::{io, num, path};
|
use std::{
|
||||||
|
fmt::Display,
|
||||||
|
io, num,
|
||||||
|
path::{self, PathBuf},
|
||||||
|
};
|
||||||
use thiserror::Error as DeriveError;
|
use thiserror::Error as DeriveError;
|
||||||
|
|
||||||
/// Errors returned by the bundler.
|
/// Errors returned by the bundler.
|
||||||
#[derive(Debug, DeriveError)]
|
#[derive(Debug, DeriveError)]
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
|
/// Error with context. Created by the [`Context`] trait.
|
||||||
|
#[error("{0}: {1}")]
|
||||||
|
Context(String, Box<Self>),
|
||||||
|
/// File system error.
|
||||||
|
#[error("{context} {path}: {error}")]
|
||||||
|
Fs {
|
||||||
|
/// Context of the error.
|
||||||
|
context: &'static str,
|
||||||
|
/// Path that was accessed.
|
||||||
|
path: PathBuf,
|
||||||
|
/// Error that occurred.
|
||||||
|
error: io::Error,
|
||||||
|
},
|
||||||
|
/// Child process error.
|
||||||
|
#[error("failed to run command {command}: {error}")]
|
||||||
|
CommandFailed {
|
||||||
|
/// Command that failed.
|
||||||
|
command: String,
|
||||||
|
/// Error that occurred.
|
||||||
|
error: io::Error,
|
||||||
|
},
|
||||||
/// Error running tauri_utils API.
|
/// Error running tauri_utils API.
|
||||||
#[error("{0}")]
|
#[error("{0}")]
|
||||||
Resource(#[from] tauri_utils::Error),
|
Resource(#[from] tauri_utils::Error),
|
||||||
/// Bundler error.
|
/// Bundler error.
|
||||||
|
///
|
||||||
|
/// This variant is no longer used as this crate no longer uses anyhow.
|
||||||
|
// TODO(v3): remove this variant
|
||||||
#[error("{0:#}")]
|
#[error("{0:#}")]
|
||||||
BundlerError(#[from] anyhow::Error),
|
BundlerError(#[from] anyhow::Error),
|
||||||
/// I/O error.
|
/// I/O error.
|
||||||
@@ -133,7 +161,110 @@ pub enum Error {
|
|||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
#[error("{0}")]
|
#[error("{0}")]
|
||||||
RpmError(#[from] rpm::Error),
|
RpmError(#[from] rpm::Error),
|
||||||
|
/// Failed to notarize application.
|
||||||
|
#[cfg(target_os = "macos")]
|
||||||
|
#[error("failed to notarize app: {0}")]
|
||||||
|
AppleNotarization(#[from] NotarizeAuthError),
|
||||||
|
/// Failed to codesign application.
|
||||||
|
#[cfg(target_os = "macos")]
|
||||||
|
#[error("failed codesign application: {0}")]
|
||||||
|
AppleCodesign(#[from] Box<tauri_macos_sign::Error>),
|
||||||
|
/// Handlebars template error.
|
||||||
|
#[error(transparent)]
|
||||||
|
Template(#[from] handlebars::TemplateError),
|
||||||
|
/// Semver error.
|
||||||
|
#[error("`{0}`")]
|
||||||
|
SemverError(#[from] semver::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(target_os = "macos")]
|
||||||
|
#[allow(clippy::enum_variant_names)]
|
||||||
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
pub enum NotarizeAuthError {
|
||||||
|
#[error(
|
||||||
|
"The team ID is now required for notarization with app-specific password as authentication. Please set the `APPLE_TEAM_ID` environment variable. You can find the team ID in https://developer.apple.com/account#MembershipDetailsCard."
|
||||||
|
)]
|
||||||
|
MissingTeamId,
|
||||||
|
#[error("could not find API key file. Please set the APPLE_API_KEY_PATH environment variables to the path to the {file_name} file")]
|
||||||
|
MissingApiKey { file_name: String },
|
||||||
|
#[error("no APPLE_ID & APPLE_PASSWORD & APPLE_TEAM_ID or APPLE_API_KEY & APPLE_API_ISSUER & APPLE_API_KEY_PATH environment variables found")]
|
||||||
|
MissingCredentials,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convenient type alias of Result type.
|
/// Convenient type alias of Result type.
|
||||||
pub type Result<T> = std::result::Result<T, Error>;
|
pub type Result<T> = std::result::Result<T, Error>;
|
||||||
|
|
||||||
|
pub trait Context<T> {
|
||||||
|
// Required methods
|
||||||
|
fn context<C>(self, context: C) -> Result<T>
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static;
|
||||||
|
fn with_context<C, F>(self, f: F) -> Result<T>
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static,
|
||||||
|
F: FnOnce() -> C;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Context<T> for Result<T> {
|
||||||
|
fn context<C>(self, context: C) -> Result<T>
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static,
|
||||||
|
{
|
||||||
|
self.map_err(|e| Error::Context(context.to_string(), Box::new(e)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_context<C, F>(self, f: F) -> Result<T>
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static,
|
||||||
|
F: FnOnce() -> C,
|
||||||
|
{
|
||||||
|
self.map_err(|e| Error::Context(f().to_string(), Box::new(e)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Context<T> for Option<T> {
|
||||||
|
fn context<C>(self, context: C) -> Result<T>
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static,
|
||||||
|
{
|
||||||
|
self.ok_or_else(|| Error::GenericError(context.to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_context<C, F>(self, f: F) -> Result<T>
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static,
|
||||||
|
F: FnOnce() -> C,
|
||||||
|
{
|
||||||
|
self.ok_or_else(|| Error::GenericError(f().to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait ErrorExt<T> {
|
||||||
|
fn fs_context(self, context: &'static str, path: impl Into<PathBuf>) -> Result<T>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> ErrorExt<T> for std::result::Result<T, std::io::Error> {
|
||||||
|
fn fs_context(self, context: &'static str, path: impl Into<PathBuf>) -> Result<T> {
|
||||||
|
self.map_err(|error| Error::Fs {
|
||||||
|
context,
|
||||||
|
path: path.into(),
|
||||||
|
error,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
|
macro_rules! bail {
|
||||||
|
($msg:literal $(,)?) => {
|
||||||
|
return Err(crate::Error::GenericError($msg.into()))
|
||||||
|
};
|
||||||
|
($err:expr $(,)?) => {
|
||||||
|
return Err(crate::Error::GenericError($err))
|
||||||
|
};
|
||||||
|
($fmt:expr, $($arg:tt)*) => {
|
||||||
|
return Err(crate::Error::GenericError(format!($fmt, $($arg)*)))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
|
pub(crate) use bail;
|
||||||
|
|||||||
@@ -46,7 +46,7 @@ jsonrpsee-ws-client = { version = "0.24", default-features = false }
|
|||||||
sublime_fuzzy = "0.7"
|
sublime_fuzzy = "0.7"
|
||||||
clap_complete = "4"
|
clap_complete = "4"
|
||||||
clap = { version = "4", features = ["derive", "env"] }
|
clap = { version = "4", features = ["derive", "env"] }
|
||||||
anyhow = "1"
|
thiserror = "2"
|
||||||
tauri-bundler = { version = "2.6.1", default-features = false, path = "../tauri-bundler" }
|
tauri-bundler = { version = "2.6.1", default-features = false, path = "../tauri-bundler" }
|
||||||
colored = "2"
|
colored = "2"
|
||||||
serde = { version = "1", features = ["derive"] }
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ use tauri_utils::acl::capability::{Capability, PermissionEntry};
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
acl::FileFormat,
|
acl::FileFormat,
|
||||||
|
error::ErrorExt,
|
||||||
helpers::{app_paths::tauri_dir, prompts},
|
helpers::{app_paths::tauri_dir, prompts},
|
||||||
Result,
|
Result,
|
||||||
};
|
};
|
||||||
@@ -106,7 +107,9 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let path = match options.out {
|
let path = match options.out {
|
||||||
Some(o) => o.canonicalize()?,
|
Some(o) => o
|
||||||
|
.canonicalize()
|
||||||
|
.fs_context("failed to canonicalize capability file path", o.clone())?,
|
||||||
None => {
|
None => {
|
||||||
let dir = tauri_dir();
|
let dir = tauri_dir();
|
||||||
let capabilities_dir = dir.join("capabilities");
|
let capabilities_dir = dir.join("capabilities");
|
||||||
@@ -125,17 +128,21 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
);
|
);
|
||||||
let overwrite = prompts::confirm(&format!("{msg}, overwrite?"), Some(false))?;
|
let overwrite = prompts::confirm(&format!("{msg}, overwrite?"), Some(false))?;
|
||||||
if overwrite {
|
if overwrite {
|
||||||
std::fs::remove_file(&path)?;
|
std::fs::remove_file(&path).fs_context("failed to remove capability file", path.clone())?;
|
||||||
} else {
|
} else {
|
||||||
anyhow::bail!(msg);
|
crate::error::bail!(msg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(parent) = path.parent() {
|
if let Some(parent) = path.parent() {
|
||||||
std::fs::create_dir_all(parent)?;
|
std::fs::create_dir_all(parent).fs_context(
|
||||||
|
"failed to create capability directory",
|
||||||
|
parent.to_path_buf(),
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::fs::write(&path, options.format.serialize(&capability)?)?;
|
std::fs::write(&path, options.format.serialize(&capability)?)
|
||||||
|
.fs_context("failed to write capability file", path.clone())?;
|
||||||
|
|
||||||
log::info!(action = "Created"; "capability at {}", dunce::simplified(&path).display());
|
log::info!(action = "Created"; "capability at {}", dunce::simplified(&path).display());
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
use crate::error::Context;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
|
|
||||||
@@ -33,8 +34,8 @@ impl FileFormat {
|
|||||||
|
|
||||||
pub fn serialize<S: Serialize>(&self, s: &S) -> crate::Result<String> {
|
pub fn serialize<S: Serialize>(&self, s: &S) -> crate::Result<String> {
|
||||||
let contents = match self {
|
let contents = match self {
|
||||||
Self::Json => serde_json::to_string_pretty(s)?,
|
Self::Json => serde_json::to_string_pretty(s).context("failed to serialize JSON")?,
|
||||||
Self::Toml => toml_edit::ser::to_string_pretty(s)?,
|
Self::Toml => toml_edit::ser::to_string_pretty(s).context("failed to serialize TOML")?,
|
||||||
};
|
};
|
||||||
Ok(contents)
|
Ok(contents)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ use std::path::Path;
|
|||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::{Context, ErrorExt},
|
||||||
helpers::{app_paths::resolve_tauri_dir, prompts},
|
helpers::{app_paths::resolve_tauri_dir, prompts},
|
||||||
Result,
|
Result,
|
||||||
};
|
};
|
||||||
@@ -100,7 +101,9 @@ impl TomlOrJson {
|
|||||||
fn to_string(&self) -> Result<String> {
|
fn to_string(&self) -> Result<String> {
|
||||||
Ok(match self {
|
Ok(match self {
|
||||||
TomlOrJson::Toml(t) => t.to_string(),
|
TomlOrJson::Toml(t) => t.to_string(),
|
||||||
TomlOrJson::Json(j) => serde_json::to_string_pretty(&j)?,
|
TomlOrJson::Json(j) => {
|
||||||
|
serde_json::to_string_pretty(&j).context("failed to serialize JSON")?
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -131,12 +134,12 @@ pub struct Options {
|
|||||||
pub fn command(options: Options) -> Result<()> {
|
pub fn command(options: Options) -> Result<()> {
|
||||||
let dir = match resolve_tauri_dir() {
|
let dir = match resolve_tauri_dir() {
|
||||||
Some(t) => t,
|
Some(t) => t,
|
||||||
None => std::env::current_dir()?,
|
None => std::env::current_dir().context("failed to resolve current directory")?,
|
||||||
};
|
};
|
||||||
|
|
||||||
let capabilities_dir = dir.join("capabilities");
|
let capabilities_dir = dir.join("capabilities");
|
||||||
if !capabilities_dir.exists() {
|
if !capabilities_dir.exists() {
|
||||||
anyhow::bail!(
|
crate::error::bail!(
|
||||||
"Couldn't find capabilities directory at {}",
|
"Couldn't find capabilities directory at {}",
|
||||||
dunce::simplified(&capabilities_dir).display()
|
dunce::simplified(&capabilities_dir).display()
|
||||||
);
|
);
|
||||||
@@ -148,7 +151,11 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
.split_once(':')
|
.split_once(':')
|
||||||
.and_then(|(plugin, _permission)| known_plugins.get(&plugin));
|
.and_then(|(plugin, _permission)| known_plugins.get(&plugin));
|
||||||
|
|
||||||
let capabilities_iter = std::fs::read_dir(&capabilities_dir)?
|
let capabilities_iter = std::fs::read_dir(&capabilities_dir)
|
||||||
|
.fs_context(
|
||||||
|
"failed to read capabilities directory",
|
||||||
|
capabilities_dir.clone(),
|
||||||
|
)?
|
||||||
.flatten()
|
.flatten()
|
||||||
.filter(|e| e.file_type().map(|e| e.is_file()).unwrap_or_default())
|
.filter(|e| e.file_type().map(|e| e.is_file()).unwrap_or_default())
|
||||||
.filter_map(|e| {
|
.filter_map(|e| {
|
||||||
@@ -240,7 +247,7 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
)?;
|
)?;
|
||||||
|
|
||||||
if selections.is_empty() {
|
if selections.is_empty() {
|
||||||
anyhow::bail!("You did not select any capabilities to update");
|
crate::error::bail!("You did not select any capabilities to update");
|
||||||
}
|
}
|
||||||
|
|
||||||
selections
|
selections
|
||||||
@@ -252,7 +259,7 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if capabilities.is_empty() {
|
if capabilities.is_empty() {
|
||||||
anyhow::bail!("Could not find a capability to update");
|
crate::error::bail!("Could not find a capability to update");
|
||||||
}
|
}
|
||||||
|
|
||||||
for (capability, path) in &mut capabilities {
|
for (capability, path) in &mut capabilities {
|
||||||
@@ -265,7 +272,8 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
capability.insert_permission(options.identifier.clone());
|
capability.insert_permission(options.identifier.clone());
|
||||||
std::fs::write(&*path, capability.to_string()?)?;
|
std::fs::write(&*path, capability.to_string()?)
|
||||||
|
.fs_context("failed to write capability file", path.clone())?;
|
||||||
log::info!(action = "Added"; "permission `{}` to `{}` at {}", options.identifier, capability.identifier(), dunce::simplified(path).display());
|
log::info!(action = "Added"; "permission `{}` to `{}` at {}", options.identifier, capability.identifier(), dunce::simplified(path).display());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,11 @@
|
|||||||
|
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
|
|
||||||
use crate::{helpers::app_paths::tauri_dir, Result};
|
use crate::{
|
||||||
|
error::{Context, ErrorExt},
|
||||||
|
helpers::app_paths::tauri_dir,
|
||||||
|
Result,
|
||||||
|
};
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use tauri_utils::acl::{manifest::Manifest, APP_ACL_KEY};
|
use tauri_utils::acl::{manifest::Manifest, APP_ACL_KEY};
|
||||||
|
|
||||||
@@ -29,8 +33,10 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
.join("acl-manifests.json");
|
.join("acl-manifests.json");
|
||||||
|
|
||||||
if acl_manifests_path.exists() {
|
if acl_manifests_path.exists() {
|
||||||
let plugin_manifest_json = read_to_string(&acl_manifests_path)?;
|
let plugin_manifest_json = read_to_string(&acl_manifests_path)
|
||||||
let acl = serde_json::from_str::<BTreeMap<String, Manifest>>(&plugin_manifest_json)?;
|
.fs_context("failed to read plugin manifest", acl_manifests_path.clone())?;
|
||||||
|
let acl = serde_json::from_str::<BTreeMap<String, Manifest>>(&plugin_manifest_json)
|
||||||
|
.context("failed to parse plugin manifest as JSON")?;
|
||||||
|
|
||||||
for (key, manifest) in acl {
|
for (key, manifest) in acl {
|
||||||
if options
|
if options
|
||||||
@@ -147,6 +153,6 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
anyhow::bail!("permission file not found, please build your application once first")
|
crate::error::bail!("permission file not found, please build your application once first")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ use clap::Parser;
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
acl::FileFormat,
|
acl::FileFormat,
|
||||||
|
error::{Context, ErrorExt},
|
||||||
helpers::{app_paths::resolve_tauri_dir, prompts},
|
helpers::{app_paths::resolve_tauri_dir, prompts},
|
||||||
Result,
|
Result,
|
||||||
};
|
};
|
||||||
@@ -67,11 +68,13 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let path = match options.out {
|
let path = match options.out {
|
||||||
Some(o) => o.canonicalize()?,
|
Some(o) => o
|
||||||
|
.canonicalize()
|
||||||
|
.fs_context("failed to canonicalize permission file path", o.clone())?,
|
||||||
None => {
|
None => {
|
||||||
let dir = match resolve_tauri_dir() {
|
let dir = match resolve_tauri_dir() {
|
||||||
Some(t) => t,
|
Some(t) => t,
|
||||||
None => std::env::current_dir()?,
|
None => std::env::current_dir().context("failed to resolve current directory")?,
|
||||||
};
|
};
|
||||||
let permissions_dir = dir.join("permissions");
|
let permissions_dir = dir.join("permissions");
|
||||||
permissions_dir.join(format!(
|
permissions_dir.join(format!(
|
||||||
@@ -89,24 +92,31 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
);
|
);
|
||||||
let overwrite = prompts::confirm(&format!("{msg}, overwrite?"), Some(false))?;
|
let overwrite = prompts::confirm(&format!("{msg}, overwrite?"), Some(false))?;
|
||||||
if overwrite {
|
if overwrite {
|
||||||
std::fs::remove_file(&path)?;
|
std::fs::remove_file(&path).fs_context("failed to remove permission file", path.clone())?;
|
||||||
} else {
|
} else {
|
||||||
anyhow::bail!(msg);
|
crate::error::bail!(msg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(parent) = path.parent() {
|
if let Some(parent) = path.parent() {
|
||||||
std::fs::create_dir_all(parent)?;
|
std::fs::create_dir_all(parent).fs_context(
|
||||||
|
"failed to create permission directory",
|
||||||
|
parent.to_path_buf(),
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::fs::write(
|
std::fs::write(
|
||||||
&path,
|
&path,
|
||||||
options.format.serialize(&PermissionFile {
|
options
|
||||||
default: None,
|
.format
|
||||||
set: Vec::new(),
|
.serialize(&PermissionFile {
|
||||||
permission: vec![permission],
|
default: None,
|
||||||
})?,
|
set: Vec::new(),
|
||||||
)?;
|
permission: vec![permission],
|
||||||
|
})
|
||||||
|
.context("failed to serialize permission")?,
|
||||||
|
)
|
||||||
|
.fs_context("failed to write permission file", path.clone())?;
|
||||||
|
|
||||||
log::info!(action = "Created"; "permission at {}", dunce::simplified(&path).display());
|
log::info!(action = "Created"; "permission at {}", dunce::simplified(&path).display());
|
||||||
|
|
||||||
|
|||||||
@@ -7,11 +7,21 @@ use std::path::Path;
|
|||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use tauri_utils::acl::{manifest::PermissionFile, PERMISSION_SCHEMA_FILE_NAME};
|
use tauri_utils::acl::{manifest::PermissionFile, PERMISSION_SCHEMA_FILE_NAME};
|
||||||
|
|
||||||
use crate::{acl::FileFormat, helpers::app_paths::resolve_tauri_dir, Result};
|
use crate::{
|
||||||
|
acl::FileFormat,
|
||||||
|
error::{Context, ErrorExt},
|
||||||
|
helpers::app_paths::resolve_tauri_dir,
|
||||||
|
Result,
|
||||||
|
};
|
||||||
|
|
||||||
fn rm_permission_files(identifier: &str, dir: &Path) -> Result<()> {
|
fn rm_permission_files(identifier: &str, dir: &Path) -> Result<()> {
|
||||||
for entry in std::fs::read_dir(dir)?.flatten() {
|
for entry in std::fs::read_dir(dir)
|
||||||
let file_type = entry.file_type()?;
|
.fs_context("failed to read permissions directory", dir.to_path_buf())?
|
||||||
|
.flatten()
|
||||||
|
{
|
||||||
|
let file_type = entry
|
||||||
|
.file_type()
|
||||||
|
.fs_context("failed to get permission file type", entry.path())?;
|
||||||
let path = entry.path();
|
let path = entry.path();
|
||||||
if file_type.is_dir() {
|
if file_type.is_dir() {
|
||||||
rm_permission_files(identifier, &path)?;
|
rm_permission_files(identifier, &path)?;
|
||||||
@@ -27,12 +37,21 @@ fn rm_permission_files(identifier: &str, dir: &Path) -> Result<()> {
|
|||||||
let (mut permission_file, format): (PermissionFile, FileFormat) =
|
let (mut permission_file, format): (PermissionFile, FileFormat) =
|
||||||
match path.extension().and_then(|o| o.to_str()) {
|
match path.extension().and_then(|o| o.to_str()) {
|
||||||
Some("toml") => {
|
Some("toml") => {
|
||||||
let content = std::fs::read_to_string(&path)?;
|
let content = std::fs::read_to_string(&path)
|
||||||
(toml::from_str(&content)?, FileFormat::Toml)
|
.fs_context("failed to read permission file", path.clone())?;
|
||||||
|
(
|
||||||
|
toml::from_str(&content).context("failed to deserialize permission file")?,
|
||||||
|
FileFormat::Toml,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
Some("json") => {
|
Some("json") => {
|
||||||
let content = std::fs::read(&path)?;
|
let content =
|
||||||
(serde_json::from_slice(&content)?, FileFormat::Json)
|
std::fs::read(&path).fs_context("failed to read permission file", path.clone())?;
|
||||||
|
(
|
||||||
|
serde_json::from_slice(&content)
|
||||||
|
.context("failed to parse permission file as JSON")?,
|
||||||
|
FileFormat::Json,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
continue;
|
continue;
|
||||||
@@ -63,10 +82,16 @@ fn rm_permission_files(identifier: &str, dir: &Path) -> Result<()> {
|
|||||||
&& permission_file.set.is_empty()
|
&& permission_file.set.is_empty()
|
||||||
&& permission_file.permission.is_empty()
|
&& permission_file.permission.is_empty()
|
||||||
{
|
{
|
||||||
std::fs::remove_file(&path)?;
|
std::fs::remove_file(&path).fs_context("failed to remove permission file", path.clone())?;
|
||||||
log::info!(action = "Removed"; "file {}", dunce::simplified(&path).display());
|
log::info!(action = "Removed"; "file {}", dunce::simplified(&path).display());
|
||||||
} else if updated {
|
} else if updated {
|
||||||
std::fs::write(&path, format.serialize(&permission_file)?)?;
|
std::fs::write(
|
||||||
|
&path,
|
||||||
|
format
|
||||||
|
.serialize(&permission_file)
|
||||||
|
.context("failed to serialize permission")?,
|
||||||
|
)
|
||||||
|
.fs_context("failed to write permission file", path.clone())?;
|
||||||
log::info!(action = "Removed"; "permission {identifier} from {}", dunce::simplified(&path).display());
|
log::info!(action = "Removed"; "permission {identifier} from {}", dunce::simplified(&path).display());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -76,13 +101,19 @@ fn rm_permission_files(identifier: &str, dir: &Path) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn rm_permission_from_capabilities(identifier: &str, dir: &Path) -> Result<()> {
|
fn rm_permission_from_capabilities(identifier: &str, dir: &Path) -> Result<()> {
|
||||||
for entry in std::fs::read_dir(dir)?.flatten() {
|
for entry in std::fs::read_dir(dir)
|
||||||
let file_type = entry.file_type()?;
|
.fs_context("failed to read capabilities directory", dir.to_path_buf())?
|
||||||
|
.flatten()
|
||||||
|
{
|
||||||
|
let file_type = entry
|
||||||
|
.file_type()
|
||||||
|
.fs_context("failed to get capability file type", entry.path())?;
|
||||||
if file_type.is_file() {
|
if file_type.is_file() {
|
||||||
let path = entry.path();
|
let path = entry.path();
|
||||||
match path.extension().and_then(|o| o.to_str()) {
|
match path.extension().and_then(|o| o.to_str()) {
|
||||||
Some("toml") => {
|
Some("toml") => {
|
||||||
let content = std::fs::read_to_string(&path)?;
|
let content = std::fs::read_to_string(&path)
|
||||||
|
.fs_context("failed to read capability file", path.clone())?;
|
||||||
if let Ok(mut value) = content.parse::<toml_edit::DocumentMut>() {
|
if let Ok(mut value) = content.parse::<toml_edit::DocumentMut>() {
|
||||||
if let Some(permissions) = value.get_mut("permissions").and_then(|p| p.as_array_mut()) {
|
if let Some(permissions) = value.get_mut("permissions").and_then(|p| p.as_array_mut()) {
|
||||||
let prev_len = permissions.len();
|
let prev_len = permissions.len();
|
||||||
@@ -98,14 +129,16 @@ fn rm_permission_from_capabilities(identifier: &str, dir: &Path) -> Result<()> {
|
|||||||
_ => false,
|
_ => false,
|
||||||
});
|
});
|
||||||
if prev_len != permissions.len() {
|
if prev_len != permissions.len() {
|
||||||
std::fs::write(&path, value.to_string())?;
|
std::fs::write(&path, value.to_string())
|
||||||
|
.fs_context("failed to write capability file", path.clone())?;
|
||||||
log::info!(action = "Removed"; "permission from capability at {}", dunce::simplified(&path).display());
|
log::info!(action = "Removed"; "permission from capability at {}", dunce::simplified(&path).display());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some("json") => {
|
Some("json") => {
|
||||||
let content = std::fs::read(&path)?;
|
let content =
|
||||||
|
std::fs::read(&path).fs_context("failed to read capability file", path.clone())?;
|
||||||
if let Ok(mut value) = serde_json::from_slice::<serde_json::Value>(&content) {
|
if let Ok(mut value) = serde_json::from_slice::<serde_json::Value>(&content) {
|
||||||
if let Some(permissions) = value.get_mut("permissions").and_then(|p| p.as_array_mut()) {
|
if let Some(permissions) = value.get_mut("permissions").and_then(|p| p.as_array_mut()) {
|
||||||
let prev_len = permissions.len();
|
let prev_len = permissions.len();
|
||||||
@@ -121,7 +154,12 @@ fn rm_permission_from_capabilities(identifier: &str, dir: &Path) -> Result<()> {
|
|||||||
_ => false,
|
_ => false,
|
||||||
});
|
});
|
||||||
if prev_len != permissions.len() {
|
if prev_len != permissions.len() {
|
||||||
std::fs::write(&path, serde_json::to_vec_pretty(&value)?)?;
|
std::fs::write(
|
||||||
|
&path,
|
||||||
|
serde_json::to_vec_pretty(&value)
|
||||||
|
.context("failed to serialize capability JSON")?,
|
||||||
|
)
|
||||||
|
.fs_context("failed to write capability file", path.clone())?;
|
||||||
log::info!(action = "Removed"; "permission from capability at {}", dunce::simplified(&path).display());
|
log::info!(action = "Removed"; "permission from capability at {}", dunce::simplified(&path).display());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -152,7 +190,9 @@ pub struct Options {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn command(options: Options) -> Result<()> {
|
pub fn command(options: Options) -> Result<()> {
|
||||||
let permissions_dir = std::env::current_dir()?.join("permissions");
|
let permissions_dir = std::env::current_dir()
|
||||||
|
.context("failed to resolve current directory")?
|
||||||
|
.join("permissions");
|
||||||
if permissions_dir.exists() {
|
if permissions_dir.exists() {
|
||||||
rm_permission_files(&options.identifier, &permissions_dir)?;
|
rm_permission_files(&options.identifier, &permissions_dir)?;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ use regex::Regex;
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
acl,
|
acl,
|
||||||
|
error::ErrorExt,
|
||||||
helpers::{
|
helpers::{
|
||||||
app_paths::{resolve_frontend_dir, tauri_dir},
|
app_paths::{resolve_frontend_dir, tauri_dir},
|
||||||
cargo,
|
cargo,
|
||||||
@@ -64,7 +65,7 @@ pub fn run(options: Options) -> Result<()> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if !is_known && (options.tag.is_some() || options.rev.is_some() || options.branch.is_some()) {
|
if !is_known && (options.tag.is_some() || options.rev.is_some() || options.branch.is_some()) {
|
||||||
anyhow::bail!(
|
crate::error::bail!(
|
||||||
"Git options --tag, --rev and --branch can only be used with official Tauri plugins"
|
"Git options --tag, --rev and --branch can only be used with official Tauri plugins"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -114,7 +115,7 @@ pub fn run(options: Options) -> Result<()> {
|
|||||||
format!("tauri-apps/tauri-plugin-{plugin}#{branch}")
|
format!("tauri-apps/tauri-plugin-{plugin}#{branch}")
|
||||||
}
|
}
|
||||||
(None, None, None, None) => npm_name,
|
(None, None, None, None) => npm_name,
|
||||||
_ => anyhow::bail!("Only one of --tag, --rev and --branch can be specified"),
|
_ => crate::error::bail!("Only one of --tag, --rev and --branch can be specified"),
|
||||||
};
|
};
|
||||||
manager.install(&[npm_spec], tauri_dir)?;
|
manager.install(&[npm_spec], tauri_dir)?;
|
||||||
}
|
}
|
||||||
@@ -141,9 +142,10 @@ pub fn run(options: Options) -> Result<()> {
|
|||||||
};
|
};
|
||||||
let plugin_init = format!(".plugin(tauri_plugin_{plugin_snake_case}::{plugin_init_fn})");
|
let plugin_init = format!(".plugin(tauri_plugin_{plugin_snake_case}::{plugin_init_fn})");
|
||||||
|
|
||||||
let re = Regex::new(r"(tauri\s*::\s*Builder\s*::\s*default\(\))(\s*)")?;
|
let re = Regex::new(r"(tauri\s*::\s*Builder\s*::\s*default\(\))(\s*)").unwrap();
|
||||||
for file in [tauri_dir.join("src/main.rs"), tauri_dir.join("src/lib.rs")] {
|
for file in [tauri_dir.join("src/main.rs"), tauri_dir.join("src/lib.rs")] {
|
||||||
let contents = std::fs::read_to_string(&file)?;
|
let contents =
|
||||||
|
std::fs::read_to_string(&file).fs_context("failed to read Rust entry point", file.clone())?;
|
||||||
|
|
||||||
if contents.contains(&plugin_init) {
|
if contents.contains(&plugin_init) {
|
||||||
log::info!(
|
log::info!(
|
||||||
@@ -157,7 +159,7 @@ pub fn run(options: Options) -> Result<()> {
|
|||||||
let out = re.replace(&contents, format!("$1$2{plugin_init}$2"));
|
let out = re.replace(&contents, format!("$1$2{plugin_init}$2"));
|
||||||
|
|
||||||
log::info!("Adding plugin to {}", file.display());
|
log::info!("Adding plugin to {}", file.display());
|
||||||
std::fs::write(file, out.as_bytes())?;
|
std::fs::write(&file, out.as_bytes()).fs_context("failed to write plugin init code", file)?;
|
||||||
|
|
||||||
if !options.no_fmt {
|
if !options.no_fmt {
|
||||||
// reformat code with rustfmt
|
// reformat code with rustfmt
|
||||||
|
|||||||
@@ -4,6 +4,7 @@
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
bundle::BundleFormat,
|
bundle::BundleFormat,
|
||||||
|
error::{Context, ErrorExt},
|
||||||
helpers::{
|
helpers::{
|
||||||
self,
|
self,
|
||||||
app_paths::{frontend_dir, tauri_dir},
|
app_paths::{frontend_dir, tauri_dir},
|
||||||
@@ -13,7 +14,6 @@ use crate::{
|
|||||||
interface::{rust::get_cargo_target_dir, AppInterface, Interface},
|
interface::{rust::get_cargo_target_dir, AppInterface, Interface},
|
||||||
ConfigValue, Result,
|
ConfigValue, Result,
|
||||||
};
|
};
|
||||||
use anyhow::Context;
|
|
||||||
use clap::{ArgAction, Parser};
|
use clap::{ArgAction, Parser};
|
||||||
use std::env::set_current_dir;
|
use std::env::set_current_dir;
|
||||||
use tauri_utils::config::RunnerConfig;
|
use tauri_utils::config::RunnerConfig;
|
||||||
@@ -160,7 +160,7 @@ pub fn setup(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
set_current_dir(tauri_path).with_context(|| "failed to change current working directory")?;
|
set_current_dir(tauri_path).context("failed to set current directory")?;
|
||||||
|
|
||||||
let config_guard = config.lock().unwrap();
|
let config_guard = config.lock().unwrap();
|
||||||
let config_ = config_guard.as_ref().unwrap();
|
let config_ = config_guard.as_ref().unwrap();
|
||||||
@@ -170,7 +170,7 @@ pub fn setup(
|
|||||||
.unwrap_or_else(|| "tauri.conf.json".into());
|
.unwrap_or_else(|| "tauri.conf.json".into());
|
||||||
|
|
||||||
if config_.identifier == "com.tauri.dev" {
|
if config_.identifier == "com.tauri.dev" {
|
||||||
anyhow::bail!(
|
crate::error::bail!(
|
||||||
"You must change the bundle identifier in `{bundle_identifier_source} identifier`. The default value `com.tauri.dev` is not allowed as it must be unique across applications.",
|
"You must change the bundle identifier in `{bundle_identifier_source} identifier`. The default value `com.tauri.dev` is not allowed as it must be unique across applications.",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -180,7 +180,7 @@ pub fn setup(
|
|||||||
.chars()
|
.chars()
|
||||||
.any(|ch| !(ch.is_alphanumeric() || ch == '-' || ch == '.'))
|
.any(|ch| !(ch.is_alphanumeric() || ch == '-' || ch == '.'))
|
||||||
{
|
{
|
||||||
anyhow::bail!(
|
crate::error::bail!(
|
||||||
"The bundle identifier \"{}\" set in `{} identifier`. The bundle identifier string must contain only alphanumeric characters (A-Z, a-z, and 0-9), hyphens (-), and periods (.).",
|
"The bundle identifier \"{}\" set in `{} identifier`. The bundle identifier string must contain only alphanumeric characters (A-Z, a-z, and 0-9), hyphens (-), and periods (.).",
|
||||||
config_.identifier,
|
config_.identifier,
|
||||||
bundle_identifier_source
|
bundle_identifier_source
|
||||||
@@ -206,15 +206,20 @@ pub fn setup(
|
|||||||
.and_then(|p| p.canonicalize().ok())
|
.and_then(|p| p.canonicalize().ok())
|
||||||
.map(|p| p.join(web_asset_path.file_name().unwrap()))
|
.map(|p| p.join(web_asset_path.file_name().unwrap()))
|
||||||
.unwrap_or_else(|| std::env::current_dir().unwrap().join(web_asset_path));
|
.unwrap_or_else(|| std::env::current_dir().unwrap().join(web_asset_path));
|
||||||
return Err(anyhow::anyhow!(
|
crate::error::bail!(
|
||||||
"Unable to find your web assets, did you forget to build your web app? Your frontendDist is set to \"{}\" (which is `{}`).",
|
"Unable to find your web assets, did you forget to build your web app? Your frontendDist is set to \"{}\" (which is `{}`).",
|
||||||
web_asset_path.display(), absolute_path.display(),
|
web_asset_path.display(), absolute_path.display(),
|
||||||
));
|
);
|
||||||
}
|
}
|
||||||
if web_asset_path.canonicalize()?.file_name() == Some(std::ffi::OsStr::new("src-tauri")) {
|
if web_asset_path
|
||||||
return Err(anyhow::anyhow!(
|
.canonicalize()
|
||||||
|
.fs_context("failed to canonicalize path", web_asset_path.to_path_buf())?
|
||||||
|
.file_name()
|
||||||
|
== Some(std::ffi::OsStr::new("src-tauri"))
|
||||||
|
{
|
||||||
|
crate::error::bail!(
|
||||||
"The configured frontendDist is the `src-tauri` folder. Please isolate your web assets on a separate folder and update `tauri.conf.json > build > frontendDist`.",
|
"The configured frontendDist is the `src-tauri` folder. Please isolate your web assets on a separate folder and update `tauri.conf.json > build > frontendDist`.",
|
||||||
));
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Issue #13287 - Allow the use of target dir inside frontendDist/distDir
|
// Issue #13287 - Allow the use of target dir inside frontendDist/distDir
|
||||||
@@ -238,11 +243,11 @@ pub fn setup(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if !out_folders.is_empty() {
|
if !out_folders.is_empty() {
|
||||||
return Err(anyhow::anyhow!(
|
crate::error::bail!(
|
||||||
"The configured frontendDist includes the `{:?}` {}. Please isolate your web assets on a separate folder and update `tauri.conf.json > build > frontendDist`.",
|
"The configured frontendDist includes the `{:?}` {}. Please isolate your web assets on a separate folder and update `tauri.conf.json > build > frontendDist`.",
|
||||||
out_folders,
|
out_folders,
|
||||||
if out_folders.len() == 1 { "folder" } else { "folders" }
|
if out_folders.len() == 1 { "folder" } else { "folders" }
|
||||||
));
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -8,12 +8,12 @@ use std::{
|
|||||||
sync::OnceLock,
|
sync::OnceLock,
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use clap::{builder::PossibleValue, ArgAction, Parser, ValueEnum};
|
use clap::{builder::PossibleValue, ArgAction, Parser, ValueEnum};
|
||||||
use tauri_bundler::PackageType;
|
use tauri_bundler::PackageType;
|
||||||
use tauri_utils::platform::Target;
|
use tauri_utils::platform::Target;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::{Context, ErrorExt},
|
||||||
helpers::{
|
helpers::{
|
||||||
self,
|
self,
|
||||||
app_paths::tauri_dir,
|
app_paths::tauri_dir,
|
||||||
@@ -28,11 +28,11 @@ use crate::{
|
|||||||
pub struct BundleFormat(PackageType);
|
pub struct BundleFormat(PackageType);
|
||||||
|
|
||||||
impl FromStr for BundleFormat {
|
impl FromStr for BundleFormat {
|
||||||
type Err = anyhow::Error;
|
type Err = crate::Error;
|
||||||
fn from_str(s: &str) -> crate::Result<Self> {
|
fn from_str(s: &str) -> crate::Result<Self> {
|
||||||
PackageType::from_short_name(s)
|
PackageType::from_short_name(s)
|
||||||
.map(Self)
|
.map(Self)
|
||||||
.ok_or_else(|| anyhow::anyhow!("unknown bundle format {s}"))
|
.with_context(|| format!("unknown bundle format {s}"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -139,8 +139,7 @@ pub fn command(options: Options, verbosity: u8) -> crate::Result<()> {
|
|||||||
)?;
|
)?;
|
||||||
|
|
||||||
let tauri_path = tauri_dir();
|
let tauri_path = tauri_dir();
|
||||||
std::env::set_current_dir(tauri_path)
|
std::env::set_current_dir(tauri_path).context("failed to set current directory")?;
|
||||||
.with_context(|| "failed to change current working directory")?;
|
|
||||||
|
|
||||||
let config_guard = config.lock().unwrap();
|
let config_guard = config.lock().unwrap();
|
||||||
let config_ = config_guard.as_ref().unwrap();
|
let config_ = config_guard.as_ref().unwrap();
|
||||||
@@ -214,12 +213,7 @@ pub fn bundle<A: AppSettings>(
|
|||||||
_ => log::Level::Trace,
|
_ => log::Level::Trace,
|
||||||
});
|
});
|
||||||
|
|
||||||
let bundles = tauri_bundler::bundle_project(&settings)
|
let bundles = tauri_bundler::bundle_project(&settings).map_err(Box::new)?;
|
||||||
.map_err(|e| match e {
|
|
||||||
tauri_bundler::Error::BundlerError(e) => e,
|
|
||||||
e => anyhow::anyhow!("{e:#}"),
|
|
||||||
})
|
|
||||||
.with_context(|| "failed to bundle project")?;
|
|
||||||
|
|
||||||
sign_updaters(settings, bundles, ci)?;
|
sign_updaters(settings, bundles, ci)?;
|
||||||
|
|
||||||
@@ -260,7 +254,8 @@ fn sign_updaters(
|
|||||||
// check if pubkey points to a file...
|
// check if pubkey points to a file...
|
||||||
let maybe_path = Path::new(pubkey);
|
let maybe_path = Path::new(pubkey);
|
||||||
let pubkey = if maybe_path.exists() {
|
let pubkey = if maybe_path.exists() {
|
||||||
std::fs::read_to_string(maybe_path)?
|
std::fs::read_to_string(maybe_path)
|
||||||
|
.fs_context("failed to read pubkey from file", maybe_path.to_path_buf())?
|
||||||
} else {
|
} else {
|
||||||
pubkey.to_string()
|
pubkey.to_string()
|
||||||
};
|
};
|
||||||
@@ -272,12 +267,15 @@ fn sign_updaters(
|
|||||||
|
|
||||||
// get the private key
|
// get the private key
|
||||||
let private_key = std::env::var("TAURI_SIGNING_PRIVATE_KEY")
|
let private_key = std::env::var("TAURI_SIGNING_PRIVATE_KEY")
|
||||||
.map_err(|_| anyhow::anyhow!("A public key has been found, but no private key. Make sure to set `TAURI_SIGNING_PRIVATE_KEY` environment variable."))?;
|
.ok()
|
||||||
|
.context("A public key has been found, but no private key. Make sure to set `TAURI_SIGNING_PRIVATE_KEY` environment variable.")?;
|
||||||
// check if private_key points to a file...
|
// check if private_key points to a file...
|
||||||
let maybe_path = Path::new(&private_key);
|
let maybe_path = Path::new(&private_key);
|
||||||
let private_key = if maybe_path.exists() {
|
let private_key = if maybe_path.exists() {
|
||||||
std::fs::read_to_string(maybe_path)
|
std::fs::read_to_string(maybe_path).fs_context(
|
||||||
.with_context(|| format!("faild to read {}", maybe_path.display()))?
|
"failed to read private key from file",
|
||||||
|
maybe_path.to_path_buf(),
|
||||||
|
)?
|
||||||
} else {
|
} else {
|
||||||
private_key
|
private_key
|
||||||
};
|
};
|
||||||
@@ -315,11 +313,11 @@ fn print_signed_updater_archive(output_paths: &[PathBuf]) -> crate::Result<()> {
|
|||||||
};
|
};
|
||||||
let mut printable_paths = String::new();
|
let mut printable_paths = String::new();
|
||||||
for path in output_paths {
|
for path in output_paths {
|
||||||
writeln!(
|
let _ = writeln!(
|
||||||
printable_paths,
|
printable_paths,
|
||||||
" {}",
|
" {}",
|
||||||
tauri_utils::display_path(path)
|
tauri_utils::display_path(path)
|
||||||
)?;
|
);
|
||||||
}
|
}
|
||||||
log::info!( action = "Finished"; "{finished_bundles} {pluralised} at:\n{printable_paths}");
|
log::info!( action = "Finished"; "{finished_bundles} {pluralised} at:\n{printable_paths}");
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,8 +2,7 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::Result;
|
use crate::{error::ErrorExt, Result};
|
||||||
use anyhow::Context;
|
|
||||||
use clap::{Command, Parser};
|
use clap::{Command, Parser};
|
||||||
use clap_complete::{generate, Shell};
|
use clap_complete::{generate, Shell};
|
||||||
|
|
||||||
@@ -95,7 +94,7 @@ pub fn command(options: Options, cmd: Command) -> Result<()> {
|
|||||||
|
|
||||||
let completions = get_completions(options.shell, cmd)?;
|
let completions = get_completions(options.shell, cmd)?;
|
||||||
if let Some(output) = options.output {
|
if let Some(output) = options.output {
|
||||||
write(output, completions).context("failed to write to output path")?;
|
write(&output, completions).fs_context("failed to write to completions", output)?;
|
||||||
} else {
|
} else {
|
||||||
print!("{completions}");
|
print!("{completions}");
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::{Context, ErrorExt},
|
||||||
helpers::{
|
helpers::{
|
||||||
app_paths::{frontend_dir, tauri_dir},
|
app_paths::{frontend_dir, tauri_dir},
|
||||||
command_env,
|
command_env,
|
||||||
@@ -12,10 +13,9 @@ use crate::{
|
|||||||
},
|
},
|
||||||
info::plugins::check_mismatched_packages,
|
info::plugins::check_mismatched_packages,
|
||||||
interface::{AppInterface, ExitReason, Interface},
|
interface::{AppInterface, ExitReason, Interface},
|
||||||
CommandExt, ConfigValue, Result,
|
CommandExt, ConfigValue, Error, Result,
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::{bail, Context};
|
|
||||||
use clap::{ArgAction, Parser};
|
use clap::{ArgAction, Parser};
|
||||||
use shared_child::SharedChild;
|
use shared_child::SharedChild;
|
||||||
use tauri_utils::{config::RunnerConfig, platform::Target};
|
use tauri_utils::{config::RunnerConfig, platform::Target};
|
||||||
@@ -143,7 +143,7 @@ pub fn setup(interface: &AppInterface, options: &mut Options, config: ConfigHand
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
set_current_dir(tauri_path).with_context(|| "failed to change current working directory")?;
|
set_current_dir(tauri_path).context("failed to set current directory")?;
|
||||||
|
|
||||||
if let Some(before_dev) = config
|
if let Some(before_dev) = config
|
||||||
.lock()
|
.lock()
|
||||||
@@ -190,15 +190,15 @@ pub fn setup(interface: &AppInterface, options: &mut Options, config: ConfigHand
|
|||||||
};
|
};
|
||||||
|
|
||||||
if wait {
|
if wait {
|
||||||
let status = command.piped().with_context(|| {
|
let status = command.piped().map_err(|error| Error::CommandFailed {
|
||||||
format!(
|
command: format!(
|
||||||
"failed to run `{}` with `{}`",
|
"`{before_dev}` with `{}`",
|
||||||
before_dev,
|
|
||||||
if cfg!(windows) { "cmd /S /C" } else { "sh -c" }
|
if cfg!(windows) { "cmd /S /C" } else { "sh -c" }
|
||||||
)
|
),
|
||||||
|
error,
|
||||||
})?;
|
})?;
|
||||||
if !status.success() {
|
if !status.success() {
|
||||||
bail!(
|
crate::error::bail!(
|
||||||
"beforeDevCommand `{}` failed with exit code {}",
|
"beforeDevCommand `{}` failed with exit code {}",
|
||||||
before_dev,
|
before_dev,
|
||||||
status.code().unwrap_or_default()
|
status.code().unwrap_or_default()
|
||||||
@@ -206,8 +206,8 @@ pub fn setup(interface: &AppInterface, options: &mut Options, config: ConfigHand
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
command.stdin(Stdio::piped());
|
command.stdin(Stdio::piped());
|
||||||
command.stdout(os_pipe::dup_stdout()?);
|
command.stdout(os_pipe::dup_stdout().unwrap());
|
||||||
command.stderr(os_pipe::dup_stderr()?);
|
command.stderr(os_pipe::dup_stderr().unwrap());
|
||||||
|
|
||||||
let child = SharedChild::spawn(&mut command)
|
let child = SharedChild::spawn(&mut command)
|
||||||
.unwrap_or_else(|_| panic!("failed to run `{before_dev}`"));
|
.unwrap_or_else(|_| panic!("failed to run `{before_dev}`"));
|
||||||
@@ -278,13 +278,16 @@ pub fn setup(interface: &AppInterface, options: &mut Options, config: ConfigHand
|
|||||||
if !options.no_dev_server && dev_url.is_none() {
|
if !options.no_dev_server && dev_url.is_none() {
|
||||||
if let Some(FrontendDist::Directory(path)) = &frontend_dist {
|
if let Some(FrontendDist::Directory(path)) = &frontend_dist {
|
||||||
if path.exists() {
|
if path.exists() {
|
||||||
let path = path.canonicalize()?;
|
let path = path
|
||||||
|
.canonicalize()
|
||||||
|
.fs_context("failed to canonicalize path", path.to_path_buf())?;
|
||||||
|
|
||||||
let ip = options
|
let ip = options
|
||||||
.host
|
.host
|
||||||
.unwrap_or_else(|| Ipv4Addr::new(127, 0, 0, 1).into());
|
.unwrap_or_else(|| Ipv4Addr::new(127, 0, 0, 1).into());
|
||||||
|
|
||||||
let server_url = builtin_dev_server::start(path, ip, options.port)?;
|
let server_url = builtin_dev_server::start(path, ip, options.port)
|
||||||
|
.context("failed to start builtin dev server")?;
|
||||||
let server_url = format!("http://{server_url}");
|
let server_url = format!("http://{server_url}");
|
||||||
dev_url = Some(server_url.parse().unwrap());
|
dev_url = Some(server_url.parse().unwrap());
|
||||||
|
|
||||||
@@ -312,7 +315,7 @@ pub fn setup(interface: &AppInterface, options: &mut Options, config: ConfigHand
|
|||||||
let addrs = match host {
|
let addrs = match host {
|
||||||
url::Host::Domain(domain) => {
|
url::Host::Domain(domain) => {
|
||||||
use std::net::ToSocketAddrs;
|
use std::net::ToSocketAddrs;
|
||||||
addrs = (domain, port).to_socket_addrs()?;
|
addrs = (domain, port).to_socket_addrs().unwrap();
|
||||||
addrs.as_slice()
|
addrs.as_slice()
|
||||||
}
|
}
|
||||||
url::Host::Ipv4(ip) => {
|
url::Host::Ipv4(ip) => {
|
||||||
|
|||||||
@@ -18,6 +18,8 @@ use std::{
|
|||||||
use tauri_utils::mime_type::MimeType;
|
use tauri_utils::mime_type::MimeType;
|
||||||
use tokio::sync::broadcast::{channel, Sender};
|
use tokio::sync::broadcast::{channel, Sender};
|
||||||
|
|
||||||
|
use crate::error::ErrorExt;
|
||||||
|
|
||||||
const RELOAD_SCRIPT: &str = include_str!("./auto-reload.js");
|
const RELOAD_SCRIPT: &str = include_str!("./auto-reload.js");
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@@ -29,7 +31,8 @@ struct ServerState {
|
|||||||
|
|
||||||
pub fn start<P: AsRef<Path>>(dir: P, ip: IpAddr, port: Option<u16>) -> crate::Result<SocketAddr> {
|
pub fn start<P: AsRef<Path>>(dir: P, ip: IpAddr, port: Option<u16>) -> crate::Result<SocketAddr> {
|
||||||
let dir = dir.as_ref();
|
let dir = dir.as_ref();
|
||||||
let dir = dunce::canonicalize(dir)?;
|
let dir =
|
||||||
|
dunce::canonicalize(dir).fs_context("failed to canonicalize path", dir.to_path_buf())?;
|
||||||
|
|
||||||
// bind port and tcp listener
|
// bind port and tcp listener
|
||||||
let auto_port = port.is_none();
|
let auto_port = port.is_none();
|
||||||
@@ -37,12 +40,12 @@ pub fn start<P: AsRef<Path>>(dir: P, ip: IpAddr, port: Option<u16>) -> crate::Re
|
|||||||
let (tcp_listener, address) = loop {
|
let (tcp_listener, address) = loop {
|
||||||
let address = SocketAddr::new(ip, port);
|
let address = SocketAddr::new(ip, port);
|
||||||
if let Ok(tcp) = std::net::TcpListener::bind(address) {
|
if let Ok(tcp) = std::net::TcpListener::bind(address) {
|
||||||
tcp.set_nonblocking(true)?;
|
tcp.set_nonblocking(true).unwrap();
|
||||||
break (tcp, address);
|
break (tcp, address);
|
||||||
}
|
}
|
||||||
|
|
||||||
if !auto_port {
|
if !auto_port {
|
||||||
anyhow::bail!("Couldn't bind to {port} on {ip}");
|
crate::error::bail!("Couldn't bind to {port} on {ip}");
|
||||||
}
|
}
|
||||||
|
|
||||||
port += 1;
|
port += 1;
|
||||||
@@ -152,11 +155,11 @@ fn inject_address(html_bytes: Vec<u8>, address: &SocketAddr) -> Vec<u8> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn fs_read_scoped(path: PathBuf, scope: &Path) -> crate::Result<Vec<u8>> {
|
fn fs_read_scoped(path: PathBuf, scope: &Path) -> crate::Result<Vec<u8>> {
|
||||||
let path = dunce::canonicalize(path)?;
|
let path = dunce::canonicalize(&path).fs_context("failed to canonicalize path", path.clone())?;
|
||||||
if path.starts_with(scope) {
|
if path.starts_with(scope) {
|
||||||
std::fs::read(path).map_err(Into::into)
|
std::fs::read(&path).fs_context("failed to read file", path.clone())
|
||||||
} else {
|
} else {
|
||||||
anyhow::bail!("forbidden path")
|
crate::error::bail!("forbidden path")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
105
crates/tauri-cli/src/error.rs
Normal file
105
crates/tauri-cli/src/error.rs
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
// Copyright 2019-2024 Tauri Programme within The Commons Conservancy
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
use std::{fmt::Display, path::PathBuf};
|
||||||
|
|
||||||
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
pub enum Error {
|
||||||
|
#[error("{0}: {1}")]
|
||||||
|
Context(String, Box<dyn std::error::Error + Send + Sync + 'static>),
|
||||||
|
#[error("{0}")]
|
||||||
|
GenericError(String),
|
||||||
|
#[error("failed to bundle project {0}")]
|
||||||
|
Bundler(#[from] Box<tauri_bundler::Error>),
|
||||||
|
#[error("{context} {path}: {error}")]
|
||||||
|
Fs {
|
||||||
|
context: &'static str,
|
||||||
|
path: PathBuf,
|
||||||
|
error: std::io::Error,
|
||||||
|
},
|
||||||
|
#[error("failed to run command {command}: {error}")]
|
||||||
|
CommandFailed {
|
||||||
|
command: String,
|
||||||
|
error: std::io::Error,
|
||||||
|
},
|
||||||
|
#[cfg(target_os = "macos")]
|
||||||
|
#[error(transparent)]
|
||||||
|
MacosSign(#[from] Box<tauri_macos_sign::Error>),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convenient type alias of Result type.
|
||||||
|
pub type Result<T> = std::result::Result<T, Error>;
|
||||||
|
|
||||||
|
pub trait Context<T> {
|
||||||
|
// Required methods
|
||||||
|
fn context<C>(self, context: C) -> Result<T>
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static;
|
||||||
|
fn with_context<C, F>(self, f: F) -> Result<T>
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static,
|
||||||
|
F: FnOnce() -> C;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T, E: std::error::Error + Send + Sync + 'static> Context<T> for std::result::Result<T, E> {
|
||||||
|
fn context<C>(self, context: C) -> Result<T>
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static,
|
||||||
|
{
|
||||||
|
self.map_err(|e| Error::Context(context.to_string(), Box::new(e)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_context<C, F>(self, f: F) -> Result<T>
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static,
|
||||||
|
F: FnOnce() -> C,
|
||||||
|
{
|
||||||
|
self.map_err(|e| Error::Context(f().to_string(), Box::new(e)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Context<T> for Option<T> {
|
||||||
|
fn context<C>(self, context: C) -> Result<T>
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static,
|
||||||
|
{
|
||||||
|
self.ok_or_else(|| Error::GenericError(context.to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_context<C, F>(self, f: F) -> Result<T>
|
||||||
|
where
|
||||||
|
C: Display + Send + Sync + 'static,
|
||||||
|
F: FnOnce() -> C,
|
||||||
|
{
|
||||||
|
self.ok_or_else(|| Error::GenericError(f().to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait ErrorExt<T> {
|
||||||
|
fn fs_context(self, context: &'static str, path: impl Into<PathBuf>) -> Result<T>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> ErrorExt<T> for std::result::Result<T, std::io::Error> {
|
||||||
|
fn fs_context(self, context: &'static str, path: impl Into<PathBuf>) -> Result<T> {
|
||||||
|
self.map_err(|error| Error::Fs {
|
||||||
|
context,
|
||||||
|
path: path.into(),
|
||||||
|
error,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! bail {
|
||||||
|
($msg:literal $(,)?) => {
|
||||||
|
return Err(crate::Error::GenericError($msg.into()))
|
||||||
|
};
|
||||||
|
($err:expr $(,)?) => {
|
||||||
|
return Err(crate::Error::GenericError($err))
|
||||||
|
};
|
||||||
|
($fmt:expr, $($arg:tt)*) => {
|
||||||
|
return Err(crate::Error::GenericError(format!($fmt, $($arg)*)))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) use bail;
|
||||||
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
use anyhow::Context;
|
use crate::Error;
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Copy)]
|
#[derive(Debug, Default, Clone, Copy)]
|
||||||
pub struct CargoInstallOptions<'a> {
|
pub struct CargoInstallOptions<'a> {
|
||||||
@@ -41,7 +41,7 @@ pub fn install_one(options: CargoInstallOptions) -> crate::Result<()> {
|
|||||||
cargo.args(["--branch", branch]);
|
cargo.args(["--branch", branch]);
|
||||||
}
|
}
|
||||||
(None, None, None) => {}
|
(None, None, None) => {}
|
||||||
_ => anyhow::bail!("Only one of --tag, --rev and --branch can be specified"),
|
_ => crate::error::bail!("Only one of --tag, --rev and --branch can be specified"),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -54,9 +54,12 @@ pub fn install_one(options: CargoInstallOptions) -> crate::Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
log::info!("Installing Cargo dependency \"{}\"...", options.name);
|
log::info!("Installing Cargo dependency \"{}\"...", options.name);
|
||||||
let status = cargo.status().context("failed to run `cargo add`")?;
|
let status = cargo.status().map_err(|error| Error::CommandFailed {
|
||||||
|
command: "cargo add".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
if !status.success() {
|
if !status.success() {
|
||||||
anyhow::bail!("Failed to install Cargo dependency");
|
crate::error::bail!("Failed to install Cargo dependency");
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -84,9 +87,12 @@ pub fn uninstall_one(options: CargoUninstallOptions) -> crate::Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
log::info!("Uninstalling Cargo dependency \"{}\"...", options.name);
|
log::info!("Uninstalling Cargo dependency \"{}\"...", options.name);
|
||||||
let status = cargo.status().context("failed to run `cargo remove`")?;
|
let status = cargo.status().map_err(|error| Error::CommandFailed {
|
||||||
|
command: "cargo remove".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
if !status.success() {
|
if !status.success() {
|
||||||
anyhow::bail!("Failed to remove Cargo dependency");
|
crate::error::bail!("Failed to remove Cargo dependency");
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|||||||
@@ -17,6 +17,8 @@ use std::{
|
|||||||
sync::{Arc, Mutex, OnceLock},
|
sync::{Arc, Mutex, OnceLock},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use crate::error::Context;
|
||||||
|
|
||||||
pub const MERGE_CONFIG_EXTENSION_NAME: &str = "--config";
|
pub const MERGE_CONFIG_EXTENSION_NAME: &str = "--config";
|
||||||
|
|
||||||
pub struct ConfigMetadata {
|
pub struct ConfigMetadata {
|
||||||
@@ -156,7 +158,8 @@ fn get_internal(
|
|||||||
|
|
||||||
let tauri_dir = super::app_paths::tauri_dir();
|
let tauri_dir = super::app_paths::tauri_dir();
|
||||||
let (mut config, config_path) =
|
let (mut config, config_path) =
|
||||||
tauri_utils::config::parse::parse_value(target, tauri_dir.join("tauri.conf.json"))?;
|
tauri_utils::config::parse::parse_value(target, tauri_dir.join("tauri.conf.json"))
|
||||||
|
.context("failed to parse config")?;
|
||||||
let config_file_name = config_path.file_name().unwrap().to_string_lossy();
|
let config_file_name = config_path.file_name().unwrap().to_string_lossy();
|
||||||
let mut extensions = HashMap::new();
|
let mut extensions = HashMap::new();
|
||||||
|
|
||||||
@@ -167,7 +170,8 @@ fn get_internal(
|
|||||||
.map(ToString::to_string);
|
.map(ToString::to_string);
|
||||||
|
|
||||||
if let Some((platform_config, config_path)) =
|
if let Some((platform_config, config_path)) =
|
||||||
tauri_utils::config::parse::read_platform(target, tauri_dir)?
|
tauri_utils::config::parse::read_platform(target, tauri_dir)
|
||||||
|
.context("failed to parse platform config")?
|
||||||
{
|
{
|
||||||
merge(&mut config, &platform_config);
|
merge(&mut config, &platform_config);
|
||||||
extensions.insert(
|
extensions.insert(
|
||||||
@@ -191,7 +195,8 @@ fn get_internal(
|
|||||||
if config_path.extension() == Some(OsStr::new("json"))
|
if config_path.extension() == Some(OsStr::new("json"))
|
||||||
|| config_path.extension() == Some(OsStr::new("json5"))
|
|| config_path.extension() == Some(OsStr::new("json5"))
|
||||||
{
|
{
|
||||||
let schema: JsonValue = serde_json::from_str(include_str!("../../config.schema.json"))?;
|
let schema: JsonValue = serde_json::from_str(include_str!("../../config.schema.json"))
|
||||||
|
.context("failed to parse config schema")?;
|
||||||
let validator = jsonschema::validator_for(&schema).expect("Invalid schema");
|
let validator = jsonschema::validator_for(&schema).expect("Invalid schema");
|
||||||
let mut errors = validator.iter_errors(&config).peekable();
|
let mut errors = validator.iter_errors(&config).peekable();
|
||||||
if errors.peek().is_some() {
|
if errors.peek().is_some() {
|
||||||
@@ -211,11 +216,11 @@ fn get_internal(
|
|||||||
|
|
||||||
// the `Config` deserializer for `package > version` can resolve the version from a path relative to the config path
|
// the `Config` deserializer for `package > version` can resolve the version from a path relative to the config path
|
||||||
// so we actually need to change the current working directory here
|
// so we actually need to change the current working directory here
|
||||||
let current_dir = current_dir()?;
|
let current_dir = current_dir().context("failed to resolve current directory")?;
|
||||||
set_current_dir(config_path.parent().unwrap())?;
|
set_current_dir(config_path.parent().unwrap()).context("failed to set current directory")?;
|
||||||
let config: Config = serde_json::from_value(config)?;
|
let config: Config = serde_json::from_value(config).context("failed to parse config")?;
|
||||||
// revert to previous working directory
|
// revert to previous working directory
|
||||||
set_current_dir(current_dir)?;
|
set_current_dir(current_dir).context("failed to set current directory")?;
|
||||||
|
|
||||||
for (plugin, conf) in &config.plugins.0 {
|
for (plugin, conf) in &config.plugins.0 {
|
||||||
set_var(
|
set_var(
|
||||||
@@ -223,7 +228,7 @@ fn get_internal(
|
|||||||
"TAURI_{}_PLUGIN_CONFIG",
|
"TAURI_{}_PLUGIN_CONFIG",
|
||||||
plugin.to_uppercase().replace('-', "_")
|
plugin.to_uppercase().replace('-', "_")
|
||||||
),
|
),
|
||||||
serde_json::to_string(&conf)?,
|
serde_json::to_string(&conf).context("failed to serialize config")?,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -254,7 +259,7 @@ pub fn reload(merge_configs: &[&serde_json::Value]) -> crate::Result<ConfigHandl
|
|||||||
if let Some(target) = target {
|
if let Some(target) = target {
|
||||||
get_internal(merge_configs, true, target)
|
get_internal(merge_configs, true, target)
|
||||||
} else {
|
} else {
|
||||||
Err(anyhow::anyhow!("config not loaded"))
|
crate::error::bail!("config not loaded");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -275,13 +280,14 @@ pub fn merge_with(merge_configs: &[&serde_json::Value]) -> crate::Result<ConfigH
|
|||||||
let merge_config_str = serde_json::to_string(&merge_config).unwrap();
|
let merge_config_str = serde_json::to_string(&merge_config).unwrap();
|
||||||
set_var("TAURI_CONFIG", merge_config_str);
|
set_var("TAURI_CONFIG", merge_config_str);
|
||||||
|
|
||||||
let mut value = serde_json::to_value(config_metadata.inner.clone())?;
|
let mut value =
|
||||||
|
serde_json::to_value(config_metadata.inner.clone()).context("failed to serialize config")?;
|
||||||
merge(&mut value, &merge_config);
|
merge(&mut value, &merge_config);
|
||||||
config_metadata.inner = serde_json::from_value(value)?;
|
config_metadata.inner = serde_json::from_value(value).context("failed to parse config")?;
|
||||||
|
|
||||||
Ok(handle.clone())
|
Ok(handle.clone())
|
||||||
} else {
|
} else {
|
||||||
Err(anyhow::anyhow!("config not loaded"))
|
crate::error::bail!("config not loaded");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -10,8 +10,7 @@ use std::io;
|
|||||||
use std::io::{Read, Seek, SeekFrom, Write};
|
use std::io::{Read, Seek, SeekFrom, Write};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use crate::Result;
|
use crate::{error::ErrorExt, Error, Result};
|
||||||
use anyhow::Context as _;
|
|
||||||
use sys::*;
|
use sys::*;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@@ -129,17 +128,25 @@ fn open(path: &Path, opts: &OpenOptions, state: State, msg: &str) -> Result<File
|
|||||||
// If we want an exclusive lock then if we fail because of NotFound it's
|
// If we want an exclusive lock then if we fail because of NotFound it's
|
||||||
// likely because an intermediate directory didn't exist, so try to
|
// likely because an intermediate directory didn't exist, so try to
|
||||||
// create the directory and then continue.
|
// create the directory and then continue.
|
||||||
let f = opts
|
let f = opts.open(path).or_else(|e| {
|
||||||
.open(path)
|
if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive {
|
||||||
.or_else(|e| {
|
create_dir_all(path.parent().unwrap()).fs_context(
|
||||||
if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive {
|
"failed to create directory",
|
||||||
create_dir_all(path.parent().unwrap())?;
|
path.parent().unwrap().to_path_buf(),
|
||||||
Ok(opts.open(path)?)
|
)?;
|
||||||
} else {
|
Ok(
|
||||||
Err(anyhow::Error::from(e))
|
opts
|
||||||
}
|
.open(path)
|
||||||
})
|
.fs_context("failed to open file", path.to_path_buf())?,
|
||||||
.with_context(|| format!("failed to open: {}", path.display()))?;
|
)
|
||||||
|
} else {
|
||||||
|
Err(Error::Fs {
|
||||||
|
context: "failed to open file",
|
||||||
|
path: path.to_path_buf(),
|
||||||
|
error: e,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})?;
|
||||||
match state {
|
match state {
|
||||||
State::Exclusive => {
|
State::Exclusive => {
|
||||||
acquire(msg, path, &|| try_lock_exclusive(&f), &|| {
|
acquire(msg, path, &|| try_lock_exclusive(&f), &|| {
|
||||||
@@ -203,16 +210,18 @@ fn acquire(
|
|||||||
|
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
if !error_contended(&e) {
|
if !error_contended(&e) {
|
||||||
let e = anyhow::Error::from(e);
|
return Err(Error::Fs {
|
||||||
let cx = format!("failed to lock file: {}", path.display());
|
context: "failed to lock file",
|
||||||
return Err(e.context(cx));
|
path: path.to_path_buf(),
|
||||||
|
error: e,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let msg = format!("waiting for file lock on {msg}");
|
let msg = format!("waiting for file lock on {msg}");
|
||||||
log::info!(action = "Blocking"; "{}", &msg);
|
log::info!(action = "Blocking"; "{}", &msg);
|
||||||
|
|
||||||
lock_block().with_context(|| format!("failed to lock file: {}", path.display()))?;
|
lock_block().fs_context("failed to lock file", path.to_path_buf())?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
|
|
||||||
#[cfg(all(target_os = "linux", not(target_env = "musl")))]
|
#[cfg(all(target_os = "linux", not(target_env = "musl")))]
|
||||||
|
|||||||
@@ -2,20 +2,29 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use anyhow::Result;
|
use crate::{error::ErrorExt, Error};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
pub fn copy_file(from: impl AsRef<Path>, to: impl AsRef<Path>) -> Result<()> {
|
pub fn copy_file(from: impl AsRef<Path>, to: impl AsRef<Path>) -> crate::Result<()> {
|
||||||
let from = from.as_ref();
|
let from = from.as_ref();
|
||||||
let to = to.as_ref();
|
let to = to.as_ref();
|
||||||
if !from.exists() {
|
if !from.exists() {
|
||||||
return Err(anyhow::anyhow!("{:?} does not exist", from));
|
Err(Error::Fs {
|
||||||
|
context: "failed to copy file",
|
||||||
|
path: from.to_path_buf(),
|
||||||
|
error: std::io::Error::new(std::io::ErrorKind::NotFound, "source does not exist"),
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
if !from.is_file() {
|
if !from.is_file() {
|
||||||
return Err(anyhow::anyhow!("{:?} is not a file", from));
|
Err(Error::Fs {
|
||||||
|
context: "failed to copy file",
|
||||||
|
path: from.to_path_buf(),
|
||||||
|
error: std::io::Error::other("not a file"),
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
let dest_dir = to.parent().expect("No data in parent");
|
let dest_dir = to.parent().expect("No data in parent");
|
||||||
std::fs::create_dir_all(dest_dir)?;
|
std::fs::create_dir_all(dest_dir)
|
||||||
std::fs::copy(from, to)?;
|
.fs_context("failed to create directory", dest_dir.to_path_buf())?;
|
||||||
|
std::fs::copy(from, to).fs_context("failed to copy file", from.to_path_buf())?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,9 +24,10 @@ use std::{
|
|||||||
process::Command,
|
process::Command,
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use tauri_utils::config::HookCommand;
|
use tauri_utils::config::HookCommand;
|
||||||
|
|
||||||
|
#[cfg(not(target_os = "windows"))]
|
||||||
|
use crate::Error;
|
||||||
use crate::{
|
use crate::{
|
||||||
interface::{AppInterface, Interface},
|
interface::{AppInterface, Interface},
|
||||||
CommandExt,
|
CommandExt,
|
||||||
@@ -98,7 +99,10 @@ pub fn run_hook(
|
|||||||
.current_dir(cwd)
|
.current_dir(cwd)
|
||||||
.envs(env)
|
.envs(env)
|
||||||
.piped()
|
.piped()
|
||||||
.with_context(|| format!("failed to run `{script}` with `cmd /C`"))?;
|
.map_err(|error| crate::error::Error::CommandFailed {
|
||||||
|
command: script.clone(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
#[cfg(not(target_os = "windows"))]
|
#[cfg(not(target_os = "windows"))]
|
||||||
let status = Command::new("sh")
|
let status = Command::new("sh")
|
||||||
.arg("-c")
|
.arg("-c")
|
||||||
@@ -106,10 +110,13 @@ pub fn run_hook(
|
|||||||
.current_dir(cwd)
|
.current_dir(cwd)
|
||||||
.envs(env)
|
.envs(env)
|
||||||
.piped()
|
.piped()
|
||||||
.with_context(|| format!("failed to run `{script}` with `sh -c`"))?;
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: script.clone(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
if !status.success() {
|
if !status.success() {
|
||||||
anyhow::bail!(
|
crate::error::bail!(
|
||||||
"{} `{}` failed with exit code {}",
|
"{} `{}` failed with exit code {}",
|
||||||
name,
|
name,
|
||||||
script,
|
script,
|
||||||
@@ -123,6 +130,7 @@ pub fn run_hook(
|
|||||||
|
|
||||||
#[cfg(target_os = "macos")]
|
#[cfg(target_os = "macos")]
|
||||||
pub fn strip_semver_prerelease_tag(version: &mut semver::Version) -> crate::Result<()> {
|
pub fn strip_semver_prerelease_tag(version: &mut semver::Version) -> crate::Result<()> {
|
||||||
|
use crate::error::Context;
|
||||||
if !version.pre.is_empty() {
|
if !version.pre.is_empty() {
|
||||||
if let Some((_prerelease_tag, number)) = version.pre.as_str().to_string().split_once('.') {
|
if let Some((_prerelease_tag, number)) = version.pre.as_str().to_string().split_once('.') {
|
||||||
version.pre = semver::Prerelease::EMPTY;
|
version.pre = semver::Prerelease::EMPTY;
|
||||||
@@ -134,7 +142,11 @@ pub fn strip_semver_prerelease_tag(version: &mut semver::Version) -> crate::Resu
|
|||||||
format!(".{}", version.build.as_str())
|
format!(".{}", version.build.as_str())
|
||||||
}
|
}
|
||||||
))
|
))
|
||||||
.with_context(|| format!("bundle version {number:?} prerelease is invalid"))?;
|
.with_context(|| {
|
||||||
|
format!(
|
||||||
|
"failed to parse {version} as semver: bundle version {number:?} prerelease is invalid"
|
||||||
|
)
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,10 +2,12 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
use crate::helpers::cross_command;
|
use crate::{
|
||||||
|
error::{Context, Error},
|
||||||
|
helpers::cross_command,
|
||||||
|
};
|
||||||
use std::{collections::HashMap, fmt::Display, path::Path, process::Command};
|
use std::{collections::HashMap, fmt::Display, path::Path, process::Command};
|
||||||
|
|
||||||
pub fn manager_version(package_manager: &str) -> Option<String> {
|
pub fn manager_version(package_manager: &str) -> Option<String> {
|
||||||
@@ -151,10 +153,13 @@ impl PackageManager {
|
|||||||
let status = command
|
let status = command
|
||||||
.current_dir(frontend_dir)
|
.current_dir(frontend_dir)
|
||||||
.status()
|
.status()
|
||||||
.with_context(|| format!("failed to run {self}"))?;
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: format!("failed to run {self}"),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
if !status.success() {
|
if !status.success() {
|
||||||
anyhow::bail!("Failed to install NPM {dependencies_str}");
|
crate::error::bail!("Failed to install NPM {dependencies_str}");
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -189,10 +194,13 @@ impl PackageManager {
|
|||||||
.args(dependencies)
|
.args(dependencies)
|
||||||
.current_dir(frontend_dir)
|
.current_dir(frontend_dir)
|
||||||
.status()
|
.status()
|
||||||
.with_context(|| format!("failed to run {self}"))?;
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: format!("failed to run {self}"),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
if !status.success() {
|
if !status.success() {
|
||||||
anyhow::bail!("Failed to remove NPM {dependencies_str}");
|
crate::error::bail!("Failed to remove NPM {dependencies_str}");
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -211,7 +219,11 @@ impl PackageManager {
|
|||||||
.arg(name)
|
.arg(name)
|
||||||
.args(["--depth", "0"])
|
.args(["--depth", "0"])
|
||||||
.current_dir(frontend_dir)
|
.current_dir(frontend_dir)
|
||||||
.output()?,
|
.output()
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "yarn list --pattern".to_string(),
|
||||||
|
error,
|
||||||
|
})?,
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
PackageManager::YarnBerry => (
|
PackageManager::YarnBerry => (
|
||||||
@@ -220,7 +232,11 @@ impl PackageManager {
|
|||||||
.arg(name)
|
.arg(name)
|
||||||
.arg("--json")
|
.arg("--json")
|
||||||
.current_dir(frontend_dir)
|
.current_dir(frontend_dir)
|
||||||
.output()?,
|
.output()
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "yarn info --json".to_string(),
|
||||||
|
error,
|
||||||
|
})?,
|
||||||
Some(regex::Regex::new("\"Version\":\"([\\da-zA-Z\\-\\.]+)\"").unwrap()),
|
Some(regex::Regex::new("\"Version\":\"([\\da-zA-Z\\-\\.]+)\"").unwrap()),
|
||||||
),
|
),
|
||||||
PackageManager::Pnpm => (
|
PackageManager::Pnpm => (
|
||||||
@@ -229,7 +245,11 @@ impl PackageManager {
|
|||||||
.arg(name)
|
.arg(name)
|
||||||
.args(["--parseable", "--depth", "0"])
|
.args(["--parseable", "--depth", "0"])
|
||||||
.current_dir(frontend_dir)
|
.current_dir(frontend_dir)
|
||||||
.output()?,
|
.output()
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "pnpm list --parseable --depth 0".to_string(),
|
||||||
|
error,
|
||||||
|
})?,
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
// Bun and Deno don't support `list` command
|
// Bun and Deno don't support `list` command
|
||||||
@@ -239,7 +259,11 @@ impl PackageManager {
|
|||||||
.arg(name)
|
.arg(name)
|
||||||
.args(["version", "--depth", "0"])
|
.args(["version", "--depth", "0"])
|
||||||
.current_dir(frontend_dir)
|
.current_dir(frontend_dir)
|
||||||
.output()?,
|
.output()
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "npm list --version --depth 0".to_string(),
|
||||||
|
error,
|
||||||
|
})?,
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
@@ -270,14 +294,22 @@ impl PackageManager {
|
|||||||
.args(packages)
|
.args(packages)
|
||||||
.args(["--json", "--depth", "0"])
|
.args(["--json", "--depth", "0"])
|
||||||
.current_dir(frontend_dir)
|
.current_dir(frontend_dir)
|
||||||
.output()?,
|
.output()
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "pnpm list --json --depth 0".to_string(),
|
||||||
|
error,
|
||||||
|
})?,
|
||||||
// Bun and Deno don't support `list` command
|
// Bun and Deno don't support `list` command
|
||||||
PackageManager::Npm | PackageManager::Bun | PackageManager::Deno => cross_command("npm")
|
PackageManager::Npm | PackageManager::Bun | PackageManager::Deno => cross_command("npm")
|
||||||
.arg("list")
|
.arg("list")
|
||||||
.args(packages)
|
.args(packages)
|
||||||
.args(["--json", "--depth", "0"])
|
.args(["--json", "--depth", "0"])
|
||||||
.current_dir(frontend_dir)
|
.current_dir(frontend_dir)
|
||||||
.output()?,
|
.output()
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "npm list --json --depth 0".to_string(),
|
||||||
|
error,
|
||||||
|
})?,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut versions = HashMap::new();
|
let mut versions = HashMap::new();
|
||||||
@@ -300,7 +332,7 @@ impl PackageManager {
|
|||||||
version: String,
|
version: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
let json: ListOutput = serde_json::from_str(&stdout)?;
|
let json: ListOutput = serde_json::from_str(&stdout).context("failed to parse npm list")?;
|
||||||
for (package, dependency) in json.dependencies.into_iter().chain(json.dev_dependencies) {
|
for (package, dependency) in json.dependencies.into_iter().chain(json.dev_dependencies) {
|
||||||
let version = dependency.version;
|
let version = dependency.version;
|
||||||
if let Ok(version) = semver::Version::parse(&version) {
|
if let Ok(version) = semver::Version::parse(&version) {
|
||||||
@@ -322,7 +354,11 @@ fn yarn_package_versions(
|
|||||||
.args(packages)
|
.args(packages)
|
||||||
.args(["--json", "--depth", "0"])
|
.args(["--json", "--depth", "0"])
|
||||||
.current_dir(frontend_dir)
|
.current_dir(frontend_dir)
|
||||||
.output()?;
|
.output()
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "yarn list --json --depth 0".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
let mut versions = HashMap::new();
|
let mut versions = HashMap::new();
|
||||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||||
@@ -371,7 +407,11 @@ fn yarn_berry_package_versions(
|
|||||||
let output = cross_command("yarn")
|
let output = cross_command("yarn")
|
||||||
.args(["info", "--json"])
|
.args(["info", "--json"])
|
||||||
.current_dir(frontend_dir)
|
.current_dir(frontend_dir)
|
||||||
.output()?;
|
.output()
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "yarn info --json".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
let mut versions = HashMap::new();
|
let mut versions = HashMap::new();
|
||||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||||
|
|||||||
@@ -8,9 +8,12 @@ use std::{
|
|||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use crate::error::ErrorExt;
|
||||||
|
|
||||||
pub fn parse<P: AsRef<Path>>(path: P) -> crate::Result<Pbxproj> {
|
pub fn parse<P: AsRef<Path>>(path: P) -> crate::Result<Pbxproj> {
|
||||||
let path = path.as_ref();
|
let path = path.as_ref();
|
||||||
let pbxproj = std::fs::read_to_string(path)?;
|
let pbxproj =
|
||||||
|
std::fs::read_to_string(path).fs_context("failed to read pbxproj file", path.to_path_buf())?;
|
||||||
|
|
||||||
let mut proj = Pbxproj {
|
let mut proj = Pbxproj {
|
||||||
path: path.to_owned(),
|
path: path.to_owned(),
|
||||||
@@ -171,7 +174,7 @@ enum State {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub struct Pbxproj {
|
pub struct Pbxproj {
|
||||||
path: PathBuf,
|
pub path: PathBuf,
|
||||||
raw_lines: Vec<String>,
|
raw_lines: Vec<String>,
|
||||||
pub xc_build_configuration: BTreeMap<String, XCBuildConfiguration>,
|
pub xc_build_configuration: BTreeMap<String, XCBuildConfiguration>,
|
||||||
pub xc_configuration_list: BTreeMap<String, XCConfigurationList>,
|
pub xc_configuration_list: BTreeMap<String, XCConfigurationList>,
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
use std::{fmt::Display, str::FromStr};
|
use std::{fmt::Display, str::FromStr};
|
||||||
|
|
||||||
use crate::Result;
|
use crate::{error::Context, Result};
|
||||||
|
|
||||||
pub fn input<T>(
|
pub fn input<T>(
|
||||||
prompt: &str,
|
prompt: &str,
|
||||||
@@ -32,7 +32,7 @@ where
|
|||||||
builder
|
builder
|
||||||
.interact_text()
|
.interact_text()
|
||||||
.map(|t: T| if t.ne("") { Some(t) } else { None })
|
.map(|t: T| if t.ne("") { Some(t) } else { None })
|
||||||
.map_err(Into::into)
|
.context("failed to prompt input")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -42,7 +42,7 @@ pub fn confirm(prompt: &str, default: Option<bool>) -> Result<bool> {
|
|||||||
if let Some(default) = default {
|
if let Some(default) = default {
|
||||||
builder = builder.default(default);
|
builder = builder.default(default);
|
||||||
}
|
}
|
||||||
builder.interact().map_err(Into::into)
|
builder.interact().context("failed to prompt confirm")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn multiselect<T: ToString>(
|
pub fn multiselect<T: ToString>(
|
||||||
@@ -57,5 +57,5 @@ pub fn multiselect<T: ToString>(
|
|||||||
if let Some(defaults) = defaults {
|
if let Some(defaults) = defaults {
|
||||||
builder = builder.defaults(defaults);
|
builder = builder.defaults(defaults);
|
||||||
}
|
}
|
||||||
builder.interact().map_err(Into::into)
|
builder.interact().context("failed to prompt multi-select")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,6 +13,8 @@ use include_dir::Dir;
|
|||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use serde_json::value::{Map, Value as JsonValue};
|
use serde_json::value::{Map, Value as JsonValue};
|
||||||
|
|
||||||
|
use crate::error::ErrorExt;
|
||||||
|
|
||||||
/// Map of template variable names and values.
|
/// Map of template variable names and values.
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
#[repr(transparent)]
|
#[repr(transparent)]
|
||||||
@@ -74,13 +76,17 @@ pub fn render_with_generator<
|
|||||||
file_path.set_extension("toml");
|
file_path.set_extension("toml");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(mut output_file) = out_file_generator(file_path)? {
|
if let Some(mut output_file) = out_file_generator(file_path.clone())
|
||||||
|
.fs_context("failed to generate output file", file_path.clone())?
|
||||||
|
{
|
||||||
if let Some(utf8) = file.contents_utf8() {
|
if let Some(utf8) = file.contents_utf8() {
|
||||||
handlebars
|
handlebars
|
||||||
.render_template_to_write(utf8, &data, &mut output_file)
|
.render_template_to_write(utf8, &data, &mut output_file)
|
||||||
.expect("Failed to render template");
|
.expect("Failed to render template");
|
||||||
} else {
|
} else {
|
||||||
output_file.write_all(file.contents())?;
|
output_file
|
||||||
|
.write_all(file.contents())
|
||||||
|
.fs_context("failed to write template", file_path.clone())?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use base64::Engine;
|
use base64::Engine;
|
||||||
use minisign::{
|
use minisign::{
|
||||||
sign, KeyPair as KP, PublicKey, PublicKeyBox, SecretKey, SecretKeyBox, SignatureBox,
|
sign, KeyPair as KP, PublicKey, PublicKeyBox, SecretKey, SecretKeyBox, SignatureBox,
|
||||||
@@ -15,6 +14,8 @@ use std::{
|
|||||||
time::{SystemTime, UNIX_EPOCH},
|
time::{SystemTime, UNIX_EPOCH},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use crate::error::{Context, ErrorExt};
|
||||||
|
|
||||||
/// A key pair (`PublicKey` and `SecretKey`).
|
/// A key pair (`PublicKey` and `SecretKey`).
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct KeyPair {
|
pub struct KeyPair {
|
||||||
@@ -24,9 +25,9 @@ pub struct KeyPair {
|
|||||||
|
|
||||||
fn create_file(path: &Path) -> crate::Result<BufWriter<File>> {
|
fn create_file(path: &Path) -> crate::Result<BufWriter<File>> {
|
||||||
if let Some(parent) = path.parent() {
|
if let Some(parent) = path.parent() {
|
||||||
fs::create_dir_all(parent)?;
|
fs::create_dir_all(parent).fs_context("failed to create directory", parent.to_path_buf())?;
|
||||||
}
|
}
|
||||||
let file = File::create(path)?;
|
let file = File::create(path).fs_context("failed to create file", path.to_path_buf())?;
|
||||||
Ok(BufWriter::new(file))
|
Ok(BufWriter::new(file))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -48,8 +49,12 @@ pub fn generate_key(password: Option<String>) -> crate::Result<KeyPair> {
|
|||||||
|
|
||||||
/// Transform a base64 String to readable string for the main signer
|
/// Transform a base64 String to readable string for the main signer
|
||||||
pub fn decode_key<S: AsRef<[u8]>>(base64_key: S) -> crate::Result<String> {
|
pub fn decode_key<S: AsRef<[u8]>>(base64_key: S) -> crate::Result<String> {
|
||||||
let decoded_str = &base64::engine::general_purpose::STANDARD.decode(base64_key)?[..];
|
let decoded_str = &base64::engine::general_purpose::STANDARD
|
||||||
Ok(String::from(str::from_utf8(decoded_str)?))
|
.decode(base64_key)
|
||||||
|
.context("failed to decode base64 key")?[..];
|
||||||
|
Ok(String::from(
|
||||||
|
str::from_utf8(decoded_str).context("failed to convert base64 to utf8")?,
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Save KeyPair to disk
|
/// Save KeyPair to disk
|
||||||
@@ -69,28 +74,43 @@ where
|
|||||||
|
|
||||||
if sk_path.exists() {
|
if sk_path.exists() {
|
||||||
if !force {
|
if !force {
|
||||||
return Err(anyhow::anyhow!(
|
crate::error::bail!(
|
||||||
"Key generation aborted:\n{} already exists\nIf you really want to overwrite the existing key pair, add the --force switch to force this operation.",
|
"Key generation aborted:\n{} already exists\nIf you really want to overwrite the existing key pair, add the --force switch to force this operation.",
|
||||||
sk_path.display()
|
sk_path.display()
|
||||||
));
|
);
|
||||||
} else {
|
} else {
|
||||||
std::fs::remove_file(sk_path)?;
|
std::fs::remove_file(sk_path)
|
||||||
|
.fs_context("failed to remove secret key file", sk_path.to_path_buf())?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if pk_path.exists() {
|
if pk_path.exists() {
|
||||||
std::fs::remove_file(pk_path)?;
|
std::fs::remove_file(pk_path)
|
||||||
|
.fs_context("failed to remove public key file", pk_path.to_path_buf())?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut sk_writer = create_file(sk_path)?;
|
let write_file = |mut writer: BufWriter<File>, contents: &str| -> std::io::Result<()> {
|
||||||
write!(sk_writer, "{key:}")?;
|
write!(writer, "{contents:}")?;
|
||||||
sk_writer.flush()?;
|
writer.flush()?;
|
||||||
|
Ok(())
|
||||||
|
};
|
||||||
|
|
||||||
let mut pk_writer = create_file(pk_path)?;
|
write_file(create_file(sk_path)?, key)
|
||||||
write!(pk_writer, "{pubkey:}")?;
|
.fs_context("failed to write secret key", sk_path.to_path_buf())?;
|
||||||
pk_writer.flush()?;
|
|
||||||
|
|
||||||
Ok((fs::canonicalize(sk_path)?, fs::canonicalize(pk_path)?))
|
write_file(create_file(pk_path)?, pubkey)
|
||||||
|
.fs_context("failed to write public key", pk_path.to_path_buf())?;
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
fs::canonicalize(sk_path).fs_context(
|
||||||
|
"failed to canonicalize secret key path",
|
||||||
|
sk_path.to_path_buf(),
|
||||||
|
)?,
|
||||||
|
fs::canonicalize(pk_path).fs_context(
|
||||||
|
"failed to canonicalize public key path",
|
||||||
|
pk_path.to_path_buf(),
|
||||||
|
)?,
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sign files
|
/// Sign files
|
||||||
@@ -104,8 +124,6 @@ where
|
|||||||
extension.push(".sig");
|
extension.push(".sig");
|
||||||
let signature_path = bin_path.with_extension(extension);
|
let signature_path = bin_path.with_extension(extension);
|
||||||
|
|
||||||
let mut signature_box_writer = create_file(&signature_path)?;
|
|
||||||
|
|
||||||
let trusted_comment = format!(
|
let trusted_comment = format!(
|
||||||
"timestamp:{}\tfile:{}",
|
"timestamp:{}\tfile:{}",
|
||||||
unix_timestamp(),
|
unix_timestamp(),
|
||||||
@@ -120,13 +138,20 @@ where
|
|||||||
data_reader,
|
data_reader,
|
||||||
Some(trusted_comment.as_str()),
|
Some(trusted_comment.as_str()),
|
||||||
Some("signature from tauri secret key"),
|
Some("signature from tauri secret key"),
|
||||||
)?;
|
)
|
||||||
|
.context("failed to sign file")?;
|
||||||
|
|
||||||
let encoded_signature =
|
let encoded_signature =
|
||||||
base64::engine::general_purpose::STANDARD.encode(signature_box.to_string());
|
base64::engine::general_purpose::STANDARD.encode(signature_box.to_string());
|
||||||
signature_box_writer.write_all(encoded_signature.as_bytes())?;
|
std::fs::write(&signature_path, encoded_signature.as_bytes())
|
||||||
signature_box_writer.flush()?;
|
.fs_context("failed to write signature file", signature_path.clone())?;
|
||||||
Ok((fs::canonicalize(&signature_path)?, signature_box))
|
Ok((
|
||||||
|
fs::canonicalize(&signature_path).fs_context(
|
||||||
|
"failed to canonicalize signature file",
|
||||||
|
signature_path.clone(),
|
||||||
|
)?,
|
||||||
|
signature_box,
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets the updater secret key from the given private key and password.
|
/// Gets the updater secret key from the given private key and password.
|
||||||
@@ -148,7 +173,9 @@ pub fn pub_key<S: AsRef<[u8]>>(public_key: S) -> crate::Result<PublicKey> {
|
|||||||
let decoded_publick = decode_key(public_key).context("failed to decode base64 pubkey")?;
|
let decoded_publick = decode_key(public_key).context("failed to decode base64 pubkey")?;
|
||||||
let pk_box =
|
let pk_box =
|
||||||
PublicKeyBox::from_string(&decoded_publick).context("failed to load updater pubkey")?;
|
PublicKeyBox::from_string(&decoded_publick).context("failed to load updater pubkey")?;
|
||||||
let pk = pk_box.into_public_key()?;
|
let pk = pk_box
|
||||||
|
.into_public_key()
|
||||||
|
.context("failed to convert updater pubkey")?;
|
||||||
Ok(pk)
|
Ok(pk)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -168,7 +195,7 @@ where
|
|||||||
let file = OpenOptions::new()
|
let file = OpenOptions::new()
|
||||||
.read(true)
|
.read(true)
|
||||||
.open(data_path)
|
.open(data_path)
|
||||||
.map_err(|e| minisign::PError::new(minisign::ErrorKind::Io, e))?;
|
.fs_context("failed to open data file", data_path.to_path_buf())?;
|
||||||
Ok(BufReader::new(file))
|
Ok(BufReader::new(file))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,11 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::{helpers::app_paths::tauri_dir, Result};
|
use crate::{
|
||||||
|
error::{Context, Error, ErrorExt},
|
||||||
|
helpers::app_paths::tauri_dir,
|
||||||
|
Result,
|
||||||
|
};
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
collections::HashMap,
|
||||||
@@ -13,7 +17,6 @@ use std::{
|
|||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use icns::{IconFamily, IconType};
|
use icns::{IconFamily, IconType};
|
||||||
use image::{
|
use image::{
|
||||||
@@ -162,12 +165,15 @@ fn read_source(path: PathBuf) -> Result<Source> {
|
|||||||
} else {
|
} else {
|
||||||
Ok(Source::DynamicImage(DynamicImage::ImageRgba8(
|
Ok(Source::DynamicImage(DynamicImage::ImageRgba8(
|
||||||
open(&path)
|
open(&path)
|
||||||
.context(format!("Can't read and decode source image: {:?}", path))?
|
.context(format!(
|
||||||
|
"failed to read and decode source image {}",
|
||||||
|
path.display()
|
||||||
|
))?
|
||||||
.into_rgba8(),
|
.into_rgba8(),
|
||||||
)))
|
)))
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
anyhow::bail!("Error loading image");
|
crate::error::bail!("Error loading image");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -181,7 +187,12 @@ fn parse_bg_color(bg_color_string: &String) -> Result<Rgba<u8>> {
|
|||||||
(color.alpha * 255.) as u8,
|
(color.alpha * 255.) as u8,
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
.map_err(|_| anyhow::anyhow!("failed to parse color {}", bg_color_string))?;
|
.map_err(|_e| {
|
||||||
|
Error::Context(
|
||||||
|
format!("failed to parse color {bg_color_string}"),
|
||||||
|
"invalid RGBA color".into(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
Ok(bg_color)
|
Ok(bg_color)
|
||||||
}
|
}
|
||||||
@@ -194,7 +205,7 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
});
|
});
|
||||||
let png_icon_sizes = options.png.unwrap_or_default();
|
let png_icon_sizes = options.png.unwrap_or_default();
|
||||||
|
|
||||||
create_dir_all(&out_dir).context("Can't create output directory")?;
|
create_dir_all(&out_dir).fs_context("Can't create output directory", &out_dir)?;
|
||||||
|
|
||||||
let manifest = if input.extension().is_some_and(|ext| ext == "json") {
|
let manifest = if input.extension().is_some_and(|ext| ext == "json") {
|
||||||
parse_manifest(&input).map(Some)?
|
parse_manifest(&input).map(Some)?
|
||||||
@@ -220,7 +231,7 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
let source = read_source(default_icon)?;
|
let source = read_source(default_icon)?;
|
||||||
|
|
||||||
if source.height() != source.width() {
|
if source.height() != source.width() {
|
||||||
anyhow::bail!("Source image must be square");
|
crate::error::bail!("Source image must be square");
|
||||||
}
|
}
|
||||||
|
|
||||||
if png_icon_sizes.is_empty() {
|
if png_icon_sizes.is_empty() {
|
||||||
@@ -256,9 +267,12 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
fn parse_manifest(manifest_path: &Path) -> Result<Manifest> {
|
fn parse_manifest(manifest_path: &Path) -> Result<Manifest> {
|
||||||
let manifest: Manifest = serde_json::from_str(
|
let manifest: Manifest = serde_json::from_str(
|
||||||
&std::fs::read_to_string(manifest_path)
|
&std::fs::read_to_string(manifest_path)
|
||||||
.with_context(|| format!("cannot read manifest file {}", manifest_path.display()))?,
|
.fs_context("cannot read manifest file", manifest_path)?,
|
||||||
)
|
)
|
||||||
.with_context(|| format!("failed to parse manifest file {}", manifest_path.display()))?;
|
.context(format!(
|
||||||
|
"failed to parse manifest file {}",
|
||||||
|
manifest_path.display()
|
||||||
|
))?;
|
||||||
log::debug!("Read manifest file from {}", manifest_path.display());
|
log::debug!("Read manifest file from {}", manifest_path.display());
|
||||||
Ok(manifest)
|
Ok(manifest)
|
||||||
}
|
}
|
||||||
@@ -285,27 +299,34 @@ fn icns(source: &Source, out_dir: &Path) -> Result<()> {
|
|||||||
|
|
||||||
let mut family = IconFamily::new();
|
let mut family = IconFamily::new();
|
||||||
|
|
||||||
for (name, entry) in entries {
|
for (_name, entry) in entries {
|
||||||
let size = entry.size;
|
let size = entry.size;
|
||||||
let mut buf = Vec::new();
|
let mut buf = Vec::new();
|
||||||
|
|
||||||
let image = source.resize_exact(size)?;
|
let image = source.resize_exact(size)?;
|
||||||
|
|
||||||
write_png(image.as_bytes(), &mut buf, size)?;
|
write_png(image.as_bytes(), &mut buf, size).context("failed to write output file")?;
|
||||||
|
|
||||||
let image = icns::Image::read_png(&buf[..])?;
|
let image = icns::Image::read_png(&buf[..]).context("failed to read output file")?;
|
||||||
|
|
||||||
family
|
family
|
||||||
.add_icon_with_type(
|
.add_icon_with_type(
|
||||||
&image,
|
&image,
|
||||||
IconType::from_ostype(entry.ostype.parse().unwrap()).unwrap(),
|
IconType::from_ostype(entry.ostype.parse().unwrap()).unwrap(),
|
||||||
)
|
)
|
||||||
.with_context(|| format!("Can't add {name} to Icns Family"))?;
|
.context("failed to add icon to Icns Family")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut out_file = BufWriter::new(File::create(out_dir.join("icon.icns"))?);
|
let icns_path = out_dir.join("icon.icns");
|
||||||
family.write(&mut out_file)?;
|
let mut out_file = BufWriter::new(
|
||||||
out_file.flush()?;
|
File::create(&icns_path).fs_context("failed to create output file", &icns_path)?,
|
||||||
|
);
|
||||||
|
family
|
||||||
|
.write(&mut out_file)
|
||||||
|
.fs_context("failed to write output file", &icns_path)?;
|
||||||
|
out_file
|
||||||
|
.flush()
|
||||||
|
.fs_context("failed to flush output file", &icns_path)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -323,28 +344,30 @@ fn ico(source: &Source, out_dir: &Path) -> Result<()> {
|
|||||||
if size == 256 {
|
if size == 256 {
|
||||||
let mut buf = Vec::new();
|
let mut buf = Vec::new();
|
||||||
|
|
||||||
write_png(image.as_bytes(), &mut buf, size)?;
|
write_png(image.as_bytes(), &mut buf, size).context("failed to write output file")?;
|
||||||
|
|
||||||
frames.push(IcoFrame::with_encoded(
|
frames.push(
|
||||||
buf,
|
IcoFrame::with_encoded(buf, size, size, ExtendedColorType::Rgba8)
|
||||||
size,
|
.context("failed to create ico frame")?,
|
||||||
size,
|
);
|
||||||
ExtendedColorType::Rgba8,
|
|
||||||
)?)
|
|
||||||
} else {
|
} else {
|
||||||
frames.push(IcoFrame::as_png(
|
frames.push(
|
||||||
image.as_bytes(),
|
IcoFrame::as_png(image.as_bytes(), size, size, ExtendedColorType::Rgba8)
|
||||||
size,
|
.context("failed to create PNG frame")?,
|
||||||
size,
|
);
|
||||||
ExtendedColorType::Rgba8,
|
|
||||||
)?);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut out_file = BufWriter::new(File::create(out_dir.join("icon.ico"))?);
|
let ico_path = out_dir.join("icon.ico");
|
||||||
|
let mut out_file =
|
||||||
|
BufWriter::new(File::create(&ico_path).fs_context("failed to create output file", &ico_path)?);
|
||||||
let encoder = IcoEncoder::new(&mut out_file);
|
let encoder = IcoEncoder::new(&mut out_file);
|
||||||
encoder.encode_images(&frames)?;
|
encoder
|
||||||
out_file.flush()?;
|
.encode_images(&frames)
|
||||||
|
.context("failed to encode images")?;
|
||||||
|
out_file
|
||||||
|
.flush()
|
||||||
|
.fs_context("failed to flush output file", &ico_path)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -399,7 +422,10 @@ fn android(
|
|||||||
let folder_name = format!("mipmap-{}", target.name);
|
let folder_name = format!("mipmap-{}", target.name);
|
||||||
let out_folder = out_dir.join(&folder_name);
|
let out_folder = out_dir.join(&folder_name);
|
||||||
|
|
||||||
create_dir_all(&out_folder).context("Can't create Android mipmap output directory")?;
|
create_dir_all(&out_folder).fs_context(
|
||||||
|
"failed to create Android mipmap output directory",
|
||||||
|
&out_folder,
|
||||||
|
)?;
|
||||||
|
|
||||||
fg_entries.push(PngEntry {
|
fg_entries.push(PngEntry {
|
||||||
name: format!("{}/{}", folder_name, "ic_launcher_foreground.png"),
|
name: format!("{}/{}", folder_name, "ic_launcher_foreground.png"),
|
||||||
@@ -445,18 +471,29 @@ fn android(
|
|||||||
}
|
}
|
||||||
fn create_color_file(out_dir: &Path, color: &String) -> Result<()> {
|
fn create_color_file(out_dir: &Path, color: &String) -> Result<()> {
|
||||||
let values_folder = out_dir.join("values");
|
let values_folder = out_dir.join("values");
|
||||||
create_dir_all(&values_folder).context("Can't create Android values output directory")?;
|
create_dir_all(&values_folder).fs_context(
|
||||||
let mut color_file = File::create(values_folder.join("ic_launcher_background.xml"))?;
|
"Can't create Android values output directory",
|
||||||
color_file.write_all(
|
&values_folder,
|
||||||
format!(
|
|
||||||
r#"<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<resources>
|
|
||||||
<color name="ic_launcher_background">{}</color>
|
|
||||||
</resources>"#,
|
|
||||||
color
|
|
||||||
)
|
|
||||||
.as_bytes(),
|
|
||||||
)?;
|
)?;
|
||||||
|
let launcher_background_xml_path = values_folder.join("ic_launcher_background.xml");
|
||||||
|
let mut color_file = File::create(&launcher_background_xml_path).fs_context(
|
||||||
|
"failed to create Android color file",
|
||||||
|
&launcher_background_xml_path,
|
||||||
|
)?;
|
||||||
|
color_file
|
||||||
|
.write_all(
|
||||||
|
format!(
|
||||||
|
r#"<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<resources>
|
||||||
|
<color name="ic_launcher_background">{color}</color>
|
||||||
|
</resources>"#,
|
||||||
|
)
|
||||||
|
.as_bytes(),
|
||||||
|
)
|
||||||
|
.fs_context(
|
||||||
|
"failed to write Android color file",
|
||||||
|
&launcher_background_xml_path,
|
||||||
|
)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -468,7 +505,7 @@ fn android(
|
|||||||
android_out
|
android_out
|
||||||
} else {
|
} else {
|
||||||
let out = out_dir.join("android");
|
let out = out_dir.join("android");
|
||||||
create_dir_all(&out).context("Can't create Android output directory")?;
|
create_dir_all(&out).fs_context("Can't create Android output directory", &out)?;
|
||||||
out
|
out
|
||||||
};
|
};
|
||||||
let entries = android_entries(&out)?;
|
let entries = android_entries(&out)?;
|
||||||
@@ -545,9 +582,14 @@ fn android(
|
|||||||
|
|
||||||
let image = apply_round_mask(&image, entry.size, margin, radius);
|
let image = apply_round_mask(&image, entry.size, margin, radius);
|
||||||
|
|
||||||
let mut out_file = BufWriter::new(File::create(entry.out_path)?);
|
let mut out_file = BufWriter::new(
|
||||||
write_png(image.as_bytes(), &mut out_file, entry.size)?;
|
File::create(&entry.out_path).fs_context("failed to create output file", &entry.out_path)?,
|
||||||
out_file.flush()?;
|
);
|
||||||
|
write_png(image.as_bytes(), &mut out_file, entry.size)
|
||||||
|
.context("failed to write output file")?;
|
||||||
|
out_file
|
||||||
|
.flush()
|
||||||
|
.fs_context("failed to flush output file", &entry.out_path)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut launcher_content = r#"<?xml version="1.0" encoding="utf-8"?>
|
let mut launcher_content = r#"<?xml version="1.0" encoding="utf-8"?>
|
||||||
@@ -570,10 +612,17 @@ fn android(
|
|||||||
launcher_content.push_str("\n</adaptive-icon>");
|
launcher_content.push_str("\n</adaptive-icon>");
|
||||||
|
|
||||||
let any_dpi_folder = out.join("mipmap-anydpi-v26");
|
let any_dpi_folder = out.join("mipmap-anydpi-v26");
|
||||||
create_dir_all(&any_dpi_folder)
|
create_dir_all(&any_dpi_folder).fs_context(
|
||||||
.context("Can't create Android mipmap-anydpi-v26 output directory")?;
|
"Can't create Android mipmap-anydpi-v26 output directory",
|
||||||
let mut launcher_file = File::create(any_dpi_folder.join("ic_launcher.xml"))?;
|
&any_dpi_folder,
|
||||||
launcher_file.write_all(launcher_content.as_bytes())?;
|
)?;
|
||||||
|
|
||||||
|
let launcher_xml_path = any_dpi_folder.join("ic_launcher.xml");
|
||||||
|
let mut launcher_file = File::create(&launcher_xml_path)
|
||||||
|
.fs_context("failed to create Android launcher file", &launcher_xml_path)?;
|
||||||
|
launcher_file
|
||||||
|
.write_all(launcher_content.as_bytes())
|
||||||
|
.fs_context("failed to write Android launcher file", &launcher_xml_path)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -685,7 +734,7 @@ fn png(source: &Source, out_dir: &Path, ios_color: Rgba<u8>) -> Result<()> {
|
|||||||
ios_out
|
ios_out
|
||||||
} else {
|
} else {
|
||||||
let out = out_dir.join("ios");
|
let out = out_dir.join("ios");
|
||||||
create_dir_all(&out).context("Can't create iOS output directory")?;
|
create_dir_all(&out).fs_context("failed to create iOS output directory", &out)?;
|
||||||
out
|
out
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -758,13 +807,16 @@ fn resize_and_save_png(
|
|||||||
scale_percent: Option<f32>,
|
scale_percent: Option<f32>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let image = resize_png(source, size, bg, scale_percent)?;
|
let image = resize_png(source, size, bg, scale_percent)?;
|
||||||
let mut out_file = BufWriter::new(File::create(file_path)?);
|
let mut out_file =
|
||||||
write_png(image.as_bytes(), &mut out_file, size)?;
|
BufWriter::new(File::create(file_path).fs_context("failed to create output file", file_path)?);
|
||||||
Ok(out_file.flush()?)
|
write_png(image.as_bytes(), &mut out_file, size).context("failed to write output file")?;
|
||||||
|
out_file
|
||||||
|
.flush()
|
||||||
|
.fs_context("failed to save output file", file_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Encode image data as png with compression.
|
// Encode image data as png with compression.
|
||||||
fn write_png<W: Write>(image_data: &[u8], w: W, size: u32) -> Result<()> {
|
fn write_png<W: Write>(image_data: &[u8], w: W, size: u32) -> image::ImageResult<()> {
|
||||||
let encoder = PngEncoder::new_with_quality(w, CompressionType::Best, PngFilterType::Adaptive);
|
let encoder = PngEncoder::new_with_quality(w, CompressionType::Best, PngFilterType::Adaptive);
|
||||||
encoder.write_image(image_data, size, size, ExtendedColorType::Rgba8)?;
|
encoder.write_image(image_data, size, size, ExtendedColorType::Rgba8)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|||||||
@@ -3,6 +3,8 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use super::{SectionItem, Status};
|
use super::{SectionItem, Status};
|
||||||
|
#[cfg(windows)]
|
||||||
|
use crate::error::Context;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
@@ -45,7 +47,11 @@ fn build_tools_version() -> crate::Result<Vec<String>> {
|
|||||||
"json",
|
"json",
|
||||||
"-utf8",
|
"-utf8",
|
||||||
])
|
])
|
||||||
.output()?;
|
.output()
|
||||||
|
.map_err(|error| crate::error::Error::CommandFailed {
|
||||||
|
command: "vswhere -prerelease -products * -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 -requires Microsoft.VisualStudio.Component.Windows10SDK.* -format json -utf8".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
let output_sdk11 = Command::new(vswhere)
|
let output_sdk11 = Command::new(vswhere)
|
||||||
.args([
|
.args([
|
||||||
@@ -60,19 +66,25 @@ fn build_tools_version() -> crate::Result<Vec<String>> {
|
|||||||
"json",
|
"json",
|
||||||
"-utf8",
|
"-utf8",
|
||||||
])
|
])
|
||||||
.output()?;
|
.output()
|
||||||
|
.map_err(|error| crate::error::Error::CommandFailed {
|
||||||
|
command: "vswhere -prerelease -products * -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 -requires Microsoft.VisualStudio.Component.Windows11SDK.* -format json -utf8".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
let mut instances: Vec<VsInstanceInfo> = Vec::new();
|
let mut instances: Vec<VsInstanceInfo> = Vec::new();
|
||||||
|
|
||||||
if output_sdk10.status.success() {
|
if output_sdk10.status.success() {
|
||||||
let stdout = String::from_utf8_lossy(&output_sdk10.stdout);
|
let stdout = String::from_utf8_lossy(&output_sdk10.stdout);
|
||||||
let found: Vec<VsInstanceInfo> = serde_json::from_str(&stdout)?;
|
let found: Vec<VsInstanceInfo> =
|
||||||
|
serde_json::from_str(&stdout).context("failed to parse vswhere output")?;
|
||||||
instances.extend(found);
|
instances.extend(found);
|
||||||
}
|
}
|
||||||
|
|
||||||
if output_sdk11.status.success() {
|
if output_sdk11.status.success() {
|
||||||
let stdout = String::from_utf8_lossy(&output_sdk11.stdout);
|
let stdout = String::from_utf8_lossy(&output_sdk11.stdout);
|
||||||
let found: Vec<VsInstanceInfo> = serde_json::from_str(&stdout)?;
|
let found: Vec<VsInstanceInfo> =
|
||||||
|
serde_json::from_str(&stdout).context("failed to parse vswhere output")?;
|
||||||
instances.extend(found);
|
instances.extend(found);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -97,7 +109,11 @@ fn webview2_version() -> crate::Result<Option<String>> {
|
|||||||
let output = Command::new(&powershell_path)
|
let output = Command::new(&powershell_path)
|
||||||
.args(["-NoProfile", "-Command"])
|
.args(["-NoProfile", "-Command"])
|
||||||
.arg("Get-ItemProperty -Path 'HKLM:\\SOFTWARE\\WOW6432Node\\Microsoft\\EdgeUpdate\\Clients\\{F3017226-FE2A-4295-8BDF-00C3A9A7E4C5}' | ForEach-Object {$_.pv}")
|
.arg("Get-ItemProperty -Path 'HKLM:\\SOFTWARE\\WOW6432Node\\Microsoft\\EdgeUpdate\\Clients\\{F3017226-FE2A-4295-8BDF-00C3A9A7E4C5}' | ForEach-Object {$_.pv}")
|
||||||
.output()?;
|
.output()
|
||||||
|
.map_err(|error| crate::error::Error::CommandFailed {
|
||||||
|
command: "Get-ItemProperty -Path 'HKLM:\\SOFTWARE\\WOW6432Node\\Microsoft\\EdgeUpdate\\Clients\\{F3017226-FE2A-4295-8BDF-00C3A9A7E4C5}' | ForEach-Object {$_.pv}".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
if output.status.success() {
|
if output.status.success() {
|
||||||
return Ok(Some(
|
return Ok(Some(
|
||||||
String::from_utf8_lossy(&output.stdout).replace('\n', ""),
|
String::from_utf8_lossy(&output.stdout).replace('\n', ""),
|
||||||
@@ -107,7 +123,11 @@ fn webview2_version() -> crate::Result<Option<String>> {
|
|||||||
let output = Command::new(&powershell_path)
|
let output = Command::new(&powershell_path)
|
||||||
.args(["-NoProfile", "-Command"])
|
.args(["-NoProfile", "-Command"])
|
||||||
.arg("Get-ItemProperty -Path 'HKLM:\\SOFTWARE\\Microsoft\\EdgeUpdate\\Clients\\{F3017226-FE2A-4295-8BDF-00C3A9A7E4C5}' | ForEach-Object {$_.pv}")
|
.arg("Get-ItemProperty -Path 'HKLM:\\SOFTWARE\\Microsoft\\EdgeUpdate\\Clients\\{F3017226-FE2A-4295-8BDF-00C3A9A7E4C5}' | ForEach-Object {$_.pv}")
|
||||||
.output()?;
|
.output()
|
||||||
|
.map_err(|error| crate::error::Error::CommandFailed {
|
||||||
|
command: "Get-ItemProperty -Path 'HKLM:\\SOFTWARE\\Microsoft\\EdgeUpdate\\Clients\\{F3017226-FE2A-4295-8BDF-00C3A9A7E4C5}' | ForEach-Object {$_.pv}".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
if output.status.success() {
|
if output.status.success() {
|
||||||
return Ok(Some(
|
return Ok(Some(
|
||||||
String::from_utf8_lossy(&output.stdout).replace('\n', ""),
|
String::from_utf8_lossy(&output.stdout).replace('\n', ""),
|
||||||
@@ -117,7 +137,11 @@ fn webview2_version() -> crate::Result<Option<String>> {
|
|||||||
let output = Command::new(&powershell_path)
|
let output = Command::new(&powershell_path)
|
||||||
.args(["-NoProfile", "-Command"])
|
.args(["-NoProfile", "-Command"])
|
||||||
.arg("Get-ItemProperty -Path 'HKCU:\\SOFTWARE\\Microsoft\\EdgeUpdate\\Clients\\{F3017226-FE2A-4295-8BDF-00C3A9A7E4C5}' | ForEach-Object {$_.pv}")
|
.arg("Get-ItemProperty -Path 'HKCU:\\SOFTWARE\\Microsoft\\EdgeUpdate\\Clients\\{F3017226-FE2A-4295-8BDF-00C3A9A7E4C5}' | ForEach-Object {$_.pv}")
|
||||||
.output()?;
|
.output()
|
||||||
|
.map_err(|error| crate::error::Error::CommandFailed {
|
||||||
|
command: "Get-ItemProperty -Path 'HKCU:\\SOFTWARE\\Microsoft\\EdgeUpdate\\Clients\\{F3017226-FE2A-4295-8BDF-00C3A9A7E4C5}' | ForEach-Object {$_.pv}".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
if output.status.success() {
|
if output.status.success() {
|
||||||
return Ok(Some(
|
return Ok(Some(
|
||||||
String::from_utf8_lossy(&output.stdout).replace('\n', ""),
|
String::from_utf8_lossy(&output.stdout).replace('\n', ""),
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::Context,
|
||||||
helpers::app_paths::{resolve_frontend_dir, resolve_tauri_dir},
|
helpers::app_paths::{resolve_frontend_dir, resolve_tauri_dir},
|
||||||
Result,
|
Result,
|
||||||
};
|
};
|
||||||
@@ -37,7 +38,7 @@ pub struct VersionMetadata {
|
|||||||
|
|
||||||
fn version_metadata() -> Result<VersionMetadata> {
|
fn version_metadata() -> Result<VersionMetadata> {
|
||||||
serde_json::from_str::<VersionMetadata>(include_str!("../../metadata-v2.json"))
|
serde_json::from_str::<VersionMetadata>(include_str!("../../metadata-v2.json"))
|
||||||
.map_err(Into::into)
|
.context("failed to parse version metadata")
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Default)]
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Default)]
|
||||||
|
|||||||
@@ -8,7 +8,11 @@ use colored::Colorize;
|
|||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use crate::helpers::{cross_command, npm::PackageManager};
|
use crate::error::Context;
|
||||||
|
use crate::{
|
||||||
|
error::Error,
|
||||||
|
helpers::{cross_command, npm::PackageManager},
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct YarnVersionInfo {
|
struct YarnVersionInfo {
|
||||||
@@ -24,10 +28,15 @@ pub fn npm_latest_version(pm: &PackageManager, name: &str) -> crate::Result<Opti
|
|||||||
.arg("info")
|
.arg("info")
|
||||||
.arg(name)
|
.arg(name)
|
||||||
.args(["version", "--json"])
|
.args(["version", "--json"])
|
||||||
.output()?;
|
.output()
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "yarn info --json".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
if output.status.success() {
|
if output.status.success() {
|
||||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||||
let info: YarnVersionInfo = serde_json::from_str(&stdout)?;
|
let info: YarnVersionInfo =
|
||||||
|
serde_json::from_str(&stdout).context("failed to parse yarn info")?;
|
||||||
Ok(Some(info.data.last().unwrap().to_string()))
|
Ok(Some(info.data.last().unwrap().to_string()))
|
||||||
} else {
|
} else {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
@@ -41,10 +50,14 @@ pub fn npm_latest_version(pm: &PackageManager, name: &str) -> crate::Result<Opti
|
|||||||
.arg("info")
|
.arg("info")
|
||||||
.arg(name)
|
.arg(name)
|
||||||
.args(["--fields", "version", "--json"])
|
.args(["--fields", "version", "--json"])
|
||||||
.output()?;
|
.output()
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "yarn npm info --fields version --json".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
if output.status.success() {
|
if output.status.success() {
|
||||||
let info: crate::PackageJson =
|
let info: crate::PackageJson = serde_json::from_reader(std::io::Cursor::new(output.stdout))
|
||||||
serde_json::from_reader(std::io::Cursor::new(output.stdout)).unwrap();
|
.context("failed to parse yarn npm info")?;
|
||||||
Ok(info.version)
|
Ok(info.version)
|
||||||
} else {
|
} else {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
@@ -54,7 +67,15 @@ pub fn npm_latest_version(pm: &PackageManager, name: &str) -> crate::Result<Opti
|
|||||||
PackageManager::Npm | PackageManager::Deno | PackageManager::Bun => {
|
PackageManager::Npm | PackageManager::Deno | PackageManager::Bun => {
|
||||||
let mut cmd = cross_command("npm");
|
let mut cmd = cross_command("npm");
|
||||||
|
|
||||||
let output = cmd.arg("show").arg(name).arg("version").output()?;
|
let output = cmd
|
||||||
|
.arg("show")
|
||||||
|
.arg(name)
|
||||||
|
.arg("version")
|
||||||
|
.output()
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "npm show --version".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
if output.status.success() {
|
if output.status.success() {
|
||||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||||
Ok(Some(stdout.replace('\n', "")))
|
Ok(Some(stdout.replace('\n', "")))
|
||||||
@@ -65,7 +86,15 @@ pub fn npm_latest_version(pm: &PackageManager, name: &str) -> crate::Result<Opti
|
|||||||
PackageManager::Pnpm => {
|
PackageManager::Pnpm => {
|
||||||
let mut cmd = cross_command("pnpm");
|
let mut cmd = cross_command("pnpm");
|
||||||
|
|
||||||
let output = cmd.arg("info").arg(name).arg("version").output()?;
|
let output = cmd
|
||||||
|
.arg("info")
|
||||||
|
.arg(name)
|
||||||
|
.arg("version")
|
||||||
|
.output()
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "pnpm info --version".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
if output.status.success() {
|
if output.status.success() {
|
||||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||||
Ok(Some(stdout.replace('\n', "")))
|
Ok(Some(stdout.replace('\n', "")))
|
||||||
|
|||||||
@@ -8,14 +8,16 @@ use std::{
|
|||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::helpers::{
|
use crate::{
|
||||||
self,
|
helpers::{
|
||||||
cargo_manifest::{cargo_manifest_and_lock, crate_version},
|
self,
|
||||||
npm::PackageManager,
|
cargo_manifest::{cargo_manifest_and_lock, crate_version},
|
||||||
|
npm::PackageManager,
|
||||||
|
},
|
||||||
|
Error,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{packages_nodejs, packages_rust, SectionItem};
|
use super::{packages_nodejs, packages_rust, SectionItem};
|
||||||
use anyhow::anyhow;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct InstalledPackage {
|
pub struct InstalledPackage {
|
||||||
@@ -161,5 +163,5 @@ pub fn check_mismatched_packages(frontend_dir: &Path, tauri_path: &Path) -> crat
|
|||||||
)
|
)
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join("\n");
|
.join("\n");
|
||||||
Err(anyhow!("Found version mismatched Tauri packages. Make sure the NPM and crate versions are on the same major/minor releases:\n{mismatched_text}"))
|
Err(Error::GenericError(format!("Found version mismatched Tauri packages. Make sure the NPM and crate versions are on the same major/minor releases:\n{mismatched_text}")))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -17,8 +17,10 @@ use std::{
|
|||||||
path::PathBuf,
|
path::PathBuf,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::Result;
|
use crate::{
|
||||||
use anyhow::Context;
|
error::{Context, ErrorExt},
|
||||||
|
Result,
|
||||||
|
};
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use handlebars::{to_json, Handlebars};
|
use handlebars::{to_json, Handlebars};
|
||||||
use include_dir::{include_dir, Dir};
|
use include_dir::{include_dir, Dir};
|
||||||
@@ -76,8 +78,10 @@ impl Options {
|
|||||||
let package_json_path = PathBuf::from(&self.directory).join("package.json");
|
let package_json_path = PathBuf::from(&self.directory).join("package.json");
|
||||||
|
|
||||||
let init_defaults = if package_json_path.exists() {
|
let init_defaults = if package_json_path.exists() {
|
||||||
let package_json_text = read_to_string(package_json_path)?;
|
let package_json_text = read_to_string(&package_json_path)
|
||||||
let package_json: crate::PackageJson = serde_json::from_str(&package_json_text)?;
|
.fs_context("failed to read", package_json_path.clone())?;
|
||||||
|
let package_json: crate::PackageJson =
|
||||||
|
serde_json::from_str(&package_json_text).context("failed to parse JSON")?;
|
||||||
let (framework, _) = infer_framework(&package_json_text);
|
let (framework, _) = infer_framework(&package_json_text);
|
||||||
InitDefaults {
|
InitDefaults {
|
||||||
app_name: package_json.product_name.or(package_json.name),
|
app_name: package_json.product_name.or(package_json.name),
|
||||||
@@ -187,7 +191,8 @@ pub fn command(mut options: Options) -> Result<()> {
|
|||||||
options = options.load()?;
|
options = options.load()?;
|
||||||
|
|
||||||
let template_target_path = PathBuf::from(&options.directory).join("src-tauri");
|
let template_target_path = PathBuf::from(&options.directory).join("src-tauri");
|
||||||
let metadata = serde_json::from_str::<VersionMetadata>(include_str!("../metadata-v2.json"))?;
|
let metadata = serde_json::from_str::<VersionMetadata>(include_str!("../metadata-v2.json"))
|
||||||
|
.context("failed to parse version metadata")?;
|
||||||
|
|
||||||
if template_target_path.exists() && !options.force {
|
if template_target_path.exists() && !options.force {
|
||||||
log::warn!(
|
log::warn!(
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use anyhow::Result;
|
use crate::Result;
|
||||||
use clap::{Parser, Subcommand};
|
use clap::{Parser, Subcommand};
|
||||||
|
|
||||||
use crate::interface::{AppInterface, AppSettings, Interface};
|
use crate::interface::{AppInterface, AppSettings, Interface};
|
||||||
|
|||||||
@@ -11,8 +11,7 @@ use std::{
|
|||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::helpers::config::Config;
|
use crate::{error::Context, helpers::config::Config};
|
||||||
use anyhow::Context;
|
|
||||||
use tauri_bundler::bundle::{PackageType, Settings, SettingsBuilder};
|
use tauri_bundler::bundle::{PackageType, Settings, SettingsBuilder};
|
||||||
|
|
||||||
pub use rust::{MobileOptions, Options, Rust as AppInterface};
|
pub use rust::{MobileOptions, Options, Rust as AppInterface};
|
||||||
@@ -20,7 +19,6 @@ pub use rust::{MobileOptions, Options, Rust as AppInterface};
|
|||||||
pub trait DevProcess {
|
pub trait DevProcess {
|
||||||
fn kill(&self) -> std::io::Result<()>;
|
fn kill(&self) -> std::io::Result<()>;
|
||||||
fn try_wait(&self) -> std::io::Result<Option<ExitStatus>>;
|
fn try_wait(&self) -> std::io::Result<Option<ExitStatus>>;
|
||||||
// TODO:
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
fn wait(&self) -> std::io::Result<ExitStatus>;
|
fn wait(&self) -> std::io::Result<ExitStatus>;
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
@@ -56,7 +54,7 @@ pub trait AppSettings {
|
|||||||
let target: String = if let Some(target) = options.target.clone() {
|
let target: String = if let Some(target) = options.target.clone() {
|
||||||
target
|
target
|
||||||
} else {
|
} else {
|
||||||
tauri_utils::platform::target_triple()?
|
tauri_utils::platform::target_triple().context("failed to get target triple")?
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut bins = self.get_binaries()?;
|
let mut bins = self.get_binaries()?;
|
||||||
@@ -81,7 +79,10 @@ pub trait AppSettings {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
settings_builder.build().map_err(Into::into)
|
settings_builder
|
||||||
|
.build()
|
||||||
|
.map_err(Box::new)
|
||||||
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ use std::{
|
|||||||
time::Duration,
|
time::Duration,
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use dunce::canonicalize;
|
use dunce::canonicalize;
|
||||||
use glob::glob;
|
use glob::glob;
|
||||||
use ignore::gitignore::{Gitignore, GitignoreBuilder};
|
use ignore::gitignore::{Gitignore, GitignoreBuilder};
|
||||||
@@ -30,6 +29,7 @@ use tauri_utils::config::{parse::is_configuration_file, DeepLinkProtocol, Runner
|
|||||||
|
|
||||||
use super::{AppSettings, DevProcess, ExitReason, Interface};
|
use super::{AppSettings, DevProcess, ExitReason, Interface};
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::{Context, Error, ErrorExt},
|
||||||
helpers::{
|
helpers::{
|
||||||
app_paths::{frontend_dir, tauri_dir},
|
app_paths::{frontend_dir, tauri_dir},
|
||||||
config::{nsis_settings, reload as reload_config, wix_settings, BundleResources, Config},
|
config::{nsis_settings, reload as reload_config, wix_settings, BundleResources, Config},
|
||||||
@@ -140,7 +140,14 @@ impl Interface for Rust {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
watcher.watch(tauri_dir().join("Cargo.toml"), RecursiveMode::NonRecursive)?;
|
watcher
|
||||||
|
.watch(tauri_dir().join("Cargo.toml"), RecursiveMode::NonRecursive)
|
||||||
|
.with_context(|| {
|
||||||
|
format!(
|
||||||
|
"failed to watch {}",
|
||||||
|
tauri_dir().join("Cargo.toml").display()
|
||||||
|
)
|
||||||
|
})?;
|
||||||
let (manifest, modified) = rewrite_manifest(config)?;
|
let (manifest, modified) = rewrite_manifest(config)?;
|
||||||
if modified {
|
if modified {
|
||||||
// Wait for the modified event so we don't trigger a re-build later on
|
// Wait for the modified event so we don't trigger a re-build later on
|
||||||
@@ -411,9 +418,9 @@ fn dev_options(
|
|||||||
// Copied from https://github.com/rust-lang/cargo/blob/69255bb10de7f74511b5cef900a9d102247b6029/src/cargo/core/workspace.rs#L665
|
// Copied from https://github.com/rust-lang/cargo/blob/69255bb10de7f74511b5cef900a9d102247b6029/src/cargo/core/workspace.rs#L665
|
||||||
fn expand_member_path(path: &Path) -> crate::Result<Vec<PathBuf>> {
|
fn expand_member_path(path: &Path) -> crate::Result<Vec<PathBuf>> {
|
||||||
let path = path.to_str().context("path is not UTF-8 compatible")?;
|
let path = path.to_str().context("path is not UTF-8 compatible")?;
|
||||||
let res = glob(path).with_context(|| format!("could not parse pattern `{path}`"))?;
|
let res = glob(path).with_context(|| format!("failed to expand glob pattern for {path}"))?;
|
||||||
let res = res
|
let res = res
|
||||||
.map(|p| p.with_context(|| format!("unable to match path to pattern `{path}`")))
|
.map(|p| p.with_context(|| format!("failed to expand glob pattern for {path}")))
|
||||||
.collect::<Result<Vec<_>, _>>()?;
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
@@ -574,7 +581,7 @@ impl Rust {
|
|||||||
);
|
);
|
||||||
|
|
||||||
let mut p = process.lock().unwrap();
|
let mut p = process.lock().unwrap();
|
||||||
p.kill().with_context(|| "failed to kill app process")?;
|
p.kill().context("failed to kill app process")?;
|
||||||
|
|
||||||
// wait for the process to exit
|
// wait for the process to exit
|
||||||
// note that on mobile, kill() already waits for the process to exit (duct implementation)
|
// note that on mobile, kill() already waits for the process to exit (duct implementation)
|
||||||
@@ -622,18 +629,19 @@ impl<T> MaybeWorkspace<T> {
|
|||||||
fn resolve(
|
fn resolve(
|
||||||
self,
|
self,
|
||||||
label: &str,
|
label: &str,
|
||||||
get_ws_field: impl FnOnce() -> anyhow::Result<T>,
|
get_ws_field: impl FnOnce() -> crate::Result<T>,
|
||||||
) -> anyhow::Result<T> {
|
) -> crate::Result<T> {
|
||||||
match self {
|
match self {
|
||||||
MaybeWorkspace::Defined(value) => Ok(value),
|
MaybeWorkspace::Defined(value) => Ok(value),
|
||||||
MaybeWorkspace::Workspace(TomlWorkspaceField { workspace: true }) => {
|
MaybeWorkspace::Workspace(TomlWorkspaceField { workspace: true }) => get_ws_field()
|
||||||
get_ws_field().context(format!(
|
.with_context(|| {
|
||||||
"error inheriting `{label}` from workspace root manifest's `workspace.package.{label}`"
|
format!(
|
||||||
))
|
"error inheriting `{label}` from workspace root manifest's `workspace.package.{label}`"
|
||||||
}
|
)
|
||||||
MaybeWorkspace::Workspace(TomlWorkspaceField { workspace: false }) => Err(anyhow::anyhow!(
|
}),
|
||||||
"`workspace=false` is unsupported for `package.{label}`"
|
MaybeWorkspace::Workspace(TomlWorkspaceField { workspace: false }) => Err(
|
||||||
)),
|
crate::Error::GenericError("`workspace=false` is unsupported for `package.{label}`".into()),
|
||||||
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn _as_defined(&self) -> Option<&T> {
|
fn _as_defined(&self) -> Option<&T> {
|
||||||
@@ -721,8 +729,11 @@ impl CargoSettings {
|
|||||||
fn load(dir: &Path) -> crate::Result<Self> {
|
fn load(dir: &Path) -> crate::Result<Self> {
|
||||||
let toml_path = dir.join("Cargo.toml");
|
let toml_path = dir.join("Cargo.toml");
|
||||||
let toml_str = std::fs::read_to_string(&toml_path)
|
let toml_str = std::fs::read_to_string(&toml_path)
|
||||||
.with_context(|| format!("Failed to read {}", toml_path.display()))?;
|
.fs_context("Failed to read Cargo manifest", toml_path.clone())?;
|
||||||
toml::from_str(&toml_str).with_context(|| format!("Failed to parse {}", toml_path.display()))
|
toml::from_str(&toml_str).context(format!(
|
||||||
|
"failed to parse Cargo manifest at {}",
|
||||||
|
toml_path.display()
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -831,11 +842,10 @@ impl AppSettings for RustAppSettings {
|
|||||||
.plugins
|
.plugins
|
||||||
.0
|
.0
|
||||||
.get("updater")
|
.get("updater")
|
||||||
.ok_or_else(|| {
|
.context("failed to get updater configuration: plugins > updater doesn't exist")?
|
||||||
anyhow::anyhow!("failed to get updater configuration: plugins > updater doesn't exist")
|
|
||||||
})?
|
|
||||||
.clone(),
|
.clone(),
|
||||||
)?;
|
)
|
||||||
|
.context("failed to parse updater plugin configuration")?;
|
||||||
Some(UpdaterSettings {
|
Some(UpdaterSettings {
|
||||||
v1_compatible,
|
v1_compatible,
|
||||||
pubkey: updater.pubkey,
|
pubkey: updater.pubkey,
|
||||||
@@ -862,7 +872,8 @@ impl AppSettings for RustAppSettings {
|
|||||||
.get("deep-link")
|
.get("deep-link")
|
||||||
.and_then(|c| c.get("desktop").cloned())
|
.and_then(|c| c.get("desktop").cloned())
|
||||||
{
|
{
|
||||||
let protocols: DesktopDeepLinks = serde_json::from_value(plugin_config)?;
|
let protocols: DesktopDeepLinks =
|
||||||
|
serde_json::from_value(plugin_config).context("failed to parse desktop deep links from Tauri configuration > plugins > deep-link > desktop")?;
|
||||||
settings.deep_link_protocols = Some(match protocols {
|
settings.deep_link_protocols = Some(match protocols {
|
||||||
DesktopDeepLinks::One(p) => vec![p],
|
DesktopDeepLinks::One(p) => vec![p],
|
||||||
DesktopDeepLinks::List(p) => p,
|
DesktopDeepLinks::List(p) => p,
|
||||||
@@ -1034,18 +1045,18 @@ impl AppSettings for RustAppSettings {
|
|||||||
impl RustAppSettings {
|
impl RustAppSettings {
|
||||||
pub fn new(config: &Config, manifest: Manifest, target: Option<String>) -> crate::Result<Self> {
|
pub fn new(config: &Config, manifest: Manifest, target: Option<String>) -> crate::Result<Self> {
|
||||||
let tauri_dir = tauri_dir();
|
let tauri_dir = tauri_dir();
|
||||||
let cargo_settings = CargoSettings::load(tauri_dir).context("failed to load cargo settings")?;
|
let cargo_settings = CargoSettings::load(tauri_dir).context("failed to load Cargo settings")?;
|
||||||
let cargo_package_settings = match &cargo_settings.package {
|
let cargo_package_settings = match &cargo_settings.package {
|
||||||
Some(package_info) => package_info.clone(),
|
Some(package_info) => package_info.clone(),
|
||||||
None => {
|
None => {
|
||||||
return Err(anyhow::anyhow!(
|
return Err(crate::Error::GenericError(
|
||||||
"No package info in the config file".to_owned(),
|
"No package info in the config file".to_owned(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let ws_package_settings = CargoSettings::load(&get_workspace_dir()?)
|
let ws_package_settings = CargoSettings::load(&get_workspace_dir()?)
|
||||||
.context("failed to load cargo settings from workspace root")?
|
.context("failed to load Cargo settings from workspace root")?
|
||||||
.workspace
|
.workspace
|
||||||
.and_then(|v| v.package);
|
.and_then(|v| v.package);
|
||||||
|
|
||||||
@@ -1058,7 +1069,7 @@ impl RustAppSettings {
|
|||||||
ws_package_settings
|
ws_package_settings
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|p| p.version.clone())
|
.and_then(|p| p.version.clone())
|
||||||
.ok_or_else(|| anyhow::anyhow!("Couldn't inherit value for `version` from workspace"))
|
.context("Couldn't inherit value for `version` from workspace")
|
||||||
})
|
})
|
||||||
.expect("Cargo project does not have a version")
|
.expect("Cargo project does not have a version")
|
||||||
});
|
});
|
||||||
@@ -1078,9 +1089,7 @@ impl RustAppSettings {
|
|||||||
ws_package_settings
|
ws_package_settings
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|v| v.description.clone())
|
.and_then(|v| v.description.clone())
|
||||||
.ok_or_else(|| {
|
.context("Couldn't inherit value for `description` from workspace")
|
||||||
anyhow::anyhow!("Couldn't inherit value for `description` from workspace")
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
.unwrap()
|
.unwrap()
|
||||||
})
|
})
|
||||||
@@ -1091,9 +1100,7 @@ impl RustAppSettings {
|
|||||||
ws_package_settings
|
ws_package_settings
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|v| v.homepage.clone())
|
.and_then(|v| v.homepage.clone())
|
||||||
.ok_or_else(|| {
|
.context("Couldn't inherit value for `homepage` from workspace")
|
||||||
anyhow::anyhow!("Couldn't inherit value for `homepage` from workspace")
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
.unwrap()
|
.unwrap()
|
||||||
}),
|
}),
|
||||||
@@ -1103,7 +1110,7 @@ impl RustAppSettings {
|
|||||||
ws_package_settings
|
ws_package_settings
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|v| v.authors.clone())
|
.and_then(|v| v.authors.clone())
|
||||||
.ok_or_else(|| anyhow::anyhow!("Couldn't inherit value for `authors` from workspace"))
|
.context("Couldn't inherit value for `authors` from workspace")
|
||||||
})
|
})
|
||||||
.unwrap()
|
.unwrap()
|
||||||
}),
|
}),
|
||||||
@@ -1168,16 +1175,20 @@ pub(crate) fn get_cargo_metadata() -> crate::Result<CargoMetadata> {
|
|||||||
let output = Command::new("cargo")
|
let output = Command::new("cargo")
|
||||||
.args(["metadata", "--no-deps", "--format-version", "1"])
|
.args(["metadata", "--no-deps", "--format-version", "1"])
|
||||||
.current_dir(tauri_dir())
|
.current_dir(tauri_dir())
|
||||||
.output()?;
|
.output()
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "cargo metadata --no-deps --format-version 1".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
if !output.status.success() {
|
if !output.status.success() {
|
||||||
return Err(anyhow::anyhow!(
|
return Err(Error::CommandFailed {
|
||||||
"cargo metadata command exited with a non zero exit code: {}",
|
command: "cargo metadata".to_string(),
|
||||||
String::from_utf8_lossy(&output.stderr)
|
error: std::io::Error::other(String::from_utf8_lossy(&output.stderr)),
|
||||||
));
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(serde_json::from_slice(&output.stdout)?)
|
serde_json::from_slice(&output.stdout).context("failed to parse cargo metadata")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the cargo target directory based on the provided arguments.
|
/// Get the cargo target directory based on the provided arguments.
|
||||||
@@ -1185,10 +1196,12 @@ pub(crate) fn get_cargo_metadata() -> crate::Result<CargoMetadata> {
|
|||||||
/// Otherwise, use the target directory from cargo metadata.
|
/// Otherwise, use the target directory from cargo metadata.
|
||||||
pub(crate) fn get_cargo_target_dir(args: &[String]) -> crate::Result<PathBuf> {
|
pub(crate) fn get_cargo_target_dir(args: &[String]) -> crate::Result<PathBuf> {
|
||||||
let path = if let Some(target) = get_cargo_option(args, "--target-dir") {
|
let path = if let Some(target) = get_cargo_option(args, "--target-dir") {
|
||||||
std::env::current_dir()?.join(target)
|
std::env::current_dir()
|
||||||
|
.context("failed to get current directory")?
|
||||||
|
.join(target)
|
||||||
} else {
|
} else {
|
||||||
get_cargo_metadata()
|
get_cargo_metadata()
|
||||||
.with_context(|| "failed to run 'cargo metadata' command to get target directory")?
|
.context("failed to run 'cargo metadata' command to get target directory")?
|
||||||
.target_directory
|
.target_directory
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1383,8 +1396,8 @@ fn tauri_config_to_bundle_settings(
|
|||||||
copyright: config.copyright,
|
copyright: config.copyright,
|
||||||
category: match config.category {
|
category: match config.category {
|
||||||
Some(category) => Some(AppCategory::from_str(&category).map_err(|e| match e {
|
Some(category) => Some(AppCategory::from_str(&category).map_err(|e| match e {
|
||||||
Some(e) => anyhow::anyhow!("invalid category, did you mean `{}`?", e),
|
Some(e) => Error::GenericError(format!("invalid category, did you mean `{e}`?")),
|
||||||
None => anyhow::anyhow!("invalid category"),
|
None => Error::GenericError("invalid category".to_string()),
|
||||||
})?),
|
})?),
|
||||||
None => None,
|
None => None,
|
||||||
},
|
},
|
||||||
@@ -1508,9 +1521,7 @@ fn tauri_config_to_bundle_settings(
|
|||||||
.cargo_ws_package_settings
|
.cargo_ws_package_settings
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|v| v.license.clone())
|
.and_then(|v| v.license.clone())
|
||||||
.ok_or_else(|| {
|
.context("Couldn't inherit value for `license` from workspace")
|
||||||
anyhow::anyhow!("Couldn't inherit value for `license` from workspace")
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
.unwrap()
|
.unwrap()
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use anyhow::{Context, Result};
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use std::{
|
use std::{
|
||||||
fs,
|
fs,
|
||||||
@@ -11,6 +10,11 @@ use std::{
|
|||||||
|
|
||||||
use tauri_utils::display_path;
|
use tauri_utils::display_path;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
error::{Context, ErrorExt},
|
||||||
|
Result,
|
||||||
|
};
|
||||||
|
|
||||||
struct PathAncestors<'a> {
|
struct PathAncestors<'a> {
|
||||||
current: Option<&'a Path>,
|
current: Option<&'a Path>,
|
||||||
}
|
}
|
||||||
@@ -57,18 +61,12 @@ impl Config {
|
|||||||
let mut config = Self::default();
|
let mut config = Self::default();
|
||||||
|
|
||||||
let get_config = |path: PathBuf| -> Result<ConfigSchema> {
|
let get_config = |path: PathBuf| -> Result<ConfigSchema> {
|
||||||
let contents = fs::read_to_string(&path).with_context(|| {
|
let contents =
|
||||||
format!(
|
fs::read_to_string(&path).fs_context("failed to read configuration file", path.clone())?;
|
||||||
"failed to read configuration file `{}`",
|
toml::from_str(&contents).context(format!(
|
||||||
display_path(&path)
|
"could not parse TOML configuration in `{}`",
|
||||||
)
|
display_path(&path)
|
||||||
})?;
|
))
|
||||||
toml::from_str(&contents).with_context(|| {
|
|
||||||
format!(
|
|
||||||
"could not parse TOML configuration in `{}`",
|
|
||||||
display_path(&path)
|
|
||||||
)
|
|
||||||
})
|
|
||||||
};
|
};
|
||||||
|
|
||||||
for current in PathAncestors::new(path) {
|
for current in PathAncestors::new(path) {
|
||||||
|
|||||||
@@ -3,9 +3,11 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use super::{AppSettings, DevProcess, ExitReason, Options, RustAppSettings, RustupTarget};
|
use super::{AppSettings, DevProcess, ExitReason, Options, RustAppSettings, RustupTarget};
|
||||||
use crate::CommandExt;
|
use crate::{
|
||||||
|
error::{Context, ErrorExt},
|
||||||
|
CommandExt, Error,
|
||||||
|
};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use shared_child::SharedChild;
|
use shared_child::SharedChild;
|
||||||
use std::{
|
use std::{
|
||||||
fs,
|
fs,
|
||||||
@@ -72,8 +74,7 @@ pub fn run_dev<F: Fn(Option<i32>, ExitReason) + Send + Sync + 'static>(
|
|||||||
dev_cmd.arg("--color");
|
dev_cmd.arg("--color");
|
||||||
dev_cmd.arg("always");
|
dev_cmd.arg("always");
|
||||||
|
|
||||||
// TODO: double check this
|
dev_cmd.stdout(os_pipe::dup_stdout().unwrap());
|
||||||
dev_cmd.stdout(os_pipe::dup_stdout()?);
|
|
||||||
dev_cmd.stderr(Stdio::piped());
|
dev_cmd.stderr(Stdio::piped());
|
||||||
|
|
||||||
dev_cmd.arg("--");
|
dev_cmd.arg("--");
|
||||||
@@ -86,16 +87,18 @@ pub fn run_dev<F: Fn(Option<i32>, ExitReason) + Send + Sync + 'static>(
|
|||||||
|
|
||||||
let dev_child = match SharedChild::spawn(&mut dev_cmd) {
|
let dev_child = match SharedChild::spawn(&mut dev_cmd) {
|
||||||
Ok(c) => Ok(c),
|
Ok(c) => Ok(c),
|
||||||
Err(e) if e.kind() == ErrorKind::NotFound => Err(anyhow::anyhow!(
|
Err(e) if e.kind() == ErrorKind::NotFound => crate::error::bail!(
|
||||||
"`{}` command not found.{}",
|
"`{runner}` command not found.{}",
|
||||||
runner,
|
|
||||||
if runner == "cargo" {
|
if runner == "cargo" {
|
||||||
" Please follow the Tauri setup guide: https://v2.tauri.app/start/prerequisites/"
|
" Please follow the Tauri setup guide: https://v2.tauri.app/start/prerequisites/"
|
||||||
} else {
|
} else {
|
||||||
""
|
""
|
||||||
}
|
}
|
||||||
)),
|
),
|
||||||
Err(e) => Err(e.into()),
|
Err(e) => Err(Error::CommandFailed {
|
||||||
|
command: runner,
|
||||||
|
error: e,
|
||||||
|
}),
|
||||||
}?;
|
}?;
|
||||||
let dev_child = Arc::new(dev_child);
|
let dev_child = Arc::new(dev_child);
|
||||||
let dev_child_stderr = dev_child.take_stderr().unwrap();
|
let dev_child_stderr = dev_child.take_stderr().unwrap();
|
||||||
@@ -164,7 +167,8 @@ pub fn build(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if options.target == Some("universal-apple-darwin".into()) {
|
if options.target == Some("universal-apple-darwin".into()) {
|
||||||
std::fs::create_dir_all(&out_dir).with_context(|| "failed to create project out directory")?;
|
std::fs::create_dir_all(&out_dir)
|
||||||
|
.fs_context("failed to create project out directory", out_dir.clone())?;
|
||||||
|
|
||||||
let bin_name = bin_path.file_stem().unwrap();
|
let bin_name = bin_path.file_stem().unwrap();
|
||||||
|
|
||||||
@@ -189,9 +193,9 @@ pub fn build(
|
|||||||
|
|
||||||
let lipo_status = lipo_cmd.output_ok()?.status;
|
let lipo_status = lipo_cmd.output_ok()?.status;
|
||||||
if !lipo_status.success() {
|
if !lipo_status.success() {
|
||||||
return Err(anyhow::anyhow!(
|
crate::error::bail!(
|
||||||
"Result of `lipo` command was unsuccessful: {lipo_status}. (Is `lipo` installed?)"
|
"Result of `lipo` command was unsuccessful: {lipo_status}. (Is `lipo` installed?)"
|
||||||
));
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
build_production_app(options, available_targets, config_features)
|
build_production_app(options, available_targets, config_features)
|
||||||
@@ -210,8 +214,8 @@ fn build_production_app(
|
|||||||
let runner = build_cmd.get_program().to_string_lossy().into_owned();
|
let runner = build_cmd.get_program().to_string_lossy().into_owned();
|
||||||
match build_cmd.piped() {
|
match build_cmd.piped() {
|
||||||
Ok(status) if status.success() => Ok(()),
|
Ok(status) if status.success() => Ok(()),
|
||||||
Ok(_) => Err(anyhow::anyhow!("failed to build app")),
|
Ok(_) => crate::error::bail!("failed to build app"),
|
||||||
Err(e) if e.kind() == ErrorKind::NotFound => Err(anyhow::anyhow!(
|
Err(e) if e.kind() == ErrorKind::NotFound => crate::error::bail!(
|
||||||
"`{}` command not found.{}",
|
"`{}` command not found.{}",
|
||||||
runner,
|
runner,
|
||||||
if runner == "cargo" {
|
if runner == "cargo" {
|
||||||
@@ -219,8 +223,11 @@ fn build_production_app(
|
|||||||
} else {
|
} else {
|
||||||
""
|
""
|
||||||
}
|
}
|
||||||
)),
|
),
|
||||||
Err(e) => Err(e.into()),
|
Err(e) => Err(Error::CommandFailed {
|
||||||
|
command: runner,
|
||||||
|
error: e,
|
||||||
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -302,7 +309,7 @@ fn validate_target(
|
|||||||
if let Some(available_targets) = available_targets {
|
if let Some(available_targets) = available_targets {
|
||||||
if let Some(target) = available_targets.iter().find(|t| t.name == target) {
|
if let Some(target) = available_targets.iter().find(|t| t.name == target) {
|
||||||
if !target.installed {
|
if !target.installed {
|
||||||
anyhow::bail!(
|
crate::error::bail!(
|
||||||
"Target {target} is not installed (installed targets: {installed}). Please run `rustup target add {target}`.",
|
"Target {target} is not installed (installed targets: {installed}). Please run `rustup target add {target}`.",
|
||||||
target = target.name,
|
target = target.name,
|
||||||
installed = available_targets.iter().filter(|t| t.installed).map(|t| t.name.as_str()).collect::<Vec<&str>>().join(", ")
|
installed = available_targets.iter().filter(|t| t.installed).map(|t| t.name.as_str()).collect::<Vec<&str>>().join(", ")
|
||||||
@@ -310,7 +317,7 @@ fn validate_target(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !available_targets.iter().any(|t| t.name == target) {
|
if !available_targets.iter().any(|t| t.name == target) {
|
||||||
anyhow::bail!("Target {target} does not exist. Please run `rustup target list` to see the available targets.", target = target);
|
crate::error::bail!("Target {target} does not exist. Please run `rustup target list` to see the available targets.", target = target);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -328,13 +335,7 @@ fn rename_app(
|
|||||||
""
|
""
|
||||||
};
|
};
|
||||||
let new_path = bin_path.with_file_name(format!("{main_binary_name}{extension}"));
|
let new_path = bin_path.with_file_name(format!("{main_binary_name}{extension}"));
|
||||||
fs::rename(&bin_path, &new_path).with_context(|| {
|
fs::rename(&bin_path, &new_path).fs_context("failed to rename app binary", bin_path.clone())?;
|
||||||
format!(
|
|
||||||
"failed to rename `{}` to `{}`",
|
|
||||||
tauri_utils::display_path(bin_path),
|
|
||||||
tauri_utils::display_path(&new_path),
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
Ok(new_path)
|
Ok(new_path)
|
||||||
} else {
|
} else {
|
||||||
Ok(bin_path)
|
Ok(bin_path)
|
||||||
|
|||||||
@@ -2,18 +2,31 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::Result;
|
use crate::{
|
||||||
|
error::{Error, ErrorExt},
|
||||||
|
Result,
|
||||||
|
};
|
||||||
|
|
||||||
use std::{fs::read_dir, path::PathBuf, process::Command};
|
use std::{fs::read_dir, path::PathBuf, process::Command};
|
||||||
|
|
||||||
pub fn installed_targets() -> Result<Vec<String>> {
|
pub fn installed_targets() -> Result<Vec<String>> {
|
||||||
let output = Command::new("rustc")
|
let output = Command::new("rustc")
|
||||||
.args(["--print", "sysroot"])
|
.args(["--print", "sysroot"])
|
||||||
.output()?;
|
.output()
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "rustc --print sysroot".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
let sysroot_path = PathBuf::from(String::from_utf8_lossy(&output.stdout).trim().to_string());
|
let sysroot_path = PathBuf::from(String::from_utf8_lossy(&output.stdout).trim().to_string());
|
||||||
|
|
||||||
let mut targets = Vec::new();
|
let mut targets = Vec::new();
|
||||||
for entry in read_dir(sysroot_path.join("lib").join("rustlib"))?.flatten() {
|
for entry in read_dir(sysroot_path.join("lib").join("rustlib"))
|
||||||
|
.fs_context(
|
||||||
|
"failed to read Rust sysroot",
|
||||||
|
sysroot_path.join("lib").join("rustlib"),
|
||||||
|
)?
|
||||||
|
.flatten()
|
||||||
|
{
|
||||||
if entry.file_type().map(|t| t.is_dir()).unwrap_or_default() {
|
if entry.file_type().map(|t| t.is_dir()).unwrap_or_default() {
|
||||||
let name = entry.file_name();
|
let name = entry.file_name();
|
||||||
if name != "etc" && name != "src" {
|
if name != "etc" && name != "src" {
|
||||||
|
|||||||
@@ -2,19 +2,19 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::helpers::{
|
use crate::{
|
||||||
app_paths::tauri_dir,
|
error::{Context, ErrorExt},
|
||||||
config::{Config, PatternKind},
|
helpers::{
|
||||||
|
app_paths::tauri_dir,
|
||||||
|
config::{Config, PatternKind},
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use toml_edit::{Array, DocumentMut, InlineTable, Item, TableLike, Value};
|
use toml_edit::{Array, DocumentMut, InlineTable, Item, TableLike, Value};
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
collections::{HashMap, HashSet},
|
collections::{HashMap, HashSet},
|
||||||
fs::File,
|
|
||||||
io::Write,
|
|
||||||
path::Path,
|
path::Path,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -84,11 +84,11 @@ fn get_enabled_features(list: &HashMap<String, Vec<String>>, feature: &str) -> V
|
|||||||
|
|
||||||
pub fn read_manifest(manifest_path: &Path) -> crate::Result<(DocumentMut, String)> {
|
pub fn read_manifest(manifest_path: &Path) -> crate::Result<(DocumentMut, String)> {
|
||||||
let manifest_str = std::fs::read_to_string(manifest_path)
|
let manifest_str = std::fs::read_to_string(manifest_path)
|
||||||
.with_context(|| format!("Failed to read `{manifest_path:?}` file"))?;
|
.fs_context("failed to read Cargo.toml", manifest_path.to_path_buf())?;
|
||||||
|
|
||||||
let manifest: DocumentMut = manifest_str
|
let manifest: DocumentMut = manifest_str
|
||||||
.parse::<DocumentMut>()
|
.parse::<DocumentMut>()
|
||||||
.with_context(|| "Failed to parse Cargo.toml")?;
|
.context("failed to parse Cargo.toml")?;
|
||||||
|
|
||||||
Ok((manifest, manifest_str))
|
Ok((manifest, manifest_str))
|
||||||
}
|
}
|
||||||
@@ -172,10 +172,10 @@ fn write_features<F: Fn(&str) -> bool>(
|
|||||||
*dep = Value::InlineTable(def);
|
*dep = Value::InlineTable(def);
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
return Err(anyhow::anyhow!(
|
crate::error::bail!(
|
||||||
"Unsupported {} dependency format on Cargo.toml",
|
"Unsupported {} dependency format on Cargo.toml",
|
||||||
dependency_name
|
dependency_name
|
||||||
))
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(true)
|
Ok(true)
|
||||||
@@ -313,10 +313,8 @@ pub fn rewrite_manifest(config: &Config) -> crate::Result<(Manifest, bool)> {
|
|||||||
let new_manifest_str = serialize_manifest(&manifest);
|
let new_manifest_str = serialize_manifest(&manifest);
|
||||||
|
|
||||||
if persist && original_manifest_str != new_manifest_str {
|
if persist && original_manifest_str != new_manifest_str {
|
||||||
let mut manifest_file =
|
std::fs::write(&manifest_path, new_manifest_str)
|
||||||
File::create(&manifest_path).with_context(|| "failed to open Cargo.toml for rewrite")?;
|
.fs_context("failed to rewrite Cargo manifest", manifest_path.clone())?;
|
||||||
manifest_file.write_all(new_manifest_str.as_bytes())?;
|
|
||||||
manifest_file.flush()?;
|
|
||||||
Ok((
|
Ok((
|
||||||
Manifest {
|
Manifest {
|
||||||
inner: manifest,
|
inner: manifest,
|
||||||
|
|||||||
@@ -10,15 +10,13 @@
|
|||||||
)]
|
)]
|
||||||
#![cfg(any(target_os = "macos", target_os = "linux", windows))]
|
#![cfg(any(target_os = "macos", target_os = "linux", windows))]
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
pub use anyhow::Result;
|
|
||||||
|
|
||||||
mod acl;
|
mod acl;
|
||||||
mod add;
|
mod add;
|
||||||
mod build;
|
mod build;
|
||||||
mod bundle;
|
mod bundle;
|
||||||
mod completions;
|
mod completions;
|
||||||
mod dev;
|
mod dev;
|
||||||
|
mod error;
|
||||||
mod helpers;
|
mod helpers;
|
||||||
mod icon;
|
mod icon;
|
||||||
mod info;
|
mod info;
|
||||||
@@ -34,6 +32,7 @@ mod signer;
|
|||||||
use clap::{ArgAction, CommandFactory, FromArgMatches, Parser, Subcommand, ValueEnum};
|
use clap::{ArgAction, CommandFactory, FromArgMatches, Parser, Subcommand, ValueEnum};
|
||||||
use env_logger::fmt::style::{AnsiColor, Style};
|
use env_logger::fmt::style::{AnsiColor, Style};
|
||||||
use env_logger::Builder;
|
use env_logger::Builder;
|
||||||
|
pub use error::{Error, ErrorExt, Result};
|
||||||
use log::Level;
|
use log::Level;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::io::{BufReader, Write};
|
use std::io::{BufReader, Write};
|
||||||
@@ -48,39 +47,46 @@ use std::{
|
|||||||
sync::{Arc, Mutex},
|
sync::{Arc, Mutex},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use crate::error::Context;
|
||||||
|
|
||||||
/// Tauri configuration argument option.
|
/// Tauri configuration argument option.
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct ConfigValue(pub(crate) serde_json::Value);
|
pub struct ConfigValue(pub(crate) serde_json::Value);
|
||||||
|
|
||||||
impl FromStr for ConfigValue {
|
impl FromStr for ConfigValue {
|
||||||
type Err = anyhow::Error;
|
type Err = Error;
|
||||||
|
|
||||||
fn from_str(config: &str) -> std::result::Result<Self, Self::Err> {
|
fn from_str(config: &str) -> std::result::Result<Self, Self::Err> {
|
||||||
if config.starts_with('{') {
|
if config.starts_with('{') {
|
||||||
Ok(Self(
|
Ok(Self(serde_json::from_str(config).with_context(|| {
|
||||||
serde_json::from_str(config).context("invalid configuration JSON")?,
|
format!("failed to parse config `{config}` as JSON")
|
||||||
))
|
})?))
|
||||||
} else {
|
} else {
|
||||||
let path = PathBuf::from(config);
|
let path = PathBuf::from(config);
|
||||||
if path.exists() {
|
let raw =
|
||||||
let raw = &read_to_string(&path)
|
read_to_string(&path).fs_context("failed to read configuration file", path.clone())?;
|
||||||
.with_context(|| format!("invalid configuration at file {config}"))?;
|
match path.extension() {
|
||||||
match path.extension() {
|
Some(ext) if ext == "toml" => {
|
||||||
Some(ext) if ext == "toml" => Ok(Self(::toml::from_str(raw)?)),
|
Ok(Self(::toml::from_str(&raw).with_context(|| {
|
||||||
Some(ext) if ext == "json5" => Ok(Self(::json5::from_str(raw)?)),
|
format!("failed to parse config at {} as TOML", path.display())
|
||||||
// treat all other extensions as json
|
})?))
|
||||||
_ => Ok(Self(
|
|
||||||
// from tauri-utils/src/config/parse.rs:
|
|
||||||
// we also want to support **valid** json5 in the .json extension
|
|
||||||
// if the json5 is not valid the serde_json error for regular json will be returned.
|
|
||||||
match ::json5::from_str(raw) {
|
|
||||||
Ok(json5) => json5,
|
|
||||||
Err(_) => serde_json::from_str(raw)?,
|
|
||||||
},
|
|
||||||
)),
|
|
||||||
}
|
}
|
||||||
} else {
|
Some(ext) if ext == "json5" => {
|
||||||
anyhow::bail!("provided configuration path does not exist")
|
Ok(Self(::json5::from_str(&raw).with_context(|| {
|
||||||
|
format!("failed to parse config at {} as JSON5", path.display())
|
||||||
|
})?))
|
||||||
|
}
|
||||||
|
// treat all other extensions as json
|
||||||
|
_ => Ok(Self(
|
||||||
|
// from tauri-utils/src/config/parse.rs:
|
||||||
|
// we also want to support **valid** json5 in the .json extension
|
||||||
|
// if the json5 is not valid the serde_json error for regular json will be returned.
|
||||||
|
match ::json5::from_str(&raw) {
|
||||||
|
Ok(json5) => json5,
|
||||||
|
Err(_) => serde_json::from_str(&raw)
|
||||||
|
.with_context(|| format!("failed to parse config at {} as JSON", path.display()))?,
|
||||||
|
},
|
||||||
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -190,19 +196,7 @@ where
|
|||||||
A: Into<OsString> + Clone,
|
A: Into<OsString> + Clone,
|
||||||
{
|
{
|
||||||
if let Err(e) = try_run(args, bin_name) {
|
if let Err(e) = try_run(args, bin_name) {
|
||||||
let mut message = e.to_string();
|
log::error!("{e}");
|
||||||
if e.chain().count() > 1 {
|
|
||||||
message.push(':');
|
|
||||||
}
|
|
||||||
e.chain().skip(1).for_each(|cause| {
|
|
||||||
let m = cause.to_string();
|
|
||||||
if !message.contains(&m) {
|
|
||||||
message.push('\n');
|
|
||||||
message.push_str(" - ");
|
|
||||||
message.push_str(&m);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
log::error!("{message}");
|
|
||||||
exit(1);
|
exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -346,12 +340,19 @@ impl CommandExt for Command {
|
|||||||
|
|
||||||
fn output_ok(&mut self) -> crate::Result<Output> {
|
fn output_ok(&mut self) -> crate::Result<Output> {
|
||||||
let program = self.get_program().to_string_lossy().into_owned();
|
let program = self.get_program().to_string_lossy().into_owned();
|
||||||
log::debug!(action = "Running"; "Command `{} {}`", program, self.get_args().map(|arg| arg.to_string_lossy()).fold(String::new(), |acc, arg| format!("{acc} {arg}")));
|
let args = self
|
||||||
|
.get_args()
|
||||||
|
.map(|arg| arg.to_string_lossy())
|
||||||
|
.fold(String::new(), |acc, arg| format!("{acc} {arg}"));
|
||||||
|
let cmdline = format!("{program} {args}");
|
||||||
|
log::debug!(action = "Running"; "Command `{cmdline}`");
|
||||||
|
|
||||||
self.stdout(Stdio::piped());
|
self.stdout(Stdio::piped());
|
||||||
self.stderr(Stdio::piped());
|
self.stderr(Stdio::piped());
|
||||||
|
|
||||||
let mut child = self.spawn()?;
|
let mut child = self
|
||||||
|
.spawn()
|
||||||
|
.with_context(|| format!("failed to run command `{cmdline}`"))?;
|
||||||
|
|
||||||
let mut stdout = child.stdout.take().map(BufReader::new).unwrap();
|
let mut stdout = child.stdout.take().map(BufReader::new).unwrap();
|
||||||
let stdout_lines = Arc::new(Mutex::new(Vec::new()));
|
let stdout_lines = Arc::new(Mutex::new(Vec::new()));
|
||||||
@@ -391,7 +392,9 @@ impl CommandExt for Command {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
let status = child.wait()?;
|
let status = child
|
||||||
|
.wait()
|
||||||
|
.with_context(|| format!("failed to run command `{cmdline}`"))?;
|
||||||
|
|
||||||
let output = Output {
|
let output = Output {
|
||||||
status,
|
status,
|
||||||
@@ -402,7 +405,10 @@ impl CommandExt for Command {
|
|||||||
if output.status.success() {
|
if output.status.success() {
|
||||||
Ok(output)
|
Ok(output)
|
||||||
} else {
|
} else {
|
||||||
Err(anyhow::anyhow!("failed to run {}", program))
|
crate::error::bail!(
|
||||||
|
"failed to run command `{cmdline}`: command exited with status code {}",
|
||||||
|
output.status.code().unwrap_or(-1)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::Result;
|
use crate::{error::Context, ErrorExt, Result};
|
||||||
|
|
||||||
use serde_json::{Map, Value};
|
use serde_json::{Map, Value};
|
||||||
use tauri_utils::acl::{
|
use tauri_utils::acl::{
|
||||||
@@ -22,9 +22,17 @@ pub fn migrate(tauri_dir: &Path) -> Result<MigratedConfig> {
|
|||||||
{
|
{
|
||||||
let migrated = migrate_config(&mut config)?;
|
let migrated = migrate_config(&mut config)?;
|
||||||
if config_path.extension().is_some_and(|ext| ext == "toml") {
|
if config_path.extension().is_some_and(|ext| ext == "toml") {
|
||||||
fs::write(&config_path, toml::to_string_pretty(&config)?)?;
|
fs::write(
|
||||||
|
&config_path,
|
||||||
|
toml::to_string_pretty(&config).context("failed to serialize config")?,
|
||||||
|
)
|
||||||
|
.fs_context("failed to write config", config_path.clone())?;
|
||||||
} else {
|
} else {
|
||||||
fs::write(&config_path, serde_json::to_string_pretty(&config)?)?;
|
fs::write(
|
||||||
|
&config_path,
|
||||||
|
serde_json::to_string_pretty(&config).context("failed to serialize config")?,
|
||||||
|
)
|
||||||
|
.fs_context("failed to write config", config_path.clone())?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut permissions: Vec<PermissionEntry> = vec!["core:default"]
|
let mut permissions: Vec<PermissionEntry> = vec!["core:default"]
|
||||||
@@ -34,7 +42,10 @@ pub fn migrate(tauri_dir: &Path) -> Result<MigratedConfig> {
|
|||||||
permissions.extend(migrated.permissions.clone());
|
permissions.extend(migrated.permissions.clone());
|
||||||
|
|
||||||
let capabilities_path = config_path.parent().unwrap().join("capabilities");
|
let capabilities_path = config_path.parent().unwrap().join("capabilities");
|
||||||
fs::create_dir_all(&capabilities_path)?;
|
fs::create_dir_all(&capabilities_path).fs_context(
|
||||||
|
"failed to create capabilities directory",
|
||||||
|
capabilities_path.clone(),
|
||||||
|
)?;
|
||||||
fs::write(
|
fs::write(
|
||||||
capabilities_path.join("migrated.json"),
|
capabilities_path.join("migrated.json"),
|
||||||
serde_json::to_string_pretty(&Capability {
|
serde_json::to_string_pretty(&Capability {
|
||||||
@@ -46,7 +57,12 @@ pub fn migrate(tauri_dir: &Path) -> Result<MigratedConfig> {
|
|||||||
webviews: vec![],
|
webviews: vec![],
|
||||||
permissions,
|
permissions,
|
||||||
platforms: None,
|
platforms: None,
|
||||||
})?,
|
})
|
||||||
|
.context("failed to serialize capabilities")?,
|
||||||
|
)
|
||||||
|
.fs_context(
|
||||||
|
"failed to write capabilities",
|
||||||
|
capabilities_path.join("migrated.json"),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
return Ok(migrated);
|
return Ok(migrated);
|
||||||
@@ -375,7 +391,8 @@ fn process_security(security: &mut Map<String, Value>) -> Result<()> {
|
|||||||
let csp = if csp_value.is_null() {
|
let csp = if csp_value.is_null() {
|
||||||
csp_value
|
csp_value
|
||||||
} else {
|
} else {
|
||||||
let mut csp: tauri_utils::config_v1::Csp = serde_json::from_value(csp_value)?;
|
let mut csp: tauri_utils::config_v1::Csp =
|
||||||
|
serde_json::from_value(csp_value).context("failed to deserialize CSP")?;
|
||||||
match &mut csp {
|
match &mut csp {
|
||||||
tauri_utils::config_v1::Csp::Policy(csp) => {
|
tauri_utils::config_v1::Csp::Policy(csp) => {
|
||||||
if csp.contains("connect-src") {
|
if csp.contains("connect-src") {
|
||||||
@@ -399,7 +416,7 @@ fn process_security(security: &mut Map<String, Value>) -> Result<()> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
serde_json::to_value(csp)?
|
serde_json::to_value(csp).context("failed to serialize CSP")?
|
||||||
};
|
};
|
||||||
|
|
||||||
security.insert("csp".into(), csp);
|
security.insert("csp".into(), csp);
|
||||||
@@ -423,7 +440,8 @@ fn process_allowlist(
|
|||||||
tauri_config: &mut Map<String, Value>,
|
tauri_config: &mut Map<String, Value>,
|
||||||
allowlist: Value,
|
allowlist: Value,
|
||||||
) -> Result<tauri_utils::config_v1::AllowlistConfig> {
|
) -> Result<tauri_utils::config_v1::AllowlistConfig> {
|
||||||
let allowlist: tauri_utils::config_v1::AllowlistConfig = serde_json::from_value(allowlist)?;
|
let allowlist: tauri_utils::config_v1::AllowlistConfig =
|
||||||
|
serde_json::from_value(allowlist).context("failed to deserialize allowlist")?;
|
||||||
|
|
||||||
if allowlist.protocol.asset_scope != Default::default() {
|
if allowlist.protocol.asset_scope != Default::default() {
|
||||||
let security = tauri_config
|
let security = tauri_config
|
||||||
@@ -435,7 +453,8 @@ fn process_allowlist(
|
|||||||
let mut asset_protocol = Map::new();
|
let mut asset_protocol = Map::new();
|
||||||
asset_protocol.insert(
|
asset_protocol.insert(
|
||||||
"scope".into(),
|
"scope".into(),
|
||||||
serde_json::to_value(allowlist.protocol.asset_scope.clone())?,
|
serde_json::to_value(allowlist.protocol.asset_scope.clone())
|
||||||
|
.context("failed to serialize asset scope")?,
|
||||||
);
|
);
|
||||||
if allowlist.protocol.asset {
|
if allowlist.protocol.asset {
|
||||||
asset_protocol.insert("enable".into(), true.into());
|
asset_protocol.insert("enable".into(), true.into());
|
||||||
@@ -639,7 +658,10 @@ fn allowlist_to_permissions(
|
|||||||
|
|
||||||
fn process_cli(plugins: &mut Map<String, Value>, cli: Value) -> Result<()> {
|
fn process_cli(plugins: &mut Map<String, Value>, cli: Value) -> Result<()> {
|
||||||
if let Some(cli) = cli.as_object() {
|
if let Some(cli) = cli.as_object() {
|
||||||
plugins.insert("cli".into(), serde_json::to_value(cli)?);
|
plugins.insert(
|
||||||
|
"cli".into(),
|
||||||
|
serde_json::to_value(cli).context("failed to serialize CLI")?,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -663,7 +685,10 @@ fn process_updater(
|
|||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
|| updater.get("pubkey").is_some()
|
|| updater.get("pubkey").is_some()
|
||||||
{
|
{
|
||||||
plugins.insert("updater".into(), serde_json::to_value(updater)?);
|
plugins.insert(
|
||||||
|
"updater".into(),
|
||||||
|
serde_json::to_value(updater).context("failed to serialize updater")?,
|
||||||
|
);
|
||||||
migrated.plugins.insert("updater".to_string());
|
migrated.plugins.insert("updater".to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,10 +3,10 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::Context,
|
||||||
helpers::{app_paths::walk_builder, npm::PackageManager},
|
helpers::{app_paths::walk_builder, npm::PackageManager},
|
||||||
Result,
|
Error, ErrorExt, Result,
|
||||||
};
|
};
|
||||||
use anyhow::Context;
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use magic_string::MagicString;
|
use magic_string::MagicString;
|
||||||
use oxc_allocator::Allocator;
|
use oxc_allocator::Allocator;
|
||||||
@@ -101,7 +101,8 @@ pub fn migrate(frontend_dir: &Path) -> Result<Vec<String>> {
|
|||||||
let path = entry.path();
|
let path = entry.path();
|
||||||
let ext = path.extension().unwrap_or_default();
|
let ext = path.extension().unwrap_or_default();
|
||||||
if JS_EXTENSIONS.iter().any(|e| e == &ext) {
|
if JS_EXTENSIONS.iter().any(|e| e == &ext) {
|
||||||
let js_contents = std::fs::read_to_string(path)?;
|
let js_contents =
|
||||||
|
std::fs::read_to_string(path).fs_context("failed to read JS file", path.to_path_buf())?;
|
||||||
let new_contents = migrate_imports(
|
let new_contents = migrate_imports(
|
||||||
path,
|
path,
|
||||||
&js_contents,
|
&js_contents,
|
||||||
@@ -110,7 +111,7 @@ pub fn migrate(frontend_dir: &Path) -> Result<Vec<String>> {
|
|||||||
)?;
|
)?;
|
||||||
if new_contents != js_contents {
|
if new_contents != js_contents {
|
||||||
fs::write(path, new_contents)
|
fs::write(path, new_contents)
|
||||||
.with_context(|| format!("Error writing {}", path.display()))?;
|
.fs_context("failed to write JS file", path.to_path_buf())?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -166,7 +167,7 @@ fn migrate_imports<'a>(
|
|||||||
let allocator = Allocator::default();
|
let allocator = Allocator::default();
|
||||||
let ret = Parser::new(&allocator, js_source, source_type).parse();
|
let ret = Parser::new(&allocator, js_source, source_type).parse();
|
||||||
if !ret.errors.is_empty() {
|
if !ret.errors.is_empty() {
|
||||||
anyhow::bail!(
|
crate::error::bail!(
|
||||||
"failed to parse {} as valid Javascript/Typescript file",
|
"failed to parse {} as valid Javascript/Typescript file",
|
||||||
path.display()
|
path.display()
|
||||||
)
|
)
|
||||||
@@ -193,8 +194,12 @@ fn migrate_imports<'a>(
|
|||||||
new_module,
|
new_module,
|
||||||
Default::default(),
|
Default::default(),
|
||||||
)
|
)
|
||||||
.map_err(|e| anyhow::anyhow!("{e}"))
|
.map_err(|e| {
|
||||||
.context("failed to replace import source")?;
|
Error::Context(
|
||||||
|
"failed to replace import source".to_string(),
|
||||||
|
e.to_string().into(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
// if module was pluginified, add to packages
|
// if module was pluginified, add to packages
|
||||||
if let Some(plugin_name) = new_module.strip_prefix("@tauri-apps/plugin-") {
|
if let Some(plugin_name) = new_module.strip_prefix("@tauri-apps/plugin-") {
|
||||||
@@ -279,8 +284,12 @@ fn migrate_imports<'a>(
|
|||||||
new_identifier,
|
new_identifier,
|
||||||
Default::default(),
|
Default::default(),
|
||||||
)
|
)
|
||||||
.map_err(|e| anyhow::anyhow!("{e}"))
|
.map_err(|e| {
|
||||||
.context("failed to rename identifier")?;
|
Error::Context(
|
||||||
|
"failed to rename identifier".to_string(),
|
||||||
|
e.to_string().into(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
} else {
|
} else {
|
||||||
// if None, we need to remove this specifier,
|
// if None, we need to remove this specifier,
|
||||||
// it will also be replaced with an import from its new plugin below
|
// it will also be replaced with an import from its new plugin below
|
||||||
@@ -297,8 +306,12 @@ fn migrate_imports<'a>(
|
|||||||
|
|
||||||
magic_js_source
|
magic_js_source
|
||||||
.remove(script_start + start as i64, script_start + end as i64)
|
.remove(script_start + start as i64, script_start + end as i64)
|
||||||
.map_err(|e| anyhow::anyhow!("{e}"))
|
.map_err(|e| {
|
||||||
.context("failed to remove identifier")?;
|
Error::Context(
|
||||||
|
"failed to remove identifier".to_string(),
|
||||||
|
e.to_string().into(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -322,8 +335,7 @@ fn migrate_imports<'a>(
|
|||||||
for import in imports_to_add {
|
for import in imports_to_add {
|
||||||
magic_js_source
|
magic_js_source
|
||||||
.append_right(script_start as u32 + start, &import)
|
.append_right(script_start as u32 + start, &import)
|
||||||
.map_err(|e| anyhow::anyhow!("{e}"))
|
.map_err(|e| Error::Context("failed to add import".to_string(), e.to_string().into()))?;
|
||||||
.context("failed to add import")?;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -331,8 +343,9 @@ fn migrate_imports<'a>(
|
|||||||
for stmt in stmts_to_add {
|
for stmt in stmts_to_add {
|
||||||
magic_js_source
|
magic_js_source
|
||||||
.append_right(script_start as u32 + start, stmt)
|
.append_right(script_start as u32 + start, stmt)
|
||||||
.map_err(|e| anyhow::anyhow!("{e}"))
|
.map_err(|e| {
|
||||||
.context("failed to add statement")?;
|
Error::Context("failed to add statement".to_string(), e.to_string().into())
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,11 +3,11 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::ErrorExt,
|
||||||
interface::rust::manifest::{read_manifest, serialize_manifest},
|
interface::rust::manifest::{read_manifest, serialize_manifest},
|
||||||
Result,
|
Result,
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use tauri_utils::config_v1::Allowlist;
|
use tauri_utils::config_v1::Allowlist;
|
||||||
use toml_edit::{DocumentMut, Entry, Item, TableLike, Value};
|
use toml_edit::{DocumentMut, Entry, Item, TableLike, Value};
|
||||||
|
|
||||||
@@ -21,7 +21,7 @@ pub fn migrate(tauri_dir: &Path) -> Result<()> {
|
|||||||
migrate_manifest(&mut manifest)?;
|
migrate_manifest(&mut manifest)?;
|
||||||
|
|
||||||
std::fs::write(&manifest_path, serialize_manifest(&manifest))
|
std::fs::write(&manifest_path, serialize_manifest(&manifest))
|
||||||
.context("failed to rewrite Cargo manifest")?;
|
.fs_context("failed to rewrite Cargo manifest", manifest_path.clone())?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,12 +3,11 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::Context,
|
||||||
helpers::app_paths::{frontend_dir, tauri_dir},
|
helpers::app_paths::{frontend_dir, tauri_dir},
|
||||||
Result,
|
Result,
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
|
|
||||||
mod config;
|
mod config;
|
||||||
mod frontend;
|
mod frontend;
|
||||||
mod manifest;
|
mod manifest;
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::{Context, ErrorExt},
|
||||||
helpers::{
|
helpers::{
|
||||||
app_paths::{frontend_dir, tauri_dir},
|
app_paths::{frontend_dir, tauri_dir},
|
||||||
npm::PackageManager,
|
npm::PackageManager,
|
||||||
@@ -13,7 +14,6 @@ use crate::{
|
|||||||
|
|
||||||
use std::{fs::read_to_string, path::Path};
|
use std::{fs::read_to_string, path::Path};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use toml_edit::{DocumentMut, Item, Table, TableLike, Value};
|
use toml_edit::{DocumentMut, Item, Table, TableLike, Value};
|
||||||
|
|
||||||
pub fn run() -> Result<()> {
|
pub fn run() -> Result<()> {
|
||||||
@@ -28,8 +28,10 @@ pub fn run() -> Result<()> {
|
|||||||
|
|
||||||
migrate_npm_dependencies(frontend_dir)?;
|
migrate_npm_dependencies(frontend_dir)?;
|
||||||
|
|
||||||
std::fs::write(&manifest_path, serialize_manifest(&manifest))
|
std::fs::write(&manifest_path, serialize_manifest(&manifest)).fs_context(
|
||||||
.context("failed to rewrite Cargo manifest")?;
|
"failed to rewrite Cargo manifest",
|
||||||
|
manifest_path.to_path_buf(),
|
||||||
|
)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -97,14 +99,19 @@ fn migrate_permissions(tauri_dir: &Path) -> Result<()> {
|
|||||||
];
|
];
|
||||||
|
|
||||||
for entry in walkdir::WalkDir::new(tauri_dir.join("capabilities")) {
|
for entry in walkdir::WalkDir::new(tauri_dir.join("capabilities")) {
|
||||||
let entry = entry?;
|
let entry = entry.map_err(std::io::Error::other).fs_context(
|
||||||
|
"failed to walk capabilities directory",
|
||||||
|
tauri_dir.join("capabilities"),
|
||||||
|
)?;
|
||||||
let path = entry.path();
|
let path = entry.path();
|
||||||
if path.extension().is_some_and(|ext| ext == "json") {
|
if path.extension().is_some_and(|ext| ext == "json") {
|
||||||
let mut capability = read_to_string(path).context("failed to read capability")?;
|
let mut capability =
|
||||||
|
read_to_string(path).fs_context("failed to read capability", path.to_path_buf())?;
|
||||||
for plugin in core_plugins {
|
for plugin in core_plugins {
|
||||||
capability = capability.replace(&format!("\"{plugin}:"), &format!("\"core:{plugin}:"));
|
capability = capability.replace(&format!("\"{plugin}:"), &format!("\"core:{plugin}:"));
|
||||||
}
|
}
|
||||||
std::fs::write(path, capability).context("failed to rewrite capability")?;
|
std::fs::write(path, capability)
|
||||||
|
.fs_context("failed to rewrite capability", path.to_path_buf())?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::{bail, Context, ErrorExt},
|
||||||
helpers::{
|
helpers::{
|
||||||
app_paths::tauri_dir,
|
app_paths::tauri_dir,
|
||||||
cargo_manifest::{crate_version, CargoLock, CargoManifest},
|
cargo_manifest::{crate_version, CargoLock, CargoManifest},
|
||||||
@@ -13,8 +14,6 @@ use crate::{
|
|||||||
|
|
||||||
use std::{fs::read_to_string, str::FromStr};
|
use std::{fs::read_to_string, str::FromStr};
|
||||||
|
|
||||||
use anyhow::{bail, Context};
|
|
||||||
|
|
||||||
mod migrations;
|
mod migrations;
|
||||||
|
|
||||||
pub fn command() -> Result<()> {
|
pub fn command() -> Result<()> {
|
||||||
@@ -22,17 +21,24 @@ pub fn command() -> Result<()> {
|
|||||||
|
|
||||||
let tauri_dir = tauri_dir();
|
let tauri_dir = tauri_dir();
|
||||||
|
|
||||||
let manifest_contents =
|
let manifest_contents = read_to_string(tauri_dir.join("Cargo.toml")).fs_context(
|
||||||
read_to_string(tauri_dir.join("Cargo.toml")).context("failed to read Cargo manifest")?;
|
"failed to read Cargo manifest",
|
||||||
let manifest = toml::from_str::<CargoManifest>(&manifest_contents)
|
tauri_dir.join("Cargo.toml"),
|
||||||
.context("failed to parse Cargo manifest")?;
|
)?;
|
||||||
|
let manifest = toml::from_str::<CargoManifest>(&manifest_contents).with_context(|| {
|
||||||
|
format!(
|
||||||
|
"failed to parse Cargo manifest {}",
|
||||||
|
tauri_dir.join("Cargo.toml").display()
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
let workspace_dir = get_workspace_dir()?;
|
let workspace_dir = get_workspace_dir()?;
|
||||||
let lock_path = workspace_dir.join("Cargo.lock");
|
let lock_path = workspace_dir.join("Cargo.lock");
|
||||||
let lock = if lock_path.exists() {
|
let lock = if lock_path.exists() {
|
||||||
let lockfile_contents = read_to_string(lock_path).context("failed to read Cargo lockfile")?;
|
let lockfile_contents =
|
||||||
let lock =
|
read_to_string(&lock_path).fs_context("failed to read Cargo lockfile", &lock_path)?;
|
||||||
toml::from_str::<CargoLock>(&lockfile_contents).context("failed to parse Cargo lockfile")?;
|
let lock = toml::from_str::<CargoLock>(&lockfile_contents)
|
||||||
|
.with_context(|| format!("failed to parse Cargo lockfile {}", lock_path.display()))?;
|
||||||
Some(lock)
|
Some(lock)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
@@ -41,7 +47,8 @@ pub fn command() -> Result<()> {
|
|||||||
let tauri_version = crate_version(tauri_dir, Some(&manifest), lock.as_ref(), "tauri")
|
let tauri_version = crate_version(tauri_dir, Some(&manifest), lock.as_ref(), "tauri")
|
||||||
.version
|
.version
|
||||||
.context("failed to get tauri version")?;
|
.context("failed to get tauri version")?;
|
||||||
let tauri_version = semver::Version::from_str(&tauri_version)?;
|
let tauri_version = semver::Version::from_str(&tauri_version)
|
||||||
|
.with_context(|| format!("failed to parse tauri version {tauri_version}"))?;
|
||||||
|
|
||||||
if tauri_version.major == 1 {
|
if tauri_version.major == 1 {
|
||||||
migrations::v1::run().context("failed to migrate from v1")?;
|
migrations::v1::run().context("failed to migrate from v1")?;
|
||||||
|
|||||||
@@ -4,14 +4,14 @@
|
|||||||
|
|
||||||
use super::{detect_target_ok, ensure_init, env, get_app, get_config, read_options, MobileTarget};
|
use super::{detect_target_ok, ensure_init, env, get_app, get_config, read_options, MobileTarget};
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::{Context, ErrorExt},
|
||||||
helpers::config::{get as get_tauri_config, reload as reload_tauri_config},
|
helpers::config::{get as get_tauri_config, reload as reload_tauri_config},
|
||||||
interface::{AppInterface, Interface},
|
interface::{AppInterface, Interface},
|
||||||
mobile::CliOptions,
|
mobile::CliOptions,
|
||||||
Result,
|
Error, Result,
|
||||||
};
|
};
|
||||||
use clap::{ArgAction, Parser};
|
use clap::{ArgAction, Parser};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use cargo_mobile2::{
|
use cargo_mobile2::{
|
||||||
android::{adb, target::Target},
|
android::{adb, target::Target},
|
||||||
opts::Profile,
|
opts::Profile,
|
||||||
@@ -144,17 +144,23 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
log::info!("Installing target {}", target.triple());
|
log::info!("Installing target {}", target.triple());
|
||||||
target
|
target
|
||||||
.install()
|
.install()
|
||||||
.context("failed to install target with rustup")?;
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "rustup target add".to_string(),
|
||||||
|
error,
|
||||||
|
})
|
||||||
|
.context("failed to install target")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
target.build(
|
target
|
||||||
&config,
|
.build(
|
||||||
&metadata,
|
&config,
|
||||||
&env,
|
&metadata,
|
||||||
cli_options.noise_level,
|
&env,
|
||||||
true,
|
cli_options.noise_level,
|
||||||
profile,
|
true,
|
||||||
)?;
|
profile,
|
||||||
|
)
|
||||||
|
.context("failed to build Android app")?;
|
||||||
|
|
||||||
if !validated_lib {
|
if !validated_lib {
|
||||||
validated_lib = true;
|
validated_lib = true;
|
||||||
@@ -164,17 +170,17 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
.target_dir(target.triple, profile)
|
.target_dir(target.triple, profile)
|
||||||
.join(config.so_name());
|
.join(config.so_name());
|
||||||
|
|
||||||
validate_lib(&lib_path)?;
|
validate_lib(&lib_path).context("failed to validate library")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.map_err(|e| anyhow::anyhow!(e.to_string()))?
|
.map_err(|e| Error::GenericError(e.to_string()))?
|
||||||
}
|
}
|
||||||
|
|
||||||
fn validate_lib(path: &Path) -> Result<()> {
|
fn validate_lib(path: &Path) -> Result<()> {
|
||||||
let so_bytes = std::fs::read(path)?;
|
let so_bytes = std::fs::read(path).fs_context("failed to read library", path.to_path_buf())?;
|
||||||
let elf = elf::ElfBytes::<elf::endian::AnyEndian>::minimal_parse(&so_bytes)
|
let elf = elf::ElfBytes::<elf::endian::AnyEndian>::minimal_parse(&so_bytes)
|
||||||
.context("failed to parse ELF")?;
|
.context("failed to parse ELF")?;
|
||||||
let (symbol_table, string_table) = elf
|
let (symbol_table, string_table) = elf
|
||||||
@@ -190,7 +196,7 @@ fn validate_lib(path: &Path) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if !symbols.contains(&"Java_app_tauri_plugin_PluginManager_handlePluginResponse") {
|
if !symbols.contains(&"Java_app_tauri_plugin_PluginManager_handlePluginResponse") {
|
||||||
anyhow::bail!(
|
crate::error::bail!(
|
||||||
"Library from {} does not include required runtime symbols. This means you are likely missing the tauri::mobile_entry_point macro usage, see the documentation for more information: https://v2.tauri.app/start/migrate/from-tauri-1",
|
"Library from {} does not include required runtime symbols. This means you are likely missing the tauri::mobile_entry_point macro usage, see the documentation for more information: https://v2.tauri.app/start/migrate/from-tauri-1",
|
||||||
path.display()
|
path.display()
|
||||||
);
|
);
|
||||||
@@ -237,7 +243,7 @@ fn adb_forward_port(
|
|||||||
let device = devices.first().unwrap();
|
let device = devices.first().unwrap();
|
||||||
Some((device.serial_no().to_string(), device.name().to_string()))
|
Some((device.serial_no().to_string(), device.name().to_string()))
|
||||||
} else if devices.len() > 1 {
|
} else if devices.len() > 1 {
|
||||||
anyhow::bail!("Multiple Android devices are connected ({}), please disconnect devices you do not intend to use so Tauri can determine which to use",
|
crate::error::bail!("Multiple Android devices are connected ({}), please disconnect devices you do not intend to use so Tauri can determine which to use",
|
||||||
devices.iter().map(|d| d.name()).collect::<Vec<_>>().join(", "));
|
devices.iter().map(|d| d.name()).collect::<Vec<_>>().join(", "));
|
||||||
} else {
|
} else {
|
||||||
// when building the app without running to a device, we might have an empty devices list
|
// when building the app without running to a device, we might have an empty devices list
|
||||||
@@ -249,7 +255,11 @@ fn adb_forward_port(
|
|||||||
|
|
||||||
// clear port forwarding for all devices
|
// clear port forwarding for all devices
|
||||||
for device in &devices {
|
for device in &devices {
|
||||||
let reverse_list_output = adb_reverse_list(env, device.serial_no())?;
|
let reverse_list_output =
|
||||||
|
adb_reverse_list(env, device.serial_no()).map_err(|error| Error::CommandFailed {
|
||||||
|
command: "adb reverse --list".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
// check if the device has the port forwarded
|
// check if the device has the port forwarded
|
||||||
if String::from_utf8_lossy(&reverse_list_output.stdout).contains(&forward) {
|
if String::from_utf8_lossy(&reverse_list_output.stdout).contains(&forward) {
|
||||||
@@ -271,11 +281,20 @@ fn adb_forward_port(
|
|||||||
log::info!("{forward} already forwarded to {target_device_name}");
|
log::info!("{forward} already forwarded to {target_device_name}");
|
||||||
} else {
|
} else {
|
||||||
loop {
|
loop {
|
||||||
run_adb_reverse(env, &target_device_serial_no, &forward, &forward).with_context(|| {
|
run_adb_reverse(env, &target_device_serial_no, &forward, &forward).map_err(|error| {
|
||||||
format!("failed to forward port with adb, is the {target_device_name} device connected?",)
|
Error::CommandFailed {
|
||||||
|
command: format!("adb reverse {forward} {forward}"),
|
||||||
|
error,
|
||||||
|
}
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let reverse_list_output = adb_reverse_list(env, &target_device_serial_no)?;
|
let reverse_list_output =
|
||||||
|
adb_reverse_list(env, &target_device_serial_no).map_err(|error| {
|
||||||
|
Error::CommandFailed {
|
||||||
|
command: "adb reverse --list".to_string(),
|
||||||
|
error,
|
||||||
|
}
|
||||||
|
})?;
|
||||||
// wait and retry until the port has actually been forwarded
|
// wait and retry until the port has actually been forwarded
|
||||||
if String::from_utf8_lossy(&reverse_list_output.stdout).contains(&forward) {
|
if String::from_utf8_lossy(&reverse_list_output.stdout).contains(&forward) {
|
||||||
break;
|
break;
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ use super::{
|
|||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
build::Options as BuildOptions,
|
build::Options as BuildOptions,
|
||||||
|
error::Context,
|
||||||
helpers::{
|
helpers::{
|
||||||
app_paths::tauri_dir,
|
app_paths::tauri_dir,
|
||||||
config::{get as get_tauri_config, ConfigHandle},
|
config::{get as get_tauri_config, ConfigHandle},
|
||||||
@@ -15,11 +16,10 @@ use crate::{
|
|||||||
},
|
},
|
||||||
interface::{AppInterface, Interface, Options as InterfaceOptions},
|
interface::{AppInterface, Interface, Options as InterfaceOptions},
|
||||||
mobile::{write_options, CliOptions},
|
mobile::{write_options, CliOptions},
|
||||||
ConfigValue, Result,
|
ConfigValue, Error, Result,
|
||||||
};
|
};
|
||||||
use clap::{ArgAction, Parser};
|
use clap::{ArgAction, Parser};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use cargo_mobile2::{
|
use cargo_mobile2::{
|
||||||
android::{aab, apk, config::Config as AndroidConfig, env::Env, target::Target},
|
android::{aab, apk, config::Config as AndroidConfig, env::Env, target::Target},
|
||||||
opts::{NoiseLevel, Profile},
|
opts::{NoiseLevel, Profile},
|
||||||
@@ -154,7 +154,7 @@ pub fn command(options: Options, noise_level: NoiseLevel) -> Result<()> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let tauri_path = tauri_dir();
|
let tauri_path = tauri_dir();
|
||||||
set_current_dir(tauri_path).with_context(|| "failed to change current working directory")?;
|
set_current_dir(tauri_path).context("failed to set current directory to Tauri directory")?;
|
||||||
|
|
||||||
ensure_init(
|
ensure_init(
|
||||||
&tauri_config,
|
&tauri_config,
|
||||||
@@ -175,10 +175,16 @@ pub fn command(options: Options, noise_level: NoiseLevel) -> Result<()> {
|
|||||||
log::info!("Installing target {}", first_target.triple());
|
log::info!("Installing target {}", first_target.triple());
|
||||||
first_target
|
first_target
|
||||||
.install()
|
.install()
|
||||||
.context("failed to install target with rustup")?;
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "rustup target add".to_string(),
|
||||||
|
error,
|
||||||
|
})
|
||||||
|
.context("failed to install target")?;
|
||||||
}
|
}
|
||||||
// run an initial build to initialize plugins
|
// run an initial build to initialize plugins
|
||||||
first_target.build(&config, &metadata, &env, noise_level, true, profile)?;
|
first_target
|
||||||
|
.build(&config, &metadata, &env, noise_level, true, profile)
|
||||||
|
.context("failed to build Android app")?;
|
||||||
|
|
||||||
let open = options.open;
|
let open = options.open;
|
||||||
let _handle = run_build(
|
let _handle = run_build(
|
||||||
@@ -248,7 +254,8 @@ fn run_build(
|
|||||||
profile,
|
profile,
|
||||||
get_targets_or_all(options.targets.clone().unwrap_or_default())?,
|
get_targets_or_all(options.targets.clone().unwrap_or_default())?,
|
||||||
options.split_per_abi,
|
options.split_per_abi,
|
||||||
)?
|
)
|
||||||
|
.context("failed to build APK")?
|
||||||
} else {
|
} else {
|
||||||
Vec::new()
|
Vec::new()
|
||||||
};
|
};
|
||||||
@@ -261,7 +268,8 @@ fn run_build(
|
|||||||
profile,
|
profile,
|
||||||
get_targets_or_all(options.targets.unwrap_or_default())?,
|
get_targets_or_all(options.targets.unwrap_or_default())?,
|
||||||
options.split_per_abi,
|
options.split_per_abi,
|
||||||
)?
|
)
|
||||||
|
.context("failed to build AAB")?
|
||||||
} else {
|
} else {
|
||||||
Vec::new()
|
Vec::new()
|
||||||
};
|
};
|
||||||
@@ -285,12 +293,8 @@ fn get_targets_or_all<'a>(targets: Vec<String>) -> Result<Vec<&'a Target<'a>>> {
|
|||||||
.join(",");
|
.join(",");
|
||||||
|
|
||||||
for t in targets {
|
for t in targets {
|
||||||
let target = Target::for_name(&t).ok_or_else(|| {
|
let target = Target::for_name(&t).with_context(|| {
|
||||||
anyhow::anyhow!(
|
format!("Target {t} is invalid; the possible targets are {possible_targets}",)
|
||||||
"Target {} is invalid; the possible targets are {}",
|
|
||||||
t,
|
|
||||||
possible_targets
|
|
||||||
)
|
|
||||||
})?;
|
})?;
|
||||||
outs.push(target);
|
outs.push(target);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ use super::{
|
|||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
dev::Options as DevOptions,
|
dev::Options as DevOptions,
|
||||||
|
error::{Context, ErrorExt},
|
||||||
helpers::{
|
helpers::{
|
||||||
app_paths::tauri_dir,
|
app_paths::tauri_dir,
|
||||||
config::{get as get_tauri_config, ConfigHandle},
|
config::{get as get_tauri_config, ConfigHandle},
|
||||||
@@ -18,11 +19,10 @@ use crate::{
|
|||||||
use_network_address_for_dev_url, write_options, CliOptions, DevChild, DevHost, DevProcess,
|
use_network_address_for_dev_url, write_options, CliOptions, DevChild, DevHost, DevProcess,
|
||||||
TargetDevice,
|
TargetDevice,
|
||||||
},
|
},
|
||||||
ConfigValue, Result,
|
ConfigValue, Error, Result,
|
||||||
};
|
};
|
||||||
use clap::{ArgAction, Parser};
|
use clap::{ArgAction, Parser};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use cargo_mobile2::{
|
use cargo_mobile2::{
|
||||||
android::{
|
android::{
|
||||||
config::{Config as AndroidConfig, Metadata as AndroidMetadata},
|
config::{Config as AndroidConfig, Metadata as AndroidMetadata},
|
||||||
@@ -145,7 +145,10 @@ fn run_command(options: Options, noise_level: NoiseLevel) -> Result<()> {
|
|||||||
if let Some(root_certificate_path) = &options.root_certificate_path {
|
if let Some(root_certificate_path) = &options.root_certificate_path {
|
||||||
std::env::set_var(
|
std::env::set_var(
|
||||||
"TAURI_DEV_ROOT_CERTIFICATE",
|
"TAURI_DEV_ROOT_CERTIFICATE",
|
||||||
std::fs::read_to_string(root_certificate_path).context("failed to read certificate file")?,
|
std::fs::read_to_string(root_certificate_path).fs_context(
|
||||||
|
"failed to read certificate file",
|
||||||
|
root_certificate_path.clone(),
|
||||||
|
)?,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -195,7 +198,7 @@ fn run_command(options: Options, noise_level: NoiseLevel) -> Result<()> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let tauri_path = tauri_dir();
|
let tauri_path = tauri_dir();
|
||||||
set_current_dir(tauri_path).with_context(|| "failed to change current working directory")?;
|
set_current_dir(tauri_path).context("failed to set current directory to Tauri directory")?;
|
||||||
|
|
||||||
ensure_init(
|
ensure_init(
|
||||||
&tauri_config,
|
&tauri_config,
|
||||||
@@ -263,23 +266,26 @@ fn run_dev(
|
|||||||
.unwrap_or_else(|| Target::all().values().next().unwrap());
|
.unwrap_or_else(|| Target::all().values().next().unwrap());
|
||||||
if !installed_targets.contains(&target.triple().into()) {
|
if !installed_targets.contains(&target.triple().into()) {
|
||||||
log::info!("Installing target {}", target.triple());
|
log::info!("Installing target {}", target.triple());
|
||||||
target
|
target.install().map_err(|error| Error::CommandFailed {
|
||||||
.install()
|
command: "rustup target add".to_string(),
|
||||||
.context("failed to install target with rustup")?;
|
error,
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
|
|
||||||
target.build(
|
target
|
||||||
config,
|
.build(
|
||||||
metadata,
|
config,
|
||||||
&env,
|
metadata,
|
||||||
noise_level,
|
&env,
|
||||||
true,
|
noise_level,
|
||||||
if options.release_mode {
|
true,
|
||||||
Profile::Release
|
if options.release_mode {
|
||||||
} else {
|
Profile::Release
|
||||||
Profile::Debug
|
} else {
|
||||||
},
|
Profile::Debug
|
||||||
)?;
|
},
|
||||||
|
)
|
||||||
|
.context("failed to build Android app")?;
|
||||||
|
|
||||||
let open = options.open;
|
let open = options.open;
|
||||||
interface.mobile_dev(
|
interface.mobile_dev(
|
||||||
@@ -358,5 +364,5 @@ fn run(
|
|||||||
".MainActivity".into(),
|
".MainActivity".into(),
|
||||||
)
|
)
|
||||||
.map(DevChild::new)
|
.map(DevChild::new)
|
||||||
.map_err(Into::into)
|
.context("failed to run Android app")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -35,8 +35,9 @@ use super::{
|
|||||||
OptionsHandle, Target as MobileTarget, MIN_DEVICE_MATCH_SCORE,
|
OptionsHandle, Target as MobileTarget, MIN_DEVICE_MATCH_SCORE,
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::Context,
|
||||||
helpers::config::{BundleResources, Config as TauriConfig},
|
helpers::config::{BundleResources, Config as TauriConfig},
|
||||||
ConfigValue, Result,
|
ConfigValue, Error, ErrorExt, Result,
|
||||||
};
|
};
|
||||||
|
|
||||||
mod android_studio_script;
|
mod android_studio_script;
|
||||||
@@ -192,28 +193,33 @@ pub fn get_config(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn env(non_interactive: bool) -> Result<Env> {
|
pub fn env(non_interactive: bool) -> Result<Env> {
|
||||||
let env = super::env()?;
|
let env = super::env().context("failed to setup Android environment")?;
|
||||||
ensure_env(non_interactive)?;
|
ensure_env(non_interactive).context("failed to ensure Android environment")?;
|
||||||
cargo_mobile2::android::env::Env::from_env(env).map_err(Into::into)
|
cargo_mobile2::android::env::Env::from_env(env).context("failed to load Android environment")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn download_cmdline_tools(extract_path: &Path) -> Result<()> {
|
fn download_cmdline_tools(extract_path: &Path) -> Result<()> {
|
||||||
log::info!("Downloading Android command line tools...");
|
log::info!("Downloading Android command line tools...");
|
||||||
|
|
||||||
let mut response = crate::helpers::http::get(CMDLINE_TOOLS_URL)?;
|
let mut response = crate::helpers::http::get(CMDLINE_TOOLS_URL)
|
||||||
|
.context("failed to download Android command line tools")?;
|
||||||
let body = response
|
let body = response
|
||||||
.body_mut()
|
.body_mut()
|
||||||
.with_config()
|
.with_config()
|
||||||
.limit(200 * 1024 * 1024 /* 200MB */)
|
.limit(200 * 1024 * 1024 /* 200MB */)
|
||||||
.read_to_vec()?;
|
.read_to_vec()
|
||||||
|
.context("failed to read Android command line tools download response")?;
|
||||||
|
|
||||||
let mut zip = zip::ZipArchive::new(Cursor::new(body))?;
|
let mut zip = zip::ZipArchive::new(Cursor::new(body))
|
||||||
|
.context("failed to create zip archive from Android command line tools download response")?;
|
||||||
|
|
||||||
log::info!(
|
log::info!(
|
||||||
"Extracting Android command line tools to {}",
|
"Extracting Android command line tools to {}",
|
||||||
extract_path.display()
|
extract_path.display()
|
||||||
);
|
);
|
||||||
zip.extract(extract_path)?;
|
zip
|
||||||
|
.extract(extract_path)
|
||||||
|
.context("failed to extract Android command line tools")?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -238,7 +244,7 @@ fn ensure_java() -> Result<()> {
|
|||||||
log::info!("Using Android Studio's default Java installation: {default_java_home}");
|
log::info!("Using Android Studio's default Java installation: {default_java_home}");
|
||||||
std::env::set_var("JAVA_HOME", default_java_home);
|
std::env::set_var("JAVA_HOME", default_java_home);
|
||||||
} else if which::which("java").is_err() {
|
} else if which::which("java").is_err() {
|
||||||
anyhow::bail!("Java not found in PATH, default Android Studio Java installation not found at {default_java_home} and JAVA_HOME environment variable not set. Please install Java before proceeding");
|
crate::error::bail!("Java not found in PATH, default Android Studio Java installation not found at {default_java_home} and JAVA_HOME environment variable not set. Please install Java before proceeding");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -272,7 +278,7 @@ fn ensure_sdk(non_interactive: bool) -> Result<()> {
|
|||||||
default_android_home.display()
|
default_android_home.display()
|
||||||
);
|
);
|
||||||
} else if non_interactive {
|
} else if non_interactive {
|
||||||
anyhow::bail!("Android SDK not found. Make sure the SDK and NDK are installed and the ANDROID_HOME and NDK_HOME environment variables are set.");
|
crate::error::bail!("Android SDK not found. Make sure the SDK and NDK are installed and the ANDROID_HOME and NDK_HOME environment variables are set.");
|
||||||
} else {
|
} else {
|
||||||
log::error!(
|
log::error!(
|
||||||
"Android SDK not found at {}",
|
"Android SDK not found at {}",
|
||||||
@@ -282,7 +288,7 @@ fn ensure_sdk(non_interactive: bool) -> Result<()> {
|
|||||||
let extract_path = if create_dir_all(&default_android_home).is_ok() {
|
let extract_path = if create_dir_all(&default_android_home).is_ok() {
|
||||||
default_android_home.clone()
|
default_android_home.clone()
|
||||||
} else {
|
} else {
|
||||||
std::env::current_dir()?
|
std::env::current_dir().context("failed to get current directory")?
|
||||||
};
|
};
|
||||||
|
|
||||||
let sdk_manager_path = extract_path
|
let sdk_manager_path = extract_path
|
||||||
@@ -299,7 +305,7 @@ fn ensure_sdk(non_interactive: bool) -> Result<()> {
|
|||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
if !granted_permission_to_install {
|
if !granted_permission_to_install {
|
||||||
anyhow::bail!("Skipping Android Studio command line tools installation. Please go through the manual setup process described in the documentation: https://tauri.app/start/prerequisites/#android");
|
crate::error::bail!("Skipping Android Studio command line tools installation. Please go through the manual setup process described in the documentation: https://tauri.app/start/prerequisites/#android");
|
||||||
}
|
}
|
||||||
|
|
||||||
download_cmdline_tools(&extract_path)?;
|
download_cmdline_tools(&extract_path)?;
|
||||||
@@ -313,7 +319,7 @@ fn ensure_sdk(non_interactive: bool) -> Result<()> {
|
|||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
if !granted_permission_to_install {
|
if !granted_permission_to_install {
|
||||||
anyhow::bail!("Skipping Android Studio SDK installation. Please go through the manual setup process described in the documentation: https://tauri.app/start/prerequisites/#android");
|
crate::error::bail!("Skipping Android Studio SDK installation. Please go through the manual setup process described in the documentation: https://tauri.app/start/prerequisites/#android");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -324,10 +330,14 @@ fn ensure_sdk(non_interactive: bool) -> Result<()> {
|
|||||||
.arg("platform-tools")
|
.arg("platform-tools")
|
||||||
.arg(format!("platforms;android-{SDK_VERSION}"))
|
.arg(format!("platforms;android-{SDK_VERSION}"))
|
||||||
.arg(format!("ndk;{NDK_VERSION}"))
|
.arg(format!("ndk;{NDK_VERSION}"))
|
||||||
.status()?;
|
.status()
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: format!("{} --sdk_root={} --install platform-tools platforms;android-{SDK_VERSION} ndk;{NDK_VERSION}", sdk_manager_path.display(), default_android_home.display()),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
if !status.success() {
|
if !status.success() {
|
||||||
anyhow::bail!("Failed to install Android SDK");
|
crate::error::bail!("Failed to install Android SDK");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -342,7 +352,7 @@ fn ensure_ndk(non_interactive: bool) -> Result<()> {
|
|||||||
let android_home = std::env::var_os("ANDROID_HOME")
|
let android_home = std::env::var_os("ANDROID_HOME")
|
||||||
.map(PathBuf::from)
|
.map(PathBuf::from)
|
||||||
.or_else(|| std::env::var_os("ANDROID_SDK_ROOT").map(PathBuf::from))
|
.or_else(|| std::env::var_os("ANDROID_SDK_ROOT").map(PathBuf::from))
|
||||||
.ok_or_else(|| anyhow::anyhow!("Failed to locate Android SDK"))?;
|
.context("Failed to locate Android SDK")?;
|
||||||
let mut installed_ndks = read_dir(android_home.join("ndk"))
|
let mut installed_ndks = read_dir(android_home.join("ndk"))
|
||||||
.map(|dir| {
|
.map(|dir| {
|
||||||
dir
|
dir
|
||||||
@@ -357,7 +367,7 @@ fn ensure_ndk(non_interactive: bool) -> Result<()> {
|
|||||||
log::info!("Using installed NDK: {}", ndk.display());
|
log::info!("Using installed NDK: {}", ndk.display());
|
||||||
std::env::set_var("NDK_HOME", ndk);
|
std::env::set_var("NDK_HOME", ndk);
|
||||||
} else if non_interactive {
|
} else if non_interactive {
|
||||||
anyhow::bail!("Android NDK not found. Make sure the NDK is installed and the NDK_HOME environment variable is set.");
|
crate::error::bail!("Android NDK not found. Make sure the NDK is installed and the NDK_HOME environment variable is set.");
|
||||||
} else {
|
} else {
|
||||||
let sdk_manager_path = android_home
|
let sdk_manager_path = android_home
|
||||||
.join("cmdline-tools/bin/sdkmanager")
|
.join("cmdline-tools/bin/sdkmanager")
|
||||||
@@ -373,7 +383,7 @@ fn ensure_ndk(non_interactive: bool) -> Result<()> {
|
|||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
if !granted_permission_to_install {
|
if !granted_permission_to_install {
|
||||||
anyhow::bail!("Skipping Android Studio command line tools installation. Please go through the manual setup process described in the documentation: https://tauri.app/start/prerequisites/#android");
|
crate::error::bail!("Skipping Android Studio command line tools installation. Please go through the manual setup process described in the documentation: https://tauri.app/start/prerequisites/#android");
|
||||||
}
|
}
|
||||||
|
|
||||||
download_cmdline_tools(&android_home)?;
|
download_cmdline_tools(&android_home)?;
|
||||||
@@ -387,7 +397,7 @@ fn ensure_ndk(non_interactive: bool) -> Result<()> {
|
|||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
if !granted_permission_to_install {
|
if !granted_permission_to_install {
|
||||||
anyhow::bail!("Skipping Android Studio NDK installation. Please go through the manual setup process described in the documentation: https://tauri.app/start/prerequisites/#android");
|
crate::error::bail!("Skipping Android Studio NDK installation. Please go through the manual setup process described in the documentation: https://tauri.app/start/prerequisites/#android");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -399,10 +409,18 @@ fn ensure_ndk(non_interactive: bool) -> Result<()> {
|
|||||||
.arg(format!("--sdk_root={}", android_home.display()))
|
.arg(format!("--sdk_root={}", android_home.display()))
|
||||||
.arg("--install")
|
.arg("--install")
|
||||||
.arg(format!("ndk;{NDK_VERSION}"))
|
.arg(format!("ndk;{NDK_VERSION}"))
|
||||||
.status()?;
|
.status()
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: format!(
|
||||||
|
"{} --sdk_root={} --install ndk;{NDK_VERSION}",
|
||||||
|
sdk_manager_path.display(),
|
||||||
|
android_home.display()
|
||||||
|
),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
if !status.success() {
|
if !status.success() {
|
||||||
anyhow::bail!("Failed to install Android NDK");
|
crate::error::bail!("Failed to install Android NDK");
|
||||||
}
|
}
|
||||||
|
|
||||||
let ndk_path = android_home.join("ndk").join(NDK_VERSION);
|
let ndk_path = android_home.join("ndk").join(NDK_VERSION);
|
||||||
@@ -422,8 +440,7 @@ fn delete_codegen_vars() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn adb_device_prompt<'a>(env: &'_ Env, target: Option<&str>) -> Result<Device<'a>> {
|
fn adb_device_prompt<'a>(env: &'_ Env, target: Option<&str>) -> Result<Device<'a>> {
|
||||||
let device_list = adb::device_list(env)
|
let device_list = adb::device_list(env).context("failed to detect connected Android devices")?;
|
||||||
.map_err(|cause| anyhow::anyhow!("Failed to detect connected Android devices: {cause}"))?;
|
|
||||||
if !device_list.is_empty() {
|
if !device_list.is_empty() {
|
||||||
let device = if let Some(t) = target {
|
let device = if let Some(t) = target {
|
||||||
let (device, score) = device_list
|
let (device, score) = device_list
|
||||||
@@ -439,7 +456,7 @@ fn adb_device_prompt<'a>(env: &'_ Env, target: Option<&str>) -> Result<Device<'a
|
|||||||
if score > MIN_DEVICE_MATCH_SCORE {
|
if score > MIN_DEVICE_MATCH_SCORE {
|
||||||
device
|
device
|
||||||
} else {
|
} else {
|
||||||
anyhow::bail!("Could not find an Android device matching {t}")
|
crate::error::bail!("Could not find an Android device matching {t}")
|
||||||
}
|
}
|
||||||
} else if device_list.len() > 1 {
|
} else if device_list.len() > 1 {
|
||||||
let index = prompt::list(
|
let index = prompt::list(
|
||||||
@@ -449,7 +466,7 @@ fn adb_device_prompt<'a>(env: &'_ Env, target: Option<&str>) -> Result<Device<'a
|
|||||||
None,
|
None,
|
||||||
"Device",
|
"Device",
|
||||||
)
|
)
|
||||||
.map_err(|cause| anyhow::anyhow!("Failed to prompt for Android device: {cause}"))?;
|
.context("failed to prompt for device")?;
|
||||||
device_list.into_iter().nth(index).unwrap()
|
device_list.into_iter().nth(index).unwrap()
|
||||||
} else {
|
} else {
|
||||||
device_list.into_iter().next().unwrap()
|
device_list.into_iter().next().unwrap()
|
||||||
@@ -462,7 +479,9 @@ fn adb_device_prompt<'a>(env: &'_ Env, target: Option<&str>) -> Result<Device<'a
|
|||||||
);
|
);
|
||||||
Ok(device)
|
Ok(device)
|
||||||
} else {
|
} else {
|
||||||
Err(anyhow::anyhow!("No connected Android devices detected"))
|
Err(Error::GenericError(
|
||||||
|
"No connected Android devices detected".to_string(),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -483,7 +502,7 @@ fn emulator_prompt(env: &'_ Env, target: Option<&str>) -> Result<emulator::Emula
|
|||||||
if score > MIN_DEVICE_MATCH_SCORE {
|
if score > MIN_DEVICE_MATCH_SCORE {
|
||||||
device
|
device
|
||||||
} else {
|
} else {
|
||||||
anyhow::bail!("Could not find an Android Emulator matching {t}")
|
crate::error::bail!("Could not find an Android Emulator matching {t}")
|
||||||
}
|
}
|
||||||
} else if emulator_list.len() > 1 {
|
} else if emulator_list.len() > 1 {
|
||||||
let index = prompt::list(
|
let index = prompt::list(
|
||||||
@@ -493,7 +512,7 @@ fn emulator_prompt(env: &'_ Env, target: Option<&str>) -> Result<emulator::Emula
|
|||||||
None,
|
None,
|
||||||
"Emulator",
|
"Emulator",
|
||||||
)
|
)
|
||||||
.map_err(|cause| anyhow::anyhow!("Failed to prompt for Android Emulator device: {cause}"))?;
|
.context("failed to prompt for emulator")?;
|
||||||
emulator_list.into_iter().nth(index).unwrap()
|
emulator_list.into_iter().nth(index).unwrap()
|
||||||
} else {
|
} else {
|
||||||
emulator_list.into_iter().next().unwrap()
|
emulator_list.into_iter().next().unwrap()
|
||||||
@@ -501,7 +520,9 @@ fn emulator_prompt(env: &'_ Env, target: Option<&str>) -> Result<emulator::Emula
|
|||||||
|
|
||||||
Ok(emulator)
|
Ok(emulator)
|
||||||
} else {
|
} else {
|
||||||
Err(anyhow::anyhow!("No available Android Emulator detected"))
|
Err(Error::GenericError(
|
||||||
|
"No available Android Emulator detected".to_string(),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -511,7 +532,9 @@ fn device_prompt<'a>(env: &'_ Env, target: Option<&str>) -> Result<Device<'a>> {
|
|||||||
} else {
|
} else {
|
||||||
let emulator = emulator_prompt(env, target)?;
|
let emulator = emulator_prompt(env, target)?;
|
||||||
log::info!("Starting emulator {}", emulator.name());
|
log::info!("Starting emulator {}", emulator.name());
|
||||||
emulator.start_detached(env)?;
|
emulator
|
||||||
|
.start_detached(env)
|
||||||
|
.context("failed to start emulator")?;
|
||||||
let mut tries = 0;
|
let mut tries = 0;
|
||||||
loop {
|
loop {
|
||||||
sleep(Duration::from_secs(2));
|
sleep(Duration::from_secs(2));
|
||||||
@@ -547,11 +570,15 @@ fn inject_resources(config: &AndroidConfig, tauri_config: &TauriConfig) -> Resul
|
|||||||
.project_dir()
|
.project_dir()
|
||||||
.join("app/src/main")
|
.join("app/src/main")
|
||||||
.join(DEFAULT_ASSET_DIR);
|
.join(DEFAULT_ASSET_DIR);
|
||||||
create_dir_all(&asset_dir)?;
|
create_dir_all(&asset_dir).fs_context("failed to create asset directory", asset_dir.clone())?;
|
||||||
|
|
||||||
write(
|
write(
|
||||||
asset_dir.join("tauri.conf.json"),
|
asset_dir.join("tauri.conf.json"),
|
||||||
serde_json::to_string(&tauri_config)?,
|
serde_json::to_string(&tauri_config).with_context(|| "failed to serialize tauri config")?,
|
||||||
|
)
|
||||||
|
.fs_context(
|
||||||
|
"failed to write tauri config",
|
||||||
|
asset_dir.join("tauri.conf.json"),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let resources = match &tauri_config.bundle.resources {
|
let resources = match &tauri_config.bundle.resources {
|
||||||
@@ -561,9 +588,9 @@ fn inject_resources(config: &AndroidConfig, tauri_config: &TauriConfig) -> Resul
|
|||||||
};
|
};
|
||||||
if let Some(resources) = resources {
|
if let Some(resources) = resources {
|
||||||
for resource in resources.iter() {
|
for resource in resources.iter() {
|
||||||
let resource = resource?;
|
let resource = resource.context("failed to get resource")?;
|
||||||
let dest = asset_dir.join(resource.target());
|
let dest = asset_dir.join(resource.target());
|
||||||
crate::helpers::fs::copy_file(resource.path(), dest)?;
|
crate::helpers::fs::copy_file(resource.path(), dest).context("failed to copy resource")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -572,7 +599,9 @@ fn inject_resources(config: &AndroidConfig, tauri_config: &TauriConfig) -> Resul
|
|||||||
|
|
||||||
fn configure_cargo(env: &mut Env, config: &AndroidConfig) -> Result<()> {
|
fn configure_cargo(env: &mut Env, config: &AndroidConfig) -> Result<()> {
|
||||||
for target in Target::all().values() {
|
for target in Target::all().values() {
|
||||||
let config = target.generate_cargo_config(config, env)?;
|
let config = target
|
||||||
|
.generate_cargo_config(config, env)
|
||||||
|
.context("failed to find Android tool")?;
|
||||||
let target_var_name = target.triple.replace('-', "_").to_uppercase();
|
let target_var_name = target.triple.replace('-', "_").to_uppercase();
|
||||||
if let Some(linker) = config.linker {
|
if let Some(linker) = config.linker {
|
||||||
env.base.insert_env_var(
|
env.base.insert_env_var(
|
||||||
|
|||||||
@@ -2,8 +2,11 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::{helpers::template, Result};
|
use crate::{
|
||||||
use anyhow::Context;
|
error::{Context, ErrorExt},
|
||||||
|
helpers::template,
|
||||||
|
Error, Result,
|
||||||
|
};
|
||||||
use cargo_mobile2::{
|
use cargo_mobile2::{
|
||||||
android::{
|
android::{
|
||||||
config::{Config, Metadata},
|
config::{Config, Metadata},
|
||||||
@@ -48,9 +51,10 @@ pub fn gen(
|
|||||||
log::info!("Installing Android Rust targets...");
|
log::info!("Installing Android Rust targets...");
|
||||||
for target in missing_targets {
|
for target in missing_targets {
|
||||||
log::info!("Installing target {}", target.triple());
|
log::info!("Installing target {}", target.triple());
|
||||||
target
|
target.install().map_err(|error| Error::CommandFailed {
|
||||||
.install()
|
command: "rustup target add".to_string(),
|
||||||
.context("failed to install target with rustup")?;
|
error,
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -137,34 +141,17 @@ pub fn gen(
|
|||||||
let source_src = config.app().root_dir().join(source);
|
let source_src = config.app().root_dir().join(source);
|
||||||
let source_file = source_src
|
let source_file = source_src
|
||||||
.file_name()
|
.file_name()
|
||||||
.ok_or_else(|| anyhow::anyhow!("asset source {} is invalid", source_src.display()))?;
|
.with_context(|| format!("asset source {} is invalid", source_src.display()))?;
|
||||||
fs::copy(&source_src, source_dest.join(source_file)).map_err(|cause| {
|
fs::copy(&source_src, source_dest.join(source_file))
|
||||||
anyhow::anyhow!(
|
.fs_context("failed to copy asset", source_src)?;
|
||||||
"failed to copy {} to {}: {}",
|
|
||||||
source_src.display(),
|
|
||||||
source_dest.display(),
|
|
||||||
cause
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let dest = prefix_path(dest, "app/src/main/");
|
let dest = prefix_path(dest, "app/src/main/");
|
||||||
fs::create_dir_all(&dest).map_err(|cause| {
|
fs::create_dir_all(&dest).fs_context("failed to create directory", dest.clone())?;
|
||||||
anyhow::anyhow!(
|
|
||||||
"failed to create directory at {}: {}",
|
|
||||||
dest.display(),
|
|
||||||
cause
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let asset_dir = dest.join(DEFAULT_ASSET_DIR);
|
let asset_dir = dest.join(DEFAULT_ASSET_DIR);
|
||||||
if !asset_dir.is_dir() {
|
if !asset_dir.is_dir() {
|
||||||
fs::create_dir_all(&asset_dir).map_err(|cause| {
|
fs::create_dir_all(&asset_dir).fs_context("failed to create asset dir", asset_dir)?;
|
||||||
anyhow::anyhow!(
|
|
||||||
"failed to create asset dir {path}: {cause}",
|
|
||||||
path = asset_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|||||||
@@ -38,8 +38,7 @@ pub fn command(
|
|||||||
reinstall_deps,
|
reinstall_deps,
|
||||||
skip_targets_install,
|
skip_targets_install,
|
||||||
config,
|
config,
|
||||||
)
|
)?;
|
||||||
.map_err(|e| anyhow::anyhow!("{:#}", e))?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -311,7 +310,7 @@ fn escape_kotlin_keyword(
|
|||||||
out.write(&escaped_result).map_err(Into::into)
|
out.write(&escaped_result).map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn app_root(ctx: &Context) -> Result<&str, RenderError> {
|
fn app_root(ctx: &Context) -> std::result::Result<&str, RenderError> {
|
||||||
let app_root = ctx
|
let app_root = ctx
|
||||||
.data()
|
.data()
|
||||||
.get("app")
|
.get("app")
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ use super::{
|
|||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
build::Options as BuildOptions,
|
build::Options as BuildOptions,
|
||||||
|
error::{Context, ErrorExt},
|
||||||
helpers::{
|
helpers::{
|
||||||
app_paths::tauri_dir,
|
app_paths::tauri_dir,
|
||||||
config::{get as get_tauri_config, ConfigHandle},
|
config::{get as get_tauri_config, ConfigHandle},
|
||||||
@@ -16,11 +17,10 @@ use crate::{
|
|||||||
},
|
},
|
||||||
interface::{AppInterface, Interface, Options as InterfaceOptions},
|
interface::{AppInterface, Interface, Options as InterfaceOptions},
|
||||||
mobile::{ios::ensure_ios_runtime_installed, write_options, CliOptions},
|
mobile::{ios::ensure_ios_runtime_installed, write_options, CliOptions},
|
||||||
ConfigValue, Result,
|
ConfigValue, Error, Result,
|
||||||
};
|
};
|
||||||
use clap::{ArgAction, Parser, ValueEnum};
|
use clap::{ArgAction, Parser, ValueEnum};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use cargo_mobile2::{
|
use cargo_mobile2::{
|
||||||
apple::{
|
apple::{
|
||||||
config::Config as AppleConfig,
|
config::Config as AppleConfig,
|
||||||
@@ -126,7 +126,7 @@ impl std::fmt::Display for ExportMethod {
|
|||||||
impl std::str::FromStr for ExportMethod {
|
impl std::str::FromStr for ExportMethod {
|
||||||
type Err = &'static str;
|
type Err = &'static str;
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
|
||||||
match s {
|
match s {
|
||||||
"app-store-connect" => Ok(Self::AppStoreConnect),
|
"app-store-connect" => Ok(Self::AppStoreConnect),
|
||||||
"release-testing" => Ok(Self::ReleaseTesting),
|
"release-testing" => Ok(Self::ReleaseTesting),
|
||||||
@@ -195,7 +195,7 @@ pub fn command(options: Options, noise_level: NoiseLevel) -> Result<()> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let tauri_path = tauri_dir();
|
let tauri_path = tauri_dir();
|
||||||
set_current_dir(tauri_path).with_context(|| "failed to change current working directory")?;
|
set_current_dir(tauri_path).context("failed to set current directory")?;
|
||||||
|
|
||||||
ensure_init(
|
ensure_init(
|
||||||
&tauri_config,
|
&tauri_config,
|
||||||
@@ -221,9 +221,12 @@ pub fn command(options: Options, noise_level: NoiseLevel) -> Result<()> {
|
|||||||
tauri_path.join("Info.ios.plist").into(),
|
tauri_path.join("Info.ios.plist").into(),
|
||||||
plist::Value::Dictionary(plist).into(),
|
plist::Value::Dictionary(plist).into(),
|
||||||
])?;
|
])?;
|
||||||
merged_info_plist.to_file_xml(&info_plist_path)?;
|
merged_info_plist
|
||||||
|
.to_file_xml(&info_plist_path)
|
||||||
|
.map_err(std::io::Error::other)
|
||||||
|
.fs_context("failed to save merged Info.plist file", info_plist_path)?;
|
||||||
|
|
||||||
let mut env = env()?;
|
let mut env = env().context("failed to load iOS environment")?;
|
||||||
|
|
||||||
if !options.open {
|
if !options.open {
|
||||||
ensure_ios_runtime_installed()?;
|
ensure_ios_runtime_installed()?;
|
||||||
@@ -240,10 +243,10 @@ pub fn command(options: Options, noise_level: NoiseLevel) -> Result<()> {
|
|||||||
let minor = iter.next().context(format!(
|
let minor = iter.next().context(format!(
|
||||||
"failed to parse Xcode version `{xcode_version}` as semver"
|
"failed to parse Xcode version `{xcode_version}` as semver"
|
||||||
))?;
|
))?;
|
||||||
let major = major.parse::<u64>().context(format!(
|
let major = major.parse::<u64>().ok().context(format!(
|
||||||
"failed to parse Xcode version `{xcode_version}` as semver: major is not a number"
|
"failed to parse Xcode version `{xcode_version}` as semver: major is not a number"
|
||||||
))?;
|
))?;
|
||||||
let minor = minor.parse::<u64>().context(format!(
|
let minor = minor.parse::<u64>().ok().context(format!(
|
||||||
"failed to parse Xcode version `{xcode_version}` as semver: minor is not a number"
|
"failed to parse Xcode version `{xcode_version}` as semver: minor is not a number"
|
||||||
))?;
|
))?;
|
||||||
|
|
||||||
@@ -268,20 +271,29 @@ pub fn command(options: Options, noise_level: NoiseLevel) -> Result<()> {
|
|||||||
options.debug,
|
options.debug,
|
||||||
)?;
|
)?;
|
||||||
if pbxproj.has_changes() {
|
if pbxproj.has_changes() {
|
||||||
pbxproj.save()?;
|
pbxproj
|
||||||
|
.save()
|
||||||
|
.fs_context("failed to save pbxproj file", pbxproj.path)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// merge export options and write to temp file
|
// merge export options and write to temp file
|
||||||
let _export_options_tmp = if !export_options_plist.is_empty() {
|
let _export_options_tmp = if !export_options_plist.is_empty() {
|
||||||
let export_options_plist_path = config.project_dir().join("ExportOptions.plist");
|
let export_options_plist_path = config.project_dir().join("ExportOptions.plist");
|
||||||
let export_options = tempfile::NamedTempFile::new()?;
|
let export_options =
|
||||||
|
tempfile::NamedTempFile::new().context("failed to create temporary file")?;
|
||||||
|
|
||||||
let merged_plist = merge_plist(vec![
|
let merged_plist = merge_plist(vec![
|
||||||
export_options.path().to_owned().into(),
|
export_options.path().to_owned().into(),
|
||||||
export_options_plist_path.clone().into(),
|
export_options_plist_path.clone().into(),
|
||||||
plist::Value::from(export_options_plist).into(),
|
plist::Value::from(export_options_plist).into(),
|
||||||
])?;
|
])?;
|
||||||
merged_plist.to_file_xml(export_options.path())?;
|
merged_plist
|
||||||
|
.to_file_xml(export_options.path())
|
||||||
|
.map_err(std::io::Error::other)
|
||||||
|
.fs_context(
|
||||||
|
"failed to save export options plist file",
|
||||||
|
export_options.path().to_path_buf(),
|
||||||
|
)?;
|
||||||
|
|
||||||
config.set_export_options_plist_path(export_options.path());
|
config.set_export_options_plist_path(export_options.path());
|
||||||
|
|
||||||
@@ -373,26 +385,31 @@ fn run_build(
|
|||||||
.skip_codesign();
|
.skip_codesign();
|
||||||
}
|
}
|
||||||
|
|
||||||
target.build(None, config, env, noise_level, profile, build_config)?;
|
target
|
||||||
|
.build(None, config, env, noise_level, profile, build_config)
|
||||||
|
.context("failed to build iOS app")?;
|
||||||
|
|
||||||
let mut archive_config = ArchiveConfig::new();
|
let mut archive_config = ArchiveConfig::new();
|
||||||
if skip_signing {
|
if skip_signing {
|
||||||
archive_config = archive_config.skip_codesign();
|
archive_config = archive_config.skip_codesign();
|
||||||
}
|
}
|
||||||
|
|
||||||
target.archive(
|
target
|
||||||
config,
|
.archive(
|
||||||
env,
|
config,
|
||||||
noise_level,
|
env,
|
||||||
profile,
|
noise_level,
|
||||||
Some(app_version),
|
profile,
|
||||||
archive_config,
|
Some(app_version),
|
||||||
)?;
|
archive_config,
|
||||||
|
)
|
||||||
|
.context("failed to archive iOS app")?;
|
||||||
|
|
||||||
let out_dir = config.export_dir().join(target.arch);
|
let out_dir = config.export_dir().join(target.arch);
|
||||||
|
|
||||||
if target.sdk == "iphonesimulator" {
|
if target.sdk == "iphonesimulator" {
|
||||||
fs::create_dir_all(&out_dir)?;
|
fs::create_dir_all(&out_dir)
|
||||||
|
.fs_context("failed to create Xcode output directory", out_dir.clone())?;
|
||||||
|
|
||||||
let app_path = config
|
let app_path = config
|
||||||
.archive_dir()
|
.archive_dir()
|
||||||
@@ -403,7 +420,7 @@ fn run_build(
|
|||||||
.with_extension("app");
|
.with_extension("app");
|
||||||
|
|
||||||
let path = out_dir.join(app_path.file_name().unwrap());
|
let path = out_dir.join(app_path.file_name().unwrap());
|
||||||
fs::rename(&app_path, &path)?;
|
fs::rename(&app_path, &path).fs_context("failed to rename app", app_path)?;
|
||||||
out_files.push(path);
|
out_files.push(path);
|
||||||
} else {
|
} else {
|
||||||
// if we skipped code signing, we do not have the entitlements applied to our exported IPA
|
// if we skipped code signing, we do not have the entitlements applied to our exported IPA
|
||||||
@@ -421,12 +438,15 @@ fn run_build(
|
|||||||
validity_days: 365,
|
validity_days: 365,
|
||||||
password: password.clone(),
|
password: password.clone(),
|
||||||
},
|
},
|
||||||
)?;
|
)
|
||||||
let tmp_dir = tempfile::tempdir()?;
|
.map_err(Box::new)?;
|
||||||
|
let tmp_dir = tempfile::tempdir().context("failed to create temporary directory")?;
|
||||||
let cert_path = tmp_dir.path().join("cert.p12");
|
let cert_path = tmp_dir.path().join("cert.p12");
|
||||||
std::fs::write(&cert_path, certificate)?;
|
std::fs::write(&cert_path, certificate)
|
||||||
|
.fs_context("failed to write certificate", cert_path.clone())?;
|
||||||
let self_signed_cert_keychain =
|
let self_signed_cert_keychain =
|
||||||
tauri_macos_sign::Keychain::with_certificate_file(&cert_path, &password.into())?;
|
tauri_macos_sign::Keychain::with_certificate_file(&cert_path, &password.into())
|
||||||
|
.map_err(Box::new)?;
|
||||||
|
|
||||||
let app_dir = config
|
let app_dir = config
|
||||||
.export_dir()
|
.export_dir()
|
||||||
@@ -434,16 +454,18 @@ fn run_build(
|
|||||||
.join("Products/Applications")
|
.join("Products/Applications")
|
||||||
.join(format!("{}.app", config.app().stylized_name()));
|
.join(format!("{}.app", config.app().stylized_name()));
|
||||||
|
|
||||||
self_signed_cert_keychain.sign(
|
self_signed_cert_keychain
|
||||||
&app_dir.join(config.app().stylized_name()),
|
.sign(
|
||||||
Some(
|
&app_dir.join(config.app().stylized_name()),
|
||||||
&config
|
Some(
|
||||||
.project_dir()
|
&config
|
||||||
.join(config.scheme())
|
.project_dir()
|
||||||
.join(format!("{}.entitlements", config.scheme())),
|
.join(config.scheme())
|
||||||
),
|
.join(format!("{}.entitlements", config.scheme())),
|
||||||
false,
|
),
|
||||||
)?;
|
false,
|
||||||
|
)
|
||||||
|
.map_err(Box::new)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut export_config = ExportConfig::new().allow_provisioning_updates();
|
let mut export_config = ExportConfig::new().allow_provisioning_updates();
|
||||||
@@ -451,12 +473,15 @@ fn run_build(
|
|||||||
export_config = export_config.authentication_credentials(credentials.clone());
|
export_config = export_config.authentication_credentials(credentials.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
target.export(config, env, noise_level, export_config)?;
|
target
|
||||||
|
.export(config, env, noise_level, export_config)
|
||||||
|
.context("failed to export iOS app")?;
|
||||||
|
|
||||||
if let Ok(ipa_path) = config.ipa_path() {
|
if let Ok(ipa_path) = config.ipa_path() {
|
||||||
fs::create_dir_all(&out_dir)?;
|
fs::create_dir_all(&out_dir)
|
||||||
|
.fs_context("failed to create Xcode output directory", out_dir.clone())?;
|
||||||
let path = out_dir.join(ipa_path.file_name().unwrap());
|
let path = out_dir.join(ipa_path.file_name().unwrap());
|
||||||
fs::rename(&ipa_path, &path)?;
|
fs::rename(&ipa_path, &path).fs_context("failed to rename IPA", ipa_path)?;
|
||||||
out_files.push(path);
|
out_files.push(path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -464,7 +489,7 @@ fn run_build(
|
|||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.map_err(|e: TargetInvalid| anyhow::anyhow!(e.to_string()))??;
|
.map_err(|e: TargetInvalid| Error::GenericError(e.to_string()))??;
|
||||||
|
|
||||||
log_finished(out_files, "iOS Bundle");
|
log_finished(out_files, "iOS Bundle");
|
||||||
|
|
||||||
@@ -485,7 +510,7 @@ fn auth_credentials_from_env() -> Result<Option<cargo_mobile2::apple::AuthCreden
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
(Err(_), Err(_), None) => Ok(None),
|
(Err(_), Err(_), None) => Ok(None),
|
||||||
_ => anyhow::bail!(
|
_ => crate::error::bail!(
|
||||||
"APPLE_API_KEY, APPLE_API_ISSUER and APPLE_API_KEY_PATH must be provided for code signing"
|
"APPLE_API_KEY, APPLE_API_ISSUER and APPLE_API_KEY_PATH must be provided for code signing"
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ use super::{
|
|||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
dev::Options as DevOptions,
|
dev::Options as DevOptions,
|
||||||
|
error::{Context, ErrorExt},
|
||||||
helpers::{
|
helpers::{
|
||||||
app_paths::tauri_dir,
|
app_paths::tauri_dir,
|
||||||
config::{get as get_tauri_config, ConfigHandle},
|
config::{get as get_tauri_config, ConfigHandle},
|
||||||
@@ -22,7 +23,6 @@ use crate::{
|
|||||||
};
|
};
|
||||||
use clap::{ArgAction, Parser};
|
use clap::{ArgAction, Parser};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use cargo_mobile2::{
|
use cargo_mobile2::{
|
||||||
apple::{
|
apple::{
|
||||||
config::Config as AppleConfig,
|
config::Config as AppleConfig,
|
||||||
@@ -150,11 +150,14 @@ fn run_command(options: Options, noise_level: NoiseLevel) -> Result<()> {
|
|||||||
if let Some(root_certificate_path) = &options.root_certificate_path {
|
if let Some(root_certificate_path) = &options.root_certificate_path {
|
||||||
std::env::set_var(
|
std::env::set_var(
|
||||||
"TAURI_DEV_ROOT_CERTIFICATE",
|
"TAURI_DEV_ROOT_CERTIFICATE",
|
||||||
std::fs::read_to_string(root_certificate_path).context("failed to read certificate file")?,
|
std::fs::read_to_string(root_certificate_path).fs_context(
|
||||||
|
"failed to read root certificate file",
|
||||||
|
root_certificate_path.clone(),
|
||||||
|
)?,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let env = env()?;
|
let env = env().context("failed to load iOS environment")?;
|
||||||
let device = if options.open {
|
let device = if options.open {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
@@ -200,7 +203,7 @@ fn run_command(options: Options, noise_level: NoiseLevel) -> Result<()> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let tauri_path = tauri_dir();
|
let tauri_path = tauri_dir();
|
||||||
set_current_dir(tauri_path).with_context(|| "failed to change current working directory")?;
|
set_current_dir(tauri_path).context("failed to set current directory to Tauri directory")?;
|
||||||
|
|
||||||
ensure_init(
|
ensure_init(
|
||||||
&tauri_config,
|
&tauri_config,
|
||||||
@@ -219,7 +222,10 @@ fn run_command(options: Options, noise_level: NoiseLevel) -> Result<()> {
|
|||||||
tauri_path.join("Info.plist").into(),
|
tauri_path.join("Info.plist").into(),
|
||||||
tauri_path.join("Info.ios.plist").into(),
|
tauri_path.join("Info.ios.plist").into(),
|
||||||
])?;
|
])?;
|
||||||
merged_info_plist.to_file_xml(&info_plist_path)?;
|
merged_info_plist
|
||||||
|
.to_file_xml(&info_plist_path)
|
||||||
|
.map_err(std::io::Error::other)
|
||||||
|
.fs_context("failed to save merged Info.plist file", info_plist_path)?;
|
||||||
|
|
||||||
let mut pbxproj = load_pbxproj(&config)?;
|
let mut pbxproj = load_pbxproj(&config)?;
|
||||||
|
|
||||||
@@ -237,7 +243,9 @@ fn run_command(options: Options, noise_level: NoiseLevel) -> Result<()> {
|
|||||||
!options.release_mode,
|
!options.release_mode,
|
||||||
)?;
|
)?;
|
||||||
if pbxproj.has_changes() {
|
if pbxproj.has_changes() {
|
||||||
pbxproj.save()?;
|
pbxproj
|
||||||
|
.save()
|
||||||
|
.fs_context("failed to save pbxproj file", pbxproj.path)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
run_dev(
|
run_dev(
|
||||||
@@ -325,7 +333,7 @@ fn run_dev(
|
|||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
crate::dev::kill_before_dev_process();
|
crate::dev::kill_before_dev_process();
|
||||||
Err(e.into())
|
crate::error::bail!("failed to run iOS app: {}", e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -28,12 +28,13 @@ use super::{
|
|||||||
OptionsHandle, Target as MobileTarget, MIN_DEVICE_MATCH_SCORE,
|
OptionsHandle, Target as MobileTarget, MIN_DEVICE_MATCH_SCORE,
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::{Context, ErrorExt},
|
||||||
helpers::{
|
helpers::{
|
||||||
app_paths::tauri_dir,
|
app_paths::tauri_dir,
|
||||||
config::{BundleResources, Config as TauriConfig, ConfigHandle},
|
config::{BundleResources, Config as TauriConfig, ConfigHandle},
|
||||||
pbxproj, strip_semver_prerelease_tag,
|
pbxproj, strip_semver_prerelease_tag,
|
||||||
},
|
},
|
||||||
ConfigValue, Result,
|
ConfigValue, Error, Result,
|
||||||
};
|
};
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
@@ -223,7 +224,7 @@ pub fn get_config(
|
|||||||
}
|
}
|
||||||
1 => None,
|
1 => None,
|
||||||
_ => {
|
_ => {
|
||||||
log::warn!("You must set the code signing certificate development team ID on the `bundle > iOS > developmentTeam` config value or the `{APPLE_DEVELOPMENT_TEAM_ENV_VAR_NAME}` environment variable. Available certificates: {}", teams.iter().map(|t| format!("{} (ID: {})", t.name, t.id)).collect::<Vec<String>>().join(", "));
|
log::warn!("You must set the code signing certificate development team ID on the `bundle > iOS > developmentTeam` config value or the `{APPLE_DEVELOPMENT_TEAM_ENV_VAR_NAME}` environment variable. Available certificates: {}", teams.iter().map(|t| format!("{} (ID: {})", t.name, t.id)).collect::<Vec<String>>().join(", "));
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -234,7 +235,8 @@ pub fn get_config(
|
|||||||
ios_version: Some(tauri_config.bundle.ios.minimum_system_version.clone()),
|
ios_version: Some(tauri_config.bundle.ios.minimum_system_version.clone()),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
let config = AppleConfig::from_raw(app.clone(), Some(raw))?;
|
let config = AppleConfig::from_raw(app.clone(), Some(raw))
|
||||||
|
.context("failed to create Apple configuration")?;
|
||||||
|
|
||||||
let tauri_dir = tauri_dir();
|
let tauri_dir = tauri_dir();
|
||||||
|
|
||||||
@@ -287,8 +289,9 @@ pub fn get_config(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn connected_device_prompt<'a>(env: &'_ Env, target: Option<&str>) -> Result<Device<'a>> {
|
fn connected_device_prompt<'a>(env: &'_ Env, target: Option<&str>) -> Result<Device<'a>> {
|
||||||
let device_list = device::list_devices(env)
|
let device_list = device::list_devices(env).map_err(|cause| {
|
||||||
.map_err(|cause| anyhow::anyhow!("Failed to detect connected iOS devices: {cause}"))?;
|
Error::GenericError(format!("Failed to detect connected iOS devices: {cause}"))
|
||||||
|
})?;
|
||||||
if !device_list.is_empty() {
|
if !device_list.is_empty() {
|
||||||
let device = if let Some(t) = target {
|
let device = if let Some(t) = target {
|
||||||
let (device, score) = device_list
|
let (device, score) = device_list
|
||||||
@@ -304,7 +307,7 @@ fn connected_device_prompt<'a>(env: &'_ Env, target: Option<&str>) -> Result<Dev
|
|||||||
if score > MIN_DEVICE_MATCH_SCORE {
|
if score > MIN_DEVICE_MATCH_SCORE {
|
||||||
device
|
device
|
||||||
} else {
|
} else {
|
||||||
anyhow::bail!("Could not find an iOS device matching {t}")
|
crate::error::bail!("Could not find an iOS device matching {t}")
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let index = if device_list.len() > 1 {
|
let index = if device_list.len() > 1 {
|
||||||
@@ -315,7 +318,7 @@ fn connected_device_prompt<'a>(env: &'_ Env, target: Option<&str>) -> Result<Dev
|
|||||||
None,
|
None,
|
||||||
"Device",
|
"Device",
|
||||||
)
|
)
|
||||||
.map_err(|cause| anyhow::anyhow!("Failed to prompt for iOS device: {cause}"))?
|
.context("failed to prompt for device")?
|
||||||
} else {
|
} else {
|
||||||
0
|
0
|
||||||
};
|
};
|
||||||
@@ -329,7 +332,7 @@ fn connected_device_prompt<'a>(env: &'_ Env, target: Option<&str>) -> Result<Dev
|
|||||||
|
|
||||||
Ok(device)
|
Ok(device)
|
||||||
} else {
|
} else {
|
||||||
Err(anyhow::anyhow!("No connected iOS devices detected"))
|
crate::error::bail!("No connected iOS devices detected")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -345,7 +348,9 @@ struct InstalledRuntime {
|
|||||||
|
|
||||||
fn simulator_prompt(env: &'_ Env, target: Option<&str>) -> Result<device::Simulator> {
|
fn simulator_prompt(env: &'_ Env, target: Option<&str>) -> Result<device::Simulator> {
|
||||||
let simulator_list = device::list_simulators(env).map_err(|cause| {
|
let simulator_list = device::list_simulators(env).map_err(|cause| {
|
||||||
anyhow::anyhow!("Failed to detect connected iOS Simulator devices: {cause}")
|
Error::GenericError(format!(
|
||||||
|
"Failed to detect connected iOS Simulator devices: {cause}"
|
||||||
|
))
|
||||||
})?;
|
})?;
|
||||||
if !simulator_list.is_empty() {
|
if !simulator_list.is_empty() {
|
||||||
let device = if let Some(t) = target {
|
let device = if let Some(t) = target {
|
||||||
@@ -362,7 +367,7 @@ fn simulator_prompt(env: &'_ Env, target: Option<&str>) -> Result<device::Simula
|
|||||||
if score > MIN_DEVICE_MATCH_SCORE {
|
if score > MIN_DEVICE_MATCH_SCORE {
|
||||||
device
|
device
|
||||||
} else {
|
} else {
|
||||||
anyhow::bail!("Could not find an iOS Simulator matching {t}")
|
crate::error::bail!("Could not find an iOS Simulator matching {t}")
|
||||||
}
|
}
|
||||||
} else if simulator_list.len() > 1 {
|
} else if simulator_list.len() > 1 {
|
||||||
let index = prompt::list(
|
let index = prompt::list(
|
||||||
@@ -372,7 +377,7 @@ fn simulator_prompt(env: &'_ Env, target: Option<&str>) -> Result<device::Simula
|
|||||||
None,
|
None,
|
||||||
"Simulator",
|
"Simulator",
|
||||||
)
|
)
|
||||||
.map_err(|cause| anyhow::anyhow!("Failed to prompt for iOS Simulator device: {cause}"))?;
|
.context("failed to prompt for simulator")?;
|
||||||
simulator_list.into_iter().nth(index).unwrap()
|
simulator_list.into_iter().nth(index).unwrap()
|
||||||
} else {
|
} else {
|
||||||
simulator_list.into_iter().next().unwrap()
|
simulator_list.into_iter().next().unwrap()
|
||||||
@@ -389,10 +394,14 @@ fn simulator_prompt(env: &'_ Env, target: Option<&str>) -> Result<device::Simula
|
|||||||
duct::cmd("xcodebuild", ["-downloadPlatform", "iOS"])
|
duct::cmd("xcodebuild", ["-downloadPlatform", "iOS"])
|
||||||
.stdout_file(os_pipe::dup_stdout().unwrap())
|
.stdout_file(os_pipe::dup_stdout().unwrap())
|
||||||
.stderr_file(os_pipe::dup_stderr().unwrap())
|
.stderr_file(os_pipe::dup_stderr().unwrap())
|
||||||
.run()?;
|
.run()
|
||||||
|
.map_err(|e| Error::CommandFailed {
|
||||||
|
command: "xcodebuild -downloadPlatform iOS".to_string(),
|
||||||
|
error: e,
|
||||||
|
})?;
|
||||||
return simulator_prompt(env, target);
|
return simulator_prompt(env, target);
|
||||||
}
|
}
|
||||||
Err(anyhow::anyhow!("No available iOS Simulator detected"))
|
crate::error::bail!("No available iOS Simulator detected")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -402,14 +411,20 @@ fn device_prompt<'a>(env: &'_ Env, target: Option<&str>) -> Result<Device<'a>> {
|
|||||||
} else {
|
} else {
|
||||||
let simulator = simulator_prompt(env, target)?;
|
let simulator = simulator_prompt(env, target)?;
|
||||||
log::info!("Starting simulator {}", simulator.name());
|
log::info!("Starting simulator {}", simulator.name());
|
||||||
simulator.start_detached(env)?;
|
simulator
|
||||||
|
.start_detached(env)
|
||||||
|
.context("failed to start simulator")?;
|
||||||
Ok(simulator.into())
|
Ok(simulator.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ensure_ios_runtime_installed() -> Result<()> {
|
fn ensure_ios_runtime_installed() -> Result<()> {
|
||||||
let installed_platforms_json =
|
let installed_platforms_json = duct::cmd("xcrun", ["simctl", "list", "runtimes", "--json"])
|
||||||
duct::cmd("xcrun", ["simctl", "list", "runtimes", "--json"]).read()?;
|
.read()
|
||||||
|
.map_err(|e| Error::CommandFailed {
|
||||||
|
command: "xcrun simctl list runtimes --json".to_string(),
|
||||||
|
error: e,
|
||||||
|
})?;
|
||||||
let installed_platforms: InstalledRuntimesList =
|
let installed_platforms: InstalledRuntimesList =
|
||||||
serde_json::from_str(&installed_platforms_json).unwrap_or_default();
|
serde_json::from_str(&installed_platforms_json).unwrap_or_default();
|
||||||
if !installed_platforms
|
if !installed_platforms
|
||||||
@@ -427,9 +442,13 @@ fn ensure_ios_runtime_installed() -> Result<()> {
|
|||||||
duct::cmd("xcodebuild", ["-downloadPlatform", "iOS"])
|
duct::cmd("xcodebuild", ["-downloadPlatform", "iOS"])
|
||||||
.stdout_file(os_pipe::dup_stdout().unwrap())
|
.stdout_file(os_pipe::dup_stdout().unwrap())
|
||||||
.stderr_file(os_pipe::dup_stderr().unwrap())
|
.stderr_file(os_pipe::dup_stderr().unwrap())
|
||||||
.run()?;
|
.run()
|
||||||
|
.map_err(|e| Error::CommandFailed {
|
||||||
|
command: "xcodebuild -downloadPlatform iOS".to_string(),
|
||||||
|
error: e,
|
||||||
|
})?;
|
||||||
} else {
|
} else {
|
||||||
anyhow::bail!("iOS platform not installed");
|
crate::error::bail!("iOS platform not installed");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -451,7 +470,7 @@ fn open_and_wait(config: &AppleConfig, env: &Env) -> ! {
|
|||||||
|
|
||||||
fn inject_resources(config: &AppleConfig, tauri_config: &TauriConfig) -> Result<()> {
|
fn inject_resources(config: &AppleConfig, tauri_config: &TauriConfig) -> Result<()> {
|
||||||
let asset_dir = config.project_dir().join(DEFAULT_ASSET_DIR);
|
let asset_dir = config.project_dir().join(DEFAULT_ASSET_DIR);
|
||||||
create_dir_all(&asset_dir)?;
|
create_dir_all(&asset_dir).fs_context("failed to create asset directory", asset_dir.clone())?;
|
||||||
|
|
||||||
let resources = match &tauri_config.bundle.resources {
|
let resources = match &tauri_config.bundle.resources {
|
||||||
Some(BundleResources::List(paths)) => Some(ResourcePaths::new(paths.as_slice(), true)),
|
Some(BundleResources::List(paths)) => Some(ResourcePaths::new(paths.as_slice(), true)),
|
||||||
@@ -460,7 +479,7 @@ fn inject_resources(config: &AppleConfig, tauri_config: &TauriConfig) -> Result<
|
|||||||
};
|
};
|
||||||
if let Some(resources) = resources {
|
if let Some(resources) = resources {
|
||||||
for resource in resources.iter() {
|
for resource in resources.iter() {
|
||||||
let resource = resource?;
|
let resource = resource.context("failed to get resource")?;
|
||||||
let dest = asset_dir.join(resource.target());
|
let dest = asset_dir.join(resource.target());
|
||||||
crate::helpers::fs::copy_file(resource.path(), dest)?;
|
crate::helpers::fs::copy_file(resource.path(), dest)?;
|
||||||
}
|
}
|
||||||
@@ -490,7 +509,7 @@ fn merge_plist(src: Vec<PlistKind>) -> Result<plist::Value> {
|
|||||||
|
|
||||||
for plist_kind in src {
|
for plist_kind in src {
|
||||||
let plist = match plist_kind {
|
let plist = match plist_kind {
|
||||||
PlistKind::Path(p) => plist::Value::from_file(p),
|
PlistKind::Path(p) => plist::Value::from_file(p).context("failed to read plist file"),
|
||||||
PlistKind::Plist(v) => Ok(v),
|
PlistKind::Plist(v) => Ok(v),
|
||||||
};
|
};
|
||||||
if let Ok(src_plist) = plist {
|
if let Ok(src_plist) = plist {
|
||||||
@@ -515,7 +534,9 @@ pub fn signing_from_env() -> Result<(
|
|||||||
) {
|
) {
|
||||||
(Some(certificate), Some(certificate_password)) => {
|
(Some(certificate), Some(certificate_password)) => {
|
||||||
log::info!("Reading iOS certificates from ");
|
log::info!("Reading iOS certificates from ");
|
||||||
tauri_macos_sign::Keychain::with_certificate(&certificate, &certificate_password).map(Some)?
|
tauri_macos_sign::Keychain::with_certificate(&certificate, &certificate_password)
|
||||||
|
.map(Some)
|
||||||
|
.map_err(Box::new)?
|
||||||
}
|
}
|
||||||
(Some(_), None) => {
|
(Some(_), None) => {
|
||||||
log::warn!("The IOS_CERTIFICATE environment variable is set but not IOS_CERTIFICATE_PASSWORD. Ignoring the certificate...");
|
log::warn!("The IOS_CERTIFICATE environment variable is set but not IOS_CERTIFICATE_PASSWORD. Ignoring the certificate...");
|
||||||
@@ -525,7 +546,9 @@ pub fn signing_from_env() -> Result<(
|
|||||||
};
|
};
|
||||||
|
|
||||||
let provisioning_profile = if let Some(provisioning_profile) = var_os("IOS_MOBILE_PROVISION") {
|
let provisioning_profile = if let Some(provisioning_profile) = var_os("IOS_MOBILE_PROVISION") {
|
||||||
tauri_macos_sign::ProvisioningProfile::from_base64(&provisioning_profile).map(Some)?
|
tauri_macos_sign::ProvisioningProfile::from_base64(&provisioning_profile)
|
||||||
|
.map(Some)
|
||||||
|
.map_err(Box::new)?
|
||||||
} else {
|
} else {
|
||||||
if keychain.is_some() {
|
if keychain.is_some() {
|
||||||
log::warn!("You have provided an iOS certificate via environment variables but the IOS_MOBILE_PROVISION environment variable is not set. This will fail when signing unless the profile is set in your Xcode project.");
|
log::warn!("You have provided an iOS certificate via environment variables but the IOS_MOBILE_PROVISION environment variable is not set. This will fail when signing unless the profile is set in your Xcode project.");
|
||||||
|
|||||||
@@ -3,15 +3,15 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::Context,
|
||||||
helpers::{config::Config as TauriConfig, template},
|
helpers::{config::Config as TauriConfig, template},
|
||||||
mobile::ios::LIB_OUTPUT_FILE_NAME,
|
mobile::ios::LIB_OUTPUT_FILE_NAME,
|
||||||
Result,
|
Error, ErrorExt, Result,
|
||||||
};
|
};
|
||||||
use anyhow::Context;
|
|
||||||
use cargo_mobile2::{
|
use cargo_mobile2::{
|
||||||
apple::{
|
apple::{
|
||||||
config::{Config, Metadata},
|
config::{Config, Metadata},
|
||||||
deps, rust_version_check,
|
deps,
|
||||||
target::Target,
|
target::Target,
|
||||||
},
|
},
|
||||||
config::app::DEFAULT_ASSET_DIR,
|
config::app::DEFAULT_ASSET_DIR,
|
||||||
@@ -53,17 +53,20 @@ pub fn gen(
|
|||||||
log::info!("Installing iOS Rust targets...");
|
log::info!("Installing iOS Rust targets...");
|
||||||
for target in missing_targets {
|
for target in missing_targets {
|
||||||
log::info!("Installing target {}", target.triple());
|
log::info!("Installing target {}", target.triple());
|
||||||
target
|
target.install().map_err(|error| Error::CommandFailed {
|
||||||
.install()
|
command: "rustup target add".to_string(),
|
||||||
.context("failed to install target with rustup")?;
|
error,
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
rust_version_check(wrapper)?;
|
deps::install_all(wrapper, non_interactive, true, reinstall_deps).map_err(|error| {
|
||||||
|
Error::CommandFailed {
|
||||||
deps::install_all(wrapper, non_interactive, true, reinstall_deps)
|
command: "pod install".to_string(),
|
||||||
.with_context(|| "failed to install Apple dependencies")?;
|
error: std::io::Error::other(error),
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
|
||||||
let dest = config.project_dir();
|
let dest = config.project_dir();
|
||||||
let rel_prefix = util::relativize_path(config.app().root_dir(), &dest);
|
let rel_prefix = util::relativize_path(config.app().root_dir(), &dest);
|
||||||
@@ -174,9 +177,14 @@ pub fn gen(
|
|||||||
.with_context(|| "failed to process template")?;
|
.with_context(|| "failed to process template")?;
|
||||||
|
|
||||||
if let Some(template_path) = tauri_config.bundle.ios.template.as_ref() {
|
if let Some(template_path) = tauri_config.bundle.ios.template.as_ref() {
|
||||||
let template = std::fs::read_to_string(template_path)
|
let template = std::fs::read_to_string(template_path).fs_context(
|
||||||
.context("failed to read custom Xcode project template")?;
|
"failed to read custom Xcode project template",
|
||||||
let mut output_file = std::fs::File::create(dest.join("project.yml"))?;
|
template_path.to_path_buf(),
|
||||||
|
)?;
|
||||||
|
let mut output_file = std::fs::File::create(dest.join("project.yml")).fs_context(
|
||||||
|
"failed to create project.yml file",
|
||||||
|
dest.join("project.yml"),
|
||||||
|
)?;
|
||||||
handlebars
|
handlebars
|
||||||
.render_template_to_write(&template, map.inner(), &mut output_file)
|
.render_template_to_write(&template, map.inner(), &mut output_file)
|
||||||
.expect("Failed to render template");
|
.expect("Failed to render template");
|
||||||
@@ -189,12 +197,7 @@ pub fn gen(
|
|||||||
|
|
||||||
// Create all required project directories if they don't already exist
|
// Create all required project directories if they don't already exist
|
||||||
for dir in &dirs_to_create {
|
for dir in &dirs_to_create {
|
||||||
std::fs::create_dir_all(dir).map_err(|cause| {
|
std::fs::create_dir_all(dir).fs_context("failed to create directory", dir.to_path_buf())?;
|
||||||
anyhow::anyhow!(
|
|
||||||
"failed to create directory at {path}: {cause}",
|
|
||||||
path = dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note that Xcode doesn't always reload the project nicely; reopening is
|
// Note that Xcode doesn't always reload the project nicely; reopening is
|
||||||
@@ -211,7 +214,10 @@ pub fn gen(
|
|||||||
.stdout_file(os_pipe::dup_stdout().unwrap())
|
.stdout_file(os_pipe::dup_stdout().unwrap())
|
||||||
.stderr_file(os_pipe::dup_stderr().unwrap())
|
.stderr_file(os_pipe::dup_stderr().unwrap())
|
||||||
.run()
|
.run()
|
||||||
.with_context(|| "failed to run `xcodegen`")?;
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "xcodegen".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
if !ios_pods.is_empty() || !macos_pods.is_empty() {
|
if !ios_pods.is_empty() || !macos_pods.is_empty() {
|
||||||
duct::cmd(
|
duct::cmd(
|
||||||
@@ -224,7 +230,10 @@ pub fn gen(
|
|||||||
.stdout_file(os_pipe::dup_stdout().unwrap())
|
.stdout_file(os_pipe::dup_stdout().unwrap())
|
||||||
.stderr_file(os_pipe::dup_stderr().unwrap())
|
.stderr_file(os_pipe::dup_stderr().unwrap())
|
||||||
.run()
|
.run()
|
||||||
.with_context(|| "failed to run `pod install`")?;
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "pod install".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,13 +4,13 @@
|
|||||||
|
|
||||||
use super::{ensure_init, env, get_app, get_config, read_options, MobileTarget};
|
use super::{ensure_init, env, get_app, get_config, read_options, MobileTarget};
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::{Context, ErrorExt},
|
||||||
helpers::config::{get as get_tauri_config, reload as reload_tauri_config},
|
helpers::config::{get as get_tauri_config, reload as reload_tauri_config},
|
||||||
interface::{AppInterface, Interface, Options as InterfaceOptions},
|
interface::{AppInterface, Interface, Options as InterfaceOptions},
|
||||||
mobile::ios::LIB_OUTPUT_FILE_NAME,
|
mobile::ios::LIB_OUTPUT_FILE_NAME,
|
||||||
Result,
|
Error, Result,
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use cargo_mobile2::{apple::target::Target, opts::Profile, target::TargetTrait};
|
use cargo_mobile2::{apple::target::Target, opts::Profile, target::TargetTrait};
|
||||||
use clap::{ArgAction, Parser};
|
use clap::{ArgAction, Parser};
|
||||||
use object::{Object, ObjectSymbol};
|
use object::{Object, ObjectSymbol};
|
||||||
@@ -78,7 +78,15 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
|| var("npm_config_user_agent")
|
|| var("npm_config_user_agent")
|
||||||
.is_ok_and(|agent| agent.starts_with("bun/1.0") || agent.starts_with("bun/1.1"))
|
.is_ok_and(|agent| agent.starts_with("bun/1.0") || agent.starts_with("bun/1.1"))
|
||||||
{
|
{
|
||||||
set_current_dir(current_dir()?.parent().unwrap().parent().unwrap()).unwrap();
|
set_current_dir(
|
||||||
|
current_dir()
|
||||||
|
.context("failed to resolve current directory")?
|
||||||
|
.parent()
|
||||||
|
.unwrap()
|
||||||
|
.parent()
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
crate::helpers::app_paths::resolve();
|
crate::helpers::app_paths::resolve();
|
||||||
@@ -142,20 +150,22 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let env = env()?.explicit_env_vars(cli_options.vars);
|
let env = env()
|
||||||
|
.context("failed to load iOS environment")?
|
||||||
|
.explicit_env_vars(cli_options.vars);
|
||||||
|
|
||||||
if !options.sdk_root.is_dir() {
|
if !options.sdk_root.is_dir() {
|
||||||
return Err(anyhow::anyhow!(
|
crate::error::bail!(
|
||||||
"SDK root provided by Xcode was invalid. {} doesn't exist or isn't a directory",
|
"SDK root provided by Xcode was invalid. {} doesn't exist or isn't a directory",
|
||||||
options.sdk_root.display(),
|
options.sdk_root.display(),
|
||||||
));
|
);
|
||||||
}
|
}
|
||||||
let include_dir = options.sdk_root.join("usr/include");
|
let include_dir = options.sdk_root.join("usr/include");
|
||||||
if !include_dir.is_dir() {
|
if !include_dir.is_dir() {
|
||||||
return Err(anyhow::anyhow!(
|
crate::error::bail!(
|
||||||
"Include dir was invalid. {} doesn't exist or isn't a directory",
|
"Include dir was invalid. {} doesn't exist or isn't a directory",
|
||||||
include_dir.display()
|
include_dir.display()
|
||||||
));
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Host flags that are used by build scripts
|
// Host flags that are used by build scripts
|
||||||
@@ -164,10 +174,7 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
.sdk_root
|
.sdk_root
|
||||||
.join("../../../../MacOSX.platform/Developer/SDKs/MacOSX.sdk");
|
.join("../../../../MacOSX.platform/Developer/SDKs/MacOSX.sdk");
|
||||||
if !macos_sdk_root.is_dir() {
|
if !macos_sdk_root.is_dir() {
|
||||||
return Err(anyhow::anyhow!(
|
crate::error::bail!("Invalid SDK root {}", macos_sdk_root.display());
|
||||||
"Invalid SDK root {}",
|
|
||||||
macos_sdk_root.display()
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
format!("-isysroot {}", macos_sdk_root.display())
|
format!("-isysroot {}", macos_sdk_root.display())
|
||||||
};
|
};
|
||||||
@@ -224,10 +231,7 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
"arm64" if simulator => ("aarch64_apple_ios_sim", "aarch64-apple-ios-sim"),
|
"arm64" if simulator => ("aarch64_apple_ios_sim", "aarch64-apple-ios-sim"),
|
||||||
"x86_64" => ("x86_64_apple_ios", "x86_64-apple-ios"),
|
"x86_64" => ("x86_64_apple_ios", "x86_64-apple-ios"),
|
||||||
_ => {
|
_ => {
|
||||||
return Err(anyhow::anyhow!(
|
crate::error::bail!("Arch specified by Xcode was invalid. {arch} isn't a known arch")
|
||||||
"Arch specified by Xcode was invalid. {} isn't a known arch",
|
|
||||||
arch
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -252,30 +256,28 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
} else {
|
} else {
|
||||||
&arch
|
&arch
|
||||||
})
|
})
|
||||||
.ok_or_else(|| {
|
.with_context(|| format!("Arch specified by Xcode was invalid. {arch} isn't a known arch"))?
|
||||||
anyhow::anyhow!(
|
|
||||||
"Arch specified by Xcode was invalid. {} isn't a known arch",
|
|
||||||
arch
|
|
||||||
)
|
|
||||||
})?
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if !installed_targets.contains(&rust_triple.into()) {
|
if !installed_targets.contains(&rust_triple.into()) {
|
||||||
log::info!("Installing target {}", target.triple());
|
log::info!("Installing target {}", target.triple());
|
||||||
target
|
target.install().map_err(|error| Error::CommandFailed {
|
||||||
.install()
|
command: "rustup target add".to_string(),
|
||||||
.context("failed to install target with rustup")?;
|
error,
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
|
|
||||||
target.compile_lib(
|
target
|
||||||
&config,
|
.compile_lib(
|
||||||
&metadata,
|
&config,
|
||||||
cli_options.noise_level,
|
&metadata,
|
||||||
true,
|
cli_options.noise_level,
|
||||||
profile,
|
true,
|
||||||
&env,
|
profile,
|
||||||
target_env,
|
&env,
|
||||||
)?;
|
target_env,
|
||||||
|
)
|
||||||
|
.context("failed to compile iOS app")?;
|
||||||
|
|
||||||
let out_dir = interface.app_settings().out_dir(&InterfaceOptions {
|
let out_dir = interface.app_settings().out_dir(&InterfaceOptions {
|
||||||
debug: matches!(profile, Profile::Debug),
|
debug: matches!(profile, Profile::Debug),
|
||||||
@@ -285,23 +287,25 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
|
|
||||||
let lib_path = out_dir.join(format!("lib{}.a", config.app().lib_name()));
|
let lib_path = out_dir.join(format!("lib{}.a", config.app().lib_name()));
|
||||||
if !lib_path.exists() {
|
if !lib_path.exists() {
|
||||||
return Err(anyhow::anyhow!("Library not found at {}. Make sure your Cargo.toml file has a [lib] block with `crate-type = [\"staticlib\", \"cdylib\", \"lib\"]`", lib_path.display()));
|
crate::error::bail!("Library not found at {}. Make sure your Cargo.toml file has a [lib] block with `crate-type = [\"staticlib\", \"cdylib\", \"lib\"]`", lib_path.display());
|
||||||
}
|
}
|
||||||
|
|
||||||
validate_lib(&lib_path)?;
|
validate_lib(&lib_path)?;
|
||||||
|
|
||||||
let project_dir = config.project_dir();
|
let project_dir = config.project_dir();
|
||||||
let externals_lib_dir = project_dir.join(format!("Externals/{arch}/{}", profile.as_str()));
|
let externals_lib_dir = project_dir.join(format!("Externals/{arch}/{}", profile.as_str()));
|
||||||
std::fs::create_dir_all(&externals_lib_dir)?;
|
std::fs::create_dir_all(&externals_lib_dir).fs_context(
|
||||||
|
"failed to create externals lib directory",
|
||||||
|
externals_lib_dir.clone(),
|
||||||
|
)?;
|
||||||
|
|
||||||
// backwards compatible lib output file name
|
// backwards compatible lib output file name
|
||||||
let uses_new_lib_output_file_name = {
|
let uses_new_lib_output_file_name = {
|
||||||
let pbxproj_contents = read_to_string(
|
let pbxproj_path = project_dir
|
||||||
project_dir
|
.join(format!("{}.xcodeproj", config.app().name()))
|
||||||
.join(format!("{}.xcodeproj", config.app().name()))
|
.join("project.pbxproj");
|
||||||
.join("project.pbxproj"),
|
let pbxproj_contents = read_to_string(&pbxproj_path)
|
||||||
)
|
.fs_context("failed to read project.pbxproj file", pbxproj_path)?;
|
||||||
.context("missing project.pbxproj file in the Xcode project")?;
|
|
||||||
|
|
||||||
pbxproj_contents.contains(LIB_OUTPUT_FILE_NAME)
|
pbxproj_contents.contains(LIB_OUTPUT_FILE_NAME)
|
||||||
};
|
};
|
||||||
@@ -312,22 +316,31 @@ pub fn command(options: Options) -> Result<()> {
|
|||||||
format!("lib{}.a", config.app().lib_name())
|
format!("lib{}.a", config.app().lib_name())
|
||||||
};
|
};
|
||||||
|
|
||||||
std::fs::copy(lib_path, externals_lib_dir.join(lib_output_file_name))?;
|
std::fs::copy(&lib_path, externals_lib_dir.join(lib_output_file_name)).fs_context(
|
||||||
|
"failed to copy mobile lib file to Externals directory",
|
||||||
|
lib_path.to_path_buf(),
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn validate_lib(path: &Path) -> Result<()> {
|
fn validate_lib(path: &Path) -> Result<()> {
|
||||||
let mut archive = ar::Archive::new(std::fs::File::open(path)?);
|
let mut archive = ar::Archive::new(
|
||||||
|
std::fs::File::open(path).fs_context("failed to open mobile lib file", path.to_path_buf())?,
|
||||||
|
);
|
||||||
// Iterate over all entries in the archive:
|
// Iterate over all entries in the archive:
|
||||||
while let Some(entry) = archive.next_entry() {
|
while let Some(entry) = archive.next_entry() {
|
||||||
let Ok(mut entry) = entry else {
|
let Ok(mut entry) = entry else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
let mut obj_bytes = Vec::new();
|
let mut obj_bytes = Vec::new();
|
||||||
entry.read_to_end(&mut obj_bytes)?;
|
entry
|
||||||
|
.read_to_end(&mut obj_bytes)
|
||||||
|
.fs_context("failed to read mobile lib entry", path.to_path_buf())?;
|
||||||
|
|
||||||
let file = object::File::parse(&*obj_bytes)?;
|
let file = object::File::parse(&*obj_bytes)
|
||||||
|
.map_err(std::io::Error::other)
|
||||||
|
.fs_context("failed to parse mobile lib entry", path.to_path_buf())?;
|
||||||
for symbol in file.symbols() {
|
for symbol in file.symbols() {
|
||||||
let Ok(name) = symbol.name() else {
|
let Ok(name) = symbol.name() else {
|
||||||
continue;
|
continue;
|
||||||
@@ -338,7 +351,7 @@ fn validate_lib(path: &Path) -> Result<()> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
anyhow::bail!(
|
crate::error::bail!(
|
||||||
"Library from {} does not include required runtime symbols. This means you are likely missing the tauri::mobile_entry_point macro usage, see the documentation for more information: https://v2.tauri.app/start/migrate/from-tauri-1",
|
"Library from {} does not include required runtime symbols. This means you are likely missing the tauri::mobile_entry_point macro usage, see the documentation for more information: https://v2.tauri.app/start/migrate/from-tauri-1",
|
||||||
path.display()
|
path.display()
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -3,15 +3,14 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::{Context, ErrorExt},
|
||||||
helpers::{
|
helpers::{
|
||||||
app_paths::tauri_dir,
|
app_paths::tauri_dir,
|
||||||
config::{reload as reload_config, Config as TauriConfig, ConfigHandle, ConfigMetadata},
|
config::{reload as reload_config, Config as TauriConfig, ConfigHandle, ConfigMetadata},
|
||||||
},
|
},
|
||||||
interface::{AppInterface, AppSettings, DevProcess, Interface, Options as InterfaceOptions},
|
interface::{AppInterface, AppSettings, DevProcess, Interface, Options as InterfaceOptions},
|
||||||
ConfigValue,
|
ConfigValue, Error, Result,
|
||||||
};
|
};
|
||||||
use anyhow::Context;
|
|
||||||
use anyhow::{bail, Result};
|
|
||||||
use heck::ToSnekCase;
|
use heck::ToSnekCase;
|
||||||
use jsonrpsee::core::client::{Client, ClientBuilder, ClientT};
|
use jsonrpsee::core::client::{Client, ClientBuilder, ClientT};
|
||||||
use jsonrpsee::server::{RpcModule, ServerBuilder, ServerHandle};
|
use jsonrpsee::server::{RpcModule, ServerBuilder, ServerHandle};
|
||||||
@@ -284,12 +283,14 @@ fn use_network_address_for_dev_url(
|
|||||||
"If your frontend is not listening on that address, try configuring your development server to use the `TAURI_DEV_HOST` environment variable or 0.0.0.0 as host"
|
"If your frontend is not listening on that address, try configuring your development server to use the `TAURI_DEV_HOST` environment variable or 0.0.0.0 as host"
|
||||||
);
|
);
|
||||||
|
|
||||||
*url = url::Url::parse(&format!(
|
let url_str = format!(
|
||||||
"{}://{}{}",
|
"{}://{}{}",
|
||||||
url.scheme(),
|
url.scheme(),
|
||||||
SocketAddr::new(ip, url.port_or_known_default().unwrap()),
|
SocketAddr::new(ip, url.port_or_known_default().unwrap()),
|
||||||
url.path()
|
url.path()
|
||||||
))?;
|
);
|
||||||
|
*url =
|
||||||
|
url::Url::parse(&url_str).with_context(|| format!("failed to parse URL: {url_str}"))?;
|
||||||
|
|
||||||
dev_options
|
dev_options
|
||||||
.config
|
.config
|
||||||
@@ -357,7 +358,7 @@ fn env_vars() -> HashMap<String, OsString> {
|
|||||||
vars
|
vars
|
||||||
}
|
}
|
||||||
|
|
||||||
fn env() -> Result<Env, EnvError> {
|
fn env() -> std::result::Result<Env, EnvError> {
|
||||||
let env = Env::new()?.explicit_env_vars(env_vars());
|
let env = Env::new()?.explicit_env_vars(env_vars());
|
||||||
Ok(env)
|
Ok(env)
|
||||||
}
|
}
|
||||||
@@ -372,12 +373,17 @@ pub fn write_options(
|
|||||||
options.vars.extend(env_vars());
|
options.vars.extend(env_vars());
|
||||||
|
|
||||||
let runtime = Runtime::new().unwrap();
|
let runtime = Runtime::new().unwrap();
|
||||||
let r: anyhow::Result<(ServerHandle, SocketAddr)> = runtime.block_on(async move {
|
let r: crate::Result<(ServerHandle, SocketAddr)> = runtime.block_on(async move {
|
||||||
let server = ServerBuilder::default().build("127.0.0.1:0").await?;
|
let server = ServerBuilder::default()
|
||||||
let addr = server.local_addr()?;
|
.build("127.0.0.1:0")
|
||||||
|
.await
|
||||||
|
.context("failed to build WebSocket server")?;
|
||||||
|
let addr = server.local_addr().context("failed to get local address")?;
|
||||||
|
|
||||||
let mut module = RpcModule::new(());
|
let mut module = RpcModule::new(());
|
||||||
module.register_method("options", move |_, _, _| Some(options.clone()))?;
|
module
|
||||||
|
.register_method("options", move |_, _, _| Some(options.clone()))
|
||||||
|
.context("failed to register options method")?;
|
||||||
|
|
||||||
let handle = server.start(module);
|
let handle = server.start(module);
|
||||||
|
|
||||||
@@ -385,15 +391,15 @@ pub fn write_options(
|
|||||||
});
|
});
|
||||||
let (handle, addr) = r?;
|
let (handle, addr) = r?;
|
||||||
|
|
||||||
write(
|
let server_addr_path = temp_dir().join(format!(
|
||||||
temp_dir().join(format!(
|
"{}-server-addr",
|
||||||
"{}-server-addr",
|
config
|
||||||
config
|
.original_identifier()
|
||||||
.original_identifier()
|
.context("app configuration is missing an identifier")?
|
||||||
.context("app configuration is missing an identifier")?
|
));
|
||||||
)),
|
|
||||||
addr.to_string(),
|
write(&server_addr_path, addr.to_string())
|
||||||
)?;
|
.fs_context("failed to write server address file", server_addr_path)?;
|
||||||
|
|
||||||
Ok(OptionsHandle(runtime, handle))
|
Ok(OptionsHandle(runtime, handle))
|
||||||
}
|
}
|
||||||
@@ -420,10 +426,14 @@ fn read_options(config: &ConfigMetadata) -> CliOptions {
|
|||||||
.parse()
|
.parse()
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await
|
||||||
|
.context("failed to build WebSocket client")?;
|
||||||
let client: Client = ClientBuilder::default().build_with_tokio(tx, rx);
|
let client: Client = ClientBuilder::default().build_with_tokio(tx, rx);
|
||||||
let options: CliOptions = client.request("options", rpc_params![]).await?;
|
let options: CliOptions = client
|
||||||
Ok::<CliOptions, anyhow::Error>(options)
|
.request("options", rpc_params![])
|
||||||
|
.await
|
||||||
|
.context("failed to request options")?;
|
||||||
|
Ok::<CliOptions, Error>(options)
|
||||||
})
|
})
|
||||||
.expect("failed to read CLI options");
|
.expect("failed to read CLI options");
|
||||||
|
|
||||||
@@ -485,7 +495,7 @@ fn ensure_init(
|
|||||||
target: Target,
|
target: Target,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
if !project_dir.exists() {
|
if !project_dir.exists() {
|
||||||
bail!(
|
crate::error::bail!(
|
||||||
"{} project directory {} doesn't exist. Please run `tauri {} init` and try again.",
|
"{} project directory {} doesn't exist. Please run `tauri {} init` and try again.",
|
||||||
target.ide_name(),
|
target.ide_name(),
|
||||||
project_dir.display(),
|
project_dir.display(),
|
||||||
@@ -518,7 +528,12 @@ fn ensure_init(
|
|||||||
.join(format!("{}.xcodeproj", app.name()))
|
.join(format!("{}.xcodeproj", app.name()))
|
||||||
.join("project.pbxproj"),
|
.join("project.pbxproj"),
|
||||||
)
|
)
|
||||||
.context("missing project.yml file in the Xcode project directory")?;
|
.fs_context(
|
||||||
|
"missing project.pbxproj file in the Xcode project directory",
|
||||||
|
project_dir
|
||||||
|
.join(format!("{}.xcodeproj", app.name()))
|
||||||
|
.join("project.pbxproj"),
|
||||||
|
)?;
|
||||||
|
|
||||||
if !(pbxproj_contents.contains(ios::LIB_OUTPUT_FILE_NAME)
|
if !(pbxproj_contents.contains(ios::LIB_OUTPUT_FILE_NAME)
|
||||||
|| pbxproj_contents.contains(&format!("lib{}.a", app.lib_name())))
|
|| pbxproj_contents.contains(&format!("lib{}.a", app.lib_name())))
|
||||||
@@ -531,7 +546,7 @@ fn ensure_init(
|
|||||||
|
|
||||||
if !project_outdated_reasons.is_empty() {
|
if !project_outdated_reasons.is_empty() {
|
||||||
let reason = project_outdated_reasons.join(" and ");
|
let reason = project_outdated_reasons.join(" and ");
|
||||||
bail!(
|
crate::error::bail!(
|
||||||
"{} project directory is outdated because {reason}. Please run `tauri {} init` and try again.",
|
"{} project directory is outdated because {reason}. Please run `tauri {} init` and try again.",
|
||||||
target.ide_name(),
|
target.ide_name(),
|
||||||
target.command_name(),
|
target.command_name(),
|
||||||
@@ -552,15 +567,15 @@ fn ensure_gradlew(project_dir: &std::path::Path) -> Result<()> {
|
|||||||
if !is_executable {
|
if !is_executable {
|
||||||
permissions.set_mode(permissions.mode() | 0o111);
|
permissions.set_mode(permissions.mode() | 0o111);
|
||||||
std::fs::set_permissions(&gradlew_path, permissions)
|
std::fs::set_permissions(&gradlew_path, permissions)
|
||||||
.context("failed to mark gradlew as executable")?;
|
.fs_context("failed to mark gradlew as executable", gradlew_path.clone())?;
|
||||||
}
|
}
|
||||||
std::fs::write(
|
std::fs::write(
|
||||||
&gradlew_path,
|
&gradlew_path,
|
||||||
std::fs::read_to_string(&gradlew_path)
|
std::fs::read_to_string(&gradlew_path)
|
||||||
.context("failed to read gradlew")?
|
.fs_context("failed to read gradlew", gradlew_path.clone())?
|
||||||
.replace("\r\n", "\n"),
|
.replace("\r\n", "\n"),
|
||||||
)
|
)
|
||||||
.context("failed to replace gradlew CRLF with LF")?;
|
.fs_context("failed to replace gradlew CRLF with LF", gradlew_path)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::Context,
|
||||||
helpers::{prompts, template},
|
helpers::{prompts, template},
|
||||||
Result,
|
Result,
|
||||||
};
|
};
|
||||||
@@ -50,13 +51,15 @@ pub fn command(cli: Cli) -> Result<()> {
|
|||||||
match cli.command {
|
match cli.command {
|
||||||
Commands::Init(options) => {
|
Commands::Init(options) => {
|
||||||
let plugin_name = match options.plugin_name {
|
let plugin_name = match options.plugin_name {
|
||||||
None => super::infer_plugin_name(std::env::current_dir()?)?,
|
None => super::infer_plugin_name(
|
||||||
|
std::env::current_dir().context("failed to get current directory")?,
|
||||||
|
)?,
|
||||||
Some(name) => name,
|
Some(name) => name,
|
||||||
};
|
};
|
||||||
|
|
||||||
let out_dir = PathBuf::from(options.out_dir);
|
let out_dir = PathBuf::from(options.out_dir);
|
||||||
if out_dir.join("android").exists() {
|
if out_dir.join("android").exists() {
|
||||||
return Err(anyhow::anyhow!("android folder already exists"));
|
crate::error::bail!("Android folder already exists");
|
||||||
}
|
}
|
||||||
|
|
||||||
let plugin_id = prompts::input(
|
let plugin_id = prompts::input(
|
||||||
|
|||||||
@@ -3,13 +3,12 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use super::PluginIosFramework;
|
use super::PluginIosFramework;
|
||||||
use crate::helpers::prompts;
|
|
||||||
use crate::Result;
|
use crate::Result;
|
||||||
use crate::{
|
use crate::{
|
||||||
helpers::{resolve_tauri_path, template},
|
error::{Context, ErrorExt},
|
||||||
|
helpers::{prompts, resolve_tauri_path, template},
|
||||||
VersionMetadata,
|
VersionMetadata,
|
||||||
};
|
};
|
||||||
use anyhow::Context;
|
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use handlebars::{to_json, Handlebars};
|
use handlebars::{to_json, Handlebars};
|
||||||
use heck::{ToKebabCase, ToPascalCase, ToSnakeCase};
|
use heck::{ToKebabCase, ToPascalCase, ToSnakeCase};
|
||||||
@@ -90,7 +89,14 @@ pub fn command(mut options: Options) -> Result<()> {
|
|||||||
|
|
||||||
let template_target_path = PathBuf::from(options.directory);
|
let template_target_path = PathBuf::from(options.directory);
|
||||||
let metadata = crates_metadata()?;
|
let metadata = crates_metadata()?;
|
||||||
if std::fs::read_dir(&template_target_path)?.count() > 0 {
|
if std::fs::read_dir(&template_target_path)
|
||||||
|
.fs_context(
|
||||||
|
"failed to read target directory",
|
||||||
|
template_target_path.clone(),
|
||||||
|
)?
|
||||||
|
.count()
|
||||||
|
> 0
|
||||||
|
{
|
||||||
log::warn!("Plugin dir ({:?}) not empty.", template_target_path);
|
log::warn!("Plugin dir ({:?}) not empty.", template_target_path);
|
||||||
} else {
|
} else {
|
||||||
let (tauri_dep, tauri_example_dep, tauri_build_dep, tauri_plugin_dep) =
|
let (tauri_dep, tauri_example_dep, tauri_build_dep, tauri_plugin_dep) =
|
||||||
@@ -247,15 +253,19 @@ pub fn command(mut options: Options) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let permissions_dir = template_target_path.join("permissions");
|
let permissions_dir = template_target_path.join("permissions");
|
||||||
std::fs::create_dir(&permissions_dir)
|
std::fs::create_dir(&permissions_dir).fs_context(
|
||||||
.with_context(|| "failed to create `permissions` directory")?;
|
"failed to create `permissions` directory",
|
||||||
|
permissions_dir.clone(),
|
||||||
|
)?;
|
||||||
|
|
||||||
let default_permissions = r#"[default]
|
let default_permissions = r#"[default]
|
||||||
description = "Default permissions for the plugin"
|
description = "Default permissions for the plugin"
|
||||||
permissions = ["allow-ping"]
|
permissions = ["allow-ping"]
|
||||||
"#;
|
"#;
|
||||||
std::fs::write(permissions_dir.join("default.toml"), default_permissions)
|
std::fs::write(permissions_dir.join("default.toml"), default_permissions).fs_context(
|
||||||
.with_context(|| "failed to write `permissions/default.toml`")?;
|
"failed to write default permissions file",
|
||||||
|
permissions_dir.join("default.toml"),
|
||||||
|
)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -275,7 +285,7 @@ pub fn plugin_name_data(data: &mut BTreeMap<&'static str, serde_json::Value>, pl
|
|||||||
|
|
||||||
pub fn crates_metadata() -> Result<VersionMetadata> {
|
pub fn crates_metadata() -> Result<VersionMetadata> {
|
||||||
serde_json::from_str::<VersionMetadata>(include_str!("../../metadata-v2.json"))
|
serde_json::from_str::<VersionMetadata>(include_str!("../../metadata-v2.json"))
|
||||||
.map_err(Into::into)
|
.context("failed to parse Tauri version metadata")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn generate_android_out_file(
|
pub fn generate_android_out_file(
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use super::PluginIosFramework;
|
use super::PluginIosFramework;
|
||||||
use crate::{helpers::template, Result};
|
use crate::{error::Context, helpers::template, Result};
|
||||||
use clap::{Parser, Subcommand};
|
use clap::{Parser, Subcommand};
|
||||||
use handlebars::Handlebars;
|
use handlebars::Handlebars;
|
||||||
|
|
||||||
@@ -53,13 +53,15 @@ pub fn command(cli: Cli) -> Result<()> {
|
|||||||
match cli.command {
|
match cli.command {
|
||||||
Commands::Init(options) => {
|
Commands::Init(options) => {
|
||||||
let plugin_name = match options.plugin_name {
|
let plugin_name = match options.plugin_name {
|
||||||
None => super::infer_plugin_name(std::env::current_dir()?)?,
|
None => super::infer_plugin_name(
|
||||||
|
std::env::current_dir().context("failed to get current directory")?,
|
||||||
|
)?,
|
||||||
Some(name) => name,
|
Some(name) => name,
|
||||||
};
|
};
|
||||||
|
|
||||||
let out_dir = PathBuf::from(options.out_dir);
|
let out_dir = PathBuf::from(options.out_dir);
|
||||||
if out_dir.join("ios").exists() {
|
if out_dir.join("ios").exists() {
|
||||||
return Err(anyhow::anyhow!("ios folder already exists"));
|
crate::error::bail!("iOS folder already exists");
|
||||||
}
|
}
|
||||||
|
|
||||||
let handlebars = Handlebars::new();
|
let handlebars = Handlebars::new();
|
||||||
|
|||||||
@@ -6,7 +6,10 @@ use std::{fmt::Display, path::Path};
|
|||||||
|
|
||||||
use clap::{Parser, Subcommand, ValueEnum};
|
use clap::{Parser, Subcommand, ValueEnum};
|
||||||
|
|
||||||
use crate::Result;
|
use crate::{
|
||||||
|
error::{Context, ErrorExt},
|
||||||
|
Result,
|
||||||
|
};
|
||||||
|
|
||||||
mod android;
|
mod android;
|
||||||
mod init;
|
mod init;
|
||||||
@@ -67,8 +70,10 @@ fn infer_plugin_name<P: AsRef<Path>>(directory: P) -> Result<String> {
|
|||||||
let dir = directory.as_ref();
|
let dir = directory.as_ref();
|
||||||
let cargo_toml_path = dir.join("Cargo.toml");
|
let cargo_toml_path = dir.join("Cargo.toml");
|
||||||
let name = if cargo_toml_path.exists() {
|
let name = if cargo_toml_path.exists() {
|
||||||
let contents = std::fs::read_to_string(cargo_toml_path)?;
|
let contents = std::fs::read_to_string(&cargo_toml_path)
|
||||||
let cargo_toml: toml::Value = toml::from_str(&contents)?;
|
.fs_context("failed to read Cargo manifest", cargo_toml_path)?;
|
||||||
|
let cargo_toml: toml::Value =
|
||||||
|
toml::from_str(&contents).context("failed to parse Cargo.toml")?;
|
||||||
cargo_toml
|
cargo_toml
|
||||||
.get("package")
|
.get("package")
|
||||||
.and_then(|v| v.get("name"))
|
.and_then(|v| v.get("name"))
|
||||||
|
|||||||
@@ -3,7 +3,10 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use super::PluginIosFramework;
|
use super::PluginIosFramework;
|
||||||
use crate::Result;
|
use crate::{
|
||||||
|
error::{Context, ErrorExt},
|
||||||
|
Result,
|
||||||
|
};
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
@@ -70,12 +73,14 @@ impl From<Options> for super::init::Options {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn command(mut options: Options) -> Result<()> {
|
pub fn command(mut options: Options) -> Result<()> {
|
||||||
let cwd = std::env::current_dir()?;
|
let cwd = std::env::current_dir().context("failed to get current directory")?;
|
||||||
if let Some(dir) = &options.directory {
|
if let Some(dir) = &options.directory {
|
||||||
std::fs::create_dir_all(cwd.join(dir))?;
|
std::fs::create_dir_all(cwd.join(dir))
|
||||||
|
.fs_context("failed to create crate directory", cwd.join(dir))?;
|
||||||
} else {
|
} else {
|
||||||
let target = cwd.join(format!("tauri-plugin-{}", options.plugin_name));
|
let target = cwd.join(format!("tauri-plugin-{}", options.plugin_name));
|
||||||
std::fs::create_dir_all(&target)?;
|
std::fs::create_dir_all(&target)
|
||||||
|
.fs_context("failed to create crate directory", target.clone())?;
|
||||||
options.directory.replace(target.display().to_string());
|
options.directory.replace(target.display().to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -5,10 +5,10 @@
|
|||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
error::Context,
|
||||||
helpers::updater_signature::{secret_key, sign_file},
|
helpers::updater_signature::{secret_key, sign_file},
|
||||||
Result,
|
Result,
|
||||||
};
|
};
|
||||||
use anyhow::Context;
|
|
||||||
use base64::Engine;
|
use base64::Engine;
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use tauri_utils::display_path;
|
use tauri_utils::display_path;
|
||||||
@@ -48,9 +48,7 @@ pub fn command(mut options: Options) -> Result<()> {
|
|||||||
let private_key = if let Some(pk) = options.private_key {
|
let private_key = if let Some(pk) = options.private_key {
|
||||||
pk
|
pk
|
||||||
} else {
|
} else {
|
||||||
return Err(anyhow::anyhow!(
|
crate::error::bail!("Key generation aborted: Unable to find the private key");
|
||||||
"Key generation aborted: Unable to find the private key".to_string(),
|
|
||||||
));
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if options.password.is_none() {
|
if options.password.is_none() {
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ edition = "2021"
|
|||||||
rust-version = "1.77.2"
|
rust-version = "1.77.2"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1"
|
thiserror = "2"
|
||||||
serde = { version = "1", features = ["derive"] }
|
serde = { version = "1", features = ["derive"] }
|
||||||
serde_json = "1"
|
serde_json = "1"
|
||||||
tempfile = "3"
|
tempfile = "3"
|
||||||
|
|||||||
@@ -2,12 +2,13 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use anyhow::{Context, Result};
|
|
||||||
use apple_codesign::create_self_signed_code_signing_certificate;
|
use apple_codesign::create_self_signed_code_signing_certificate;
|
||||||
use x509_certificate::{EcdsaCurve, KeyAlgorithm};
|
use x509_certificate::{EcdsaCurve, KeyAlgorithm};
|
||||||
|
|
||||||
pub use apple_codesign::CertificateProfile;
|
pub use apple_codesign::CertificateProfile;
|
||||||
|
|
||||||
|
use crate::{Error, Result};
|
||||||
|
|
||||||
/// Self signed certificate options.
|
/// Self signed certificate options.
|
||||||
pub struct SelfSignedCertificateRequest {
|
pub struct SelfSignedCertificateRequest {
|
||||||
/// Which key type to use
|
/// Which key type to use
|
||||||
@@ -49,16 +50,21 @@ pub fn generate_self_signed(request: SelfSignedCertificateRequest) -> Result<Vec
|
|||||||
&request.person_name,
|
&request.person_name,
|
||||||
&request.country_name,
|
&request.country_name,
|
||||||
validity_duration,
|
validity_duration,
|
||||||
)?;
|
)
|
||||||
|
.map_err(|error| Error::FailedToCreateSelfSignedCertificate {
|
||||||
|
error: Box::new(error),
|
||||||
|
})?;
|
||||||
|
|
||||||
let pfx = p12::PFX::new(
|
let pfx = p12::PFX::new(
|
||||||
&cert.encode_der()?,
|
&cert
|
||||||
|
.encode_der()
|
||||||
|
.map_err(|error| Error::FailedToEncodeDER { error })?,
|
||||||
&key_pair.to_pkcs8_one_asymmetric_key_der(),
|
&key_pair.to_pkcs8_one_asymmetric_key_der(),
|
||||||
None,
|
None,
|
||||||
&request.password,
|
&request.password,
|
||||||
"code-signing",
|
"code-signing",
|
||||||
)
|
)
|
||||||
.context("failed to create PFX structure")?;
|
.ok_or(Error::FailedToCreatePFX)?;
|
||||||
let der = pfx.to_der();
|
let der = pfx.to_der();
|
||||||
|
|
||||||
Ok(der)
|
Ok(der)
|
||||||
|
|||||||
@@ -8,8 +8,7 @@ use std::{
|
|||||||
process::Command,
|
process::Command,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{assert_command, CommandExt};
|
use crate::{assert_command, CommandExt, Error, Result};
|
||||||
use anyhow::Result;
|
|
||||||
use rand::distr::{Alphanumeric, SampleString};
|
use rand::distr::{Alphanumeric, SampleString};
|
||||||
|
|
||||||
mod identity;
|
mod identity;
|
||||||
@@ -57,14 +56,14 @@ impl Keychain {
|
|||||||
certificate_encoded: &OsString,
|
certificate_encoded: &OsString,
|
||||||
certificate_password: &OsString,
|
certificate_password: &OsString,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let tmp_dir = tempfile::tempdir()?;
|
let tmp_dir = tempfile::tempdir().map_err(Error::TempDir)?;
|
||||||
let cert_path = tmp_dir.path().join("cert.p12");
|
let cert_path = tmp_dir.path().join("cert.p12");
|
||||||
super::decode_base64(certificate_encoded, &cert_path)?;
|
super::decode_base64(certificate_encoded, &cert_path)?;
|
||||||
Self::with_certificate_file(&cert_path, certificate_password)
|
Self::with_certificate_file(&cert_path, certificate_password)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_certificate_file(cert_path: &Path, certificate_password: &OsString) -> Result<Self> {
|
pub fn with_certificate_file(cert_path: &Path, certificate_password: &OsString) -> Result<Self> {
|
||||||
let home_dir = dirs::home_dir().ok_or_else(|| anyhow::anyhow!("failed to resolve home dir"))?;
|
let home_dir = dirs::home_dir().ok_or(Error::ResolveHomeDir)?;
|
||||||
let keychain_path = home_dir.join("Library").join("Keychains").join(format!(
|
let keychain_path = home_dir.join("Library").join("Keychains").join(format!(
|
||||||
"{}.keychain-db",
|
"{}.keychain-db",
|
||||||
Alphanumeric.sample_string(&mut rand::rng(), 16)
|
Alphanumeric.sample_string(&mut rand::rng(), 16)
|
||||||
@@ -73,7 +72,11 @@ impl Keychain {
|
|||||||
|
|
||||||
let keychain_list_output = Command::new("security")
|
let keychain_list_output = Command::new("security")
|
||||||
.args(["list-keychain", "-d", "user"])
|
.args(["list-keychain", "-d", "user"])
|
||||||
.output()?;
|
.output()
|
||||||
|
.map_err(|e| Error::CommandFailed {
|
||||||
|
command: "security list-keychain -d user".to_string(),
|
||||||
|
error: e,
|
||||||
|
})?;
|
||||||
|
|
||||||
assert_command(
|
assert_command(
|
||||||
Command::new("security")
|
Command::new("security")
|
||||||
@@ -81,7 +84,11 @@ impl Keychain {
|
|||||||
.arg(&keychain_path)
|
.arg(&keychain_path)
|
||||||
.piped(),
|
.piped(),
|
||||||
"failed to create keychain",
|
"failed to create keychain",
|
||||||
)?;
|
)
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "security create-Keychain".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
assert_command(
|
assert_command(
|
||||||
Command::new("security")
|
Command::new("security")
|
||||||
@@ -89,7 +96,11 @@ impl Keychain {
|
|||||||
.arg(&keychain_path)
|
.arg(&keychain_path)
|
||||||
.piped(),
|
.piped(),
|
||||||
"failed to set unlock keychain",
|
"failed to set unlock keychain",
|
||||||
)?;
|
)
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "security unlock-keychain".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
assert_command(
|
assert_command(
|
||||||
Command::new("security")
|
Command::new("security")
|
||||||
@@ -109,7 +120,11 @@ impl Keychain {
|
|||||||
.arg(&keychain_path)
|
.arg(&keychain_path)
|
||||||
.piped(),
|
.piped(),
|
||||||
"failed to import keychain certificate",
|
"failed to import keychain certificate",
|
||||||
)?;
|
)
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "security import".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
assert_command(
|
assert_command(
|
||||||
Command::new("security")
|
Command::new("security")
|
||||||
@@ -117,7 +132,11 @@ impl Keychain {
|
|||||||
.arg(&keychain_path)
|
.arg(&keychain_path)
|
||||||
.piped(),
|
.piped(),
|
||||||
"failed to set keychain settings",
|
"failed to set keychain settings",
|
||||||
)?;
|
)
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "security set-keychain-settings".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
assert_command(
|
assert_command(
|
||||||
Command::new("security")
|
Command::new("security")
|
||||||
@@ -132,7 +151,11 @@ impl Keychain {
|
|||||||
.arg(&keychain_path)
|
.arg(&keychain_path)
|
||||||
.piped(),
|
.piped(),
|
||||||
"failed to set keychain settings",
|
"failed to set keychain settings",
|
||||||
)?;
|
)
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "security set-key-partition-list".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
let current_keychains = String::from_utf8_lossy(&keychain_list_output.stdout)
|
let current_keychains = String::from_utf8_lossy(&keychain_list_output.stdout)
|
||||||
.split('\n')
|
.split('\n')
|
||||||
@@ -151,11 +174,15 @@ impl Keychain {
|
|||||||
.arg(&keychain_path)
|
.arg(&keychain_path)
|
||||||
.piped(),
|
.piped(),
|
||||||
"failed to list keychain",
|
"failed to list keychain",
|
||||||
)?;
|
)
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "security list-keychain".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
let signing_identity = identity::list(&keychain_path)
|
let signing_identity = identity::list(&keychain_path)
|
||||||
.map(|l| l.first().cloned())?
|
.map(|l| l.first().cloned())?
|
||||||
.ok_or_else(|| anyhow::anyhow!("failed to resolve signing identity"))?;
|
.ok_or(Error::ResolveSigningIdentity)?;
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
path: Some(keychain_path),
|
path: Some(keychain_path),
|
||||||
@@ -211,7 +238,12 @@ impl Keychain {
|
|||||||
|
|
||||||
codesign.arg(path);
|
codesign.arg(path);
|
||||||
|
|
||||||
assert_command(codesign.piped(), "failed to sign app")?;
|
assert_command(codesign.piped(), "failed to sign app").map_err(|error| {
|
||||||
|
Error::CommandFailed {
|
||||||
|
command: "codesign".to_string(),
|
||||||
|
error,
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,14 +2,13 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use once_cell_regex::regex;
|
use once_cell_regex::regex;
|
||||||
use std::{collections::BTreeSet, path::Path, process::Command};
|
use std::{collections::BTreeSet, path::Path, process::Command};
|
||||||
use x509_certificate::certificate::X509Certificate;
|
use x509_certificate::certificate::X509Certificate;
|
||||||
|
|
||||||
use crate::Result;
|
use crate::{Error, Result};
|
||||||
|
|
||||||
fn get_pem_list(keychain_path: &Path, name_substr: &str) -> std::io::Result<std::process::Output> {
|
fn get_pem_list(keychain_path: &Path, name_substr: &str) -> Result<std::process::Output> {
|
||||||
Command::new("security")
|
Command::new("security")
|
||||||
.arg("find-certificate")
|
.arg("find-certificate")
|
||||||
.args(["-p", "-a"])
|
.args(["-p", "-a"])
|
||||||
@@ -19,6 +18,10 @@ fn get_pem_list(keychain_path: &Path, name_substr: &str) -> std::io::Result<std:
|
|||||||
.stdin(os_pipe::dup_stdin().unwrap())
|
.stdin(os_pipe::dup_stdin().unwrap())
|
||||||
.stderr(os_pipe::dup_stderr().unwrap())
|
.stderr(os_pipe::dup_stderr().unwrap())
|
||||||
.output()
|
.output()
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "security find-certificate".to_string(),
|
||||||
|
error,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, Ord, PartialEq, PartialOrd)]
|
#[derive(Debug, Clone, Eq, Ord, PartialEq, PartialOrd)]
|
||||||
@@ -33,7 +36,7 @@ impl Team {
|
|||||||
fn from_x509(cert_prefix: &'static str, cert: X509Certificate) -> Result<Self> {
|
fn from_x509(cert_prefix: &'static str, cert: X509Certificate) -> Result<Self> {
|
||||||
let common_name = cert
|
let common_name = cert
|
||||||
.subject_common_name()
|
.subject_common_name()
|
||||||
.ok_or_else(|| anyhow::anyhow!("skipping cert, missing common name"))?;
|
.ok_or(Error::CertificateMissingCommonName)?;
|
||||||
|
|
||||||
let organization = cert
|
let organization = cert
|
||||||
.subject_name()
|
.subject_name()
|
||||||
@@ -62,7 +65,9 @@ impl Team {
|
|||||||
.iter_organizational_unit()
|
.iter_organizational_unit()
|
||||||
.next()
|
.next()
|
||||||
.and_then(|v| v.to_string().ok())
|
.and_then(|v| v.to_string().ok())
|
||||||
.ok_or_else(|| anyhow::anyhow!("skipping cert {common_name}: missing Organization Unit"))?;
|
.ok_or_else(|| Error::CertificateMissingOrganizationUnit {
|
||||||
|
common_name: common_name.clone(),
|
||||||
|
})?;
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
name,
|
name,
|
||||||
@@ -89,10 +94,9 @@ pub fn list(keychain_path: &Path) -> Result<Vec<Team>> {
|
|||||||
"iOS App Development:",
|
"iOS App Development:",
|
||||||
"Mac Development:",
|
"Mac Development:",
|
||||||
] {
|
] {
|
||||||
let pem_list_out =
|
let pem_list_out = get_pem_list(keychain_path, cert_prefix)?;
|
||||||
get_pem_list(keychain_path, cert_prefix).context("Failed to call `security` command")?;
|
|
||||||
let cert_list = X509Certificate::from_pem_multiple(pem_list_out.stdout)
|
let cert_list = X509Certificate::from_pem_multiple(pem_list_out.stdout)
|
||||||
.context("Failed to parse X509 cert")?;
|
.map_err(|error| Error::X509Certificate { error })?;
|
||||||
certs.extend(cert_list.into_iter().map(|cert| (cert_prefix, cert)));
|
certs.extend(cert_list.into_iter().map(|cert| (cert_prefix, cert)));
|
||||||
}
|
}
|
||||||
certs
|
certs
|
||||||
@@ -102,7 +106,7 @@ pub fn list(keychain_path: &Path) -> Result<Vec<Team>> {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|(cert_prefix, cert)| {
|
.flat_map(|(cert_prefix, cert)| {
|
||||||
Team::from_x509(cert_prefix, cert).map_err(|err| {
|
Team::from_x509(cert_prefix, cert).map_err(|err| {
|
||||||
eprintln!("{err}");
|
log::error!("{err}");
|
||||||
err
|
err
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ use std::{
|
|||||||
process::{Command, ExitStatus},
|
process::{Command, ExitStatus},
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::{Context, Result};
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
pub mod certificate;
|
pub mod certificate;
|
||||||
@@ -18,6 +17,61 @@ mod provisioning_profile;
|
|||||||
pub use keychain::{Keychain, Team};
|
pub use keychain::{Keychain, Team};
|
||||||
pub use provisioning_profile::ProvisioningProfile;
|
pub use provisioning_profile::ProvisioningProfile;
|
||||||
|
|
||||||
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
pub enum Error {
|
||||||
|
#[error("failed to create temp directory: {0}")]
|
||||||
|
TempDir(std::io::Error),
|
||||||
|
#[error("failed to resolve home dir")]
|
||||||
|
ResolveHomeDir,
|
||||||
|
#[error("failed to resolve signing identity")]
|
||||||
|
ResolveSigningIdentity,
|
||||||
|
#[error("failed to decode provisioning profile")]
|
||||||
|
FailedToDecodeProvisioningProfile,
|
||||||
|
#[error("could not find provisioning profile UUID")]
|
||||||
|
FailedToFindProvisioningProfileUuid,
|
||||||
|
#[error("{context} {path}: {error}")]
|
||||||
|
Plist {
|
||||||
|
context: &'static str,
|
||||||
|
path: PathBuf,
|
||||||
|
error: plist::Error,
|
||||||
|
},
|
||||||
|
#[error("failed to upload app to Apple's notarization servers: {error}")]
|
||||||
|
FailedToUploadApp { error: std::io::Error },
|
||||||
|
#[error("failed to notarize app: {0}")]
|
||||||
|
Notarize(String),
|
||||||
|
#[error("failed to parse notarytool output as JSON: {output}")]
|
||||||
|
ParseNotarytoolOutput { output: String },
|
||||||
|
#[error("failed to run command {command}: {error}")]
|
||||||
|
CommandFailed {
|
||||||
|
command: String,
|
||||||
|
error: std::io::Error,
|
||||||
|
},
|
||||||
|
#[error("{context} {path}: {error}")]
|
||||||
|
Fs {
|
||||||
|
context: &'static str,
|
||||||
|
path: PathBuf,
|
||||||
|
error: std::io::Error,
|
||||||
|
},
|
||||||
|
#[error("failed to parse X509 certificate: {error}")]
|
||||||
|
X509Certificate {
|
||||||
|
error: x509_certificate::X509CertificateError,
|
||||||
|
},
|
||||||
|
#[error("failed to create PFX from self signed certificate")]
|
||||||
|
FailedToCreatePFX,
|
||||||
|
#[error("failed to create self signed certificate: {error}")]
|
||||||
|
FailedToCreateSelfSignedCertificate {
|
||||||
|
error: Box<apple_codesign::AppleCodesignError>,
|
||||||
|
},
|
||||||
|
#[error("failed to encode DER: {error}")]
|
||||||
|
FailedToEncodeDER { error: std::io::Error },
|
||||||
|
#[error("certificate missing common name")]
|
||||||
|
CertificateMissingCommonName,
|
||||||
|
#[error("certificate missing organization unit for common name {common_name}")]
|
||||||
|
CertificateMissingOrganizationUnit { common_name: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type Result<T> = std::result::Result<T, Error>;
|
||||||
|
|
||||||
trait CommandExt {
|
trait CommandExt {
|
||||||
// The `pipe` function sets the stdout and stderr to properly
|
// The `pipe` function sets the stdout and stderr to properly
|
||||||
// show the command output in the Node.js wrapper.
|
// show the command output in the Node.js wrapper.
|
||||||
@@ -88,7 +142,7 @@ fn notarize_inner(
|
|||||||
.file_stem()
|
.file_stem()
|
||||||
.expect("failed to get bundle filename");
|
.expect("failed to get bundle filename");
|
||||||
|
|
||||||
let tmp_dir = tempfile::tempdir()?;
|
let tmp_dir = tempfile::tempdir().map_err(Error::TempDir)?;
|
||||||
let zip_path = tmp_dir
|
let zip_path = tmp_dir
|
||||||
.path()
|
.path()
|
||||||
.join(format!("{}.zip", bundle_stem.to_string_lossy()));
|
.join(format!("{}.zip", bundle_stem.to_string_lossy()));
|
||||||
@@ -110,7 +164,11 @@ fn notarize_inner(
|
|||||||
assert_command(
|
assert_command(
|
||||||
Command::new("ditto").args(zip_args).piped(),
|
Command::new("ditto").args(zip_args).piped(),
|
||||||
"failed to zip app with ditto",
|
"failed to zip app with ditto",
|
||||||
)?;
|
)
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "ditto".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
// sign the zip file
|
// sign the zip file
|
||||||
keychain.sign(&zip_path, None, false)?;
|
keychain.sign(&zip_path, None, false)?;
|
||||||
@@ -135,13 +193,12 @@ fn notarize_inner(
|
|||||||
.args(notarize_args)
|
.args(notarize_args)
|
||||||
.notarytool_args(auth, tmp_dir.path())?
|
.notarytool_args(auth, tmp_dir.path())?
|
||||||
.output()
|
.output()
|
||||||
.context("failed to upload app to Apple's notarization servers.")?;
|
.map_err(|error| Error::FailedToUploadApp { error })?;
|
||||||
|
|
||||||
if !output.status.success() {
|
if !output.status.success() {
|
||||||
return Err(
|
return Err(Error::Notarize(
|
||||||
anyhow::anyhow!("failed to notarize app")
|
String::from_utf8_lossy(&output.stderr).into_owned(),
|
||||||
.context(String::from_utf8_lossy(&output.stderr).into_owned()),
|
));
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let output_str = String::from_utf8_lossy(&output.stdout);
|
let output_str = String::from_utf8_lossy(&output.stdout);
|
||||||
@@ -176,17 +233,17 @@ fn notarize_inner(
|
|||||||
.notarytool_args(auth, tmp_dir.path())?
|
.notarytool_args(auth, tmp_dir.path())?
|
||||||
.output()
|
.output()
|
||||||
{
|
{
|
||||||
Err(anyhow::anyhow!(
|
Err(Error::Notarize(format!(
|
||||||
"{log_message}\nLog:\n{}",
|
"{log_message}\nLog:\n{}",
|
||||||
String::from_utf8_lossy(&output.stdout)
|
String::from_utf8_lossy(&output.stdout)
|
||||||
))
|
)))
|
||||||
} else {
|
} else {
|
||||||
Err(anyhow::anyhow!("{log_message}"))
|
Err(Error::Notarize(log_message.to_string()))
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Err(anyhow::anyhow!(
|
Err(Error::ParseNotarytoolOutput {
|
||||||
"failed to parse notarytool output as JSON: `{output_str}`"
|
output: output_str.into_owned(),
|
||||||
))
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -204,7 +261,10 @@ fn staple_app(mut app_bundle_path: PathBuf) -> Result<()> {
|
|||||||
.args(vec!["stapler", "staple", "-v", filename])
|
.args(vec!["stapler", "staple", "-v", filename])
|
||||||
.current_dir(app_bundle_path)
|
.current_dir(app_bundle_path)
|
||||||
.output()
|
.output()
|
||||||
.context("failed to staple app.")?;
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "xcrun stapler staple".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -245,7 +305,11 @@ impl NotarytoolCmdExt for Command {
|
|||||||
let key_path = match key {
|
let key_path = match key {
|
||||||
ApiKey::Raw(k) => {
|
ApiKey::Raw(k) => {
|
||||||
let key_path = temp_dir.join("AuthKey.p8");
|
let key_path = temp_dir.join("AuthKey.p8");
|
||||||
std::fs::write(&key_path, k)?;
|
std::fs::write(&key_path, k).map_err(|error| Error::Fs {
|
||||||
|
context: "failed to write notarization API key to temp file",
|
||||||
|
path: key_path.clone(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
key_path
|
key_path
|
||||||
}
|
}
|
||||||
ApiKey::Path(p) => p.to_owned(),
|
ApiKey::Path(p) => p.to_owned(),
|
||||||
@@ -266,7 +330,7 @@ impl NotarytoolCmdExt for Command {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn decode_base64(base64: &OsStr, out_path: &Path) -> Result<()> {
|
fn decode_base64(base64: &OsStr, out_path: &Path) -> Result<()> {
|
||||||
let tmp_dir = tempfile::tempdir()?;
|
let tmp_dir = tempfile::tempdir().map_err(Error::TempDir)?;
|
||||||
|
|
||||||
let src_path = tmp_dir.path().join("src");
|
let src_path = tmp_dir.path().join("src");
|
||||||
let base64 = base64
|
let base64 = base64
|
||||||
@@ -277,7 +341,11 @@ fn decode_base64(base64: &OsStr, out_path: &Path) -> Result<()> {
|
|||||||
// as base64 contain whitespace decoding may be broken
|
// as base64 contain whitespace decoding may be broken
|
||||||
// https://github.com/marshallpierce/rust-base64/issues/105
|
// https://github.com/marshallpierce/rust-base64/issues/105
|
||||||
// we'll use builtin base64 command from the OS
|
// we'll use builtin base64 command from the OS
|
||||||
std::fs::write(&src_path, base64)?;
|
std::fs::write(&src_path, base64).map_err(|error| Error::Fs {
|
||||||
|
context: "failed to write base64 to temp file",
|
||||||
|
path: src_path.clone(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
assert_command(
|
assert_command(
|
||||||
std::process::Command::new("base64")
|
std::process::Command::new("base64")
|
||||||
@@ -288,13 +356,17 @@ fn decode_base64(base64: &OsStr, out_path: &Path) -> Result<()> {
|
|||||||
.arg(out_path)
|
.arg(out_path)
|
||||||
.piped(),
|
.piped(),
|
||||||
"failed to decode certificate",
|
"failed to decode certificate",
|
||||||
)?;
|
)
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "base64 --decode".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn assert_command(
|
fn assert_command(
|
||||||
response: Result<std::process::ExitStatus, std::io::Error>,
|
response: std::result::Result<std::process::ExitStatus, std::io::Error>,
|
||||||
error_message: &str,
|
error_message: &str,
|
||||||
) -> std::io::Result<()> {
|
) -> std::io::Result<()> {
|
||||||
let status =
|
let status =
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
use std::{ffi::OsStr, path::PathBuf, process::Command};
|
use std::{ffi::OsStr, path::PathBuf, process::Command};
|
||||||
|
|
||||||
use anyhow::{Context, Result};
|
use crate::{Error, Result};
|
||||||
use rand::distr::{Alphanumeric, SampleString};
|
use rand::distr::{Alphanumeric, SampleString};
|
||||||
|
|
||||||
pub struct ProvisioningProfile {
|
pub struct ProvisioningProfile {
|
||||||
@@ -13,12 +13,16 @@ pub struct ProvisioningProfile {
|
|||||||
|
|
||||||
impl ProvisioningProfile {
|
impl ProvisioningProfile {
|
||||||
pub fn from_base64(base64: &OsStr) -> Result<Self> {
|
pub fn from_base64(base64: &OsStr) -> Result<Self> {
|
||||||
let home_dir = dirs::home_dir().unwrap();
|
let home_dir = dirs::home_dir().ok_or(Error::ResolveHomeDir)?;
|
||||||
let provisioning_profiles_folder = home_dir
|
let provisioning_profiles_folder = home_dir
|
||||||
.join("Library")
|
.join("Library")
|
||||||
.join("MobileDevice")
|
.join("MobileDevice")
|
||||||
.join("Provisioning Profiles");
|
.join("Provisioning Profiles");
|
||||||
std::fs::create_dir_all(&provisioning_profiles_folder).unwrap();
|
std::fs::create_dir_all(&provisioning_profiles_folder).map_err(|error| Error::Fs {
|
||||||
|
context: "failed to create provisioning profiles folder",
|
||||||
|
path: provisioning_profiles_folder.clone(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
let provisioning_profile_path = provisioning_profiles_folder.join(format!(
|
let provisioning_profile_path = provisioning_profiles_folder.join(format!(
|
||||||
"{}.mobileprovision",
|
"{}.mobileprovision",
|
||||||
@@ -35,18 +39,26 @@ impl ProvisioningProfile {
|
|||||||
let output = Command::new("security")
|
let output = Command::new("security")
|
||||||
.args(["cms", "-D", "-i"])
|
.args(["cms", "-D", "-i"])
|
||||||
.arg(&self.path)
|
.arg(&self.path)
|
||||||
.output()?;
|
.output()
|
||||||
|
.map_err(|error| Error::CommandFailed {
|
||||||
|
command: "security cms -D -i".to_string(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
if !output.status.success() {
|
if !output.status.success() {
|
||||||
return Err(anyhow::anyhow!("failed to decode provisioning profile"));
|
return Err(Error::FailedToDecodeProvisioningProfile);
|
||||||
}
|
}
|
||||||
|
|
||||||
let plist = plist::from_bytes::<plist::Dictionary>(&output.stdout)
|
let plist =
|
||||||
.context("failed to decode provisioning profile as plist")?;
|
plist::from_bytes::<plist::Dictionary>(&output.stdout).map_err(|error| Error::Plist {
|
||||||
|
context: "failed to parse provisioning profile as plist",
|
||||||
|
path: self.path.clone(),
|
||||||
|
error,
|
||||||
|
})?;
|
||||||
|
|
||||||
plist
|
plist
|
||||||
.get("UUID")
|
.get("UUID")
|
||||||
.and_then(|v| v.as_string().map(ToString::to_string))
|
.and_then(|v| v.as_string().map(ToString::to_string))
|
||||||
.ok_or_else(|| anyhow::anyhow!("could not find provisioning profile UUID"))
|
.ok_or(Error::FailedToFindProvisioningProfileUuid)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user