fix: working hermes commands (#41349)

This commit is contained in:
David Newell
2025-11-13 13:54:19 +00:00
committed by GitHub
parent 1eb2a57152
commit 52f8bc5c7b
9 changed files with 86 additions and 93 deletions

View File

@@ -1,5 +1,10 @@
# posthog-cli
# 0.5.11
- Do not read bundle files as part of hermes sourcemap commands
- Change hermes clone command to take two file paths (for the minified and composed maps respectively)
# 0.5.10
- Add terminal checks for login and query command

View File

@@ -5,7 +5,9 @@ and bump the package version number at the same time.
```bash
git checkout -b "cli/release-v0.1.0-pre1"
# Bump version number in Cargo.toml and build to update Cargo.lock
# Bump version number in Cargo.toml
# Build to update Cargo.lock (cargo build)
# Update the CHANGELOG.md
git add .
git commit -m "Bump version number"
git tag "posthog-cli-v0.1.0-prerelease.1"

2
cli/Cargo.lock generated
View File

@@ -1521,7 +1521,7 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "posthog-cli"
version = "0.5.10"
version = "0.5.11"
dependencies = [
"anyhow",
"chrono",

View File

@@ -1,6 +1,6 @@
[package]
name = "posthog-cli"
version = "0.5.10"
version = "0.5.11"
authors = [
"David <david@posthog.com>",
"Olly <oliver@posthog.com>",

View File

@@ -16,7 +16,7 @@ use crate::{
pub struct SourceMapContent {
#[serde(skip_serializing_if = "Option::is_none")]
pub release_id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(skip_serializing_if = "Option::is_none", alias = "debugId")]
pub chunk_id: Option<String>,
#[serde(flatten)]
pub fields: BTreeMap<String, Value>,
@@ -49,6 +49,10 @@ impl SourceMapFile {
self.inner.content.release_id.clone()
}
pub fn has_release_id(&self) -> bool {
self.get_release_id().is_some()
}
pub fn apply_adjustment(&mut self, adjustment: SourceMap) -> Result<()> {
let new_content = {
let content = serde_json::to_string(&self.inner.content)?.into_bytes();

View File

@@ -1,27 +1,22 @@
use std::path::PathBuf;
use anyhow::{anyhow, bail, Result};
use tracing::{info, warn};
use anyhow::{anyhow, Result};
use tracing::info;
use crate::{
invocation_context::context,
sourcemaps::{
content::SourceMapFile,
hermes::{get_composed_map, inject::is_metro_bundle},
inject::get_release_for_pairs,
source_pairs::read_pairs,
},
sourcemaps::{content::SourceMapFile, inject::get_release_for_maps},
};
#[derive(clap::Args)]
pub struct CloneArgs {
/// The directory containing the bundled chunks
/// The path of the minified source map
#[arg(short, long)]
pub directory: PathBuf,
pub minified_map_path: PathBuf,
/// One or more directory glob patterns to ignore
/// The path of the composed source map
#[arg(short, long)]
pub ignore: Vec<String>,
pub composed_map_path: PathBuf,
/// The project name associated with the uploaded chunks. Required to have the uploaded chunks associated with
/// a specific release. We will try to auto-derive this from git information if not provided. Strongly recommended
@@ -39,31 +34,30 @@ pub fn clone(args: &CloneArgs) -> Result<()> {
context().capture_command_invoked("hermes_clone");
let CloneArgs {
directory,
ignore,
minified_map_path,
composed_map_path,
project,
version,
} = args;
let directory = directory.canonicalize().map_err(|e| {
let mut minified_map = SourceMapFile::load(minified_map_path).map_err(|e| {
anyhow!(
"Directory '{}' not found or inaccessible: {}",
directory.display(),
"Failed to load minified map at '{}': {}",
minified_map_path.display(),
e
)
})?;
info!("Processing directory: {}", directory.display());
let pairs = read_pairs(&directory, ignore, is_metro_bundle, &None)?;
let mut composed_map = SourceMapFile::load(composed_map_path).map_err(|e| {
anyhow!(
"Failed to load composed map at '{}': {}",
composed_map_path.display(),
e
)
})?;
if pairs.is_empty() {
bail!("No source files found");
}
info!("Found {} pairs", pairs.len());
let release_id =
get_release_for_pairs(&directory, project, version, &pairs)?.map(|r| r.id.to_string());
let release_id = get_release_for_maps(minified_map_path, project, version, [&minified_map])?
.map(|r| r.id.to_string());
// The flow here differs from plain sourcemap injection a bit - here, we don't ever
// overwrite the chunk ID, because at this point in the build process, we no longer
@@ -73,46 +67,25 @@ pub fn clone(args: &CloneArgs) -> Result<()> {
// tries to run `clone` twice, changing release but not posthog env, we'll error out. The
// correct way to upload the same set of artefacts to the same posthog env as part of
// two different releases is, 1, not to, but failing that, 2, to re-run the bundling process
let mut pairs = pairs;
for pair in &mut pairs {
if !pair.has_release_id() || pair.get_release_id() != release_id {
pair.set_release_id(release_id.clone());
pair.save()?;
if !minified_map.has_release_id() || minified_map.get_release_id() != release_id {
minified_map.set_release_id(release_id.clone());
minified_map.save()?;
}
}
let pairs = pairs;
let maps: Result<Vec<(&SourceMapFile, Option<SourceMapFile>)>> = pairs
.iter()
.map(|p| get_composed_map(p).map(|c| (&p.sourcemap, c)))
.collect();
let maps = maps?;
for (minified, composed) in maps {
let Some(mut composed) = composed else {
warn!(
"Could not find composed map for minified sourcemap {}",
minified.inner.path.display()
);
continue;
};
// Copy metadata from source map to composed map
if let Some(chunk_id) = minified.get_chunk_id() {
composed.set_chunk_id(Some(chunk_id));
if let Some(chunk_id) = minified_map.get_chunk_id() {
composed_map.set_chunk_id(Some(chunk_id));
}
if let Some(release_id) = minified.get_release_id() {
composed.set_release_id(Some(release_id));
if let Some(release_id) = minified_map.get_release_id() {
composed_map.set_release_id(Some(release_id));
}
composed.save()?;
composed_map.save()?;
info!(
"Successfully cloned metadata to {}",
composed.inner.path.display()
composed_map.inner.path.display()
);
}
info!("Finished cloning metadata");
Ok(())

View File

@@ -2,28 +2,23 @@ use std::path::PathBuf;
use anyhow::{anyhow, Ok, Result};
use tracing::{info, warn};
use walkdir::WalkDir;
use crate::api::symbol_sets::{self, SymbolSetUpload};
use crate::invocation_context::context;
use crate::sourcemaps::hermes::get_composed_map;
use crate::sourcemaps::hermes::inject::is_metro_bundle;
use crate::sourcemaps::source_pairs::read_pairs;
use crate::sourcemaps::content::SourceMapFile;
#[derive(clap::Args, Clone)]
pub struct Args {
/// The directory containing the bundled chunks
#[arg(short, long)]
pub directory: PathBuf,
/// One or more directory glob patterns to ignore
#[arg(short, long)]
pub ignore: Vec<String>,
}
pub fn upload(args: &Args) -> Result<()> {
context().capture_command_invoked("hermes_upload");
let Args { directory, ignore } = args;
let Args { directory } = args;
let directory = directory.canonicalize().map_err(|e| {
anyhow!(
@@ -34,17 +29,10 @@ pub fn upload(args: &Args) -> Result<()> {
})?;
info!("Processing directory: {}", directory.display());
let pairs = read_pairs(&directory, ignore, is_metro_bundle, &None)?;
let maps: Result<Vec<_>> = pairs.iter().map(get_composed_map).collect();
let maps = maps?;
let maps = read_maps(&directory);
let mut uploads: Vec<SymbolSetUpload> = Vec::new();
for map in maps.into_iter() {
let Some(map) = map else {
continue;
};
if map.get_chunk_id().is_none() {
warn!("Skipping map {}, no chunk ID", map.inner.path.display());
continue;
@@ -53,9 +41,22 @@ pub fn upload(args: &Args) -> Result<()> {
uploads.push(map.try_into()?);
}
info!("Found {} bundles to upload", uploads.len());
info!("Found {} maps to upload", uploads.len());
symbol_sets::upload(&uploads, 100)?;
Ok(())
}
fn read_maps(directory: &PathBuf) -> Vec<SourceMapFile> {
WalkDir::new(directory)
.into_iter()
.filter_map(Result::ok)
.filter(|e| e.file_type().is_file())
.map(|e| {
let path = e.path().canonicalize()?;
SourceMapFile::load(&path)
})
.filter_map(Result::ok)
.collect()
}

View File

@@ -5,7 +5,10 @@ use walkdir::DirEntry;
use crate::{
api::releases::{Release, ReleaseBuilder},
sourcemaps::source_pairs::{read_pairs, SourcePair},
sourcemaps::{
content::SourceMapFile,
source_pairs::{read_pairs, SourcePair},
},
utils::git::get_git_info,
};
@@ -61,7 +64,12 @@ pub fn inject_impl(args: &InjectArgs, matcher: impl Fn(&DirEntry) -> bool) -> Re
bail!("no source files found");
}
let created_release_id = get_release_for_pairs(&directory, project, version, &pairs)?
let created_release_id = get_release_for_maps(
&directory,
project,
version,
pairs.iter().map(|p| &p.sourcemap),
)?
.as_ref()
.map(|r| r.id.to_string());
@@ -97,16 +105,16 @@ pub fn inject_pairs(
Ok(pairs)
}
pub fn get_release_for_pairs<'a>(
pub fn get_release_for_maps<'a>(
directory: &Path,
project: &Option<String>,
version: &Option<String>,
pairs: impl IntoIterator<Item = &'a SourcePair>,
maps: impl IntoIterator<Item = &'a SourceMapFile>,
) -> Result<Option<Release>> {
// We need to fetch or create a release if: the user specified one, any pair is missing one, or the user
// forced release overriding
let needs_release =
project.is_some() || version.is_some() || pairs.into_iter().any(|p| !p.has_release_id());
project.is_some() || version.is_some() || maps.into_iter().any(|p| !p.has_release_id());
let mut created_release = None;
if needs_release {

View File

@@ -28,7 +28,7 @@ impl SourcePair {
}
pub fn has_release_id(&self) -> bool {
self.get_release_id().is_some()
self.sourcemap.has_release_id()
}
pub fn get_release_id(&self) -> Option<String> {