mirror of
https://github.com/BillyOutlast/posthog.git
synced 2026-02-04 03:01:23 +01:00
fix(cli): inject command updates chunks (#40113)
This commit is contained in:
60
.github/workflows/ci-cli.yml
vendored
60
.github/workflows/ci-cli.yml
vendored
@@ -1,8 +1,11 @@
|
||||
name: CI CLI
|
||||
permissions:
|
||||
contents: read
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- cli/Cargo.toml
|
||||
- 'cli/**'
|
||||
- '.github/workflows/ci-cli.yml'
|
||||
|
||||
jobs:
|
||||
check-lockfile:
|
||||
@@ -17,5 +20,56 @@ jobs:
|
||||
- name: Install rust
|
||||
uses: dtolnay/rust-toolchain@6691ebadcb18182cc1391d07c9f295f657c593cd # 1.88
|
||||
|
||||
- name: Install sccache
|
||||
run: cargo generate-lockfile --locked
|
||||
- name: Check Cargo.lock is up to date
|
||||
run: |
|
||||
cargo update --workspace --locked
|
||||
git diff --exit-code Cargo.lock
|
||||
|
||||
tests:
|
||||
name: Run tests
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: cli
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install rust
|
||||
uses: dtolnay/rust-toolchain@6691ebadcb18182cc1391d07c9f295f657c593cd # 1.88
|
||||
|
||||
- name: Run tests
|
||||
run: cargo test --all-features
|
||||
|
||||
clippy:
|
||||
name: Clippy
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: cli
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install rust
|
||||
uses: dtolnay/rust-toolchain@6691ebadcb18182cc1391d07c9f295f657c593cd # 1.88
|
||||
with:
|
||||
components: clippy
|
||||
|
||||
- name: Run clippy
|
||||
run: cargo clippy --all-targets --all-features -- -D warnings
|
||||
|
||||
format:
|
||||
name: Format
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: cli
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install rust
|
||||
uses: dtolnay/rust-toolchain@6691ebadcb18182cc1391d07c9f295f657c593cd # 1.88
|
||||
with:
|
||||
components: rustfmt
|
||||
|
||||
- name: Check formatting
|
||||
run: cargo fmt --all -- --check
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
# posthog-cli
|
||||
|
||||
# 0.5.5
|
||||
|
||||
- When running inject command multiple times, we only update chunk ids when releases are different
|
||||
|
||||
# 0.5.4
|
||||
|
||||
- Added no fail flag to disable non-zero exit codes on errors.
|
||||
|
||||
6
cli/Cargo.lock
generated
6
cli/Cargo.lock
generated
@@ -762,9 +762,9 @@ checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7"
|
||||
|
||||
[[package]]
|
||||
name = "globset"
|
||||
version = "0.4.18"
|
||||
version = "0.4.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "52dfc19153a48bde0cbd630453615c8151bce3a5adfac7a0aebfbf0a1e1f57e3"
|
||||
checksum = "eab69130804d941f8075cfd713bf8848a2c3b3f201a9457a11e6f87e1ab62305"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"bstr",
|
||||
@@ -1520,7 +1520,7 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
|
||||
|
||||
[[package]]
|
||||
name = "posthog-cli"
|
||||
version = "0.5.4"
|
||||
version = "0.5.5"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "posthog-cli"
|
||||
version = "0.5.4"
|
||||
version = "0.5.5"
|
||||
authors = [
|
||||
"David <david@posthog.com>",
|
||||
"Olly <oliver@posthog.com>",
|
||||
|
||||
@@ -65,10 +65,7 @@ pub fn upload(input_sets: &[SymbolSetUpload], batch_size: usize) -> Result<()> {
|
||||
info!("Starting upload of batch {i}, {} symbol sets", batch.len());
|
||||
let start_response = start_upload(batch)?;
|
||||
|
||||
let id_map: HashMap<_, _> = batch
|
||||
.iter()
|
||||
.map(|u| (u.chunk_id.as_str(), u))
|
||||
.collect();
|
||||
let id_map: HashMap<_, _> = batch.iter().map(|u| (u.chunk_id.as_str(), u)).collect();
|
||||
|
||||
info!(
|
||||
"Server returned {} upload keys ({} skipped as already present)",
|
||||
@@ -99,7 +96,7 @@ pub fn upload(input_sets: &[SymbolSetUpload], batch_size: usize) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn start_upload<'a>(symbol_sets: &[&SymbolSetUpload]) -> Result<BulkUploadStartResponse> {
|
||||
fn start_upload(symbol_sets: &[&SymbolSetUpload]) -> Result<BulkUploadStartResponse> {
|
||||
let base_url = format!(
|
||||
"{}/api/environments/{}/error_tracking/symbol_sets",
|
||||
context().token.get_host(),
|
||||
|
||||
@@ -4,8 +4,10 @@ use tracing::info;
|
||||
use uuid;
|
||||
|
||||
use crate::{
|
||||
api::releases::ReleaseBuilder, invocation_context::context,
|
||||
sourcemaps::source_pair::read_pairs, utils::git::get_git_info,
|
||||
api::releases::ReleaseBuilder,
|
||||
invocation_context::context,
|
||||
sourcemaps::source_pair::{read_pairs, SourcePair},
|
||||
utils::git::get_git_info,
|
||||
};
|
||||
|
||||
#[derive(clap::Args)]
|
||||
@@ -86,33 +88,36 @@ pub fn inject(args: &InjectArgs) -> Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
let mut skipped_pairs = 0;
|
||||
for pair in &mut pairs {
|
||||
if pair.has_chunk_id() {
|
||||
skipped_pairs += 1;
|
||||
continue;
|
||||
}
|
||||
let chunk_id = uuid::Uuid::now_v7().to_string();
|
||||
pair.set_chunk_id(chunk_id)?;
|
||||
let created_release_id = created_release.as_ref().map(|r| r.id.to_string());
|
||||
|
||||
// If we've got a release, and the user asked us to, or a set is missing one,
|
||||
// put the release ID on the pair
|
||||
if created_release.is_some() && !pair.has_release_id() {
|
||||
pair.set_release_id(created_release.as_ref().unwrap().id.to_string());
|
||||
}
|
||||
}
|
||||
if skipped_pairs > 0 {
|
||||
info!(
|
||||
"Skipped {} pairs because chunk IDs already exist",
|
||||
skipped_pairs
|
||||
);
|
||||
}
|
||||
pairs = inject_pairs(pairs, created_release_id)?;
|
||||
|
||||
// Write the source and sourcemaps back to disk
|
||||
for pair in &pairs {
|
||||
pair.save()?;
|
||||
}
|
||||
info!("Finished processing directory");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn inject_pairs(
|
||||
mut pairs: Vec<SourcePair>,
|
||||
created_release_id: Option<String>,
|
||||
) -> Result<Vec<SourcePair>> {
|
||||
for pair in &mut pairs {
|
||||
let current_release_id = pair.get_release_id();
|
||||
// We only update release ids and chunk ids when the release id changed or is not present
|
||||
if current_release_id != created_release_id || pair.get_chunk_id().is_none() {
|
||||
pair.set_release_id(created_release_id.clone());
|
||||
|
||||
let chunk_id = uuid::Uuid::now_v7().to_string();
|
||||
if let Some(previous_chunk_id) = pair.get_chunk_id() {
|
||||
pair.update_chunk_id(previous_chunk_id, chunk_id)?;
|
||||
} else {
|
||||
pair.add_chunk_id(chunk_id)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(pairs)
|
||||
}
|
||||
|
||||
@@ -43,7 +43,7 @@ impl SourcePair {
|
||||
pub fn has_chunk_id(&self) -> bool {
|
||||
// Minified chunks are the source of truth for their ID's, not sourcemaps,
|
||||
// because sometimes sourcemaps are shared across multiple chunks.
|
||||
self.source.get_chunk_id().is_some()
|
||||
self.get_chunk_id().is_some()
|
||||
}
|
||||
|
||||
pub fn get_chunk_id(&self) -> Option<String> {
|
||||
@@ -51,10 +51,30 @@ impl SourcePair {
|
||||
}
|
||||
|
||||
pub fn has_release_id(&self) -> bool {
|
||||
self.sourcemap.get_release_id().is_some()
|
||||
self.get_release_id().is_some()
|
||||
}
|
||||
|
||||
pub fn set_chunk_id(&mut self, chunk_id: String) -> Result<()> {
|
||||
pub fn remove_chunk_id(&mut self, chunk_id: String) -> Result<()> {
|
||||
if self.get_chunk_id().as_ref() != Some(&chunk_id) {
|
||||
return Err(anyhow!("Chunk ID mismatch"));
|
||||
}
|
||||
let adjustment = self.source.remove_chunk_id(chunk_id)?;
|
||||
self.sourcemap.apply_adjustment(adjustment)?;
|
||||
self.sourcemap.set_chunk_id(None);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_chunk_id(
|
||||
&mut self,
|
||||
previous_chunk_id: String,
|
||||
new_chunk_id: String,
|
||||
) -> Result<()> {
|
||||
self.remove_chunk_id(previous_chunk_id)?;
|
||||
self.add_chunk_id(new_chunk_id)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn add_chunk_id(&mut self, chunk_id: String) -> Result<()> {
|
||||
if self.has_chunk_id() {
|
||||
return Err(anyhow!("Chunk ID already set"));
|
||||
}
|
||||
@@ -65,12 +85,12 @@ impl SourcePair {
|
||||
// have a chunk ID set (since otherwise, it's already been adjusted)
|
||||
if self.sourcemap.get_chunk_id().is_none() {
|
||||
self.sourcemap.apply_adjustment(adjustment)?;
|
||||
self.sourcemap.set_chunk_id(chunk_id);
|
||||
self.sourcemap.set_chunk_id(Some(chunk_id));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn set_release_id(&mut self, release_id: String) {
|
||||
pub fn set_release_id(&mut self, release_id: Option<String>) {
|
||||
self.sourcemap.set_release_id(release_id);
|
||||
}
|
||||
|
||||
@@ -79,6 +99,10 @@ impl SourcePair {
|
||||
self.sourcemap.save()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn get_release_id(&self) -> Option<String> {
|
||||
self.sourcemap.get_release_id()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read_pairs(
|
||||
@@ -209,12 +233,12 @@ impl SourceMapFile {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn set_chunk_id(&mut self, chunk_id: String) {
|
||||
self.inner.content.chunk_id = Some(chunk_id);
|
||||
pub fn set_chunk_id(&mut self, chunk_id: Option<String>) {
|
||||
self.inner.content.chunk_id = chunk_id;
|
||||
}
|
||||
|
||||
pub fn set_release_id(&mut self, release_id: String) {
|
||||
self.inner.content.release_id = Some(release_id);
|
||||
pub fn set_release_id(&mut self, release_id: Option<String>) {
|
||||
self.inner.content.release_id = release_id;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -284,7 +308,7 @@ impl MinifiedSourceFile {
|
||||
self.inner
|
||||
.path
|
||||
.parent()
|
||||
.map(|p| p.join(&filename))
|
||||
.map(|p| p.join(filename))
|
||||
.unwrap_or_else(|| PathBuf::from(&filename)),
|
||||
);
|
||||
}
|
||||
@@ -294,7 +318,7 @@ impl MinifiedSourceFile {
|
||||
self.inner
|
||||
.path
|
||||
.parent()
|
||||
.map(|p| p.join(&filename))
|
||||
.map(|p| p.join(filename))
|
||||
.unwrap_or_else(|| PathBuf::from(&filename)),
|
||||
);
|
||||
}
|
||||
@@ -343,4 +367,48 @@ impl MinifiedSourceFile {
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn remove_chunk_id(&mut self, chunk_id: String) -> Result<SourceMap> {
|
||||
let (new_source_content, source_adjustment) = {
|
||||
// Update source content with chunk ID
|
||||
let source_content = &self.inner.content;
|
||||
let mut magic_source = MagicString::new(source_content);
|
||||
|
||||
let chunk_comment = CHUNKID_COMMENT_PREFIX.replace(CHUNKID_PLACEHOLDER, &chunk_id);
|
||||
if let Some(chunk_comment_start) = source_content.find(&chunk_comment) {
|
||||
let chunk_comment_end = chunk_comment_start as i64 + chunk_comment.len() as i64;
|
||||
magic_source
|
||||
.remove(chunk_comment_start as i64, chunk_comment_end)
|
||||
.map_err(|err| anyhow!("Failed to remove chunk comment: {err}"))?;
|
||||
}
|
||||
|
||||
let code_snippet = CODE_SNIPPET_TEMPLATE.replace(CHUNKID_PLACEHOLDER, &chunk_id);
|
||||
if let Some(code_snippet_start) = source_content.find(&code_snippet) {
|
||||
let code_snippet_end = code_snippet_start as i64 + code_snippet.len() as i64;
|
||||
magic_source
|
||||
.remove(code_snippet_start as i64, code_snippet_end)
|
||||
.map_err(|err| anyhow!("Failed to remove code snippet {err}"))?;
|
||||
}
|
||||
|
||||
let adjustment = magic_source
|
||||
.generate_map(GenerateDecodedMapOptions {
|
||||
include_content: true,
|
||||
..Default::default()
|
||||
})
|
||||
.map_err(|err| anyhow!("Failed to generate source map: {err}"))?;
|
||||
|
||||
let adjustment_sourcemap = SourceMap::from_slice(
|
||||
adjustment
|
||||
.to_string()
|
||||
.map_err(|err| anyhow!("Failed to serialize source map: {err}"))?
|
||||
.as_bytes(),
|
||||
)
|
||||
.map_err(|err| anyhow!("Failed to parse adjustment sourcemap: {err}"))?;
|
||||
|
||||
(magic_source.to_string(), adjustment_sourcemap)
|
||||
};
|
||||
|
||||
self.inner.content = new_source_content;
|
||||
Ok(source_adjustment)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
!function(){try{var e="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof globalThis?globalThis:"undefined"!=typeof self?self:{},n=(new e.Error).stack;n&&(e._posthogChunkIds=e._posthogChunkIds||{},e._posthogChunkIds[n]="00000-00000-00000")}catch(e){}}();\nimport{j as r,R as o}from"./index-D_JuKIur.js";function e(){const t=new Error("Exception created");throw o.captureException(t),t}function c(){return r.jsx("button",{onClick:()=>e(),children:"Create exception"})}export{c as default};
|
||||
!function(){try{var e="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof globalThis?globalThis:"undefined"!=typeof self?self:{},n=(new e.Error).stack;n&&(e._posthogChunkIds=e._posthogChunkIds||{},e._posthogChunkIds[n]="00000-00000-00000")}catch(e){}}();import{j as r,R as o}from"./index-D_JuKIur.js";function e(){const t=new Error("Exception created");throw o.captureException(t),t}function c(){return r.jsx("button",{onClick:()=>e(),children:"Create exception"})}export{c as default};
|
||||
//# sourceMappingURL=chunk.js.map
|
||||
|
||||
//# chunkId=00000-00000-00000
|
||||
@@ -1 +1 @@
|
||||
{"chunk_id":"00000-00000-00000","file":"error-button-BPm7RXMz.js","mappings":"mVAEA,SAASA,GAAiB,CAChB,MAAAC,EAAQ,IAAI,MAAM,mBAAmB,EAC3CC,MAAAA,EAAQ,iBAAiBD,CAAK,EACxBA,CACV,CAEA,SAAwBE,GAAc,CAClC,aAAQ,SAAO,CAAA,QAAS,IAAMH,IAAkB,SAAgB,mBAAA,CACpE","names":["throwException","error","posthog","ErrorButton"],"sources":["../../src/error-button.tsx"],"sourcesContent":["import { posthog } from 'posthog-js'\n\nfunction throwException() {\n const error = new Error('Exception created')\n posthog.captureException(error)\n throw error\n}\n\nexport default function ErrorButton() {\n return <button onClick={() => throwException()}>Create exception</button>\n}\n"],"version":3}
|
||||
{"chunk_id":"00000-00000-00000","file":"error-button-BPm7RXMz.js","mappings":"iVAEA,SAASA,GAAiB,CAChB,MAAAC,EAAQ,IAAI,MAAM,mBAAmB,EAC3CC,MAAAA,EAAQ,iBAAiBD,CAAK,EACxBA,CACV,CAEA,SAAwBE,GAAc,CAClC,aAAQ,SAAO,CAAA,QAAS,IAAMH,IAAkB,SAAgB,mBAAA,CACpE","names":["throwException","error","posthog","ErrorButton"],"sources":["../../src/error-button.tsx"],"sourcesContent":["import { posthog } from 'posthog-js'\n\nfunction throwException() {\n const error = new Error('Exception created')\n posthog.captureException(error)\n throw error\n}\n\nexport default function ErrorButton() {\n return <button onClick={() => throwException()}>Create exception</button>\n}\n"],"version":3}
|
||||
|
||||
2
cli/tests/_cases/reinject/index.js
Normal file
2
cli/tests/_cases/reinject/index.js
Normal file
@@ -0,0 +1,2 @@
|
||||
|
||||
//# chunkId=0
|
||||
4
cli/tests/_cases/reinject/index.js.map
Normal file
4
cli/tests/_cases/reinject/index.js.map
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"release_id": "1234",
|
||||
"chunk_id": "0"
|
||||
}
|
||||
@@ -4,10 +4,7 @@ use std::path::PathBuf;
|
||||
use uuid::Uuid;
|
||||
|
||||
fn make_git_dir_with_config(config_content: &str) -> PathBuf {
|
||||
let temp_root = std::env::temp_dir().join(format!(
|
||||
"posthog_cli_git_test_{}",
|
||||
Uuid::now_v7()
|
||||
));
|
||||
let temp_root = std::env::temp_dir().join(format!("posthog_cli_git_test_{}", Uuid::now_v7()));
|
||||
let git_dir = temp_root.join(".git");
|
||||
fs::create_dir_all(&git_dir).expect("failed to create .git directory");
|
||||
let config_path = git_dir.join("config");
|
||||
@@ -86,5 +83,3 @@ fn test_get_repo_infos_ssh_without_dot_git() {
|
||||
assert_eq!(get_repo_name(&git_dir).as_deref(), Some("posthog"));
|
||||
let _ = fs::remove_dir_all(git_dir.parent().unwrap());
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
use posthog_cli::sourcemaps::source_pair::{read_pairs, SourceMapContent};
|
||||
use posthog_cli::sourcemaps::{
|
||||
inject::inject_pairs,
|
||||
source_pair::{read_pairs, SourceMapContent},
|
||||
};
|
||||
|
||||
use std::{
|
||||
fs,
|
||||
@@ -19,9 +22,9 @@ fn get_case_path(relative_path: &str) -> PathBuf {
|
||||
.expect("Failed to canonicalize path")
|
||||
}
|
||||
|
||||
fn assert_file_eq(base_path: &Path, path: &str, actual: &str) {
|
||||
fn assert_file_eq(base_path: &Path, path: &str, actual: impl Into<String>) {
|
||||
let expected = fs::read_to_string(base_path.join(path)).expect("Failed to read expected file");
|
||||
assert_eq!(expected, actual);
|
||||
assert_eq!(expected, actual.into());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -45,11 +48,11 @@ fn test_search_with_prefix() {
|
||||
#[test]
|
||||
fn test_ignore() {
|
||||
let pairs = read_pairs(&get_case_path(""), &Vec::new(), &None).expect("Failed to read pairs");
|
||||
assert_eq!(pairs.len(), 5);
|
||||
assert_eq!(pairs.len(), 6);
|
||||
|
||||
let pairs = read_pairs(&get_case_path(""), &["**/search/**".to_string()], &None)
|
||||
.expect("Failed to read pairs");
|
||||
assert_eq!(pairs.len(), 2);
|
||||
assert_eq!(pairs.len(), 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -60,7 +63,7 @@ fn test_pair_inject() {
|
||||
let current_pair = pairs.first_mut().expect("Failed to get first pair");
|
||||
let chunk_id = "00000-00000-00000";
|
||||
current_pair
|
||||
.set_chunk_id(chunk_id.to_string())
|
||||
.add_chunk_id(chunk_id.to_string())
|
||||
.expect("Failed to set chunk ID");
|
||||
|
||||
assert_file_eq(
|
||||
@@ -82,7 +85,7 @@ fn test_index_inject() {
|
||||
let current_pair = pairs.first_mut().expect("Failed to get first pair");
|
||||
let chunk_id = "00000-00000-00000";
|
||||
current_pair
|
||||
.set_chunk_id(chunk_id.to_string())
|
||||
.add_chunk_id(chunk_id.to_string())
|
||||
.expect("Failed to set chunk ID");
|
||||
|
||||
let bytes = serde_json::to_string(¤t_pair.sourcemap.inner.content).unwrap();
|
||||
@@ -90,3 +93,61 @@ fn test_index_inject() {
|
||||
let _ = sourcemap::SourceMap::from_slice(bytes.as_bytes())
|
||||
.expect("Failed to parse as a flattened sourcemap");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pair_remove() {
|
||||
let case_path = get_case_path("inject");
|
||||
let mut pairs = read_pairs(&case_path, &Vec::new(), &None).expect("Failed to read pairs");
|
||||
assert_eq!(pairs.len(), 1);
|
||||
let current_pair = pairs.first_mut().expect("Failed to get first pair");
|
||||
let chunk_id = "00000-00000-00000";
|
||||
current_pair
|
||||
.add_chunk_id(chunk_id.to_string())
|
||||
.expect("Failed to set chunk ID");
|
||||
|
||||
current_pair
|
||||
.remove_chunk_id(chunk_id.to_string())
|
||||
.expect("Failed to remove chunk ID");
|
||||
|
||||
assert_file_eq(&case_path, "chunk.js", ¤t_pair.source.inner.content);
|
||||
|
||||
let expected_val: SourceMapContent =
|
||||
serde_json::from_str(include_str!(case!("inject/chunk.js.map"))).unwrap();
|
||||
|
||||
assert_eq!(expected_val, current_pair.sourcemap.inner.content,);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reinject_without_new_release() {
|
||||
let case_path = get_case_path("reinject");
|
||||
let pairs = read_pairs(&case_path, &Vec::new(), &None).expect("Failed to read pairs");
|
||||
assert_eq!(pairs.len(), 1);
|
||||
let injected_pairs = inject_pairs(pairs, None).expect("Failed to inject pairs");
|
||||
let first_pair = injected_pairs.first().expect("Failed to get first pair");
|
||||
assert_ne!(&first_pair.source.get_chunk_id().unwrap(), "0");
|
||||
assert_eq!(
|
||||
&first_pair.sourcemap.get_chunk_id().unwrap(),
|
||||
&first_pair.source.get_chunk_id().unwrap()
|
||||
);
|
||||
assert!(&first_pair.sourcemap.get_release_id().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reinject_with_new_release() {
|
||||
let case_path = get_case_path("reinject");
|
||||
let pairs = read_pairs(&case_path, &Vec::new(), &None).expect("Failed to read pairs");
|
||||
assert_eq!(pairs.len(), 1);
|
||||
let release_id = uuid::Uuid::now_v7().to_string();
|
||||
let injected_pairs =
|
||||
inject_pairs(pairs, Some(release_id.clone())).expect("Failed to inject pairs");
|
||||
let first_pair = injected_pairs.first().expect("Failed to get first pair");
|
||||
assert_ne!(&first_pair.source.get_chunk_id().unwrap(), "0");
|
||||
assert_eq!(
|
||||
&first_pair.sourcemap.get_chunk_id().unwrap(),
|
||||
&first_pair.source.get_chunk_id().unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
first_pair.sourcemap.get_release_id().unwrap(),
|
||||
release_id.clone()
|
||||
);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user