Bug 1611770 - Update Glean to v37.0.0. r=chutten

Differential Revision: https://phabricator.services.mozilla.com/D113982
This commit is contained in:
Jan-Erik Rediger 2021-05-03 08:37:15 +00:00
parent 56031a1d67
commit 59d7f84bcd
23 changed files with 441 additions and 204 deletions

9
Cargo.lock generated
View File

@ -2086,9 +2086,9 @@ dependencies = [
[[package]]
name = "glean"
version = "36.0.1"
version = "37.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "189c370f4d17144dc0af5b9640de2ac1016b603e583d3eb1050a142dba3df9dd"
checksum = "4cacd10ebcc8d4ff7b60dc864e7d1a214df4d10ca4d3d9df1d983d7262a93914"
dependencies = [
"chrono",
"crossbeam-channel 0.5.0",
@ -2106,9 +2106,9 @@ dependencies = [
[[package]]
name = "glean-core"
version = "36.0.1"
version = "37.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "66d4e6fd10f9824bb8f46587536bcc20d858087513d7f913d4d9db2c18c2fe2f"
checksum = "6210924f86da7bb55dcf61baa6d1a66c3e37506b65443f05d9f386c5d716ae17"
dependencies = [
"bincode",
"chrono",
@ -2119,6 +2119,7 @@ dependencies = [
"rkv",
"serde",
"serde_json",
"time",
"uuid",
"zeitstempel",
]

File diff suppressed because one or more lines are too long

View File

@ -31,11 +31,10 @@ checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
[[package]]
name = "bincode"
version = "1.3.2"
version = "1.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d175dfa69e619905c4c3cdb7c3c203fa3bdd5d51184e3afdb2742c0280493772"
checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad"
dependencies = [
"byteorder",
"serde",
]
@ -47,9 +46,9 @@ checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
[[package]]
name = "byteorder"
version = "1.3.4"
version = "1.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
[[package]]
name = "cc"
@ -153,7 +152,7 @@ dependencies = [
[[package]]
name = "glean-core"
version = "36.0.1"
version = "37.0.0"
dependencies = [
"bincode",
"chrono",
@ -168,6 +167,7 @@ dependencies = [
"serde",
"serde_json",
"tempfile",
"time",
"uuid",
"zeitstempel",
]
@ -198,9 +198,9 @@ checksum = "25a2bc672d1148e28034f176e01fffebb08b35768468cc954630da77a1449005"
[[package]]
name = "idna"
version = "0.2.2"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89829a5d69c23d348314a7ac337fe39173b61149a9864deabd260983aed48c21"
checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8"
dependencies = [
"matches",
"unicode-bidi",
@ -230,9 +230,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.93"
version = "0.2.94"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9385f66bf6105b241aa65a61cb923ef20efc665cb9f9bb50ac2f0c4b7f378d41"
checksum = "18794a8ad5b29321f790b55d93dfba91e125cb1a9edbd4f8e3150acc771c1a5e"
[[package]]
name = "lmdb-rkv"
@ -441,9 +441,9 @@ dependencies = [
[[package]]
name = "redox_syscall"
version = "0.2.5"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94341e4e44e24f6b591b59e47a8a027df12e008d73fd5672dbea9cc22f4507d9"
checksum = "85dd92e586f7355c633911e11f77f3d12f04b1b1bd76a198bd34ae3af8341ef2"
dependencies = [
"bitflags",
]
@ -519,9 +519,9 @@ dependencies = [
[[package]]
name = "syn"
version = "1.0.69"
version = "1.0.71"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48fe99c6bd8b1cc636890bcc071842de909d902c81ac7dab53ba33c421ab8ffb"
checksum = "ad184cc9470f9117b2ac6817bfe297307418819ba40552f9b3846f05c33d5373"
dependencies = [
"proc-macro2",
"quote",
@ -616,9 +616,9 @@ dependencies = [
[[package]]
name = "unicode-xid"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
[[package]]
name = "url"

View File

@ -13,7 +13,7 @@
[package]
edition = "2018"
name = "glean-core"
version = "36.0.1"
version = "37.0.0"
authors = ["Jan-Erik Rediger <jrediger@mozilla.com>", "The Glean Team <glean-team@mozilla.com>"]
include = ["/README.md", "/LICENSE", "/src", "/examples", "/tests", "/Cargo.toml"]
description = "A modern Telemetry library"
@ -22,7 +22,7 @@ keywords = ["telemetry"]
license = "MPL-2.0"
repository = "https://github.com/mozilla/glean"
[package.metadata.glean]
glean-parser = "2.5.0"
glean-parser = "3.2.0"
[dependencies.bincode]
version = "1.2.1"
@ -75,6 +75,8 @@ version = "3.1.0"
[features]
rkv-safe-mode = []
[target."cfg(target_os = \"windows\")".dependencies.time]
version = "0.1.40"
[badges.circle-ci]
branch = "main"
repository = "mozilla/glean"

View File

@ -1,4 +1,5 @@
use std::env;
use std::path::PathBuf;
use glean_core::metrics::*;
use glean_core::ping::PingMaker;
@ -11,10 +12,10 @@ fn main() {
let mut args = env::args().skip(1);
let data_path = if let Some(path) = args.next() {
path
PathBuf::from(path)
} else {
let root = Builder::new().prefix("simple-db").tempdir().unwrap();
root.path().display().to_string()
root.path().into()
};
let cfg = glean_core::Configuration {

View File

@ -234,8 +234,23 @@ impl Database {
///
/// It also loads any Lifetime::Ping data that might be
/// persisted, in case `delay_ping_lifetime_io` is set.
pub fn new(data_path: &str, delay_ping_lifetime_io: bool) -> Result<Self> {
let path = Path::new(data_path).join("db");
pub fn new(data_path: &Path, delay_ping_lifetime_io: bool) -> Result<Self> {
#[cfg(all(windows, not(feature = "rkv-safe-mode")))]
{
// The underlying lmdb wrapper implementation
// cannot actually handle non-UTF8 paths on Windows.
// It will unconditionally panic if passed one.
// See
// https://github.com/mozilla/lmdb-rs/blob/df1c2f56e3088f097c719c57b9925ab51e26f3f4/src/environment.rs#L43-L53
//
// To avoid this, in case we're using LMDB on Windows (that's just testing now though),
// we simply error out earlier.
if data_path.to_str().is_none() {
return Err(crate::Error::utf8_error());
}
}
let path = data_path.join("db");
log::debug!("Database path: {:?}", path.display());
let file_size = database_size(&path);
@ -783,19 +798,103 @@ mod test {
use crate::tests::new_glean;
use crate::CommonMetricData;
use std::collections::HashMap;
use std::path::Path;
use tempfile::tempdir;
#[test]
fn test_panicks_if_fails_dir_creation() {
assert!(Database::new("/!#\"'@#°ç", false).is_err());
let path = Path::new("/!#\"'@#°ç");
assert!(Database::new(path, false).is_err());
}
#[test]
#[cfg(windows)]
fn windows_invalid_utf16_panicfree() {
use std::ffi::OsString;
use std::os::windows::prelude::*;
// Here the values 0x0066 and 0x006f correspond to 'f' and 'o'
// respectively. The value 0xD800 is a lone surrogate half, invalid
// in a UTF-16 sequence.
let source = [0x0066, 0x006f, 0xD800, 0x006f];
let os_string = OsString::from_wide(&source[..]);
let os_str = os_string.as_os_str();
let dir = tempdir().unwrap();
let path = dir.path().join(os_str);
let res = Database::new(&path, false);
#[cfg(feature = "rkv-safe-mode")]
{
assert!(
res.is_ok(),
"Database should succeed at {}: {:?}",
path.display(),
res
);
}
#[cfg(not(feature = "rkv-safe-mode"))]
{
assert!(
res.is_err(),
"Database should fail at {}: {:?}",
path.display(),
res
);
}
}
#[test]
#[cfg(target_os = "linux")]
fn linux_invalid_utf8_panicfree() {
use std::ffi::OsStr;
use std::os::unix::ffi::OsStrExt;
// Here, the values 0x66 and 0x6f correspond to 'f' and 'o'
// respectively. The value 0x80 is a lone continuation byte, invalid
// in a UTF-8 sequence.
let source = [0x66, 0x6f, 0x80, 0x6f];
let os_str = OsStr::from_bytes(&source[..]);
let dir = tempdir().unwrap();
let path = dir.path().join(os_str);
let res = Database::new(&path, false);
assert!(
res.is_ok(),
"Database should not fail at {}: {:?}",
path.display(),
res
);
}
#[test]
#[cfg(target_os = "macos")]
fn macos_invalid_utf8_panicfree() {
use std::ffi::OsStr;
use std::os::unix::ffi::OsStrExt;
// Here, the values 0x66 and 0x6f correspond to 'f' and 'o'
// respectively. The value 0x80 is a lone continuation byte, invalid
// in a UTF-8 sequence.
let source = [0x66, 0x6f, 0x80, 0x6f];
let os_str = OsStr::from_bytes(&source[..]);
let dir = tempdir().unwrap();
let path = dir.path().join(os_str);
let res = Database::new(&path, false);
assert!(
res.is_err(),
"Database should not fail at {}: {:?}",
path.display(),
res
);
}
#[test]
fn test_data_dir_rkv_inits() {
let dir = tempdir().unwrap();
let str_dir = dir.path().display().to_string();
Database::new(&str_dir, false).unwrap();
Database::new(dir.path(), false).unwrap();
assert!(dir.path().exists());
}
@ -804,8 +903,7 @@ mod test {
fn test_ping_lifetime_metric_recorded() {
// Init the database in a temporary directory.
let dir = tempdir().unwrap();
let str_dir = dir.path().display().to_string();
let db = Database::new(&str_dir, false).unwrap();
let db = Database::new(dir.path(), false).unwrap();
assert!(db.ping_lifetime_data.is_none());
@ -841,8 +939,7 @@ mod test {
fn test_application_lifetime_metric_recorded() {
// Init the database in a temporary directory.
let dir = tempdir().unwrap();
let str_dir = dir.path().display().to_string();
let db = Database::new(&str_dir, false).unwrap();
let db = Database::new(dir.path(), false).unwrap();
// Attempt to record a known value.
let test_value = "test-value";
@ -879,8 +976,7 @@ mod test {
fn test_user_lifetime_metric_recorded() {
// Init the database in a temporary directory.
let dir = tempdir().unwrap();
let str_dir = dir.path().display().to_string();
let db = Database::new(&str_dir, false).unwrap();
let db = Database::new(dir.path(), false).unwrap();
// Attempt to record a known value.
let test_value = "test-value";
@ -914,8 +1010,7 @@ mod test {
fn test_clear_ping_storage() {
// Init the database in a temporary directory.
let dir = tempdir().unwrap();
let str_dir = dir.path().display().to_string();
let db = Database::new(&str_dir, false).unwrap();
let db = Database::new(dir.path(), false).unwrap();
// Attempt to record a known value for every single lifetime.
let test_storage = "test-storage";
@ -990,8 +1085,7 @@ mod test {
fn test_remove_single_metric() {
// Init the database in a temporary directory.
let dir = tempdir().unwrap();
let str_dir = dir.path().display().to_string();
let db = Database::new(&str_dir, false).unwrap();
let db = Database::new(dir.path(), false).unwrap();
let test_storage = "test-storage-single-lifetime";
let metric_id_pattern = "telemetry_test.single_metric";
@ -1047,8 +1141,7 @@ mod test {
fn test_delayed_ping_lifetime_persistence() {
// Init the database in a temporary directory.
let dir = tempdir().unwrap();
let str_dir = dir.path().display().to_string();
let db = Database::new(&str_dir, true).unwrap();
let db = Database::new(dir.path(), true).unwrap();
let test_storage = "test-storage";
assert!(db.ping_lifetime_data.is_some());
@ -1158,14 +1251,13 @@ mod test {
fn test_load_ping_lifetime_data_from_memory() {
// Init the database in a temporary directory.
let dir = tempdir().unwrap();
let str_dir = dir.path().display().to_string();
let test_storage = "test-storage";
let test_value = "test-value";
let test_metric_id = "telemetry_test.test_name";
{
let db = Database::new(&str_dir, true).unwrap();
let db = Database::new(dir.path(), true).unwrap();
// Attempt to record a known value.
db.record_per_lifetime(
@ -1204,7 +1296,7 @@ mod test {
// Now create a new instace of the db and check if data was
// correctly loaded from rkv to memory.
{
let db = Database::new(&str_dir, true).unwrap();
let db = Database::new(dir.path(), true).unwrap();
// Verify that test_value is in memory.
let data = match &db.ping_lifetime_data {
@ -1234,7 +1326,6 @@ mod test {
let (mut glean, dir) = new_glean(None);
// Init the database in a temporary directory.
let str_dir = dir.path().display().to_string();
let test_storage = "test-storage";
let test_data = CommonMetricData::new("category", "name", test_storage);
@ -1242,7 +1333,7 @@ mod test {
// Attempt to record metric with the record and record_with functions,
// this should work since upload is enabled.
let db = Database::new(&str_dir, true).unwrap();
let db = Database::new(dir.path(), true).unwrap();
db.record(&glean, &test_data, &Metric::String("record".to_owned()));
db.iter_store_from(
Lifetime::Ping,
@ -1325,7 +1416,6 @@ mod test {
#[test]
fn empty_data_file() {
let dir = tempdir().unwrap();
let str_dir = dir.path().display().to_string();
// Create database directory structure.
let database_dir = dir.path().join("db");
@ -1336,7 +1426,7 @@ mod test {
let f = fs::File::create(datamdb).expect("create database file");
drop(f);
Database::new(&str_dir, false).unwrap();
Database::new(dir.path(), false).unwrap();
assert!(dir.path().exists());
}
@ -1351,7 +1441,6 @@ mod test {
#[test]
fn empty_data_file() {
let dir = tempdir().unwrap();
let str_dir = dir.path().display().to_string();
// Create database directory structure.
let database_dir = dir.path().join("db");
@ -1362,7 +1451,7 @@ mod test {
let f = File::create(safebin).expect("create database file");
drop(f);
Database::new(&str_dir, false).unwrap();
Database::new(dir.path(), false).unwrap();
assert!(dir.path().exists());
}
@ -1370,7 +1459,6 @@ mod test {
#[test]
fn corrupted_data_file() {
let dir = tempdir().unwrap();
let str_dir = dir.path().display().to_string();
// Create database directory structure.
let database_dir = dir.path().join("db");
@ -1380,7 +1468,7 @@ mod test {
let safebin = database_dir.join("data.safe.bin");
fs::write(safebin, "<broken>").expect("write to database file");
Database::new(&str_dir, false).unwrap();
Database::new(dir.path(), false).unwrap();
assert!(dir.path().exists());
}
@ -1388,7 +1476,6 @@ mod test {
#[test]
fn migration_works_on_startup() {
let dir = tempdir().unwrap();
let str_dir = dir.path().display().to_string();
let database_dir = dir.path().join("db");
let datamdb = database_dir.join("data.mdb");
@ -1426,7 +1513,7 @@ mod test {
// First open should migrate the data.
{
let db = Database::new(&str_dir, false).unwrap();
let db = Database::new(dir.path(), false).unwrap();
let safebin = database_dir.join("data.safe.bin");
assert!(safebin.exists(), "safe-mode file should exist");
assert!(!datamdb.exists(), "LMDB data should be deleted");
@ -1446,7 +1533,7 @@ mod test {
// Next open should not re-create the LMDB files.
{
let db = Database::new(&str_dir, false).unwrap();
let db = Database::new(dir.path(), false).unwrap();
let safebin = database_dir.join("data.safe.bin");
assert!(safebin.exists(), "safe-mode file exists");
assert!(!datamdb.exists(), "LMDB data should not be recreated");
@ -1468,7 +1555,6 @@ mod test {
#[test]
fn migration_doesnt_overwrite() {
let dir = tempdir().unwrap();
let str_dir = dir.path().display().to_string();
let database_dir = dir.path().join("db");
let datamdb = database_dir.join("data.mdb");
@ -1526,7 +1612,7 @@ mod test {
// First open should try migration and ignore it, because destination is not empty.
// It also deletes the leftover LMDB database.
{
let db = Database::new(&str_dir, false).unwrap();
let db = Database::new(dir.path(), false).unwrap();
let safebin = database_dir.join("data.safe.bin");
assert!(safebin.exists(), "safe-mode file should exist");
assert!(!datamdb.exists(), "LMDB data should be deleted");
@ -1548,7 +1634,6 @@ mod test {
#[test]
fn migration_ignores_broken_database() {
let dir = tempdir().unwrap();
let str_dir = dir.path().display().to_string();
let database_dir = dir.path().join("db");
let datamdb = database_dir.join("data.mdb");
@ -1592,7 +1677,7 @@ mod test {
// First open should try migration and ignore it, because destination is not empty.
// It also deletes the leftover LMDB database.
{
let db = Database::new(&str_dir, false).unwrap();
let db = Database::new(dir.path(), false).unwrap();
let safebin = database_dir.join("data.safe.bin");
assert!(safebin.exists(), "safe-mode file should exist");
assert!(!datamdb.exists(), "LMDB data should be deleted");
@ -1614,7 +1699,6 @@ mod test {
#[test]
fn migration_ignores_empty_database() {
let dir = tempdir().unwrap();
let str_dir = dir.path().display().to_string();
let database_dir = dir.path().join("db");
let datamdb = database_dir.join("data.mdb");
@ -1638,7 +1722,7 @@ mod test {
// safe-mode does not write an empty database to disk.
// It also deletes the leftover LMDB database.
{
let _db = Database::new(&str_dir, false).unwrap();
let _db = Database::new(dir.path(), false).unwrap();
let safebin = database_dir.join("data.safe.bin");
assert!(!safebin.exists(), "safe-mode file should exist");
assert!(!datamdb.exists(), "LMDB data should be deleted");

View File

@ -86,8 +86,8 @@ impl EventDatabase {
///
/// * `data_path` - The directory to store events in. A new directory
/// * `events` - will be created inside of this directory.
pub fn new(data_path: &str) -> Result<Self> {
let path = Path::new(data_path).join("events");
pub fn new(data_path: &Path) -> Result<Self> {
let path = data_path.join("events");
create_dir_all(&path)?;
Ok(Self {
@ -227,7 +227,7 @@ impl EventDatabase {
for store_name in stores_to_submit {
if let Err(err) = glean.submit_ping_by_name(store_name, Some("max_capacity")) {
log::warn!(
"Got more than {} events, but could not send {} ping: {}",
"Got more than {} events, but could not persist {} ping: {}",
glean.get_max_events(),
store_name,
err
@ -380,7 +380,7 @@ mod test {
let t = tempfile::tempdir().unwrap();
{
let db = EventDatabase::new(&t.path().display().to_string()).unwrap();
let db = EventDatabase::new(&t.path()).unwrap();
db.write_event_to_disk("events", "{\"timestamp\": 500");
db.write_event_to_disk("events", "{\"timestamp\"");
db.write_event_to_disk(
@ -390,7 +390,7 @@ mod test {
}
{
let db = EventDatabase::new(&t.path().display().to_string()).unwrap();
let db = EventDatabase::new(&t.path()).unwrap();
db.load_events_from_disk().unwrap();
let events = &db.event_stores.read().unwrap()["events"];
assert_eq!(1, events.len());
@ -472,7 +472,7 @@ mod test {
#[test]
fn doesnt_record_when_upload_is_disabled() {
let (mut glean, dir) = new_glean(None);
let db = EventDatabase::new(dir.path().to_str().unwrap()).unwrap();
let db = EventDatabase::new(dir.path()).unwrap();
let test_storage = "test-storage";
let test_category = "category";

View File

@ -10,14 +10,24 @@ pub struct CoreMetrics {
pub first_run_date: DatetimeMetric,
pub first_run_hour: DatetimeMetric,
pub os: StringMetric,
}
#[derive(Debug)]
pub struct AdditionalMetrics {
/// The number of times we encountered an IO error
/// when writing a pending ping to disk.
///
/// **Note**: Not a _core_ metric, but an error metric,
/// placed here for the lack of a more suitable part in the Glean struct.
pub io_errors: CounterMetric,
/// A count of the pings submitted, by ping type.
pub pings_submitted: LabeledMetric<CounterMetric>,
/// The number of times we encountered an invalid timezone offset
/// (outside of [-24, +24] hours).
///
/// **Note**: This metric has an expiration date set.
/// However because it's statically defined here we can't specify that.
/// Needs to be removed after 2021-06-30.
pub invalid_timezone_offset: CounterMetric,
}
impl CoreMetrics {
@ -64,7 +74,13 @@ impl CoreMetrics {
disabled: false,
dynamic_label: None,
}),
}
}
}
impl AdditionalMetrics {
pub fn new() -> AdditionalMetrics {
AdditionalMetrics {
io_errors: CounterMetric::new(CommonMetricData {
name: "io".into(),
category: "glean.error".into(),
@ -85,6 +101,15 @@ impl CoreMetrics {
}),
None,
),
invalid_timezone_offset: CounterMetric::new(CommonMetricData {
name: "invalid_timezone_offset".into(),
category: "glean.time".into(),
send_in_pings: vec!["metrics".into()],
lifetime: Lifetime::Ping,
disabled: false,
dynamic_label: None,
}),
}
}
}

View File

@ -53,7 +53,12 @@ impl InternalPings {
"max_capacity".to_string(),
],
),
deletion_request: PingType::new("deletion-request", true, true, vec![]),
deletion_request: PingType::new(
"deletion-request",
true,
true,
vec!["at_init".to_string(), "set_upload_enabled".to_string()],
),
}
}
}

View File

@ -51,7 +51,7 @@ pub use crate::error::{Error, ErrorKind, Result};
pub use crate::error_recording::{test_get_num_recorded_errors, ErrorType};
use crate::event_database::EventDatabase;
pub use crate::histogram::HistogramType;
use crate::internal_metrics::{CoreMetrics, DatabaseMetrics};
use crate::internal_metrics::{AdditionalMetrics, CoreMetrics, DatabaseMetrics};
use crate::internal_pings::InternalPings;
use crate::metrics::{Metric, MetricType, PingType};
use crate::ping::PingMaker;
@ -120,7 +120,7 @@ pub struct Configuration {
/// Whether upload should be enabled.
pub upload_enabled: bool,
/// Path to a directory to store all data in.
pub data_path: String,
pub data_path: PathBuf,
/// The application ID (will be sanitized during initialization).
pub application_id: String,
/// The name of the programming language used by the binding creating this instance of Glean.
@ -174,6 +174,7 @@ pub struct Glean {
data_store: Option<Database>,
event_data_store: EventDatabase,
core_metrics: CoreMetrics,
additional_metrics: AdditionalMetrics,
database_metrics: DatabaseMetrics,
internal_pings: InternalPings,
data_path: PathBuf,
@ -213,24 +214,35 @@ impl Glean {
let _scanning_thread = upload_manager.scan_pending_pings_directories();
}
Ok(Self {
let (start_time, start_time_is_corrected) = local_now_with_offset();
let this = Self {
upload_enabled: cfg.upload_enabled,
// In the subprocess, we want to avoid accessing the database entirely.
// The easiest way to ensure that is to just not initialize it.
data_store: None,
event_data_store,
core_metrics: CoreMetrics::new(),
additional_metrics: AdditionalMetrics::new(),
database_metrics: DatabaseMetrics::new(),
internal_pings: InternalPings::new(),
upload_manager,
data_path: PathBuf::from(&cfg.data_path),
application_id,
ping_registry: HashMap::new(),
start_time: local_now_with_offset(),
start_time,
max_events: cfg.max_events.unwrap_or(DEFAULT_MAX_EVENTS),
is_first_run: false,
debug: DebugOptions::new(),
})
};
// Can't use `local_now_with_offset_and_record` above, because we needed a valid `Glean` first.
if start_time_is_corrected {
this.additional_metrics
.invalid_timezone_offset
.add(&this, 1);
}
Ok(this)
}
/// Creates and initializes a new Glean object.
@ -270,7 +282,7 @@ impl Glean {
// Temporarily enable uploading so we can submit a
// deletion request ping.
glean.upload_enabled = true;
glean.on_upload_disabled();
glean.on_upload_disabled(true);
}
}
}
@ -400,7 +412,7 @@ impl Glean {
if flag {
self.on_upload_enabled();
} else {
self.on_upload_disabled();
self.on_upload_disabled(false);
}
true
} else {
@ -434,10 +446,15 @@ impl Glean {
/// A deletion_request ping is sent, all pending metrics, events and queued
/// pings are cleared, and the client_id is set to KNOWN_CLIENT_ID.
/// Afterward, the upload_enabled flag is set to false.
fn on_upload_disabled(&mut self) {
fn on_upload_disabled(&mut self, during_init: bool) {
// The upload_enabled flag should be true here, or the deletion ping
// won't be submitted.
if let Err(err) = self.internal_pings.deletion_request.submit(self, None) {
let reason = if during_init {
Some("at_init")
} else {
Some("set_upload_enabled")
};
if let Err(err) = self.internal_pings.deletion_request.submit(self, reason) {
log::error!("Failed to submit deletion-request ping on optout: {}", err);
}
self.clear_metrics();
@ -609,10 +626,6 @@ impl Glean {
/// # Returns
///
/// Whether the ping was succesfully assembled and queued.
///
/// # Errors
///
/// If collecting or writing the ping to disk failed.
pub fn submit_ping(&self, ping: &PingType, reason: Option<&str>) -> Result<bool> {
if !self.is_upload_enabled() {
log::info!("Glean disabled: not submitting any pings.");
@ -622,7 +635,7 @@ impl Glean {
let ping_maker = PingMaker::new();
let doc_id = Uuid::new_v4().to_string();
let url_path = self.make_path(&ping.name, &doc_id);
match ping_maker.collect(self, &ping, reason) {
match ping_maker.collect(self, &ping, reason, &doc_id, &url_path) {
None => {
log::info!(
"No content for ping '{}', therefore no ping queued.",
@ -630,27 +643,29 @@ impl Glean {
);
Ok(false)
}
Some(content) => {
Some(ping) => {
// This metric is recorded *after* the ping is collected (since
// that is the only way to know *if* it will be submitted). The
// implication of this is that the count for a metrics ping will
// be included in the *next* metrics ping.
self.core_metrics
self.additional_metrics
.pings_submitted
.get(&ping.name)
.add(&self, 1);
if let Err(e) = ping_maker.store_ping(
self,
&doc_id,
&ping.name,
&self.get_data_path(),
&url_path,
&content,
) {
log::warn!("IO error while writing ping to file: {}", e);
self.core_metrics.io_errors.add(self, 1);
return Err(e.into());
if let Err(e) = ping_maker.store_ping(&self.get_data_path(), &ping) {
log::warn!("IO error while writing ping to file: {}. Enqueuing upload of what we have in memory.", e);
self.additional_metrics.io_errors.add(self, 1);
let content = ::serde_json::to_string(&ping.content)?;
self.upload_manager.enqueue_ping(
self,
ping.doc_id,
ping.url_path,
&content,
Some(ping.headers),
);
// Not actually 100% 'Ok'. bug 1704606
return Ok(true);
}
self.upload_manager.enqueue_ping_from_file(self, &doc_id);

View File

@ -401,7 +401,7 @@ fn correct_order() {
Counter(0),
CustomDistributionExponential(Histogram::exponential(1, 500, 10)),
CustomDistributionLinear(Histogram::linear(1, 500, 10)),
Datetime(local_now_with_offset(), TimeUnit::Second),
Datetime(local_now_with_offset().0, TimeUnit::Second),
Experiment(RecordedExperimentData { branch: "branch".into(), extra: None, }),
Quantity(0),
String("glean".into()),
@ -893,9 +893,11 @@ fn records_io_errors() {
// Writing the ping file should fail.
let submitted = glean.internal_pings.metrics.submit(&glean, None);
assert!(submitted.is_err());
// But the return value is still Ok(true) because we enqueue the ping anyway.
assert!(submitted.is_ok());
assert!(submitted.unwrap());
let metric = &glean.core_metrics.io_errors;
let metric = &glean.additional_metrics.io_errors;
assert_eq!(
1,
metric.test_get_value(&glean, "metrics").unwrap(),
@ -929,3 +931,20 @@ fn test_activity_api() {
// Check that we set everything we needed for the 'inactuve' status.
assert!(!glean.is_dirty_flag_set());
}
/// We explicitly test that NO invalid timezone offset was recorded.
/// If it _does_ happen and fails on a developer machine or CI, we better know about it.
#[test]
fn handles_local_now_gracefully() {
let _ = env_logger::builder().is_test(true).try_init();
let dir = tempfile::tempdir().unwrap();
let (glean, _) = new_glean(Some(dir));
let metric = &glean.additional_metrics.invalid_timezone_offset;
assert_eq!(
None,
metric.test_get_value(&glean, "metrics"),
"Timezones should be valid"
);
}

View File

@ -9,7 +9,7 @@ use crate::metrics::time_unit::TimeUnit;
use crate::metrics::Metric;
use crate::metrics::MetricType;
use crate::storage::StorageManager;
use crate::util::{get_iso_time_string, local_now_with_offset};
use crate::util::{get_iso_time_string, local_now_with_offset_and_record};
use crate::CommonMetricData;
use crate::Glean;
@ -117,7 +117,7 @@ impl DatetimeMetric {
return;
}
let value = value.unwrap_or_else(local_now_with_offset);
let value = value.unwrap_or_else(|| local_now_with_offset_and_record(&glean));
let value = Metric::Datetime(value, self.time_unit);
glean.storage().record(glean, &self.meta, &value)
}

View File

@ -14,11 +14,26 @@ use serde_json::{json, Value as JsonValue};
use crate::common_metric_data::{CommonMetricData, Lifetime};
use crate::metrics::{CounterMetric, DatetimeMetric, Metric, MetricType, PingType, TimeUnit};
use crate::storage::StorageManager;
use crate::util::{get_iso_time_string, local_now_with_offset};
use crate::upload::HeaderMap;
use crate::util::{get_iso_time_string, local_now_with_offset_and_record};
use crate::{
Glean, Result, DELETION_REQUEST_PINGS_DIRECTORY, INTERNAL_STORAGE, PENDING_PINGS_DIRECTORY,
};
/// Holds everything you need to store or send a ping.
pub struct Ping<'a> {
/// The unique document id.
pub doc_id: &'a str,
/// The ping's name.
pub name: &'a str,
/// The path on the server to use when uplaoding this ping.
pub url_path: &'a str,
/// The payload, including `*_info` fields.
pub content: JsonValue,
/// The headers to upload with the payload.
pub headers: HeaderMap,
}
/// Collect a ping's data, assemble it into its full payload and store it on disk.
pub struct PingMaker;
@ -97,7 +112,7 @@ impl PingMaker {
let start_time_data = start_time
.get_value(glean, INTERNAL_STORAGE)
.unwrap_or_else(|| glean.start_time());
let end_time_data = local_now_with_offset();
let end_time_data = local_now_with_offset_and_record(&glean);
// Update the start time with the current time.
start_time.set(glean, Some(end_time_data));
@ -160,58 +175,31 @@ impl PingMaker {
json!(map)
}
/// Build the metadata JSON to be persisted with a ping.
/// Build the headers to be persisted and sent with a ping.
///
/// Currently the only type of metadata we need to persist is the value of the `X-Debug-ID` header.
/// Currently the only headers we persist are `X-Debug-ID` and `X-Source-Tags`.
///
/// # Arguments
///
/// * `glean` - the [`Glean`] instance to collect metadata from.
/// * `glean` - the [`Glean`] instance to collect headers from.
///
/// # Returns
///
/// A JSON object representing the metadata that needs to be persisted with this ping.
///
/// The structure of the metadata json is:
///
/// ```json
/// {
/// "headers": {
/// "X-Debug-ID": "test-tag"
/// }
/// }
/// A map of header names to header values.
/// Might be empty if there are no extra headers to send.
/// ```
fn get_metadata(&self, glean: &Glean) -> Option<JsonValue> {
let mut headers_map = json!({});
fn get_headers(&self, glean: &Glean) -> HeaderMap {
let mut headers_map = HeaderMap::new();
if let Some(debug_view_tag) = glean.debug_view_tag() {
headers_map
.as_object_mut()
.unwrap() // safe unwrap, we created the object above
.insert(
"X-Debug-ID".to_string(),
JsonValue::String(debug_view_tag.to_string()),
);
headers_map.insert("X-Debug-ID".to_string(), debug_view_tag.to_string());
}
if let Some(source_tags) = glean.source_tags() {
headers_map
.as_object_mut()
.unwrap() // safe unwrap, we created the object above
.insert(
"X-Source-Tags".to_string(),
JsonValue::String(source_tags.join(",")),
);
headers_map.insert("X-Source-Tags".to_string(), source_tags.join(","));
}
// safe unwrap, we created the object above
if !headers_map.as_object().unwrap().is_empty() {
Some(json!({
"headers": headers_map,
}))
} else {
None
}
headers_map
}
/// Collects a snapshot for the given ping from storage and attach required meta information.
@ -221,17 +209,21 @@ impl PingMaker {
/// * `glean` - the [`Glean`] instance to collect data from.
/// * `ping` - the ping to collect for.
/// * `reason` - an optional reason code to include in the ping.
/// * `doc_id` - the ping's unique document identifier.
/// * `url_path` - the path on the server to upload this ping to.
///
/// # Returns
///
/// A fully assembled JSON representation of the ping payload.
/// A fully assembled representation of the ping payload and associated metadata.
/// If there is no data stored for the ping, `None` is returned.
pub fn collect(
pub fn collect<'a>(
&self,
glean: &Glean,
ping: &PingType,
ping: &'a PingType,
reason: Option<&str>,
) -> Option<JsonValue> {
doc_id: &'a str,
url_path: &'a str,
) -> Option<Ping<'a>> {
info!("Collecting {}", ping.name);
let metrics_data = StorageManager.snapshot_as_json(glean.storage(), &ping.name, true);
@ -260,7 +252,13 @@ impl PingMaker {
json_obj.insert("events".to_string(), events_data);
}
Some(json)
Some(Ping {
content: json,
name: &ping.name,
doc_id,
url_path,
headers: self.get_headers(glean),
})
}
/// Collects a snapshot for the given ping from storage and attach required meta information.
@ -281,8 +279,8 @@ impl PingMaker {
ping: &PingType,
reason: Option<&str>,
) -> Option<String> {
self.collect(glean, ping, reason)
.map(|ping| ::serde_json::to_string_pretty(&ping).unwrap())
self.collect(glean, ping, reason, "", "")
.map(|ping| ::serde_json::to_string_pretty(&ping.content).unwrap())
}
/// Gets the path to a directory for ping storage.
@ -313,33 +311,30 @@ impl PingMaker {
}
/// Stores a ping to disk in the pings directory.
pub fn store_ping(
&self,
glean: &Glean,
doc_id: &str,
ping_name: &str,
data_path: &Path,
url_path: &str,
ping_content: &JsonValue,
) -> std::io::Result<()> {
let pings_dir = self.get_pings_dir(data_path, Some(ping_name))?;
pub fn store_ping(&self, data_path: &Path, ping: &Ping) -> std::io::Result<()> {
let pings_dir = self.get_pings_dir(data_path, Some(ping.name))?;
let temp_dir = self.get_tmp_dir(data_path)?;
// Write to a temporary location and then move when done,
// for transactional writes.
let temp_ping_path = temp_dir.join(doc_id);
let ping_path = pings_dir.join(doc_id);
let temp_ping_path = temp_dir.join(ping.doc_id);
let ping_path = pings_dir.join(ping.doc_id);
log::debug!("Storing ping '{}' at '{}'", doc_id, ping_path.display());
log::debug!(
"Storing ping '{}' at '{}'",
ping.doc_id,
ping_path.display()
);
{
let mut file = File::create(&temp_ping_path)?;
file.write_all(url_path.as_bytes())?;
file.write_all(ping.url_path.as_bytes())?;
file.write_all(b"\n")?;
file.write_all(::serde_json::to_string(ping_content)?.as_bytes())?;
if let Some(metadata) = self.get_metadata(glean) {
file.write_all(b"\n")?;
file.write_all(::serde_json::to_string(&metadata)?.as_bytes())?;
file.write_all(::serde_json::to_string(&ping.content)?.as_bytes())?;
if !ping.headers.is_empty() {
file.write_all(b"\n{\"headers\":")?;
file.write_all(::serde_json::to_string(&ping.headers)?.as_bytes())?;
file.write_all(b"}")?;
}
}

View File

@ -321,7 +321,8 @@ impl PingUploadManager {
}
}
fn enqueue_ping(
/// Enqueue a ping for upload.
pub fn enqueue_ping(
&self,
glean: &Glean,
document_id: &str,

View File

@ -47,9 +47,66 @@ pub fn get_iso_time_string(datetime: DateTime<FixedOffset>, truncate_to: TimeUni
/// Get the current date & time with a fixed-offset timezone.
///
/// This converts from the `Local` timezone into its fixed-offset equivalent.
pub(crate) fn local_now_with_offset() -> DateTime<FixedOffset> {
/// If a timezone outside of [-24h, +24h] is detected it corrects the timezone offset to UTC (+0).
/// The return value will signal if the timezone offset was corrected.
pub(crate) fn local_now_with_offset() -> (DateTime<FixedOffset>, bool) {
#[cfg(target_os = "windows")]
{
// `Local::now` takes the user's timezone offset
// and panics if it's not within a range of [-24, +24] hours.
// This causes crashes in a small number of clients on Windows.
//
// We can't determine the faulty clients
// or the circumstancens under which this happens,
// so the best we can do is have a workaround:
//
// We try getting the time and timezone first,
// then manually check that it is a valid timezone offset.
// If it is, we proceed and use that time and offset.
// If it isn't we fallback to UTC.
//
// This has the small downside that it will use 2 calls to get the time,
// but only on Windows.
//
// See https://bugzilla.mozilla.org/show_bug.cgi?id=1611770.
use chrono::Utc;
// Get timespec, including the user's timezone.
let tm = time::now();
// Same as chrono:
// https://docs.rs/chrono/0.4.10/src/chrono/offset/local.rs.html#37
let offset = tm.tm_utcoff;
if let None = FixedOffset::east_opt(offset) {
log::warn!(
"Detected invalid timezone offset: {}. Using UTC fallback.",
offset
);
let now: DateTime<Utc> = Utc::now();
let utc_offset = FixedOffset::east(0);
return (now.with_timezone(&utc_offset), true);
}
}
let now: DateTime<Local> = Local::now();
now.with_timezone(now.offset())
(now.with_timezone(now.offset()), false)
}
/// Get the current date & time with a fixed-offset timezone.
///
/// This converts from the `Local` timezone into its fixed-offset equivalent.
/// If a timezone outside of [-24h, +24h] is detected it corrects the timezone offset to UTC (+0).
/// The corresponding error counter is incremented in this case.
pub(crate) fn local_now_with_offset_and_record(glean: &Glean) -> DateTime<FixedOffset> {
let (now, is_corrected) = local_now_with_offset();
if is_corrected {
glean
.additional_metrics
.invalid_timezone_offset
.add(&glean, 1);
}
now
}
/// Truncates a string, ensuring that it doesn't end in the middle of a codepoint.
@ -240,7 +297,11 @@ mod test {
#[test]
fn local_now_gets_the_time() {
let now = Local::now();
let fixed_now = local_now_with_offset();
let (fixed_now, is_corrected) = local_now_with_offset();
// We explicitly test that NO invalid timezone offset was recorded.
// If it _does_ happen and fails on a developer machine or CI, we better know about it.
assert!(!is_corrected, "Timezone offset should be valid.");
// We can't compare across differing timezones, so we just compare the UTC timestamps.
// The second timestamp should be just a few nanoseconds later.

View File

@ -48,10 +48,9 @@ pub fn new_glean(tempdir: Option<tempfile::TempDir>) -> (Glean, tempfile::TempDi
Some(tempdir) => tempdir,
None => tempfile::tempdir().unwrap(),
};
let tmpname = dir.path().display().to_string();
let cfg = glean_core::Configuration {
data_path: tmpname,
data_path: dir.path().into(),
application_id: GLOBAL_APPLICATION_ID.into(),
language_binding_name: "Rust".into(),
upload_enabled: true,

View File

@ -55,7 +55,15 @@ fn disabling_upload_clears_pending_pings() {
glean.set_upload_enabled(false);
assert_eq!(0, get_queued_pings(glean.get_data_path()).unwrap().len());
// Disabling upload generates a deletion ping
assert_eq!(1, get_deletion_pings(glean.get_data_path()).unwrap().len());
let dpings = get_deletion_pings(glean.get_data_path()).unwrap();
assert_eq!(1, dpings.len());
let payload = &dpings[0].1;
assert_eq!(
"set_upload_enabled",
payload["ping_info"].as_object().unwrap()["reason"]
.as_str()
.unwrap()
);
glean.set_upload_enabled(true);
assert_eq!(0, get_queued_pings(glean.get_data_path()).unwrap().len());
@ -71,7 +79,15 @@ fn deletion_request_only_when_toggled_from_on_to_off() {
// Disabling upload generates a deletion ping
glean.set_upload_enabled(false);
assert_eq!(1, get_deletion_pings(glean.get_data_path()).unwrap().len());
let dpings = get_deletion_pings(glean.get_data_path()).unwrap();
assert_eq!(1, dpings.len());
let payload = &dpings[0].1;
assert_eq!(
"set_upload_enabled",
payload["ping_info"].as_object().unwrap()["reason"]
.as_str()
.unwrap()
);
// Re-setting it to `false` should not generate an additional ping.
// As we didn't clear the pending ping, that's the only one that sticks around.

View File

@ -34,8 +34,10 @@ fn set_up_basic_ping() -> (Glean, PingMaker, PingType, tempfile::TempDir) {
fn ping_info_must_contain_a_nonempty_start_and_end_time() {
let (glean, ping_maker, ping_type, _t) = set_up_basic_ping();
let content = ping_maker.collect(&glean, &ping_type, None).unwrap();
let ping_info = content["ping_info"].as_object().unwrap();
let ping = ping_maker
.collect(&glean, &ping_type, None, "", "")
.unwrap();
let ping_info = ping.content["ping_info"].as_object().unwrap();
let start_time_str = ping_info["start_time"].as_str().unwrap();
let start_time_date = iso8601_to_chrono(&iso8601::datetime(start_time_str).unwrap());
@ -50,8 +52,10 @@ fn ping_info_must_contain_a_nonempty_start_and_end_time() {
fn get_ping_info_must_report_all_the_required_fields() {
let (glean, ping_maker, ping_type, _t) = set_up_basic_ping();
let content = ping_maker.collect(&glean, &ping_type, None).unwrap();
let ping_info = content["ping_info"].as_object().unwrap();
let ping = ping_maker
.collect(&glean, &ping_type, None, "", "")
.unwrap();
let ping_info = ping.content["ping_info"].as_object().unwrap();
assert!(ping_info.get("start_time").is_some());
assert!(ping_info.get("end_time").is_some());
@ -62,15 +66,14 @@ fn get_ping_info_must_report_all_the_required_fields() {
fn get_client_info_must_report_all_the_available_data() {
let (glean, ping_maker, ping_type, _t) = set_up_basic_ping();
let content = ping_maker.collect(&glean, &ping_type, None).unwrap();
let client_info = content["client_info"].as_object().unwrap();
let ping = ping_maker
.collect(&glean, &ping_type, None, "", "")
.unwrap();
let client_info = ping.content["client_info"].as_object().unwrap();
client_info["telemetry_sdk_build"].as_str().unwrap();
}
// SKIPPED from glean-ac: collect() must report a valid ping with the data from the engines
// This test doesn't really make sense with rkv
#[test]
fn collect_must_report_none_when_no_data_is_stored() {
// NOTE: This is a behavior change from glean-ac which returned an empty
@ -83,7 +86,7 @@ fn collect_must_report_none_when_no_data_is_stored() {
glean.register_ping_type(&ping_type);
assert!(ping_maker
.collect(&glean, &unknown_ping_type, None)
.collect(&glean, &unknown_ping_type, None, "", "")
.is_none());
}
@ -104,8 +107,10 @@ fn seq_number_must_be_sequential() {
for i in 0..=1 {
for ping_name in ["store1", "store2"].iter() {
let ping_type = PingType::new(*ping_name, true, false, vec![]);
let content = ping_maker.collect(&glean, &ping_type, None).unwrap();
let seq_num = content["ping_info"]["seq"].as_i64().unwrap();
let ping = ping_maker
.collect(&glean, &ping_type, None, "", "")
.unwrap();
let seq_num = ping.content["ping_info"]["seq"].as_i64().unwrap();
// Ensure sequence numbers in different stores are independent of
// each other
assert_eq!(i, seq_num);
@ -117,13 +122,17 @@ fn seq_number_must_be_sequential() {
let ping_type = PingType::new("store1", true, false, vec![]);
// 3rd ping of store1
let content = ping_maker.collect(&glean, &ping_type, None).unwrap();
let seq_num = content["ping_info"]["seq"].as_i64().unwrap();
let ping = ping_maker
.collect(&glean, &ping_type, None, "", "")
.unwrap();
let seq_num = ping.content["ping_info"]["seq"].as_i64().unwrap();
assert_eq!(2, seq_num);
// 4th ping of store1
let content = ping_maker.collect(&glean, &ping_type, None).unwrap();
let seq_num = content["ping_info"]["seq"].as_i64().unwrap();
let ping = ping_maker
.collect(&glean, &ping_type, None, "", "")
.unwrap();
let seq_num = ping.content["ping_info"]["seq"].as_i64().unwrap();
assert_eq!(3, seq_num);
}
@ -131,8 +140,10 @@ fn seq_number_must_be_sequential() {
let ping_type = PingType::new("store2", true, false, vec![]);
// 3rd ping of store2
let content = ping_maker.collect(&glean, &ping_type, None).unwrap();
let seq_num = content["ping_info"]["seq"].as_i64().unwrap();
let ping = ping_maker
.collect(&glean, &ping_type, None, "", "")
.unwrap();
let seq_num = ping.content["ping_info"]["seq"].as_i64().unwrap();
assert_eq!(2, seq_num);
}
@ -140,8 +151,10 @@ fn seq_number_must_be_sequential() {
let ping_type = PingType::new("store1", true, false, vec![]);
// 5th ping of store1
let content = ping_maker.collect(&glean, &ping_type, None).unwrap();
let seq_num = content["ping_info"]["seq"].as_i64().unwrap();
let ping = ping_maker
.collect(&glean, &ping_type, None, "", "")
.unwrap();
let seq_num = ping.content["ping_info"]["seq"].as_i64().unwrap();
assert_eq!(4, seq_num);
}
}

View File

@ -1 +1 @@
{"files":{"Cargo.toml":"5823bee026cf23227db4ab4d0dcdf93826c423720885e73d3ae7dc2ab0d03d6a","LICENSE":"1f256ecad192880510e84ad60474eab7589218784b9a50bc7ceee34c2b91f1d5","README.md":"fd9e0ca6907917ea6bec5de05e15dd21d20fae1cb7f3250467bb20231a8e1065","src/common_test.rs":"585eb8b0ecc6628090da6b0e7f8488f13c428286d5533af1c96af19649ee29c8","src/configuration.rs":"a0b6fe024ee7fd1164780fc4a63ad57c2f6f381c64c2417fc51ef6a1f8898889","src/core_metrics.rs":"0ecf9ec7de9032f45e6c0feaebf17c614f9be88c1a28043e397eaf7d3b18ba37","src/dispatcher/global.rs":"427d23168ea2457f0332e6f4d6684810fe35a5d5df1e8336d070159d82d936e5","src/dispatcher/mod.rs":"9bf42571a23491db1c6643064caee463301dccac13430883ac0bdaefacf4ace3","src/glean_metrics.rs":"151b6e5acc12436c33c75e2e0a18f9769179d3c7fdc60a22fa02afb76feaf00f","src/lib.rs":"bd35db406d6e974bab5546cfd6c3646fc1d85fee151d2e42ac5d413791b3f771","src/net/http_uploader.rs":"9e8c1837ca0d3f6ea165ec936ab054173c4fe95a958710176c33b4d4d1d98beb","src/net/mod.rs":"59db2f4dcfd0a2d77feb63f40cae2252da59fa8a87e10877fcb305eb91aa0645","src/pings.rs":"2dfccd84848e1933aa4f6a7a707c58ec794c8f73ef2d93ea4d4df71d4e6abc31","src/private/boolean.rs":"eeadc0529e2c69a930479f208746799b064b27facab8306c1c10c650e83fb63c","src/private/counter.rs":"75ad96cd5f53d024230357223244c0a8f58f8b32a9a4d0dbc7cc6ecd74db13b5","src/private/custom_distribution.rs":"0de9cd030a555d93352a7fd251febf4de3a2ca4eeb7666abe5baa884d59168b8","src/private/datetime.rs":"f7e68491b267452fc7d0bb50a2e2f73337d2580435b0518e4cb375a5a30d3017","src/private/denominator.rs":"746c5dfd81fe4027061d799421e35c2cf47b14b98e18e15f2e0d21379604f3f0","src/private/event.rs":"7839ded635f979c7e3a97227530c5843783b8cb2e4487aedbdce7ae16c611e76","src/private/labeled.rs":"e7e68d6a8fd84594ff76209d59733c66e523777a98e0492ed4ddd6c18475148c","src/private/memory_distribution.rs":"201ce833900fca33f2e4bdd65d9055927627c5e97c9df001351ca40e8e11efae","src/private/mod.rs":"413a41942a48de3d39e9346c2a0803a3ce184978173f8a79b13a116be4abaffe","src/private/numerator.rs":"4133f4a1f2a20931176ecaa7e85a96a4d639ba1b3737441a5713c18909892a42","src/private/ping.rs":"915fc42994e0929656daee5511946ac1f56fe0d4d704e97e13795771d9890180","src/private/quantity.rs":"0fa3c6fb00a4c4d659284a87a4cfbfc5153a73e65ed802f27d74c1bd7fc06273","src/private/rate.rs":"bb7f1a1c9aa2413eb4c606f04aa58199a18d9d12a97fb6548d410f939b01ed09","src/private/recorded_experiment_data.rs":"66b2601902a2dc2b7a283717c21ce754de94fcca30d12e0398195c8ad49c90af","src/private/string.rs":"cab1b0a3a5368a1650dc253bcb5a4622f0d016913bf323c7d74c4130ab22f886","src/private/string_list.rs":"2f4df2aefdf9130a9913cd06dbf91747953ac79648af1c1b924053af18944bac","src/private/timespan.rs":"4094a6020269324e84d52a921c580ce156ea45e5af70a46540a64d23dfb2002e","src/private/timing_distribution.rs":"e6f45e4d57b1dde12b047f7f402787d69f0702fede88af1f0691663aeddaa161","src/private/uuid.rs":"7b76b815f08ac70522c65785f765c59d397f54ee257d47f8290029b456dce0ed","src/system.rs":"98aae0e0c9bf53f92fce4ca3d6040439f540023b63aab022c8c26381f04a4185","src/test.rs":"8e8c1620b94ed753ea849e76083a698c9ab47dba31cf330af68d2a89a6e361f9","tests/common/mod.rs":"4837df2e771929cc077e6fb9a9239645e8e0f7bc6c9f409b71c4d147edf334fc","tests/init_fails.rs":"302aae0c58b06a3bd679d3f5c49cc55f97a3af824b960985a9055a1ad497e5ef","tests/never_init.rs":"1f33b8ce7ca3514b57b48cc16d98408974c85cf8aa7d13257ffc2ad878ebb295","tests/no_time_to_init.rs":"3ae2c1abda55da68519bda1afdf6555f3190e92eedda41f2d43b914c41f97cb9","tests/overflowing_preinit.rs":"0cdb186d07658ffe528f361429dcd20b79c8b313615e4fcd4f793bab3decc566","tests/schema.rs":"4b52dcacc4edf0313d970e65eec87de89341afb1fe6ec9fbc351d88859411871","tests/simple.rs":"d7afe9e7e2645a575eb9ba98bebd54278ff97cb5ab3aac76f52d6ab1802de60c"},"package":"189c370f4d17144dc0af5b9640de2ac1016b603e583d3eb1050a142dba3df9dd"}
{"files":{"Cargo.toml":"1716625f6696d8210de0b2eea8cd0d0c98926a35c396871f8ab3111524f7ea76","LICENSE":"1f256ecad192880510e84ad60474eab7589218784b9a50bc7ceee34c2b91f1d5","README.md":"fd9e0ca6907917ea6bec5de05e15dd21d20fae1cb7f3250467bb20231a8e1065","src/common_test.rs":"585eb8b0ecc6628090da6b0e7f8488f13c428286d5533af1c96af19649ee29c8","src/configuration.rs":"a0b6fe024ee7fd1164780fc4a63ad57c2f6f381c64c2417fc51ef6a1f8898889","src/core_metrics.rs":"0ecf9ec7de9032f45e6c0feaebf17c614f9be88c1a28043e397eaf7d3b18ba37","src/dispatcher/global.rs":"427d23168ea2457f0332e6f4d6684810fe35a5d5df1e8336d070159d82d936e5","src/dispatcher/mod.rs":"9bf42571a23491db1c6643064caee463301dccac13430883ac0bdaefacf4ace3","src/glean_metrics.rs":"151b6e5acc12436c33c75e2e0a18f9769179d3c7fdc60a22fa02afb76feaf00f","src/lib.rs":"d94f967209dcaaac4d0e939452ac86b4e955f14869d40dbb887a707d85c9e872","src/net/http_uploader.rs":"9e8c1837ca0d3f6ea165ec936ab054173c4fe95a958710176c33b4d4d1d98beb","src/net/mod.rs":"59db2f4dcfd0a2d77feb63f40cae2252da59fa8a87e10877fcb305eb91aa0645","src/pings.rs":"2dfccd84848e1933aa4f6a7a707c58ec794c8f73ef2d93ea4d4df71d4e6abc31","src/private/boolean.rs":"eeadc0529e2c69a930479f208746799b064b27facab8306c1c10c650e83fb63c","src/private/counter.rs":"75ad96cd5f53d024230357223244c0a8f58f8b32a9a4d0dbc7cc6ecd74db13b5","src/private/custom_distribution.rs":"0de9cd030a555d93352a7fd251febf4de3a2ca4eeb7666abe5baa884d59168b8","src/private/datetime.rs":"f7e68491b267452fc7d0bb50a2e2f73337d2580435b0518e4cb375a5a30d3017","src/private/denominator.rs":"746c5dfd81fe4027061d799421e35c2cf47b14b98e18e15f2e0d21379604f3f0","src/private/event.rs":"7839ded635f979c7e3a97227530c5843783b8cb2e4487aedbdce7ae16c611e76","src/private/labeled.rs":"e7e68d6a8fd84594ff76209d59733c66e523777a98e0492ed4ddd6c18475148c","src/private/memory_distribution.rs":"201ce833900fca33f2e4bdd65d9055927627c5e97c9df001351ca40e8e11efae","src/private/mod.rs":"413a41942a48de3d39e9346c2a0803a3ce184978173f8a79b13a116be4abaffe","src/private/numerator.rs":"4133f4a1f2a20931176ecaa7e85a96a4d639ba1b3737441a5713c18909892a42","src/private/ping.rs":"915fc42994e0929656daee5511946ac1f56fe0d4d704e97e13795771d9890180","src/private/quantity.rs":"0fa3c6fb00a4c4d659284a87a4cfbfc5153a73e65ed802f27d74c1bd7fc06273","src/private/rate.rs":"bb7f1a1c9aa2413eb4c606f04aa58199a18d9d12a97fb6548d410f939b01ed09","src/private/recorded_experiment_data.rs":"66b2601902a2dc2b7a283717c21ce754de94fcca30d12e0398195c8ad49c90af","src/private/string.rs":"cab1b0a3a5368a1650dc253bcb5a4622f0d016913bf323c7d74c4130ab22f886","src/private/string_list.rs":"2f4df2aefdf9130a9913cd06dbf91747953ac79648af1c1b924053af18944bac","src/private/timespan.rs":"4094a6020269324e84d52a921c580ce156ea45e5af70a46540a64d23dfb2002e","src/private/timing_distribution.rs":"e6f45e4d57b1dde12b047f7f402787d69f0702fede88af1f0691663aeddaa161","src/private/uuid.rs":"7b76b815f08ac70522c65785f765c59d397f54ee257d47f8290029b456dce0ed","src/system.rs":"98aae0e0c9bf53f92fce4ca3d6040439f540023b63aab022c8c26381f04a4185","src/test.rs":"8e8c1620b94ed753ea849e76083a698c9ab47dba31cf330af68d2a89a6e361f9","tests/common/mod.rs":"4837df2e771929cc077e6fb9a9239645e8e0f7bc6c9f409b71c4d147edf334fc","tests/init_fails.rs":"302aae0c58b06a3bd679d3f5c49cc55f97a3af824b960985a9055a1ad497e5ef","tests/never_init.rs":"1f33b8ce7ca3514b57b48cc16d98408974c85cf8aa7d13257ffc2ad878ebb295","tests/no_time_to_init.rs":"3ae2c1abda55da68519bda1afdf6555f3190e92eedda41f2d43b914c41f97cb9","tests/overflowing_preinit.rs":"0cdb186d07658ffe528f361429dcd20b79c8b313615e4fcd4f793bab3decc566","tests/schema.rs":"4b52dcacc4edf0313d970e65eec87de89341afb1fe6ec9fbc351d88859411871","tests/simple.rs":"d7afe9e7e2645a575eb9ba98bebd54278ff97cb5ab3aac76f52d6ab1802de60c"},"package":"4cacd10ebcc8d4ff7b60dc864e7d1a214df4d10ca4d3d9df1d983d7262a93914"}

View File

@ -13,7 +13,7 @@
[package]
edition = "2018"
name = "glean"
version = "36.0.1"
version = "37.0.0"
authors = ["Jan-Erik Rediger <jrediger@mozilla.com>", "The Glean Team <glean-team@mozilla.com>"]
include = ["/README.md", "/LICENSE", "/src", "/tests", "/Cargo.toml"]
description = "Glean SDK Rust language bindings"
@ -29,7 +29,7 @@ features = ["serde"]
version = "0.5"
[dependencies.glean-core]
version = "36.0.1"
version = "37.0.0"
[dependencies.inherent]
version = "0.1.4"

View File

@ -182,7 +182,7 @@ pub fn initialize(cfg: Configuration, client_info: ClientInfoMetrics) {
.spawn(move || {
let core_cfg = glean_core::Configuration {
upload_enabled: cfg.upload_enabled,
data_path: cfg.data_path.into_os_string().into_string().unwrap(),
data_path: cfg.data_path,
application_id: cfg.application_id.clone(),
language_binding_name: LANGUAGE_BINDING_NAME.into(),
max_events: cfg.max_events,

View File

@ -6,8 +6,8 @@ edition = "2018"
license = "MPL-2.0"
[dependencies]
glean = "36.0.1"
glean-core = { version = "36.0.0", features = ["rkv-safe-mode"] }
glean = "37.0.0"
glean-core = { version = "37.0.0", features = ["rkv-safe-mode"] }
log = "0.4"
nserror = { path = "../../../xpcom/rust/nserror" }
nsstring = { path = "../../../xpcom/rust/nsstring" }

View File

@ -8,7 +8,7 @@ publish = false
[dependencies]
bincode = "1.0"
chrono = "0.4.10"
glean = "36.0.1"
glean = "37.0.0"
inherent = "0.1.4"
log = "0.4"
nsstring = { path = "../../../../xpcom/rust/nsstring", optional = true }