Skip to content

Commit

Permalink
Merge pull request #133 from horizonx-tech/metric
Browse files Browse the repository at this point in the history
add metrics
  • Loading branch information
hide-yoshi authored Nov 28, 2024
2 parents b85b465 + 8815c56 commit ac7b4a8
Show file tree
Hide file tree
Showing 9 changed files with 301 additions and 19 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/check-examples.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ on:
branches:
- main
env:
RUST_VERSION: 1.76.0
RUST_VERSION: 1.79.0
jobs:
check-compilable:
strategy:
Expand Down
30 changes: 13 additions & 17 deletions .github/workflows/qa.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ on:
branches:
- main
env:
RUST_VERSION: 1.76.0
RUST_VERSION: 1.79.0
jobs:
format:
runs-on: ubuntu-latest
Expand Down Expand Up @@ -68,19 +68,15 @@ jobs:
with:
toolchain: nightly
components: rustfmt
- uses: actions-rs/cargo@v1
with:
toolchain: nightly
command: test
args: --all-features --no-fail-fast
env:
CARGO_INCREMENTAL: "0"
RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Coverflow-checks=off"
RUSTDOCFLAGS: "-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Coverflow-checks=off"
- uses: actions-rs/grcov@v0.1
id: coverage
- name: Coveralls upload
uses: coverallsapp/github-action@master
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
path-to-lcov: ${{ steps.coverage.outputs.report }}
- id: test
name: Run tests
run: cargo +nightly test --all --all-features
# - id: coverage
# name: Generate Coverage Report
# uses: alekitto/grcov@v0.2
#
# - name: Coveralls upload
# uses: coverallsapp/github-action@master
# with:
# github-token: ${{ secrets.GITHUB_TOKEN }}
# path-to-lcov: ${{ steps.coverage.outputs.report }}
16 changes: 16 additions & 0 deletions chainsight-cdk-macros/src/canisters/attributes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,3 +31,19 @@ pub fn only_proxy(_attr: TokenStream, item: TokenStream) -> TokenStream {
}
.into()
}

pub fn metric(_attr: TokenStream, item: TokenStream) -> TokenStream {
let item_fn = parse_macro_input!(item as ItemFn);
let sig = item_fn.sig;
let block = item_fn.block.stmts;
quote! {
#sig {
let timestamper = chainsight_cdk::time::TimeStamper;
let start = timestamper::now_nanosec();
#(#block);*
let end = timestamper::now_nanosec();
chainsight_cdk::metric::metric(stringify!(#sig), TaskDuration::new(start, end));
}
}
.into()
}
5 changes: 5 additions & 0 deletions chainsight-cdk-macros/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -214,3 +214,8 @@ pub fn only_controller(_attr: proc_macro::TokenStream, item: TokenStream) -> Tok
pub fn only_proxy(_attr: TokenStream, item: TokenStream) -> TokenStream {
canisters::attributes::only_proxy(_attr, item)
}

#[proc_macro_attribute]
pub fn metric(_attr: TokenStream, item: TokenStream) -> TokenStream {
canisters::attributes::metric(_attr, item)
}
2 changes: 2 additions & 0 deletions chainsight-cdk/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ pub mod core;
pub mod indexer;
pub mod initializer;
pub mod lens;
pub mod metric;
pub mod rpc;
pub mod storage;
pub mod time;
pub mod web2;
pub mod web3;
1 change: 1 addition & 0 deletions chainsight-cdk/src/metric/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
pub mod types;
232 changes: 232 additions & 0 deletions chainsight-cdk/src/metric/types.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,232 @@
use std::{cell::RefCell, collections::HashMap};

use candid::CandidType;

use crate::time::TimeStamper;
// 7 days
const DATA_POINT_RETENTION_SECONDS: u64 = 7 * 24 * 60 * 60;
// 1 hour
const ONE_DATA_POINT_SECONDS: u64 = 60 * 60;

#[derive(Clone, Debug, CandidType)]
pub struct Metric {
pub metric_type: MetricType,
pub value: f64,
}

#[derive(Clone, Debug, CandidType)]
pub enum MetricType {
TimeMax,
TimeMin,
Count,
}

#[derive(Clone, Debug)]
pub struct TaskDuration {
from_nanosec: u64,
to_nanosec: u64,
}

impl TaskDuration {
pub fn new(from_nanosec: u64, to_nanosec: u64) -> Self {
Self {
from_nanosec,
to_nanosec,
}
}
}

#[derive(Clone, Debug, CandidType)]
pub struct DataPoint {
pub metrics: Vec<Metric>,
pub from_timestamp: u64,
}

impl Default for DataPoint {
fn default() -> Self {
Self {
metrics: vec![
Metric {
metric_type: MetricType::TimeMax,
value: 0.0,
},
Metric {
metric_type: MetricType::TimeMin,
value: f64::MAX,
},
Metric {
metric_type: MetricType::Count,
value: 0.0,
},
],
from_timestamp: TimeStamper::now_sec(),
}
}
}
struct MetricCollector {
data: HashMap<MetricId, Vec<DataPoint>>,
}
type MetricId = String;

impl MetricCollector {
fn new() -> Self {
Self {
data: HashMap::new(),
}
}
}

thread_local! {
static METRIC_COLLECTOR: RefCell<MetricCollector> = RefCell::new(MetricCollector::new());
}

#[ic_cdk::query]
fn metric_ids() -> Vec<String> {
_metric_ids()
}

#[ic_cdk::query]
fn metrics(id: String, count: u8) -> Vec<DataPoint> {
METRIC_COLLECTOR.with(|collector| {
if let Some(data) = collector.borrow().data.get(&id) {
return data.iter().rev().take(count as usize).cloned().collect();
}
vec![]
})
}

#[ic_cdk::query]
fn metrics_between(id: String, from: u64, to: u64) -> Vec<DataPoint> {
METRIC_COLLECTOR.with(|collector| {
if let Some(data) = collector.borrow().data.get(&id) {
return data
.iter()
.filter(|d| d.from_timestamp >= from && d.from_timestamp <= to)
.cloned()
.collect();
}
vec![]
})
}

fn _metric_ids() -> Vec<MetricId> {
METRIC_COLLECTOR.with(|collector| collector.borrow().data.keys().cloned().collect())
}

fn _enqueue(id: MetricId) {
let now = TimeStamper::now_sec();
METRIC_COLLECTOR.with(|collector| {
let last = _last(id.clone());
if last.is_none() {
collector.borrow_mut().data.insert(id, vec![]);
return;
}
let last = last.unwrap();
if last.from_timestamp + ONE_DATA_POINT_SECONDS < now {
collector.borrow_mut().data.insert(id, vec![]);
}
});
}

fn _insert(id: MetricId, data_point: DataPoint) {
METRIC_COLLECTOR.with(|collector| {
let mut collector = collector.borrow_mut();
if let Some(data) = collector.data.get_mut(&id) {
data.push(data_point);
} else {
collector.data.insert(id, vec![data_point]);
}
});
}

fn _dequeue(id: MetricId) {
METRIC_COLLECTOR.with(|collector| {
let mut collector = collector.borrow_mut();
if let Some(data) = collector.data.get_mut(&id) {
data.remove(0);
}
});
}

fn _clean() {
let ids = _metric_ids();
let now = TimeStamper::now_sec();

for id in ids {
let first = _first(id.clone());
if first.is_none() {
continue;
}
let first = first.unwrap();
if first.from_timestamp + DATA_POINT_RETENTION_SECONDS < now {
_dequeue(id);
}
}
}

fn _first(id: MetricId) -> Option<DataPoint> {
METRIC_COLLECTOR.with(|collector| {
if let Some(data) = collector.borrow().data.get(&id) {
return data.first().cloned();
}
None
})
}

fn _last(id: MetricId) -> Option<DataPoint> {
METRIC_COLLECTOR.with(|collector| {
if let Some(data) = collector.borrow().data.get(&id) {
return data.last().cloned();
}
None
})
}
pub fn metric(id: MetricId, duration: TaskDuration) {
_clean();
_enqueue(id.clone());
let mut last = _last(id.clone()).unwrap_or_default();
let task_dur = (duration.to_nanosec - duration.from_nanosec) as f64;
let new_metrics = last
.metrics
.iter()
.map(|m| {
let mut m = m.clone();
match m.metric_type {
MetricType::TimeMax => {
m.value = m.value.max(task_dur);
}
MetricType::TimeMin => {
m.value = m.value.min(task_dur);
}
MetricType::Count => {
m.value += 1.0;
}
}
m
})
.collect();
last.metrics = new_metrics;
_insert(id, last);
}

#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_metric() {
let id = "test".to_string();
let duration = TaskDuration::new(0, 100);
metric(id.clone(), duration);
let data = _last(id.clone()).unwrap();
assert_eq!(data.metrics.len(), 3);
assert_eq!(data.metrics[0].value, 100.0);
assert_eq!(data.metrics[1].value, 100.0);
assert_eq!(data.metrics[2].value, 1.0);
metric(id.clone(), TaskDuration::new(0, 200));
let data = _last(id.clone()).unwrap();
assert_eq!(data.metrics.len(), 3);
assert_eq!(data.metrics[0].value, 200.0);
assert_eq!(data.metrics[1].value, 100.0);
assert_eq!(data.metrics[2].value, 2.0);
}
}
2 changes: 1 addition & 1 deletion chainsight-cdk/src/storage/storage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ thread_local! {
MANAGER.with(|m|m.borrow().get(MemoryId::new(10))),
)
);
static LAST_KEY_STORE: RefCell<String> = RefCell::new(String::new());
static LAST_KEY_STORE: RefCell<String> = const { RefCell::new(String::new()) }

}

Expand Down
30 changes: 30 additions & 0 deletions chainsight-cdk/src/time/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
pub struct TimeStamper;

impl TimeStamper {
/// returns current time nano seconds
#[cfg(not(target_arch = "wasm32"))]
fn _now() -> u64 {
use std::time::{SystemTime, UNIX_EPOCH};
SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("Time went backwards")
.as_nanos() as u64
}
#[cfg(target_arch = "wasm32")]
fn _now() -> u64 {
ic_cdk::api::time()
}

pub fn now_nanosec() -> u64 {
Self::_now()
}
pub fn now_millisec() -> u64 {
Self::_now() / 1_000_000
}
pub fn now_microsec() -> u64 {
Self::_now() / 1_000
}
pub fn now_sec() -> u64 {
Self::_now() / 1_000_000_000
}
}

0 comments on commit ac7b4a8

Please sign in to comment.