Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 2 additions & 4 deletions src/backend/impl/src/controllers/log_controller.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,9 +50,7 @@ impl<A: AccessControlService, L: LogService> LogController<A, L> {
self.access_control_service
.assert_principal_is_admin(&calling_principal)?;

let logs = self.log_service.list_logs(request);

Ok(logs)
self.log_service.list_logs(request)
}
}

Expand Down Expand Up @@ -128,7 +126,7 @@ mod tests {
.expect_list_logs()
.once()
.with(eq(request.clone()))
.return_const(logs.clone());
.return_const(Ok(logs.clone()));

let controller = LogController::new(access_control_service_mock, service_mock);

Expand Down
24 changes: 0 additions & 24 deletions src/backend/impl/src/fixtures/log.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,30 +34,6 @@ pub fn log_entry_error() -> LogEntry {
}
}

#[fixture]
pub fn log_entries() -> Vec<LogEntry> {
vec![
LogEntry {
date_time: date_time_a(),
level: LogLevel::Info,
context: Some("function_a".to_string()),
message: "foo".to_string(),
},
LogEntry {
date_time: date_time_b(),
level: LogLevel::Warn,
context: Some("function_b".to_string()),
message: "bar".to_string(),
},
LogEntry {
date_time: date_time_b(),
level: LogLevel::Error,
context: Some("function_c".to_string()),
message: "baz".to_string(),
},
]
}

pub mod filters {
use crate::repositories::LogsFilter;

Expand Down
70 changes: 53 additions & 17 deletions src/backend/impl/src/repositories/log_repository.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,17 @@
use super::{init_logs, LogEntry, LogId, LogMemory};
use super::{
init_log_timestamp_index, init_logs, DateTime, LogEntry, LogId, LogMemory,
LogTimestampIndexMemory, LogTimestampKey, LogTimestampRange,
};
use backend_api::ApiError;
use std::cell::RefCell;

#[cfg_attr(test, mockall::automock)]
pub trait LogRepository {
fn get_logs(&self) -> Vec<LogEntry>;
fn get_logs(
&self,
after: Option<DateTime>,
before: Option<DateTime>,
) -> Result<Vec<LogEntry>, ApiError>;

fn append_log(&self, log_entry: LogEntry) -> Result<LogId, ApiError>;
}
Expand All @@ -18,14 +25,34 @@ impl Default for LogRepositoryImpl {
}

impl LogRepository for LogRepositoryImpl {
fn get_logs(&self) -> Vec<LogEntry> {
STATE.with_borrow(|s| s.logs.iter().collect::<Vec<_>>())
fn get_logs(
&self,
after: Option<DateTime>,
before: Option<DateTime>,
) -> Result<Vec<LogEntry>, ApiError> {
let range = LogTimestampRange::new(after, before)?;
let logs = STATE.with_borrow(|s| {
s.logs_timestamp_index
.range(range)
.filter_map(|(_, log_id)| {
// the None case should never happen
s.logs.get(log_id)
})
.collect()
});
Ok(logs)
}

fn append_log(&self, log_entry: LogEntry) -> Result<LogId, ApiError> {
STATE
.with_borrow_mut(|s| s.logs.append(&log_entry))
.map_err(|e| ApiError::internal(&format!("Cannot write log: {:?}", e)))
STATE.with_borrow_mut(|s| {
let log_id = s
.logs
.append(&log_entry)
.map_err(|e| ApiError::internal(&format!("Cannot write log: {:?}", e)))?;
let log_key = LogTimestampKey::new(log_entry.date_time, log_id)?;
s.logs_timestamp_index.insert(log_key, log_id);
Ok(log_id)
})
}
}

Expand All @@ -37,11 +64,15 @@ impl LogRepositoryImpl {

struct LogState {
logs: LogMemory,
logs_timestamp_index: LogTimestampIndexMemory,
}

impl Default for LogState {
fn default() -> Self {
Self { logs: init_logs() }
Self {
logs: init_logs(),
logs_timestamp_index: init_log_timestamp_index(),
}
}
}

Expand All @@ -52,23 +83,28 @@ thread_local! {
#[cfg(test)]
mod tests {
use super::*;
use crate::fixtures;
use crate::{fixtures, repositories::LogsFilter};
use rstest::*;

#[rstest]
async fn get_logs() {
#[case::before_filter_matching(fixtures::filters::before_filter_matching())]
#[case::before_filter_not_matching(fixtures::filters::before_filter_not_matching())]
#[case::after_filter_matching(fixtures::filters::after_filter_matching())]
#[case::after_filter_not_matching(fixtures::filters::after_filter_not_matching())]
#[case::time_range_filter_matching(fixtures::filters::time_range_filter_matching())]
#[case::time_range_filter_not_matching(fixtures::filters::time_range_filter_not_matching())]
async fn get_logs(#[case] fixture: (LogEntry, LogsFilter, bool)) {
let (log_entry, filter, expected) = fixture;

STATE.set(LogState::default());

let log_entries = fixtures::log_entries();
let repository = LogRepositoryImpl::default();
repository.append_log(log_entry.clone()).unwrap();

for log_entry in log_entries.iter() {
repository.append_log(log_entry.clone()).unwrap();
}

let result = repository.get_logs();
// ranges are tested in the service and controller above
let result = repository.get_logs(filter.after, filter.before).unwrap();

assert_eq!(result, log_entries);
assert_eq!(result, if expected { vec![log_entry] } else { vec![] });
}

#[rstest]
Expand Down
18 changes: 15 additions & 3 deletions src/backend/impl/src/repositories/memories/log_memory.rs
Original file line number Diff line number Diff line change
@@ -1,18 +1,30 @@
use super::{Memory, LOGS_INDEX_MEMORY_ID, LOGS_MEMORY_ID, MEMORY_MANAGER};
use crate::repositories::LogEntry;
use ic_stable_structures::Log;
use super::{
Memory, LOGS_INDEX_MEMORY_ID, LOGS_MEMORY_ID, LOGS_TIMESTAMP_LEVEL_INDEX_MEMORY_ID,
MEMORY_MANAGER,
};
use crate::repositories::{LogEntry, LogId, LogTimestampKey};
use ic_stable_structures::{BTreeMap, Log};

pub type LogMemory = Log<LogEntry, Memory, Memory>;
pub type LogTimestampIndexMemory = BTreeMap<LogTimestampKey, LogId, Memory>;

pub fn init_logs() -> LogMemory {
// TODO: handle the error
LogMemory::init(get_logs_index_memory(), get_logs_memory()).unwrap()
}

pub fn init_log_timestamp_index() -> LogTimestampIndexMemory {
LogTimestampIndexMemory::init(get_logs_timestamp_level_index_memory())
}

fn get_logs_index_memory() -> Memory {
MEMORY_MANAGER.with(|m| m.borrow().get(LOGS_INDEX_MEMORY_ID))
}

fn get_logs_memory() -> Memory {
MEMORY_MANAGER.with(|m| m.borrow().get(LOGS_MEMORY_ID))
}

fn get_logs_timestamp_level_index_memory() -> Memory {
MEMORY_MANAGER.with(|m| m.borrow().get(LOGS_TIMESTAMP_LEVEL_INDEX_MEMORY_ID))
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,3 +28,4 @@ pub(super) const PROPOSAL_REVIEW_COMMIT_PROPOSAL_REVIEW_ID_USER_ID_MEMORY_ID: Me
pub(super) const IMAGES_MEMORY_ID: MemoryId = MemoryId::new(13);
pub(super) const PROPOSAL_NERVOUS_SYSTEM_ID_INDEX_MEMORY_ID: MemoryId = MemoryId::new(14);
pub(super) const PROPOSAL_TIMESTAMP_INDEX_MEMORY_ID: MemoryId = MemoryId::new(15);
pub(super) const LOGS_TIMESTAMP_LEVEL_INDEX_MEMORY_ID: MemoryId = MemoryId::new(16);
111 changes: 87 additions & 24 deletions src/backend/impl/src/repositories/types/log.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
use super::DateTime;
use backend_api::ApiError;
use candid::{CandidType, Decode, Deserialize, Encode};
use ic_stable_structures::{storable::Bound, Storable};
use std::borrow::Cow;
use ic_stable_structures::{
storable::{Blob, Bound},
Storable,
};
use std::{borrow::Cow, ops::RangeBounds};

pub type LogId = u64;

Expand All @@ -16,18 +20,6 @@ pub struct LogsFilter {

impl LogsFilter {
pub fn matches(&self, log_entry: &LogEntry) -> bool {
if let Some(before) = &self.before {
if log_entry.date_time > *before {
return false;
}
}

if let Some(after) = &self.after {
if log_entry.date_time < *after {
return false;
}
}

if let Some(level) = &self.level {
if log_entry.level != *level {
return false;
Expand Down Expand Up @@ -82,10 +74,77 @@ impl Storable for LogEntry {
const BOUND: Bound = Bound::Unbounded;
}

#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct LogTimestampKey(Blob<{ Self::MAX_SIZE as usize }>);

impl LogTimestampKey {
const MAX_SIZE: u32 = <(DateTime, LogId)>::BOUND.max_size();

pub fn new(date_time: DateTime, log_id: LogId) -> Result<Self, ApiError> {
Ok(Self(
Blob::try_from((date_time, log_id).to_bytes().as_ref()).map_err(|_| {
ApiError::internal(&format!(
"Failed to convert date time {:?} and log id {} to bytes.",
date_time, log_id
))
})?,
))
}
}

impl Storable for LogTimestampKey {
fn to_bytes(&self) -> Cow<[u8]> {
self.0.to_bytes()
}

fn from_bytes(bytes: Cow<[u8]>) -> Self {
Self(Blob::from_bytes(bytes))
}

const BOUND: Bound = Bound::Bounded {
max_size: Self::MAX_SIZE,
is_fixed_size: true,
};
}

pub struct LogTimestampRange {
start_bound: LogTimestampKey,
end_bound: LogTimestampKey,
}

impl LogTimestampRange {
pub fn new(
min_date_time: Option<DateTime>,
max_date_time: Option<DateTime>,
) -> Result<Self, ApiError> {
let max_date_time = match max_date_time {
Some(max_date_time) => max_date_time,
None => DateTime::max()?,
};
Ok(Self {
start_bound: LogTimestampKey::new(
min_date_time.unwrap_or_else(DateTime::min),
LogId::MIN,
)?,
end_bound: LogTimestampKey::new(max_date_time, LogId::MAX)?,
})
}
}

impl RangeBounds<LogTimestampKey> for LogTimestampRange {
fn start_bound(&self) -> std::ops::Bound<&LogTimestampKey> {
std::ops::Bound::Included(&self.start_bound)
}

fn end_bound(&self) -> std::ops::Bound<&LogTimestampKey> {
std::ops::Bound::Included(&self.end_bound)
}
}

#[cfg(test)]
mod tests {
use super::*;
use crate::fixtures;
use crate::{fixtures, system_api::get_date_time};
use rstest::*;

#[rstest]
Expand All @@ -99,23 +158,27 @@ mod tests {

#[rstest]
#[case::empty_filter(fixtures::filters::empty_filter())]
#[case::before_filter_matching(fixtures::filters::before_filter_matching())]
#[case::before_filter_not_matching(fixtures::filters::before_filter_not_matching())]
#[case::after_filter_matching(fixtures::filters::after_filter_matching())]
#[case::after_filter_not_matching(fixtures::filters::after_filter_not_matching())]
#[case::time_range_filter_matching(fixtures::filters::time_range_filter_matching())]
#[case::time_range_filter_not_matching(fixtures::filters::time_range_filter_not_matching())]
#[case::level_filter_matching(fixtures::filters::level_filter_matching())]
#[case::level_filter_not_matching(fixtures::filters::level_filter_not_matching())]
#[case::context_filter_matching(fixtures::filters::context_filter_matching())]
#[case::context_filter_not_matching(fixtures::filters::context_filter_not_matching())]
#[case::message_filter_matching(fixtures::filters::message_filter_matching())]
#[case::message_filter_not_matchingd(fixtures::filters::message_filter_not_matching())]
#[case::all_matching(fixtures::filters::all_matching())]
#[case::all_not_matching(fixtures::filters::all_not_matching())]
#[case::message_filter_not_matching(fixtures::filters::message_filter_not_matching())]
fn filter_matches(#[case] fixture: (LogEntry, LogsFilter, bool)) {
let (log_entry, filter, expected) = fixture;

assert_eq!(filter.matches(&log_entry), expected);
}

#[rstest]
fn log_timestamp_key_storable_impl() {
let date_time = get_date_time().unwrap();
let log_id: LogId = 1234;

let key = LogTimestampKey::new(DateTime::new(date_time).unwrap(), log_id).unwrap();
let serialized_key = key.to_bytes();
let deserialized_key = LogTimestampKey::from_bytes(serialized_key);

assert_eq!(key, deserialized_key);
}
}
Loading
Loading