Update duckdb. use more duckdb. Fix the display of stats
This commit is contained in:
@@ -241,43 +241,83 @@ class AnalyticsController < ApplicationController
|
||||
end
|
||||
|
||||
def prepare_chart_data_with_split_cache(cache_key_base, cache_ttl)
|
||||
# Split timeline into historical (completed hours) and current (incomplete hour)
|
||||
# Historical hours are cached for full TTL, current hour cached briefly for freshness
|
||||
# Generate timeline based on selected time period
|
||||
case @time_period
|
||||
when :hour
|
||||
# Show last 60 minutes for hour view
|
||||
timeline_data = Rails.cache.fetch("#{cache_key_base}/chart_hourly", expires_in: 1.minute) do
|
||||
# For hour view, show minute-by-minute data for the last hour
|
||||
(0..59).map do |minutes_ago|
|
||||
time_point = minutes_ago.minutes.ago
|
||||
count = Event.where("timestamp >= ? AND timestamp < ?", time_point, time_point + 1.minute).count
|
||||
{
|
||||
time_iso: time_point.iso8601,
|
||||
total: count
|
||||
}
|
||||
end.reverse
|
||||
end
|
||||
|
||||
# Cache historical hours (1-23 hours ago) - these are complete and won't change
|
||||
# No expiration - will stick around until evicted by cache store (uses DuckDB if available)
|
||||
historical_timeline = Rails.cache.fetch("#{cache_key_base}/chart_historical") do
|
||||
historical_start = 23.hours.ago.beginning_of_hour
|
||||
current_hour_start = Time.current.beginning_of_hour
|
||||
when :day
|
||||
# Show last 24 hours (existing logic)
|
||||
# Split timeline into historical (completed hours) and current (incomplete hour)
|
||||
# Historical hours are cached for full TTL, current hour cached briefly for freshness
|
||||
|
||||
events_by_hour = with_duckdb_fallback { EventDdb.hourly_timeline(historical_start, current_hour_start) } ||
|
||||
Event.where("timestamp >= ? AND timestamp < ?", historical_start, current_hour_start)
|
||||
.group("DATE_TRUNC('hour', timestamp)")
|
||||
.count
|
||||
# Cache historical hours (1-23 hours ago) - these are complete and won't change
|
||||
# Use DuckDB directly for performance, no PostgreSQL fallback
|
||||
historical_timeline = Rails.cache.fetch("#{cache_key_base}/chart_historical", expires_in: 1.hour) do
|
||||
historical_start = 23.hours.ago.beginning_of_hour
|
||||
current_hour_start = Time.current.beginning_of_hour
|
||||
|
||||
(1..23).map do |hour_ago|
|
||||
hour_time = hour_ago.hours.ago.beginning_of_hour
|
||||
hour_key = hour_time.utc
|
||||
# Use DuckDB directly - if it fails, we'll show empty data rather than slow PostgreSQL
|
||||
events_by_hour = BaffleDl.hourly_timeline(historical_start, current_hour_start) || {}
|
||||
|
||||
(1..23).map do |hour_ago|
|
||||
hour_time = hour_ago.hours.ago.beginning_of_hour
|
||||
hour_key = hour_time.utc
|
||||
{
|
||||
time_iso: hour_time.iso8601,
|
||||
total: events_by_hour[hour_key] || 0
|
||||
}
|
||||
end.reverse
|
||||
end
|
||||
|
||||
# Current hour (0 hours ago) - cache very briefly since it's actively accumulating
|
||||
# ALWAYS use PostgreSQL for current hour to get real-time data (DuckDB syncs every minute)
|
||||
current_hour_data = Rails.cache.fetch("#{cache_key_base}/chart_current_hour", expires_in: 1.minute) do
|
||||
hour_time = Time.current.beginning_of_hour
|
||||
count = Event.where("timestamp >= ?", hour_time).count
|
||||
{
|
||||
time_iso: hour_time.iso8601,
|
||||
total: events_by_hour[hour_key] || 0
|
||||
total: count
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
# Current hour (0 hours ago) - cache very briefly since it's actively accumulating
|
||||
# ALWAYS use PostgreSQL for current hour to get real-time data (DuckDB syncs every minute)
|
||||
current_hour_data = Rails.cache.fetch("#{cache_key_base}/chart_current_hour", expires_in: 1.minute) do
|
||||
hour_time = Time.current.beginning_of_hour
|
||||
count = Event.where("timestamp >= ?", hour_time).count
|
||||
{
|
||||
time_iso: hour_time.iso8601,
|
||||
total: count
|
||||
}
|
||||
end
|
||||
# Combine current + historical for full 24-hour timeline
|
||||
timeline_data = [current_hour_data] + historical_timeline
|
||||
|
||||
# Combine current + historical for full 24-hour timeline
|
||||
timeline_data = [current_hour_data] + historical_timeline
|
||||
when :week, :month
|
||||
# Show daily data for week/month views
|
||||
days_to_show = @time_period == :week ? 7 : 30
|
||||
timeline_data = Rails.cache.fetch("#{cache_key_base}/chart_daily_#{days_to_show}", expires_in: cache_ttl) do
|
||||
historical_start = days_to_show.days.ago.beginning_of_day
|
||||
current_day_end = Time.current.end_of_day
|
||||
|
||||
# Use DuckDB for all data including current day (max 1 minute delay)
|
||||
daily_events = BaffleDl.daily_timeline(historical_start, current_day_end) || {}
|
||||
|
||||
(0..days_to_show-1).map do |days_ago|
|
||||
day_time = days_ago.days.ago.beginning_of_day
|
||||
{
|
||||
time_iso: day_time.iso8601,
|
||||
total: daily_events[day_time] || 0
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
else
|
||||
# Default to 24 hours
|
||||
timeline_data = []
|
||||
end
|
||||
|
||||
# Action distribution and other chart data (cached with main cache)
|
||||
other_chart_data = Rails.cache.fetch("#{cache_key_base}/chart_metadata", expires_in: cache_ttl) do
|
||||
@@ -323,7 +363,7 @@ class AnalyticsController < ApplicationController
|
||||
time_iso: hour_time.iso8601,
|
||||
total: events_by_hour[hour_key] || 0
|
||||
}
|
||||
end
|
||||
end.reverse
|
||||
|
||||
# Action distribution for pie chart
|
||||
action_distribution = @event_breakdown.map do |action, count|
|
||||
@@ -348,8 +388,8 @@ class AnalyticsController < ApplicationController
|
||||
end
|
||||
|
||||
def calculate_network_type_stats(start_time)
|
||||
# Try DuckDB first, fallback to PostgreSQL
|
||||
duckdb_stats = with_duckdb_fallback { EventDdb.network_type_stats(start_time) }
|
||||
# Try DuckLake first, fallback to PostgreSQL
|
||||
duckdb_stats = with_duckdb_fallback { BaffleDl.network_type_stats(start_time) }
|
||||
|
||||
return duckdb_stats if duckdb_stats
|
||||
|
||||
@@ -397,8 +437,8 @@ class AnalyticsController < ApplicationController
|
||||
end
|
||||
|
||||
def calculate_suspicious_patterns(start_time)
|
||||
# Try DuckDB first, fallback to PostgreSQL
|
||||
duckdb_patterns = with_duckdb_fallback { EventDdb.suspicious_patterns(start_time) }
|
||||
# Try DuckLake first, fallback to PostgreSQL
|
||||
duckdb_patterns = with_duckdb_fallback { BaffleDl.suspicious_patterns(start_time) }
|
||||
|
||||
return duckdb_patterns if duckdb_patterns
|
||||
|
||||
|
||||
@@ -37,8 +37,8 @@ class EventsController < ApplicationController
|
||||
filters[:network_range_id] = range.id if range
|
||||
end
|
||||
|
||||
# Try DuckDB first, fallback to PostgreSQL if unavailable
|
||||
result = EventDdb.search(filters, page: params[:page]&.to_i || 1, per_page: 50)
|
||||
# Try DuckLake first, fallback to PostgreSQL if unavailable
|
||||
result = BaffleDl.search(filters, page: params[:page]&.to_i || 1, per_page: 50)
|
||||
|
||||
if result
|
||||
# DuckDB query succeeded
|
||||
|
||||
@@ -262,10 +262,10 @@ class NetworkRangesController < ApplicationController
|
||||
# Include child network ranges to capture all traffic within this network block
|
||||
network_ids = [network_range.id] + network_range.child_ranges.pluck(:id)
|
||||
|
||||
# Try DuckDB first for stats (much faster)
|
||||
duckdb_stats = with_duckdb_fallback { EventDdb.network_traffic_stats(network_ids) }
|
||||
duckdb_top_paths = with_duckdb_fallback { EventDdb.network_top_paths(network_ids, 10) }
|
||||
duckdb_top_agents = with_duckdb_fallback { EventDdb.network_top_user_agents(network_ids, 5) }
|
||||
# Try DuckLake first for stats (much faster)
|
||||
duckdb_stats = with_duckdb_fallback { BaffleDl.network_traffic_stats(network_ids) }
|
||||
duckdb_top_paths = with_duckdb_fallback { BaffleDl.network_top_paths(network_ids, 10) }
|
||||
duckdb_top_agents = with_duckdb_fallback { BaffleDl.network_top_user_agents(network_ids, 5) }
|
||||
|
||||
if duckdb_stats
|
||||
# DuckDB success - use fast aggregated stats
|
||||
|
||||
Reference in New Issue
Block a user