path-matching #1
@@ -56,11 +56,10 @@ class AnalyticsController < ApplicationController
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# Top countries by event count - cached (this is the expensive JOIN query)
|
# Top countries by event count - cached (now uses denormalized country column)
|
||||||
@top_countries = Rails.cache.fetch("#{cache_key_base}/top_countries", expires_in: cache_ttl) do
|
@top_countries = Rails.cache.fetch("#{cache_key_base}/top_countries", expires_in: cache_ttl) do
|
||||||
Event.joins("JOIN network_ranges ON events.ip_address <<= network_ranges.network")
|
Event.where("timestamp >= ? AND country IS NOT NULL", @start_time)
|
||||||
.where("timestamp >= ? AND network_ranges.country IS NOT NULL", @start_time)
|
.group(:country)
|
||||||
.group("network_ranges.country")
|
|
||||||
.count
|
.count
|
||||||
.sort_by { |_, count| -count }
|
.sort_by { |_, count| -count }
|
||||||
.first(10)
|
.first(10)
|
||||||
@@ -126,10 +125,10 @@ class AnalyticsController < ApplicationController
|
|||||||
@time_period = params[:period]&.to_sym || :day
|
@time_period = params[:period]&.to_sym || :day
|
||||||
@start_time = calculate_start_time(@time_period)
|
@start_time = calculate_start_time(@time_period)
|
||||||
|
|
||||||
# Top networks by request volume
|
# Top networks by request volume (using denormalized network_range_id)
|
||||||
@top_networks = NetworkRange.joins("LEFT JOIN events ON events.ip_address <<= network_ranges.network")
|
@top_networks = NetworkRange.joins("LEFT JOIN events ON events.network_range_id = network_ranges.id")
|
||||||
.where("events.timestamp >= ? OR events.timestamp IS NULL", @start_time)
|
.where("events.timestamp >= ? OR events.timestamp IS NULL", @start_time)
|
||||||
.group("network_ranges.id", "network_ranges.network", "network_ranges.company", "network_ranges.asn", "network_ranges.country", "network_ranges.is_datacenter", "network_ranges.is_vpn", "network_ranges.is_proxy")
|
.group("network_ranges.id")
|
||||||
.select("network_ranges.*, COUNT(events.id) as event_count, COUNT(DISTINCT events.ip_address) as unique_ips")
|
.select("network_ranges.*, COUNT(events.id) as event_count, COUNT(DISTINCT events.ip_address) as unique_ips")
|
||||||
.order("event_count DESC")
|
.order("event_count DESC")
|
||||||
.limit(50)
|
.limit(50)
|
||||||
@@ -137,27 +136,24 @@ class AnalyticsController < ApplicationController
|
|||||||
# Network type breakdown with traffic stats
|
# Network type breakdown with traffic stats
|
||||||
@network_breakdown = calculate_network_type_stats(@start_time)
|
@network_breakdown = calculate_network_type_stats(@start_time)
|
||||||
|
|
||||||
# Company breakdown for top traffic sources
|
# Company breakdown for top traffic sources (using denormalized company column)
|
||||||
@top_companies = NetworkRange.joins("LEFT JOIN events ON events.ip_address <<= network_ranges.network")
|
@top_companies = Event.where("timestamp >= ? AND company IS NOT NULL", @start_time)
|
||||||
.where("events.timestamp >= ? AND network_ranges.company IS NOT NULL", @start_time)
|
.group(:company)
|
||||||
.group("network_ranges.company")
|
.select("company, COUNT(*) as event_count, COUNT(DISTINCT ip_address) as unique_ips")
|
||||||
.select("network_ranges.company, COUNT(events.id) as event_count, COUNT(DISTINCT events.ip_address) as unique_ips, COUNT(DISTINCT network_ranges.id) as network_count")
|
|
||||||
.order("event_count DESC")
|
.order("event_count DESC")
|
||||||
.limit(20)
|
.limit(20)
|
||||||
|
|
||||||
# ASN breakdown
|
# ASN breakdown (using denormalized asn columns)
|
||||||
@top_asns = NetworkRange.joins("LEFT JOIN events ON events.ip_address <<= network_ranges.network")
|
@top_asns = Event.where("timestamp >= ? AND asn IS NOT NULL", @start_time)
|
||||||
.where("events.timestamp >= ? AND network_ranges.asn IS NOT NULL", @start_time)
|
.group(:asn, :asn_org)
|
||||||
.group("network_ranges.asn", "network_ranges.asn_org")
|
.select("asn, asn_org, COUNT(*) as event_count, COUNT(DISTINCT ip_address) as unique_ips")
|
||||||
.select("network_ranges.asn, network_ranges.asn_org, COUNT(events.id) as event_count, COUNT(DISTINCT events.ip_address) as unique_ips, COUNT(DISTINCT network_ranges.id) as network_count")
|
|
||||||
.order("event_count DESC")
|
.order("event_count DESC")
|
||||||
.limit(15)
|
.limit(15)
|
||||||
|
|
||||||
# Geographic breakdown
|
# Geographic breakdown (using denormalized country column)
|
||||||
@top_countries = NetworkRange.joins("LEFT JOIN events ON events.ip_address <<= network_ranges.network")
|
@top_countries = Event.where("timestamp >= ? AND country IS NOT NULL", @start_time)
|
||||||
.where("events.timestamp >= ? AND network_ranges.country IS NOT NULL", @start_time)
|
.group(:country)
|
||||||
.group("network_ranges.country")
|
.select("country, COUNT(*) as event_count, COUNT(DISTINCT ip_address) as unique_ips")
|
||||||
.select("network_ranges.country, COUNT(events.id) as event_count, COUNT(DISTINCT events.ip_address) as unique_ips, COUNT(DISTINCT network_ranges.id) as network_count")
|
|
||||||
.order("event_count DESC")
|
.order("event_count DESC")
|
||||||
.limit(15)
|
.limit(15)
|
||||||
|
|
||||||
@@ -297,51 +293,41 @@ class AnalyticsController < ApplicationController
|
|||||||
end
|
end
|
||||||
|
|
||||||
def calculate_network_type_stats(start_time)
|
def calculate_network_type_stats(start_time)
|
||||||
# Get all network types with their traffic statistics
|
# Get all network types with their traffic statistics using denormalized columns
|
||||||
network_types = [
|
network_types = [
|
||||||
{ type: 'datacenter', label: 'Datacenter' },
|
{ type: 'datacenter', label: 'Datacenter', column: :is_datacenter },
|
||||||
{ type: 'vpn', label: 'VPN' },
|
{ type: 'vpn', label: 'VPN', column: :is_vpn },
|
||||||
{ type: 'proxy', label: 'Proxy' }
|
{ type: 'proxy', label: 'Proxy', column: :is_proxy }
|
||||||
]
|
]
|
||||||
|
|
||||||
results = {}
|
results = {}
|
||||||
total_events = Event.where("timestamp >= ?", start_time).count
|
total_events = Event.where("timestamp >= ?", start_time).count
|
||||||
|
|
||||||
network_types.each do |network_type|
|
network_types.each do |network_type|
|
||||||
scope = case network_type[:type]
|
# Query events directly using denormalized flags
|
||||||
when 'datacenter' then NetworkRange.datacenter
|
event_stats = Event.where("timestamp >= ? AND #{network_type[:column]} = ?", start_time, true)
|
||||||
when 'vpn' then NetworkRange.vpn
|
.select("COUNT(*) as event_count, COUNT(DISTINCT ip_address) as unique_ips, COUNT(DISTINCT network_range_id) as network_count")
|
||||||
when 'proxy' then NetworkRange.proxy
|
|
||||||
end
|
|
||||||
|
|
||||||
if scope
|
|
||||||
network_stats = scope.joins("LEFT JOIN events ON events.ip_address <<= network_ranges.network")
|
|
||||||
.where("events.timestamp >= ? OR events.timestamp IS NULL", start_time)
|
|
||||||
.select("COUNT(events.id) as event_count, COUNT(DISTINCT events.ip_address) as unique_ips, COUNT(DISTINCT network_ranges.id) as network_count")
|
|
||||||
.first
|
.first
|
||||||
|
|
||||||
results[network_type[:type]] = {
|
results[network_type[:type]] = {
|
||||||
label: network_type[:label],
|
label: network_type[:label],
|
||||||
networks: network_stats.network_count,
|
networks: event_stats.network_count || 0,
|
||||||
events: network_stats.event_count,
|
events: event_stats.event_count || 0,
|
||||||
unique_ips: network_stats.unique_ips,
|
unique_ips: event_stats.unique_ips || 0,
|
||||||
percentage: total_events > 0 ? ((network_stats.event_count.to_f / total_events) * 100).round(1) : 0
|
percentage: total_events > 0 ? ((event_stats.event_count.to_f / total_events) * 100).round(1) : 0
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
end
|
|
||||||
|
|
||||||
# Calculate standard networks (everything else)
|
# Calculate standard networks (everything else)
|
||||||
standard_stats = NetworkRange.where(is_datacenter: false, is_vpn: false, is_proxy: false)
|
standard_stats = Event.where("timestamp >= ? AND is_datacenter = ? AND is_vpn = ? AND is_proxy = ?", start_time, false, false, false)
|
||||||
.joins("LEFT JOIN events ON events.ip_address <<= network_ranges.network")
|
.select("COUNT(*) as event_count, COUNT(DISTINCT ip_address) as unique_ips, COUNT(DISTINCT network_range_id) as network_count")
|
||||||
.where("events.timestamp >= ? OR events.timestamp IS NULL", start_time)
|
|
||||||
.select("COUNT(events.id) as event_count, COUNT(DISTINCT events.ip_address) as unique_ips, COUNT(DISTINCT network_ranges.id) as network_count")
|
|
||||||
.first
|
.first
|
||||||
|
|
||||||
results['standard'] = {
|
results['standard'] = {
|
||||||
label: 'Standard',
|
label: 'Standard',
|
||||||
networks: standard_stats.network_count,
|
networks: standard_stats.network_count || 0,
|
||||||
events: standard_stats.event_count,
|
events: standard_stats.event_count || 0,
|
||||||
unique_ips: standard_stats.unique_ips,
|
unique_ips: standard_stats.unique_ips || 0,
|
||||||
percentage: total_events > 0 ? ((standard_stats.event_count.to_f / total_events) * 100).round(1) : 0
|
percentage: total_events > 0 ? ((standard_stats.event_count.to_f / total_events) * 100).round(1) : 0
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -351,51 +337,51 @@ class AnalyticsController < ApplicationController
|
|||||||
def calculate_suspicious_patterns(start_time)
|
def calculate_suspicious_patterns(start_time)
|
||||||
patterns = {}
|
patterns = {}
|
||||||
|
|
||||||
# High volume networks (top 1% by request count)
|
# High volume networks (top 1% by request count) - using denormalized network_range_id
|
||||||
total_networks = NetworkRange.joins("LEFT JOIN events ON events.ip_address <<= network_ranges.network")
|
total_networks = Event.where("timestamp >= ? AND network_range_id IS NOT NULL", start_time)
|
||||||
.where("events.timestamp >= ?", start_time)
|
.distinct.count(:network_range_id)
|
||||||
.distinct.count
|
|
||||||
|
|
||||||
high_volume_threshold = [total_networks * 0.01, 1].max
|
if total_networks > 0
|
||||||
high_volume_networks = NetworkRange.joins("INNER JOIN events ON events.ip_address <<= network_ranges.network")
|
avg_events_per_network = Event.where("timestamp >= ?", start_time).count / total_networks
|
||||||
.where("events.timestamp >= ?", start_time)
|
high_volume_networks = Event.where("timestamp >= ? AND network_range_id IS NOT NULL", start_time)
|
||||||
.group("network_ranges.id")
|
.group(:network_range_id)
|
||||||
.having("COUNT(events.id) > ?", Event.where("timestamp >= ?", start_time).count / total_networks)
|
.having("COUNT(*) > ?", avg_events_per_network * 5)
|
||||||
.count
|
.count
|
||||||
|
|
||||||
patterns[:high_volume] = {
|
patterns[:high_volume] = {
|
||||||
count: high_volume_networks.count,
|
count: high_volume_networks.count,
|
||||||
networks: high_volume_networks.keys
|
networks: high_volume_networks.keys
|
||||||
}
|
}
|
||||||
|
else
|
||||||
|
patterns[:high_volume] = { count: 0, networks: [] }
|
||||||
|
end
|
||||||
|
|
||||||
# Networks with high deny rates (> 50% blocked requests)
|
# Networks with high deny rates (> 50% blocked requests) - using denormalized network_range_id
|
||||||
high_deny_networks = NetworkRange.joins("INNER JOIN events ON events.ip_address <<= network_ranges.network")
|
high_deny_networks = Event.where("timestamp >= ? AND network_range_id IS NOT NULL", start_time)
|
||||||
.where("events.timestamp >= ?", start_time)
|
.group(:network_range_id)
|
||||||
.group("network_ranges.id")
|
.select("network_range_id,
|
||||||
.select("network_ranges.id,
|
COUNT(CASE WHEN waf_action = 1 THEN 1 END) as denied_count,
|
||||||
COUNT(CASE WHEN events.waf_action = 1 THEN 1 END) as denied_count,
|
COUNT(*) as total_count")
|
||||||
COUNT(events.id) as total_count")
|
.having("COUNT(CASE WHEN waf_action = 1 THEN 1 END)::float / COUNT(*) > 0.5")
|
||||||
.having("COUNT(CASE WHEN events.waf_action = 1 THEN 1 END)::float / COUNT(events.id) > 0.5")
|
.having("COUNT(*) >= 10") # minimum threshold
|
||||||
.having("COUNT(events.id) >= 10") # minimum threshold
|
|
||||||
|
|
||||||
patterns[:high_deny_rate] = {
|
patterns[:high_deny_rate] = {
|
||||||
count: high_deny_networks.count,
|
count: high_deny_networks.count,
|
||||||
network_ids: high_deny_networks.map(&:id)
|
network_ids: high_deny_networks.map(&:network_range_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
# Networks appearing as multiple subnets (potential botnets)
|
# Companies appearing with multiple IPs (potential botnets) - using denormalized company column
|
||||||
company_subnets = NetworkRange.where("company IS NOT NULL")
|
company_subnets = Event.where("timestamp >= ? AND company IS NOT NULL", start_time)
|
||||||
.where("timestamp >= ? OR timestamp IS NULL", start_time)
|
|
||||||
.group(:company)
|
.group(:company)
|
||||||
.select(:company, "COUNT(DISTINCT network) as subnet_count")
|
.select("company, COUNT(DISTINCT ip_address) as ip_count")
|
||||||
.having("COUNT(DISTINCT network) > 5")
|
.having("COUNT(DISTINCT ip_address) > 5")
|
||||||
.order("subnet_count DESC")
|
.order("ip_count DESC")
|
||||||
.limit(10)
|
.limit(10)
|
||||||
|
|
||||||
patterns[:distributed_companies] = company_subnets.map do |company|
|
patterns[:distributed_companies] = company_subnets.map do |stat|
|
||||||
{
|
{
|
||||||
company: company.company,
|
company: stat.company,
|
||||||
subnets: company.subnet_count
|
subnets: stat.ip_count
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,13 @@
|
|||||||
|
|
||||||
class EventsController < ApplicationController
|
class EventsController < ApplicationController
|
||||||
def show
|
def show
|
||||||
@event = Event.find(params[:id])
|
@event = Event.includes(:network_range).find(params[:id])
|
||||||
|
|
||||||
|
# Use denormalized network_range_id if available (much faster)
|
||||||
|
@network_range = @event.network_range
|
||||||
|
|
||||||
|
# Fallback to IP lookup if network_range_id is missing
|
||||||
|
unless @network_range
|
||||||
@network_range = NetworkRange.contains_ip(@event.ip_address.to_s).first
|
@network_range = NetworkRange.contains_ip(@event.ip_address.to_s).first
|
||||||
|
|
||||||
# Auto-generate network range if no match found
|
# Auto-generate network range if no match found
|
||||||
@@ -11,19 +17,20 @@ class EventsController < ApplicationController
|
|||||||
Rails.logger.debug "Auto-generated network range #{@network_range&.cidr} for IP #{@event.ip_address}" if @network_range
|
Rails.logger.debug "Auto-generated network range #{@network_range&.cidr} for IP #{@event.ip_address}" if @network_range
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def index
|
def index
|
||||||
@events = Event.order(timestamp: :desc)
|
@events = Event.includes(:network_range, :rule).order(timestamp: :desc)
|
||||||
Rails.logger.debug "Found #{@events.count} total events"
|
Rails.logger.debug "Found #{@events.count} total events"
|
||||||
Rails.logger.debug "Action: #{params[:waf_action]}"
|
Rails.logger.debug "Action: #{params[:waf_action]}"
|
||||||
|
|
||||||
# Apply filters
|
# Apply filters
|
||||||
@events = @events.by_ip(params[:ip]) if params[:ip].present?
|
@events = @events.by_ip(params[:ip]) if params[:ip].present?
|
||||||
@events = @events.by_waf_action(params[:waf_action]) if params[:waf_action].present?
|
@events = @events.by_waf_action(params[:waf_action]) if params[:waf_action].present?
|
||||||
@events = @events.joins("JOIN network_ranges ON events.ip_address <<= network_ranges.network")
|
@events = @events.by_country(params[:country]) if params[:country].present?
|
||||||
.where("network_ranges.country = ?", params[:country]) if params[:country].present?
|
@events = @events.where(rule_id: params[:rule_id]) if params[:rule_id].present?
|
||||||
|
|
||||||
# Network-based filters
|
# Network-based filters (now using denormalized columns)
|
||||||
@events = @events.by_company(params[:company]) if params[:company].present?
|
@events = @events.by_company(params[:company]) if params[:company].present?
|
||||||
@events = @events.by_network_type(params[:network_type]) if params[:network_type].present?
|
@events = @events.by_network_type(params[:network_type]) if params[:network_type].present?
|
||||||
@events = @events.by_asn(params[:asn]) if params[:asn].present?
|
@events = @events.by_asn(params[:asn]) if params[:asn].present?
|
||||||
@@ -37,24 +44,10 @@ class EventsController < ApplicationController
|
|||||||
# Paginate
|
# Paginate
|
||||||
@pagy, @events = pagy(@events, items: 50)
|
@pagy, @events = pagy(@events, items: 50)
|
||||||
|
|
||||||
# Preload network ranges for all unique IPs to avoid N+1 queries
|
# Network ranges are now preloaded via includes(:network_range)
|
||||||
unique_ips = @events.pluck(:ip_address).uniq.compact
|
# The denormalized network_range_id makes this much faster than IP containment lookups
|
||||||
@network_ranges_by_ip = {}
|
|
||||||
unique_ips.each do |ip|
|
|
||||||
ip_string = ip.to_s # IPAddr objects can be converted to string
|
|
||||||
range = NetworkRange.contains_ip(ip_string).first
|
|
||||||
|
|
||||||
# Auto-generate network range if no match found
|
|
||||||
unless range
|
|
||||||
range = NetworkRangeGenerator.find_or_create_for_ip(ip)
|
|
||||||
Rails.logger.debug "Auto-generated network range #{range&.cidr} for IP #{ip_string}" if range
|
|
||||||
end
|
|
||||||
|
|
||||||
@network_ranges_by_ip[ip_string] = range if range
|
|
||||||
end
|
|
||||||
|
|
||||||
Rails.logger.debug "Events count after pagination: #{@events.count}"
|
Rails.logger.debug "Events count after pagination: #{@events.count}"
|
||||||
Rails.logger.debug "Pagy info: #{@pagy.count} total, #{@pagy.pages} pages"
|
Rails.logger.debug "Pagy info: #{@pagy.count} total, #{@pagy.pages} pages"
|
||||||
Rails.logger.debug "Preloaded network ranges for #{@network_ranges_by_ip.count} unique IPs"
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@@ -46,24 +46,51 @@ class NetworkRangesController < ApplicationController
|
|||||||
authorize @network_range
|
authorize @network_range
|
||||||
|
|
||||||
if @network_range.persisted?
|
if @network_range.persisted?
|
||||||
# Real network - use existing logic
|
# Real network - use direct IP containment for consistency with stats
|
||||||
@related_events = Event.joins("JOIN network_ranges ON events.ip_address <<= network_ranges.network")
|
events_scope = Event.where("ip_address <<= ?", @network_range.cidr).recent
|
||||||
.where("network_ranges.id = ?", @network_range.id)
|
|
||||||
.recent
|
|
||||||
.limit(100)
|
|
||||||
else
|
else
|
||||||
# Virtual network - find events by IP range containment
|
# Virtual network - find events by IP range containment
|
||||||
@related_events = Event.where("ip_address <<= ?::inet", @network_range.to_s)
|
events_scope = Event.where("ip_address <<= ?::inet", @network_range.to_s).recent
|
||||||
.recent
|
|
||||||
.limit(100)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Paginate events
|
||||||
|
@events_pagy, @related_events = pagy(events_scope, items: 50)
|
||||||
|
|
||||||
@child_ranges = @network_range.child_ranges.limit(20)
|
@child_ranges = @network_range.child_ranges.limit(20)
|
||||||
@parent_ranges = @network_range.parent_ranges.limit(10)
|
@parent_ranges = @network_range.parent_ranges.limit(10)
|
||||||
@associated_rules = @network_range.persisted? ? @network_range.rules.includes(:user).order(created_at: :desc) : []
|
@associated_rules = @network_range.persisted? ? @network_range.rules.includes(:user).order(created_at: :desc) : []
|
||||||
|
|
||||||
# Traffic analytics (if we have events)
|
# Traffic analytics (if we have events)
|
||||||
@traffic_stats = calculate_traffic_stats(@network_range)
|
@traffic_stats = calculate_traffic_stats(@network_range)
|
||||||
|
|
||||||
|
# Check if we have IPAPI data (or if parent has it)
|
||||||
|
@has_ipapi_data = @network_range.has_network_data_from?(:ipapi)
|
||||||
|
@parent_with_ipapi = nil
|
||||||
|
|
||||||
|
unless @has_ipapi_data
|
||||||
|
# Check if parent has IPAPI data
|
||||||
|
parent = @network_range.parent_with_intelligence
|
||||||
|
if parent&.has_network_data_from?(:ipapi)
|
||||||
|
@parent_with_ipapi = parent
|
||||||
|
@has_ipapi_data = true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# If we don't have IPAPI data anywhere and no parent has it, queue fetch job
|
||||||
|
if @network_range.persisted? && @network_range.should_fetch_ipapi_data?
|
||||||
|
@network_range.mark_as_fetching_api_data!(:ipapi)
|
||||||
|
FetchIpapiDataJob.perform_later(network_range_id: @network_range.id)
|
||||||
|
@ipapi_loading = true
|
||||||
|
end
|
||||||
|
|
||||||
|
# Get IPAPI data for display
|
||||||
|
@ipapi_data = if @parent_with_ipapi
|
||||||
|
@parent_with_ipapi.network_data_for(:ipapi)
|
||||||
|
elsif @network_range.has_network_data_from?(:ipapi)
|
||||||
|
@network_range.network_data_for(:ipapi)
|
||||||
|
else
|
||||||
|
nil
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# GET /network_ranges/new
|
# GET /network_ranges/new
|
||||||
@@ -214,18 +241,27 @@ class NetworkRangesController < ApplicationController
|
|||||||
if network_range.persisted?
|
if network_range.persisted?
|
||||||
# Real network - use cached events_count for total requests (much more performant)
|
# Real network - use cached events_count for total requests (much more performant)
|
||||||
if network_range.events_count > 0
|
if network_range.events_count > 0
|
||||||
events = Event.joins("JOIN network_ranges ON events.ip_address <<= network_ranges.network")
|
# Base query for consistent IP containment logic
|
||||||
.where("network_ranges.id = ?", network_range.id)
|
base_query = Event.where("ip_address <<= ?", network_range.cidr)
|
||||||
.limit(1000) # Limit the sample for performance
|
|
||||||
|
# Use separate queries: one for grouping (without ordering), one for recent activity (with ordering)
|
||||||
|
events_for_grouping = base_query.limit(1000)
|
||||||
|
events_for_activity = base_query.recent.limit(20)
|
||||||
|
|
||||||
|
# Calculate counts properly - use consistent base_query for all counts
|
||||||
|
total_requests = base_query.count
|
||||||
|
unique_ips = base_query.except(:order).distinct.count(:ip_address)
|
||||||
|
blocked_requests = base_query.blocked.count
|
||||||
|
allowed_requests = base_query.allowed.count
|
||||||
|
|
||||||
{
|
{
|
||||||
total_requests: network_range.events_count, # Use cached count
|
total_requests: total_requests,
|
||||||
unique_ips: events.distinct.count(:ip_address),
|
unique_ips: unique_ips,
|
||||||
blocked_requests: events.blocked.count,
|
blocked_requests: blocked_requests,
|
||||||
allowed_requests: events.allowed.count,
|
allowed_requests: allowed_requests,
|
||||||
top_paths: events.group(:request_path).count.sort_by { |_, count| -count }.first(10),
|
top_paths: events_for_grouping.group(:request_path).count.sort_by { |_, count| -count }.first(10),
|
||||||
top_user_agents: events.group(:user_agent).count.sort_by { |_, count| -count }.first(5),
|
top_user_agents: events_for_grouping.group(:user_agent).count.sort_by { |_, count| -count }.first(5),
|
||||||
recent_activity: events.recent.limit(20)
|
recent_activity: events_for_activity
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
# No events - return empty stats
|
# No events - return empty stats
|
||||||
@@ -241,20 +277,35 @@ class NetworkRangesController < ApplicationController
|
|||||||
end
|
end
|
||||||
else
|
else
|
||||||
# Virtual network - calculate stats from events within range
|
# Virtual network - calculate stats from events within range
|
||||||
events = Event.where("ip_address <<= ?::inet", network_range.to_s)
|
base_query = Event.where("ip_address <<= ?", network_range.cidr)
|
||||||
.limit(1000) # Limit the sample for performance
|
total_events = base_query.count
|
||||||
|
|
||||||
total_events = Event.where("ip_address <<= ?::inet", network_range.to_s).count
|
if total_events > 0
|
||||||
|
# Use separate queries: one for grouping (without ordering), one for recent activity (with ordering)
|
||||||
|
events_for_grouping = base_query.limit(1000)
|
||||||
|
events_for_activity = base_query.recent.limit(20)
|
||||||
|
|
||||||
{
|
{
|
||||||
total_requests: total_events,
|
total_requests: total_events,
|
||||||
unique_ips: events.distinct.count(:ip_address),
|
unique_ips: base_query.except(:order).distinct.count(:ip_address),
|
||||||
blocked_requests: events.blocked.count,
|
blocked_requests: base_query.blocked.count,
|
||||||
allowed_requests: events.allowed.count,
|
allowed_requests: base_query.allowed.count,
|
||||||
top_paths: events.group(:request_path).count.sort_by { |_, count| -count }.first(10),
|
top_paths: events_for_grouping.group(:request_path).count.sort_by { |_, count| -count }.first(10),
|
||||||
top_user_agents: events.group(:user_agent).count.sort_by { |_, count| -count }.first(5),
|
top_user_agents: events_for_grouping.group(:user_agent).count.sort_by { |_, count| -count }.first(5),
|
||||||
recent_activity: events.recent.limit(20)
|
recent_activity: events_for_activity
|
||||||
|
}
|
||||||
|
else
|
||||||
|
# No events for virtual network
|
||||||
|
{
|
||||||
|
total_requests: 0,
|
||||||
|
unique_ips: 0,
|
||||||
|
blocked_requests: 0,
|
||||||
|
allowed_requests: 0,
|
||||||
|
top_paths: {},
|
||||||
|
top_user_agents: {},
|
||||||
|
recent_activity: []
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
@@ -11,8 +11,8 @@ class RulesController < ApplicationController
|
|||||||
# GET /rules
|
# GET /rules
|
||||||
def index
|
def index
|
||||||
@pagy, @rules = pagy(policy_scope(Rule).includes(:user, :network_range).order(created_at: :desc))
|
@pagy, @rules = pagy(policy_scope(Rule).includes(:user, :network_range).order(created_at: :desc))
|
||||||
@rule_types = Rule::RULE_TYPES
|
@waf_rule_types = Rule.waf_rule_types
|
||||||
@actions = Rule::ACTIONS
|
@waf_actions = Rule.waf_actions
|
||||||
end
|
end
|
||||||
|
|
||||||
# GET /rules/new
|
# GET /rules/new
|
||||||
@@ -27,11 +27,11 @@ class RulesController < ApplicationController
|
|||||||
end
|
end
|
||||||
|
|
||||||
if params[:cidr].present?
|
if params[:cidr].present?
|
||||||
@rule.rule_type = 'network'
|
@rule.waf_rule_type = 'network'
|
||||||
end
|
end
|
||||||
|
|
||||||
@rule_types = Rule::RULE_TYPES
|
@waf_rule_types = Rule.waf_rule_types
|
||||||
@actions = Rule::ACTIONS
|
@waf_actions = Rule.waf_actions
|
||||||
end
|
end
|
||||||
|
|
||||||
# POST /rules
|
# POST /rules
|
||||||
@@ -39,8 +39,8 @@ class RulesController < ApplicationController
|
|||||||
authorize Rule
|
authorize Rule
|
||||||
@rule = Rule.new(rule_params)
|
@rule = Rule.new(rule_params)
|
||||||
@rule.user = Current.user
|
@rule.user = Current.user
|
||||||
@rule_types = Rule::RULE_TYPES
|
@waf_rule_types = Rule.waf_rule_types
|
||||||
@actions = Rule::ACTIONS
|
@waf_actions = Rule.waf_actions
|
||||||
|
|
||||||
# Process additional form data for quick create
|
# Process additional form data for quick create
|
||||||
process_quick_create_parameters
|
process_quick_create_parameters
|
||||||
@@ -79,16 +79,26 @@ class RulesController < ApplicationController
|
|||||||
# GET /rules/:id/edit
|
# GET /rules/:id/edit
|
||||||
def edit
|
def edit
|
||||||
authorize @rule
|
authorize @rule
|
||||||
@rule_types = Rule::RULE_TYPES
|
@waf_rule_types = Rule.waf_rule_types
|
||||||
@actions = Rule::ACTIONS
|
@waf_actions = Rule.waf_actions
|
||||||
end
|
end
|
||||||
|
|
||||||
# PATCH/PUT /rules/:id
|
# PATCH/PUT /rules/:id
|
||||||
def update
|
def update
|
||||||
authorize @rule
|
authorize @rule
|
||||||
|
|
||||||
|
# Preserve original attributes in case validation fails
|
||||||
|
original_attributes = @rule.attributes.dup
|
||||||
|
original_network_range_id = @rule.network_range_id
|
||||||
|
|
||||||
if @rule.update(rule_params)
|
if @rule.update(rule_params)
|
||||||
redirect_to @rule, notice: 'Rule was successfully updated.'
|
redirect_to @rule, notice: 'Rule was successfully updated.'
|
||||||
else
|
else
|
||||||
|
# Restore original attributes to preserve form state
|
||||||
|
# This prevents network range dropdown from resetting
|
||||||
|
@rule.attributes = original_attributes
|
||||||
|
@rule.network_range_id = original_network_range_id
|
||||||
|
|
||||||
render :edit, status: :unprocessable_entity
|
render :edit, status: :unprocessable_entity
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@@ -116,8 +126,8 @@ class RulesController < ApplicationController
|
|||||||
|
|
||||||
def rule_params
|
def rule_params
|
||||||
permitted = [
|
permitted = [
|
||||||
:rule_type,
|
:waf_rule_type,
|
||||||
:action,
|
:waf_action,
|
||||||
:metadata,
|
:metadata,
|
||||||
:expires_at,
|
:expires_at,
|
||||||
:enabled,
|
:enabled,
|
||||||
@@ -126,7 +136,7 @@ class RulesController < ApplicationController
|
|||||||
]
|
]
|
||||||
|
|
||||||
# Only include conditions for non-network rules
|
# Only include conditions for non-network rules
|
||||||
if params[:rule][:rule_type] != 'network'
|
if params[:rule][:waf_rule_type] != 'network'
|
||||||
permitted << :conditions
|
permitted << :conditions
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -136,7 +146,7 @@ end
|
|||||||
def calculate_rule_priority
|
def calculate_rule_priority
|
||||||
return unless @rule
|
return unless @rule
|
||||||
|
|
||||||
case @rule.rule_type
|
case @rule.waf_rule_type
|
||||||
when 'network'
|
when 'network'
|
||||||
# For network rules, priority based on prefix specificity
|
# For network rules, priority based on prefix specificity
|
||||||
if @rule.network_range
|
if @rule.network_range
|
||||||
@@ -167,20 +177,10 @@ def calculate_rule_priority
|
|||||||
else
|
else
|
||||||
@rule.priority = 100 # Default for network rules without range
|
@rule.priority = 100 # Default for network rules without range
|
||||||
end
|
end
|
||||||
when 'protocol_violation'
|
|
||||||
@rule.priority = 95
|
|
||||||
when 'method_enforcement'
|
|
||||||
@rule.priority = 90
|
|
||||||
when 'path_pattern'
|
when 'path_pattern'
|
||||||
@rule.priority = 85
|
@rule.priority = 85
|
||||||
when 'header_pattern', 'query_pattern'
|
|
||||||
@rule.priority = 80
|
|
||||||
when 'body_signature'
|
|
||||||
@rule.priority = 75
|
|
||||||
when 'rate_limit'
|
when 'rate_limit'
|
||||||
@rule.priority = 70
|
@rule.priority = 70
|
||||||
when 'composite'
|
|
||||||
@rule.priority = 65
|
|
||||||
else
|
else
|
||||||
@rule.priority = 50 # Default priority
|
@rule.priority = 50 # Default priority
|
||||||
end
|
end
|
||||||
@@ -203,7 +203,7 @@ def process_quick_create_parameters
|
|||||||
end
|
end
|
||||||
|
|
||||||
# Handle redirect URL
|
# Handle redirect URL
|
||||||
if @rule.action == 'redirect' && params[:redirect_url].present?
|
if @rule.redirect? && params[:redirect_url].present?
|
||||||
@rule.metadata ||= {}
|
@rule.metadata ||= {}
|
||||||
if @rule.metadata.is_a?(String)
|
if @rule.metadata.is_a?(String)
|
||||||
begin
|
begin
|
||||||
@@ -227,6 +227,24 @@ def process_quick_create_parameters
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Handle expires_at parsing for text input
|
||||||
|
if params.dig(:rule, :expires_at).present?
|
||||||
|
expires_at_str = params[:rule][:expires_at].strip
|
||||||
|
if expires_at_str.present?
|
||||||
|
begin
|
||||||
|
# Try to parse various datetime formats
|
||||||
|
@rule.expires_at = DateTime.parse(expires_at_str)
|
||||||
|
rescue ArgumentError
|
||||||
|
# Try specific format
|
||||||
|
begin
|
||||||
|
@rule.expires_at = DateTime.strptime(expires_at_str, '%Y-%m-%d %H:%M')
|
||||||
|
rescue ArgumentError
|
||||||
|
@rule.errors.add(:expires_at, 'must be in format YYYY-MM-DD HH:MM')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
# Add reason to metadata if provided
|
# Add reason to metadata if provided
|
||||||
if params.dig(:rule, :metadata).present?
|
if params.dig(:rule, :metadata).present?
|
||||||
if @rule.metadata.is_a?(Hash)
|
if @rule.metadata.is_a?(Hash)
|
||||||
@@ -245,8 +263,8 @@ end
|
|||||||
|
|
||||||
def rule_params
|
def rule_params
|
||||||
permitted = [
|
permitted = [
|
||||||
:rule_type,
|
:waf_rule_type,
|
||||||
:action,
|
:waf_action,
|
||||||
:metadata,
|
:metadata,
|
||||||
:expires_at,
|
:expires_at,
|
||||||
:enabled,
|
:enabled,
|
||||||
@@ -255,7 +273,7 @@ end
|
|||||||
]
|
]
|
||||||
|
|
||||||
# Only include conditions for non-network rules
|
# Only include conditions for non-network rules
|
||||||
if params[:rule][:rule_type] != 'network'
|
if params[:rule][:waf_rule_type] != 'network'
|
||||||
permitted << :conditions
|
permitted << :conditions
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -265,7 +283,7 @@ end
|
|||||||
def calculate_rule_priority
|
def calculate_rule_priority
|
||||||
return unless @rule
|
return unless @rule
|
||||||
|
|
||||||
case @rule.rule_type
|
case @rule.waf_rule_type
|
||||||
when 'network'
|
when 'network'
|
||||||
# For network rules, priority based on prefix specificity
|
# For network rules, priority based on prefix specificity
|
||||||
if @rule.network_range
|
if @rule.network_range
|
||||||
@@ -296,20 +314,10 @@ end
|
|||||||
else
|
else
|
||||||
@rule.priority = 100 # Default for network rules without range
|
@rule.priority = 100 # Default for network rules without range
|
||||||
end
|
end
|
||||||
when 'protocol_violation'
|
|
||||||
@rule.priority = 95
|
|
||||||
when 'method_enforcement'
|
|
||||||
@rule.priority = 90
|
|
||||||
when 'path_pattern'
|
when 'path_pattern'
|
||||||
@rule.priority = 85
|
@rule.priority = 85
|
||||||
when 'header_pattern', 'query_pattern'
|
|
||||||
@rule.priority = 80
|
|
||||||
when 'body_signature'
|
|
||||||
@rule.priority = 75
|
|
||||||
when 'rate_limit'
|
when 'rate_limit'
|
||||||
@rule.priority = 70
|
@rule.priority = 70
|
||||||
when 'composite'
|
|
||||||
@rule.priority = 65
|
|
||||||
else
|
else
|
||||||
@rule.priority = 50 # Default priority
|
@rule.priority = 50 # Default priority
|
||||||
end
|
end
|
||||||
@@ -332,7 +340,7 @@ end
|
|||||||
end
|
end
|
||||||
|
|
||||||
# Handle redirect URL
|
# Handle redirect URL
|
||||||
if @rule.action == 'redirect' && params[:redirect_url].present?
|
if @rule.redirect? && params[:redirect_url].present?
|
||||||
@rule.metadata ||= {}
|
@rule.metadata ||= {}
|
||||||
if @rule.metadata.is_a?(String)
|
if @rule.metadata.is_a?(String)
|
||||||
begin
|
begin
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ class WafPoliciesController < ApplicationController
|
|||||||
|
|
||||||
# Set default values from URL parameters
|
# Set default values from URL parameters
|
||||||
@waf_policy.policy_type = params[:policy_type] if params[:policy_type].present?
|
@waf_policy.policy_type = params[:policy_type] if params[:policy_type].present?
|
||||||
@waf_policy.action = params[:action] if params[:action].present?
|
@waf_policy.policy_action = params[:policy_action] if params[:policy_action].present?
|
||||||
@waf_policy.targets = params[:targets] if params[:targets].present?
|
@waf_policy.targets = params[:targets] if params[:targets].present?
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -37,9 +37,6 @@ class WafPoliciesController < ApplicationController
|
|||||||
@actions = WafPolicy::ACTIONS
|
@actions = WafPolicy::ACTIONS
|
||||||
|
|
||||||
if @waf_policy.save
|
if @waf_policy.save
|
||||||
# Trigger policy processing for existing network ranges
|
|
||||||
ProcessWafPoliciesJob.perform_later(waf_policy_id: @waf_policy.id)
|
|
||||||
|
|
||||||
redirect_to @waf_policy, notice: 'WAF policy was successfully created.'
|
redirect_to @waf_policy, notice: 'WAF policy was successfully created.'
|
||||||
else
|
else
|
||||||
render :new, status: :unprocessable_entity
|
render :new, status: :unprocessable_entity
|
||||||
@@ -64,11 +61,6 @@ class WafPoliciesController < ApplicationController
|
|||||||
@actions = WafPolicy::ACTIONS
|
@actions = WafPolicy::ACTIONS
|
||||||
|
|
||||||
if @waf_policy.update(waf_policy_params)
|
if @waf_policy.update(waf_policy_params)
|
||||||
# Re-process policies for existing network ranges if policy was changed
|
|
||||||
if @waf_policy.saved_change_to_targets? || @waf_policy.saved_change_to_action?
|
|
||||||
ProcessWafPoliciesJob.reprocess_for_policy(@waf_policy)
|
|
||||||
end
|
|
||||||
|
|
||||||
redirect_to @waf_policy, notice: 'WAF policy was successfully updated.'
|
redirect_to @waf_policy, notice: 'WAF policy was successfully updated.'
|
||||||
else
|
else
|
||||||
render :edit, status: :unprocessable_entity
|
render :edit, status: :unprocessable_entity
|
||||||
@@ -89,9 +81,6 @@ class WafPoliciesController < ApplicationController
|
|||||||
def activate
|
def activate
|
||||||
@waf_policy.activate!
|
@waf_policy.activate!
|
||||||
|
|
||||||
# Re-process policies for existing network ranges
|
|
||||||
ProcessWafPoliciesJob.reprocess_for_policy(@waf_policy)
|
|
||||||
|
|
||||||
redirect_to @waf_policy, notice: 'WAF policy was activated.'
|
redirect_to @waf_policy, notice: 'WAF policy was activated.'
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -105,7 +94,7 @@ class WafPoliciesController < ApplicationController
|
|||||||
# GET /waf_policies/new_country
|
# GET /waf_policies/new_country
|
||||||
def new_country
|
def new_country
|
||||||
authorize WafPolicy
|
authorize WafPolicy
|
||||||
@waf_policy = WafPolicy.new(policy_type: 'country', action: 'deny')
|
@waf_policy = WafPolicy.new(policy_type: 'country', policy_action: 'deny')
|
||||||
@policy_types = WafPolicy::POLICY_TYPES
|
@policy_types = WafPolicy::POLICY_TYPES
|
||||||
@actions = WafPolicy::ACTIONS
|
@actions = WafPolicy::ACTIONS
|
||||||
end
|
end
|
||||||
@@ -115,24 +104,28 @@ class WafPoliciesController < ApplicationController
|
|||||||
authorize WafPolicy
|
authorize WafPolicy
|
||||||
|
|
||||||
countries = params[:countries]&.reject(&:blank?) || []
|
countries = params[:countries]&.reject(&:blank?) || []
|
||||||
action = params[:action] || 'deny'
|
policy_action = params[:policy_action] || 'deny'
|
||||||
|
|
||||||
if countries.empty?
|
if countries.empty?
|
||||||
redirect_to new_country_waf_policies_path, alert: 'Please select at least one country.'
|
redirect_to new_country_waf_policies_path, alert: 'Please select at least one country.'
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
|
|
||||||
@waf_policy = WafPolicy.create_country_policy(
|
# Build the options hash with additional_data if present
|
||||||
countries,
|
options = {
|
||||||
action: action,
|
policy_action: policy_action,
|
||||||
user: Current.user,
|
user: Current.user,
|
||||||
description: params[:description]
|
description: params[:description]
|
||||||
)
|
}
|
||||||
|
|
||||||
|
# Add additional_data if provided (for redirect/challenge actions)
|
||||||
|
if params[:additional_data].present?
|
||||||
|
options[:additional_data] = params[:additional_data].to_unsafe_hash
|
||||||
|
end
|
||||||
|
|
||||||
|
@waf_policy = WafPolicy.create_country_policy(countries, **options)
|
||||||
|
|
||||||
if @waf_policy.persisted?
|
if @waf_policy.persisted?
|
||||||
# Trigger policy processing for existing network ranges
|
|
||||||
ProcessWafPoliciesJob.reprocess_for_policy(@waf_policy)
|
|
||||||
|
|
||||||
redirect_to @waf_policy, notice: "Country blocking policy was successfully created for #{countries.join(', ')}."
|
redirect_to @waf_policy, notice: "Country blocking policy was successfully created for #{countries.join(', ')}."
|
||||||
else
|
else
|
||||||
@policy_types = WafPolicy::POLICY_TYPES
|
@policy_types = WafPolicy::POLICY_TYPES
|
||||||
@@ -144,18 +137,30 @@ class WafPoliciesController < ApplicationController
|
|||||||
private
|
private
|
||||||
|
|
||||||
def set_waf_policy
|
def set_waf_policy
|
||||||
@waf_policy = WafPolicy.find(params[:id])
|
# First try to find by ID (standard Rails behavior)
|
||||||
|
if params[:id] =~ /^\d+$/
|
||||||
|
@waf_policy = WafPolicy.find_by(id: params[:id])
|
||||||
|
end
|
||||||
|
|
||||||
|
# If not found by ID, try to find by parameterized name
|
||||||
|
unless @waf_policy
|
||||||
|
# Try direct parameterized comparison by parameterizing existing policy names
|
||||||
|
@waf_policy = WafPolicy.all.find { |policy| policy.to_param == params[:id] }
|
||||||
|
end
|
||||||
|
|
||||||
|
if @waf_policy
|
||||||
authorize @waf_policy
|
authorize @waf_policy
|
||||||
rescue ActiveRecord::RecordNotFound
|
else
|
||||||
redirect_to waf_policies_path, alert: 'WAF policy not found.'
|
redirect_to waf_policies_path, alert: 'WAF policy not found.'
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def waf_policy_params
|
def waf_policy_params
|
||||||
params.require(:waf_policy).permit(
|
params.require(:waf_policy).permit(
|
||||||
:name,
|
:name,
|
||||||
:description,
|
:description,
|
||||||
:policy_type,
|
:policy_type,
|
||||||
:action,
|
:policy_action,
|
||||||
:enabled,
|
:enabled,
|
||||||
:expires_at,
|
:expires_at,
|
||||||
targets: [],
|
targets: [],
|
||||||
|
|||||||
@@ -139,4 +139,15 @@ module ApplicationHelper
|
|||||||
raw: user_agent
|
raw: user_agent
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Convert country code to flag emoji
|
||||||
|
def country_flag(country_code)
|
||||||
|
return "" if country_code.blank?
|
||||||
|
|
||||||
|
# Convert ISO 3166-1 alpha-2 country code to flag emoji
|
||||||
|
# Each letter is converted to its regional indicator symbol
|
||||||
|
country_code.upcase.chars.map { |c| (c.ord + 127397).chr(Encoding::UTF_8) }.join
|
||||||
|
rescue
|
||||||
|
""
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ export default class extends Controller {
|
|||||||
this.hideOptionalFields()
|
this.hideOptionalFields()
|
||||||
|
|
||||||
// Show relevant fields based on rule type
|
// Show relevant fields based on rule type
|
||||||
if (["path_pattern", "header_pattern", "query_pattern", "body_signature"].includes(ruleType)) {
|
if (["path_pattern"].includes(ruleType)) {
|
||||||
if (this.hasPatternFieldsTarget) {
|
if (this.hasPatternFieldsTarget) {
|
||||||
this.patternFieldsTarget.classList.remove("hidden")
|
this.patternFieldsTarget.classList.remove("hidden")
|
||||||
this.updatePatternHelpText(ruleType)
|
this.updatePatternHelpText(ruleType)
|
||||||
@@ -64,18 +64,6 @@ export default class extends Controller {
|
|||||||
path_pattern: {
|
path_pattern: {
|
||||||
text: "Regex pattern to match URL paths (e.g.,\\.env$|wp-admin|phpmyadmin)",
|
text: "Regex pattern to match URL paths (e.g.,\\.env$|wp-admin|phpmyadmin)",
|
||||||
placeholder: "Example: \\.env$|\\.git|config\\.php|wp-admin"
|
placeholder: "Example: \\.env$|\\.git|config\\.php|wp-admin"
|
||||||
},
|
|
||||||
header_pattern: {
|
|
||||||
text: 'JSON with header_name and pattern (e.g., {"header_name": "User-Agent", "pattern": "bot.*"})',
|
|
||||||
placeholder: 'Example: {"header_name": "User-Agent", "pattern": ".*[Bb]ot.*"}'
|
|
||||||
},
|
|
||||||
query_pattern: {
|
|
||||||
text: "Regex pattern to match query parameters (e.g., union.*select|<script>)",
|
|
||||||
placeholder: "Example: (?:union|select|insert|update|delete).*\\s+(?:union|select)"
|
|
||||||
},
|
|
||||||
body_signature: {
|
|
||||||
text: "Regex pattern to match request body content (e.g., OR 1=1|<script>)",
|
|
||||||
placeholder: "Example: (?:OR\\s+1\\s*=\\s*1|AND\\s+1\\s*=\\s*1|UNION\\s+SELECT)"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,37 +2,74 @@ class FetchIpapiDataJob < ApplicationJob
|
|||||||
queue_as :default
|
queue_as :default
|
||||||
|
|
||||||
# Fetches IPAPI enrichment data for a NetworkRange
|
# Fetches IPAPI enrichment data for a NetworkRange
|
||||||
# @param network_range_id [Integer] ID of the NetworkRange to enrich
|
# @param network_range_id [Integer] ID of the tracking NetworkRange (usually /24)
|
||||||
def perform(network_range_id:)
|
def perform(network_range_id:)
|
||||||
network_range = NetworkRange.find_by(id: network_range_id)
|
tracking_network = NetworkRange.find_by(id: network_range_id)
|
||||||
return unless network_range
|
return unless tracking_network
|
||||||
|
|
||||||
# Skip if we already have IPAPI data and it's recent (< 30 days old)
|
|
||||||
if network_range.has_network_data_from?(:ipapi) &&
|
|
||||||
network_range.last_api_fetch.present? &&
|
|
||||||
network_range.last_api_fetch > 30.days.ago
|
|
||||||
Rails.logger.info "Skipping IPAPI fetch for #{network_range.cidr} - data is recent"
|
|
||||||
return
|
|
||||||
end
|
|
||||||
|
|
||||||
# Use the network address (first IP in range) as the representative IP
|
# Use the network address (first IP in range) as the representative IP
|
||||||
sample_ip = network_range.network_address.split('/').first
|
sample_ip = tracking_network.network_address.split('/').first
|
||||||
|
|
||||||
Rails.logger.info "Fetching IPAPI data for #{network_range.cidr} using IP #{sample_ip}"
|
Rails.logger.info "Fetching IPAPI data for #{tracking_network.cidr} using IP #{sample_ip}"
|
||||||
|
|
||||||
ipapi_data = Ipapi.lookup(sample_ip)
|
ipapi_data = Ipapi.lookup(sample_ip)
|
||||||
|
|
||||||
if ipapi_data.present? && !ipapi_data.key?('error')
|
if ipapi_data.present? && !ipapi_data.key?('error')
|
||||||
network_range.set_network_data(:ipapi, ipapi_data)
|
# Check if IPAPI returned a different route than our tracking network
|
||||||
network_range.last_api_fetch = Time.current
|
ipapi_route = ipapi_data.dig('asn', 'route')
|
||||||
network_range.save!
|
target_network = tracking_network
|
||||||
|
|
||||||
Rails.logger.info "Successfully fetched IPAPI data for #{network_range.cidr}"
|
if ipapi_route.present? && ipapi_route != tracking_network.cidr
|
||||||
|
# IPAPI returned a different CIDR - find or create that network range
|
||||||
|
Rails.logger.info "IPAPI returned different route: #{ipapi_route} (requested: #{tracking_network.cidr})"
|
||||||
|
|
||||||
|
target_network = NetworkRange.find_or_create_by(network: ipapi_route) do |nr|
|
||||||
|
nr.source = 'api_imported'
|
||||||
|
nr.creation_reason = "Created from IPAPI lookup for #{tracking_network.cidr}"
|
||||||
|
end
|
||||||
|
|
||||||
|
Rails.logger.info "Storing IPAPI data on correct network: #{target_network.cidr}"
|
||||||
|
end
|
||||||
|
|
||||||
|
# Store data on the target network (wherever IPAPI said it belongs)
|
||||||
|
target_network.set_network_data(:ipapi, ipapi_data)
|
||||||
|
target_network.last_api_fetch = Time.current
|
||||||
|
target_network.save!
|
||||||
|
|
||||||
|
# Mark the tracking network as having been queried, with the CIDR that was returned
|
||||||
|
tracking_network.mark_ipapi_queried!(target_network.cidr)
|
||||||
|
|
||||||
|
Rails.logger.info "Successfully fetched IPAPI data for #{tracking_network.cidr} (stored on #{target_network.cidr})"
|
||||||
|
|
||||||
|
# Broadcast to the tracking network
|
||||||
|
broadcast_ipapi_update(tracking_network, ipapi_data)
|
||||||
else
|
else
|
||||||
Rails.logger.warn "IPAPI returned error for #{network_range.cidr}: #{ipapi_data}"
|
Rails.logger.warn "IPAPI returned error for #{tracking_network.cidr}: #{ipapi_data}"
|
||||||
|
# Still mark as queried to avoid retrying immediately
|
||||||
|
tracking_network.mark_ipapi_queried!(tracking_network.cidr)
|
||||||
end
|
end
|
||||||
rescue => e
|
rescue => e
|
||||||
Rails.logger.error "Failed to fetch IPAPI data for network_range #{network_range_id}: #{e.message}"
|
Rails.logger.error "Failed to fetch IPAPI data for network_range #{network_range_id}: #{e.message}"
|
||||||
Rails.logger.error e.backtrace.join("\n")
|
Rails.logger.error e.backtrace.join("\n")
|
||||||
|
ensure
|
||||||
|
# Always clear the fetching status when done
|
||||||
|
tracking_network&.clear_fetching_status!(:ipapi)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def broadcast_ipapi_update(network_range, ipapi_data)
|
||||||
|
# Broadcast to a stream specific to this network range
|
||||||
|
Turbo::StreamsChannel.broadcast_replace_to(
|
||||||
|
"network_range_#{network_range.id}",
|
||||||
|
target: "ipapi_data_section",
|
||||||
|
partial: "network_ranges/ipapi_data",
|
||||||
|
locals: {
|
||||||
|
ipapi_data: ipapi_data,
|
||||||
|
network_range: network_range,
|
||||||
|
parent_with_ipapi: nil,
|
||||||
|
ipapi_loading: false
|
||||||
|
}
|
||||||
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -89,12 +89,13 @@ class GeoliteAsnImportJob < ApplicationJob
|
|||||||
temp_file.write(file.read)
|
temp_file.write(file.read)
|
||||||
end
|
end
|
||||||
|
|
||||||
temp_file.close
|
# Close but keep the file on disk (false prevents auto-deletion)
|
||||||
|
temp_file.close(false)
|
||||||
temp_file.path
|
temp_file.path
|
||||||
rescue => e
|
rescue => e
|
||||||
Rails.logger.error "Error downloading file: #{e.message}"
|
Rails.logger.error "Error downloading file: #{e.message}"
|
||||||
Rails.logger.error e.backtrace.join("\n")
|
Rails.logger.error e.backtrace.join("\n")
|
||||||
temp_file&.close
|
temp_file&.close(false)
|
||||||
temp_file&.unlink
|
temp_file&.unlink
|
||||||
nil
|
nil
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -89,12 +89,13 @@ class GeoliteCountryImportJob < ApplicationJob
|
|||||||
temp_file.write(file.read)
|
temp_file.write(file.read)
|
||||||
end
|
end
|
||||||
|
|
||||||
temp_file.close
|
# Close but keep the file on disk (false prevents auto-deletion)
|
||||||
|
temp_file.close(false)
|
||||||
temp_file.path
|
temp_file.path
|
||||||
rescue => e
|
rescue => e
|
||||||
Rails.logger.error "Error downloading file: #{e.message}"
|
Rails.logger.error "Error downloading file: #{e.message}"
|
||||||
Rails.logger.error e.backtrace.join("\n")
|
Rails.logger.error e.backtrace.join("\n")
|
||||||
temp_file&.close
|
temp_file&.close(false)
|
||||||
temp_file&.unlink
|
temp_file&.unlink
|
||||||
nil
|
nil
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -10,11 +10,11 @@ class ProcessWafEventJob < ApplicationJob
|
|||||||
if event_data.key?('events') && event_data['events'].is_a?(Array)
|
if event_data.key?('events') && event_data['events'].is_a?(Array)
|
||||||
# Multiple events in an array
|
# Multiple events in an array
|
||||||
events_to_process = event_data['events']
|
events_to_process = event_data['events']
|
||||||
elsif event_data.key?('event_id')
|
elsif event_data.key?('request_id') || event_data.key?('event_id') || event_data.key?('correlation_id')
|
||||||
# Single event
|
# Single event (support new and old field names)
|
||||||
events_to_process = [event_data]
|
events_to_process = [event_data]
|
||||||
else
|
else
|
||||||
Rails.logger.warn "Invalid event data format: missing event_id or events array"
|
Rails.logger.warn "Invalid event data format: missing request_id/event_id/correlation_id or events array"
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -23,50 +23,70 @@ class ProcessWafEventJob < ApplicationJob
|
|||||||
event_start = Time.current
|
event_start = Time.current
|
||||||
|
|
||||||
# Generate unique event ID if not provided
|
# Generate unique event ID if not provided
|
||||||
event_id = single_event_data['event_id'] || SecureRandom.uuid
|
# Support both new (request_id) and old (event_id, correlation_id) field names during cutover
|
||||||
|
request_id = single_event_data['request_id'] ||
|
||||||
|
single_event_data['event_id'] ||
|
||||||
|
single_event_data['correlation_id'] ||
|
||||||
|
SecureRandom.uuid
|
||||||
|
|
||||||
|
# Skip if event already exists (duplicate in batch or retry)
|
||||||
|
if Event.exists?(request_id: request_id)
|
||||||
|
Rails.logger.debug "Skipping duplicate event #{request_id}"
|
||||||
|
next
|
||||||
|
end
|
||||||
|
|
||||||
# Create the WAF event record
|
# Create the WAF event record
|
||||||
create_start = Time.current
|
create_start = Time.current
|
||||||
event = Event.create_from_waf_payload!(event_id, single_event_data)
|
event = Event.create_from_waf_payload!(request_id, single_event_data)
|
||||||
Rails.logger.debug "Event creation took #{((Time.current - create_start) * 1000).round(2)}ms"
|
Rails.logger.debug "Event creation took #{((Time.current - create_start) * 1000).round(2)}ms"
|
||||||
|
|
||||||
# Ensure network range exists for this IP and evaluate policies if needed
|
# Process network intelligence and policies
|
||||||
if event.ip_address.present?
|
# Note: Event.before_save already created the /24 tracking network
|
||||||
|
# and stored it in event.network_range_id
|
||||||
|
if event.network_range_id.present?
|
||||||
begin
|
begin
|
||||||
network_start = Time.current
|
network_start = Time.current
|
||||||
# Single lookup instead of checking has_geo_data? then querying again
|
# The tracking network was already created in Event.before_save
|
||||||
existing_range = NetworkRange.contains_ip(event.ip_address.to_s).first
|
tracking_network = event.network_range
|
||||||
network_range = existing_range || NetworkRangeGenerator.find_or_create_for_ip(event.ip_address)
|
Rails.logger.debug "Using tracking network #{tracking_network.cidr} (created in before_save)"
|
||||||
Rails.logger.debug "Network range lookup/creation took #{((Time.current - network_start) * 1000).round(2)}ms"
|
|
||||||
|
|
||||||
if network_range
|
# Queue IPAPI enrichment based on /24 tracking
|
||||||
Rails.logger.debug "Network range #{network_range.cidr} for event IP #{event.ip_address}"
|
# The tracking network is the /24 that stores ipapi_queried_at
|
||||||
|
if NetworkRange.should_fetch_ipapi_for_ip?(event.ip_address)
|
||||||
# Queue IPAPI enrichment if we don't have it yet
|
# Use tracking network for fetch status to avoid race conditions
|
||||||
unless network_range.has_network_data_from?(:ipapi)
|
if tracking_network.is_fetching_api_data?(:ipapi)
|
||||||
Rails.logger.info "Queueing IPAPI fetch for #{network_range.cidr}"
|
Rails.logger.info "Skipping IPAPI fetch for #{tracking_network.cidr} - already being fetched"
|
||||||
FetchIpapiDataJob.perform_later(network_range_id: network_range.id)
|
else
|
||||||
|
tracking_network.mark_as_fetching_api_data!(:ipapi)
|
||||||
|
Rails.logger.info "Queueing IPAPI fetch for IP #{event.ip_address} (tracking network: #{tracking_network.cidr})"
|
||||||
|
FetchIpapiDataJob.perform_later(network_range_id: tracking_network.id)
|
||||||
|
end
|
||||||
|
else
|
||||||
|
Rails.logger.debug "Skipping IPAPI fetch for IP #{event.ip_address} - already queried recently"
|
||||||
end
|
end
|
||||||
|
|
||||||
# Evaluate WAF policies inline if needed (lazy evaluation)
|
# Evaluate WAF policies inline if needed (lazy evaluation)
|
||||||
# Only runs when: network never evaluated OR policies changed since last evaluation
|
# Only runs when: network never evaluated OR policies changed since last evaluation
|
||||||
if network_range.needs_policy_evaluation?
|
if tracking_network.needs_policy_evaluation?
|
||||||
policy_start = Time.current
|
policy_start = Time.current
|
||||||
result = WafPolicyMatcher.evaluate_and_mark!(network_range)
|
result = WafPolicyMatcher.evaluate_and_mark!(tracking_network)
|
||||||
Rails.logger.debug "Policy evaluation took #{((Time.current - policy_start) * 1000).round(2)}ms"
|
Rails.logger.debug "Policy evaluation took #{((Time.current - policy_start) * 1000).round(2)}ms"
|
||||||
|
|
||||||
if result[:generated_rules].any?
|
if result[:generated_rules].any?
|
||||||
Rails.logger.info "Generated #{result[:generated_rules].length} rules for #{network_range.cidr}"
|
Rails.logger.info "Generated #{result[:generated_rules].length} rules for #{tracking_network.cidr}"
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
rescue => e
|
|
||||||
Rails.logger.warn "Failed to process network range for event #{event.id}: #{e.message}"
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
Rails.logger.debug "Network processing took #{((Time.current - network_start) * 1000).round(2)}ms"
|
||||||
|
rescue => e
|
||||||
|
Rails.logger.warn "Failed to process network range for event #{event.id}: #{e.message}"
|
||||||
|
end
|
||||||
|
elsif event.ip_address.present?
|
||||||
|
Rails.logger.warn "Event #{event.id} has IP but no network_range_id (private IP?)"
|
||||||
|
end
|
||||||
|
|
||||||
total_time = ((Time.current - event_start) * 1000).round(2)
|
total_time = ((Time.current - event_start) * 1000).round(2)
|
||||||
Rails.logger.info "Processed WAF event #{event_id} in #{total_time}ms"
|
Rails.logger.info "Processed WAF event #{request_id} in #{total_time}ms"
|
||||||
rescue ActiveRecord::RecordInvalid => e
|
rescue ActiveRecord::RecordInvalid => e
|
||||||
Rails.logger.error "Failed to create WAF event: #{e.message}"
|
Rails.logger.error "Failed to create WAF event: #{e.message}"
|
||||||
Rails.logger.error e.record.errors.full_messages.join(", ")
|
Rails.logger.error e.record.errors.full_messages.join(", ")
|
||||||
|
|||||||
@@ -9,9 +9,8 @@ class ProcessWafPoliciesJob < ApplicationJob
|
|||||||
|
|
||||||
retry_on StandardError, wait: 5.seconds, attempts: 3
|
retry_on StandardError, wait: 5.seconds, attempts: 3
|
||||||
|
|
||||||
def perform(network_range_id:, event_id: nil)
|
def perform(network_range:, event: nil)
|
||||||
# Find the network range
|
# network_range and event are passed as Global IDs and automatically deserialized
|
||||||
network_range = NetworkRange.find_by(id: network_range_id)
|
|
||||||
return if network_range.nil?
|
return if network_range.nil?
|
||||||
|
|
||||||
Rails.logger.debug "Processing WAF policies for network range #{network_range.cidr}"
|
Rails.logger.debug "Processing WAF policies for network range #{network_range.cidr}"
|
||||||
@@ -55,8 +54,6 @@ class ProcessWafPoliciesJob < ApplicationJob
|
|||||||
network_range.update_column(:policies_evaluated_at, Time.current)
|
network_range.update_column(:policies_evaluated_at, Time.current)
|
||||||
|
|
||||||
# Update event record if provided
|
# Update event record if provided
|
||||||
if event_id.present?
|
|
||||||
event = Event.find_by(id: event_id)
|
|
||||||
if event.present?
|
if event.present?
|
||||||
# Add policy match information to event metadata
|
# Add policy match information to event metadata
|
||||||
# Handle potential nil payload or type issues
|
# Handle potential nil payload or type issues
|
||||||
@@ -64,7 +61,7 @@ class ProcessWafPoliciesJob < ApplicationJob
|
|||||||
|
|
||||||
# Ensure payload is a hash before merging
|
# Ensure payload is a hash before merging
|
||||||
unless current_payload.is_a?(Hash)
|
unless current_payload.is_a?(Hash)
|
||||||
Rails.logger.warn "Event #{event_id} has invalid payload type: #{current_payload.class}, resetting to hash"
|
Rails.logger.warn "Event #{event.id} has invalid payload type: #{current_payload.class}, resetting to hash"
|
||||||
current_payload = {}
|
current_payload = {}
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -75,16 +72,14 @@ class ProcessWafPoliciesJob < ApplicationJob
|
|||||||
processed_at: Time.current.iso8601
|
processed_at: Time.current.iso8601
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
else
|
|
||||||
Rails.logger.warn "Event #{event_id} not found for ProcessWafPoliciesJob, skipping update"
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# Class method for batch processing multiple network ranges
|
# Class method for batch processing multiple network ranges
|
||||||
def self.process_network_ranges(network_range_ids)
|
def self.process_network_ranges(network_range_ids)
|
||||||
network_range_ids.each do |network_range_id|
|
network_range_ids.each do |network_range_id|
|
||||||
perform_later(network_range_id: network_range_id)
|
network_range = NetworkRange.find_by(id: network_range_id)
|
||||||
|
perform_later(network_range: network_range) if network_range
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -109,7 +104,7 @@ class ProcessWafPoliciesJob < ApplicationJob
|
|||||||
Rails.logger.info "Reprocessing #{network_ranges.count} network ranges for policy #{waf_policy_id}"
|
Rails.logger.info "Reprocessing #{network_ranges.count} network ranges for policy #{waf_policy_id}"
|
||||||
|
|
||||||
network_ranges.find_each do |network_range|
|
network_ranges.find_each do |network_range|
|
||||||
perform_later(network_range_id: network_range.id)
|
perform_later(network_range: network_range)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@@ -4,6 +4,10 @@ class Event < ApplicationRecord
|
|||||||
# Normalized association for hosts (most valuable compression)
|
# Normalized association for hosts (most valuable compression)
|
||||||
belongs_to :request_host, optional: true
|
belongs_to :request_host, optional: true
|
||||||
|
|
||||||
|
# WAF rule associations
|
||||||
|
belongs_to :rule, optional: true
|
||||||
|
has_one :waf_policy, through: :rule
|
||||||
|
|
||||||
# Enums for fixed value sets
|
# Enums for fixed value sets
|
||||||
enum :waf_action, {
|
enum :waf_action, {
|
||||||
allow: 0, # allow/pass
|
allow: 0, # allow/pass
|
||||||
@@ -29,7 +33,7 @@ class Event < ApplicationRecord
|
|||||||
# This provides direct array access and efficient indexing
|
# This provides direct array access and efficient indexing
|
||||||
attribute :tags, :json, default: -> { [] }
|
attribute :tags, :json, default: -> { [] }
|
||||||
|
|
||||||
validates :event_id, presence: true, uniqueness: true
|
validates :request_id, presence: true, uniqueness: true
|
||||||
validates :timestamp, presence: true
|
validates :timestamp, presence: true
|
||||||
|
|
||||||
scope :recent, -> { order(timestamp: :desc) }
|
scope :recent, -> { order(timestamp: :desc) }
|
||||||
@@ -55,32 +59,42 @@ class Event < ApplicationRecord
|
|||||||
where("tags @> ARRAY[?]", tag_array)
|
where("tags @> ARRAY[?]", tag_array)
|
||||||
}
|
}
|
||||||
|
|
||||||
# Network-based filtering scopes
|
# Network-based filtering scopes - now using denormalized columns
|
||||||
scope :by_company, ->(company) {
|
scope :by_company, ->(company) {
|
||||||
joins("JOIN network_ranges ON events.ip_address <<= network_ranges.network")
|
where("company ILIKE ?", "%#{company}%")
|
||||||
.where("network_ranges.company ILIKE ?", "%#{company}%")
|
}
|
||||||
|
|
||||||
|
scope :by_country, ->(country) {
|
||||||
|
where(country: country)
|
||||||
}
|
}
|
||||||
|
|
||||||
scope :by_network_type, ->(type) {
|
scope :by_network_type, ->(type) {
|
||||||
joins("JOIN network_ranges ON events.ip_address <<= network_ranges.network")
|
case type.to_s.downcase
|
||||||
.case(type)
|
when "datacenter"
|
||||||
.when("datacenter") { where("network_ranges.is_datacenter = ?", true) }
|
where(is_datacenter: true)
|
||||||
.when("vpn") { where("network_ranges.is_vpn = ?", true) }
|
when "vpn"
|
||||||
.when("proxy") { where("network_ranges.is_proxy = ?", true) }
|
where(is_vpn: true)
|
||||||
.when("standard") { where("network_ranges.is_datacenter = ? AND network_ranges.is_vpn = ? AND network_ranges.is_proxy = ?", false, false, false) }
|
when "proxy"
|
||||||
.else { none }
|
where(is_proxy: true)
|
||||||
|
when "standard"
|
||||||
|
where(is_datacenter: false, is_vpn: false, is_proxy: false)
|
||||||
|
else
|
||||||
|
none
|
||||||
|
end
|
||||||
}
|
}
|
||||||
|
|
||||||
scope :by_asn, ->(asn) {
|
scope :by_asn, ->(asn) {
|
||||||
joins("JOIN network_ranges ON events.ip_address <<= network_ranges.network")
|
where(asn: asn.to_i)
|
||||||
.where("network_ranges.asn = ?", asn.to_i)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
scope :by_network_cidr, ->(cidr) {
|
scope :by_network_cidr, ->(cidr) {
|
||||||
joins("JOIN network_ranges ON events.ip_address <<= network_ranges.network")
|
# This still requires a join since we need to match CIDR
|
||||||
.where("network_ranges.network = ?", cidr)
|
joins(:network_range).where("network_ranges.network = ?", cidr)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Add association for the optional network_range_id
|
||||||
|
belongs_to :network_range, optional: true
|
||||||
|
|
||||||
# Path prefix matching using range queries (uses B-tree index efficiently)
|
# Path prefix matching using range queries (uses B-tree index efficiently)
|
||||||
scope :with_path_prefix, ->(prefix_segment_ids) {
|
scope :with_path_prefix, ->(prefix_segment_ids) {
|
||||||
return none if prefix_segment_ids.blank?
|
return none if prefix_segment_ids.blank?
|
||||||
@@ -130,13 +144,39 @@ class Event < ApplicationRecord
|
|||||||
# Normalize event fields after extraction
|
# Normalize event fields after extraction
|
||||||
after_validation :normalize_event_fields, if: :should_normalize?
|
after_validation :normalize_event_fields, if: :should_normalize?
|
||||||
|
|
||||||
def self.create_from_waf_payload!(event_id, payload)
|
# Populate network intelligence from IP address
|
||||||
|
before_save :populate_network_intelligence, if: :should_populate_network_intelligence?
|
||||||
|
|
||||||
|
# Backfill network intelligence for all events
|
||||||
|
def self.backfill_network_intelligence!(batch_size: 10_000)
|
||||||
|
total = where(country: nil).count
|
||||||
|
return 0 if total.zero?
|
||||||
|
|
||||||
|
puts "Backfilling network intelligence for #{total} events..."
|
||||||
|
processed = 0
|
||||||
|
|
||||||
|
where(country: nil).find_in_batches(batch_size: batch_size) do |batch|
|
||||||
|
batch.each(&:save) # Triggers before_save callback
|
||||||
|
processed += batch.size
|
||||||
|
puts " Processed #{processed}/#{total} (#{(processed.to_f / total * 100).round(1)}%)"
|
||||||
|
end
|
||||||
|
|
||||||
|
processed
|
||||||
|
end
|
||||||
|
|
||||||
|
# Backfill network intelligence for a specific event
|
||||||
|
def backfill_network_intelligence!
|
||||||
|
populate_network_intelligence
|
||||||
|
save!
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.create_from_waf_payload!(request_id, payload)
|
||||||
# Normalize headers in payload during import phase
|
# Normalize headers in payload during import phase
|
||||||
normalized_payload = normalize_payload_headers(payload)
|
normalized_payload = normalize_payload_headers(payload)
|
||||||
|
|
||||||
# Create the WAF request event
|
# Create the WAF request event
|
||||||
create!(
|
create!(
|
||||||
event_id: event_id,
|
request_id: request_id,
|
||||||
timestamp: parse_timestamp(normalized_payload["timestamp"]),
|
timestamp: parse_timestamp(normalized_payload["timestamp"]),
|
||||||
payload: normalized_payload,
|
payload: normalized_payload,
|
||||||
|
|
||||||
@@ -150,7 +190,8 @@ class Event < ApplicationRecord
|
|||||||
response_status: normalized_payload.dig("response", "status_code"),
|
response_status: normalized_payload.dig("response", "status_code"),
|
||||||
response_time_ms: normalized_payload.dig("response", "duration_ms"),
|
response_time_ms: normalized_payload.dig("response", "duration_ms"),
|
||||||
waf_action: normalize_action(normalized_payload["waf_action"]), # Normalize incoming action values
|
waf_action: normalize_action(normalized_payload["waf_action"]), # Normalize incoming action values
|
||||||
rule_matched: normalized_payload["rule_matched"],
|
# Support both new (rule_id) and old (rule_matched) field names during cutover
|
||||||
|
rule_id: normalized_payload["rule_id"] || normalized_payload["rule_matched"],
|
||||||
blocked_reason: normalized_payload["blocked_reason"],
|
blocked_reason: normalized_payload["blocked_reason"],
|
||||||
|
|
||||||
# Server/Environment info
|
# Server/Environment info
|
||||||
@@ -283,7 +324,7 @@ class Event < ApplicationRecord
|
|||||||
end
|
end
|
||||||
|
|
||||||
def rule_matched?
|
def rule_matched?
|
||||||
rule_matched.present?
|
rule_id.present?
|
||||||
end
|
end
|
||||||
|
|
||||||
# New path methods for normalization
|
# New path methods for normalization
|
||||||
@@ -343,40 +384,39 @@ class Event < ApplicationRecord
|
|||||||
end
|
end
|
||||||
|
|
||||||
def most_specific_range
|
def most_specific_range
|
||||||
matching_network_ranges.first
|
# Use the cached network_range_id if available (much faster)
|
||||||
|
return NetworkRange.find_by(id: network_range_id) if network_range_id.present?
|
||||||
|
|
||||||
|
# Fallback to expensive lookup
|
||||||
|
matching_network_ranges.first&.dig(:range)
|
||||||
end
|
end
|
||||||
|
|
||||||
def broadest_range
|
def broadest_range
|
||||||
matching_network_ranges.last
|
matching_network_ranges.last&.dig(:range)
|
||||||
end
|
end
|
||||||
|
|
||||||
def network_intelligence
|
def network_intelligence
|
||||||
most_specific_range&.dig(:intelligence) || {}
|
# Use denormalized fields instead of expensive lookup
|
||||||
|
{
|
||||||
|
country: country,
|
||||||
|
company: company,
|
||||||
|
asn: asn,
|
||||||
|
asn_org: asn_org,
|
||||||
|
is_datacenter: is_datacenter,
|
||||||
|
is_vpn: is_vpn,
|
||||||
|
is_proxy: is_proxy
|
||||||
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
def company
|
# Denormalized attribute accessors - these now use the columns directly
|
||||||
network_intelligence[:company]
|
# No need to override - Rails provides these automatically:
|
||||||
end
|
# - country (column)
|
||||||
|
# - company (column)
|
||||||
def asn
|
# - asn (column)
|
||||||
network_intelligence[:asn]
|
# - asn_org (column)
|
||||||
end
|
# - is_datacenter (column)
|
||||||
|
# - is_vpn (column)
|
||||||
def asn_org
|
# - is_proxy (column)
|
||||||
network_intelligence[:asn_org]
|
|
||||||
end
|
|
||||||
|
|
||||||
def is_datacenter?
|
|
||||||
network_intelligence[:is_datacenter] || false
|
|
||||||
end
|
|
||||||
|
|
||||||
def is_proxy?
|
|
||||||
network_intelligence[:is_proxy] || false
|
|
||||||
end
|
|
||||||
|
|
||||||
def is_vpn?
|
|
||||||
network_intelligence[:is_vpn] || false
|
|
||||||
end
|
|
||||||
|
|
||||||
# IP validation
|
# IP validation
|
||||||
def valid_ipv4?
|
def valid_ipv4?
|
||||||
@@ -480,7 +520,8 @@ class Event < ApplicationRecord
|
|||||||
self.request_url = request_data["url"]
|
self.request_url = request_data["url"]
|
||||||
self.response_status = response_data["status_code"]
|
self.response_status = response_data["status_code"]
|
||||||
self.response_time_ms = response_data["duration_ms"]
|
self.response_time_ms = response_data["duration_ms"]
|
||||||
self.rule_matched = payload["rule_matched"]
|
# Support both new (rule_id) and old (rule_matched) field names during cutover
|
||||||
|
self.rule_id = payload["rule_id"] || payload["rule_matched"]
|
||||||
self.blocked_reason = payload["blocked_reason"]
|
self.blocked_reason = payload["blocked_reason"]
|
||||||
|
|
||||||
# Store original values for normalization only if they don't exist yet
|
# Store original values for normalization only if they don't exist yet
|
||||||
|
|||||||
@@ -116,7 +116,7 @@ class NetworkRange < ApplicationRecord
|
|||||||
|
|
||||||
# Parent/child relationships
|
# Parent/child relationships
|
||||||
def parent_ranges
|
def parent_ranges
|
||||||
NetworkRange.where("network << ?::inet AND masklen(network) < ?", network.to_s, prefix_length)
|
NetworkRange.where("?::inet << network AND masklen(network) < ?", network.to_s, prefix_length)
|
||||||
.order("masklen(network) DESC")
|
.order("masklen(network) DESC")
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -142,6 +142,59 @@ class NetworkRange < ApplicationRecord
|
|||||||
.first
|
.first
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Check if this network or any parent has IPAPI data
|
||||||
|
def has_ipapi_data_available?
|
||||||
|
return true if has_network_data_from?(:ipapi)
|
||||||
|
|
||||||
|
parent_ranges.any? { |parent| parent.has_network_data_from?(:ipapi) }
|
||||||
|
end
|
||||||
|
|
||||||
|
# Generic API fetching status management
|
||||||
|
def is_fetching_api_data?(source)
|
||||||
|
fetching_status = network_data&.dig('fetching_status') || {}
|
||||||
|
fetching_status[source.to_s] &&
|
||||||
|
fetching_status[source.to_s]['started_at'] &&
|
||||||
|
fetching_status[source.to_s]['started_at'] > 5.minutes.ago.to_f
|
||||||
|
end
|
||||||
|
|
||||||
|
def mark_as_fetching_api_data!(source)
|
||||||
|
self.network_data ||= {}
|
||||||
|
self.network_data['fetching_status'] ||= {}
|
||||||
|
self.network_data['fetching_status'][source.to_s] = {
|
||||||
|
'started_at' => Time.current.to_f,
|
||||||
|
'job_id' => SecureRandom.hex(8)
|
||||||
|
}
|
||||||
|
save!
|
||||||
|
end
|
||||||
|
|
||||||
|
def clear_fetching_status!(source)
|
||||||
|
if network_data&.dig('fetching_status')&.dig(source.to_s)
|
||||||
|
self.network_data['fetching_status'].delete(source.to_s)
|
||||||
|
# Clean up empty fetching_status hash
|
||||||
|
self.network_data.delete('fetching_status') if self.network_data['fetching_status'].empty?
|
||||||
|
save!
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Check if we should fetch API data (not available and not currently being fetched)
|
||||||
|
def should_fetch_api_data?(source)
|
||||||
|
return false if send("has_network_data_from?(#{source})") if respond_to?("has_network_data_from?(#{source})")
|
||||||
|
return false if is_fetching_api_data?(source)
|
||||||
|
|
||||||
|
true
|
||||||
|
end
|
||||||
|
|
||||||
|
# Check if this network or any parent has IPAPI data available and no active fetch
|
||||||
|
def should_fetch_ipapi_data?
|
||||||
|
return false if has_ipapi_data_available?
|
||||||
|
return false if is_fetching_api_data?(:ipapi)
|
||||||
|
|
||||||
|
# Also check if any parent is currently fetching IPAPI data
|
||||||
|
return false if parent_ranges.any? { |parent| parent.is_fetching_api_data?(:ipapi) }
|
||||||
|
|
||||||
|
true
|
||||||
|
end
|
||||||
|
|
||||||
def inherited_intelligence
|
def inherited_intelligence
|
||||||
return own_intelligence if has_intelligence?
|
return own_intelligence if has_intelligence?
|
||||||
|
|
||||||
@@ -168,6 +221,12 @@ class NetworkRange < ApplicationRecord
|
|||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def agent_tally
|
||||||
|
# Rails.cache.fetch("#{to_s}:agent_tally", expires_in: 5.minutes) do
|
||||||
|
events.map(&:user_agent).tally
|
||||||
|
# end
|
||||||
|
end
|
||||||
|
|
||||||
# Geographic lookup
|
# Geographic lookup
|
||||||
def geo_lookup_country!
|
def geo_lookup_country!
|
||||||
return if country.present?
|
return if country.present?
|
||||||
@@ -189,6 +248,12 @@ class NetworkRange < ApplicationRecord
|
|||||||
where("network && ?", range_cidr)
|
where("network && ?", range_cidr)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def self.findd(cidr)
|
||||||
|
cidr = cidr.gsub("_", "/")
|
||||||
|
cidr = "#{cidr}/24" unless cidr.include?("/")
|
||||||
|
find_by(network: cidr)
|
||||||
|
end
|
||||||
|
|
||||||
def self.find_or_create_by_cidr(cidr, user: nil, source: nil, reason: nil)
|
def self.find_or_create_by_cidr(cidr, user: nil, source: nil, reason: nil)
|
||||||
find_or_create_by(network: cidr) do |range|
|
find_or_create_by(network: cidr) do |range|
|
||||||
range.user = user
|
range.user = user
|
||||||
@@ -246,6 +311,63 @@ class NetworkRange < ApplicationRecord
|
|||||||
network_data&.key?(source.to_s) && network_data[source.to_s].present?
|
network_data&.key?(source.to_s) && network_data[source.to_s].present?
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# IPAPI tracking at /24 granularity
|
||||||
|
# Find or create the /24 network for a given IP address
|
||||||
|
def self.find_or_create_tracking_network_for_ip(ip_address)
|
||||||
|
ip = IPAddr.new(ip_address.to_s)
|
||||||
|
|
||||||
|
# Create /24 network for IPv4, /64 for IPv6
|
||||||
|
tracking_cidr = if ip.ipv4?
|
||||||
|
"#{ip.mask(24)}/24"
|
||||||
|
else
|
||||||
|
"#{ip.mask(64)}/64"
|
||||||
|
end
|
||||||
|
|
||||||
|
find_or_create_by(network: tracking_cidr) do |range|
|
||||||
|
range.source = 'auto_generated'
|
||||||
|
range.creation_reason = 'IPAPI tracking network'
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Check if we should fetch IPAPI data for a given IP address
|
||||||
|
# Uses /24 networks as the tracking unit
|
||||||
|
def self.should_fetch_ipapi_for_ip?(ip_address)
|
||||||
|
tracking_network = find_or_create_tracking_network_for_ip(ip_address)
|
||||||
|
|
||||||
|
# Check if /24 has been queried recently
|
||||||
|
queried_at = tracking_network.network_data&.dig('ipapi_queried_at')
|
||||||
|
return true if queried_at.nil?
|
||||||
|
|
||||||
|
# Check if IPAPI returned a CIDR that actually covers this IP
|
||||||
|
# (handles edge case where IPAPI returns /25 or more specific)
|
||||||
|
returned_cidr = tracking_network.network_data&.dig('ipapi_returned_cidr')
|
||||||
|
if returned_cidr.present?
|
||||||
|
begin
|
||||||
|
returned_range = IPAddr.new(returned_cidr)
|
||||||
|
ip = IPAddr.new(ip_address.to_s)
|
||||||
|
# If the IP is NOT covered by what IPAPI returned, fetch again
|
||||||
|
return true unless returned_range.include?(ip)
|
||||||
|
rescue IPAddr::InvalidAddressError => e
|
||||||
|
Rails.logger.warn "Invalid CIDR stored in ipapi_returned_cidr: #{returned_cidr}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Re-query after 1 year
|
||||||
|
Time.at(queried_at) < 1.year.ago
|
||||||
|
rescue => e
|
||||||
|
Rails.logger.error "Error checking IPAPI fetch status for #{ip_address}: #{e.message}"
|
||||||
|
true # Default to fetching on error
|
||||||
|
end
|
||||||
|
|
||||||
|
# Mark that we've queried IPAPI for this /24 network
|
||||||
|
# @param returned_cidr [String] The CIDR that IPAPI actually returned (may be more specific than /24)
|
||||||
|
def mark_ipapi_queried!(returned_cidr)
|
||||||
|
self.network_data ||= {}
|
||||||
|
self.network_data['ipapi_queried_at'] = Time.current.to_i
|
||||||
|
self.network_data['ipapi_returned_cidr'] = returned_cidr
|
||||||
|
save!
|
||||||
|
end
|
||||||
|
|
||||||
# String representations
|
# String representations
|
||||||
def to_s
|
def to_s
|
||||||
cidr
|
cidr
|
||||||
@@ -261,10 +383,12 @@ class NetworkRange < ApplicationRecord
|
|||||||
self[:events_count] || 0
|
self[:events_count] || 0
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def events
|
||||||
|
Event.where("ip_address <<= ?", cidr)
|
||||||
|
end
|
||||||
|
|
||||||
def recent_events(limit: 100)
|
def recent_events(limit: 100)
|
||||||
Event.where(ip_address: child_ranges.pluck(:network_address) + [network_address])
|
events.recent.limit(limit)
|
||||||
.recent
|
|
||||||
.limit(limit)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def blocking_rules
|
def blocking_rules
|
||||||
|
|||||||
@@ -5,7 +5,11 @@
|
|||||||
# Rules define actions to take for matching traffic conditions.
|
# Rules define actions to take for matching traffic conditions.
|
||||||
# Network rules are associated with NetworkRange objects for rich context.
|
# Network rules are associated with NetworkRange objects for rich context.
|
||||||
class Rule < ApplicationRecord
|
class Rule < ApplicationRecord
|
||||||
# Rule types and actions
|
# Rule enums
|
||||||
|
enum :waf_action, { allow: 0, deny: 1, rate_limit: 2, redirect: 3, log: 4, challenge: 5 }, scopes: false, prefix: true
|
||||||
|
enum :waf_rule_type, { network: 0, rate_limit: 1, path_pattern: 2 }, scopes: false, prefix: true
|
||||||
|
|
||||||
|
# Legacy string constants for backward compatibility
|
||||||
RULE_TYPES = %w[network rate_limit path_pattern].freeze
|
RULE_TYPES = %w[network rate_limit path_pattern].freeze
|
||||||
ACTIONS = %w[allow deny rate_limit redirect log challenge].freeze
|
ACTIONS = %w[allow deny rate_limit redirect log challenge].freeze
|
||||||
SOURCES = %w[manual auto:scanner_detected auto:rate_limit_exceeded auto:bot_detected imported default manual:surgical_block manual:surgical_exception policy].freeze
|
SOURCES = %w[manual auto:scanner_detected auto:rate_limit_exceeded auto:bot_detected imported default manual:surgical_block manual:surgical_exception policy].freeze
|
||||||
@@ -14,14 +18,42 @@ class Rule < ApplicationRecord
|
|||||||
belongs_to :user
|
belongs_to :user
|
||||||
belongs_to :network_range, optional: true
|
belongs_to :network_range, optional: true
|
||||||
belongs_to :waf_policy, optional: true
|
belongs_to :waf_policy, optional: true
|
||||||
|
has_many :events, dependent: :nullify
|
||||||
|
|
||||||
|
# Backward compatibility accessors for transition period
|
||||||
|
def action
|
||||||
|
waf_action
|
||||||
|
end
|
||||||
|
|
||||||
|
def action=(value)
|
||||||
|
self.waf_action = value
|
||||||
|
self[:action] = value # Also set the legacy column
|
||||||
|
end
|
||||||
|
|
||||||
|
def rule_type
|
||||||
|
waf_rule_type
|
||||||
|
end
|
||||||
|
|
||||||
|
def rule_type=(value)
|
||||||
|
self.waf_rule_type = value
|
||||||
|
self[:rule_type] = value # Also set the legacy column
|
||||||
|
end
|
||||||
|
|
||||||
# Validations
|
# Validations
|
||||||
validates :rule_type, presence: true, inclusion: { in: RULE_TYPES }
|
validates :waf_rule_type, presence: true, inclusion: { in: waf_rule_types.keys }
|
||||||
validates :action, presence: true, inclusion: { in: ACTIONS }
|
validates :waf_action, presence: true, inclusion: { in: waf_actions.keys }
|
||||||
validates :conditions, presence: true, unless: :network_rule?
|
validates :conditions, presence: true, unless: :network_rule?
|
||||||
validates :enabled, inclusion: { in: [true, false] }
|
validates :enabled, inclusion: { in: [true, false] }
|
||||||
validates :source, inclusion: { in: SOURCES }
|
validates :source, inclusion: { in: SOURCES }
|
||||||
|
|
||||||
|
# Legacy enum definitions (disabled to prevent conflicts)
|
||||||
|
# enum :action, { allow: "allow", deny: "deny", rate_limit: "rate_limit", redirect: "redirect", log: "log", challenge: "challenge" }, scopes: false
|
||||||
|
# enum :rule_type, { network: "network", rate_limit: "rate_limit", path_pattern: "path_pattern" }, scopes: false
|
||||||
|
|
||||||
|
# Legacy validations for backward compatibility during transition
|
||||||
|
# validates :rule_type, presence: true, inclusion: { in: RULE_TYPES }, allow_nil: true
|
||||||
|
# validates :action, presence: true, inclusion: { in: ACTIONS }, allow_nil: true
|
||||||
|
|
||||||
# Custom validations
|
# Custom validations
|
||||||
validate :validate_conditions_by_type
|
validate :validate_conditions_by_type
|
||||||
validate :validate_metadata_by_action
|
validate :validate_metadata_by_action
|
||||||
@@ -33,16 +65,22 @@ class Rule < ApplicationRecord
|
|||||||
scope :disabled, -> { where(enabled: false) }
|
scope :disabled, -> { where(enabled: false) }
|
||||||
scope :active, -> { enabled.where("expires_at IS NULL OR expires_at > ?", Time.current) }
|
scope :active, -> { enabled.where("expires_at IS NULL OR expires_at > ?", Time.current) }
|
||||||
scope :expired, -> { where("expires_at IS NOT NULL AND expires_at <= ?", Time.current) }
|
scope :expired, -> { where("expires_at IS NOT NULL AND expires_at <= ?", Time.current) }
|
||||||
scope :by_type, ->(type) { where(rule_type: type) }
|
scope :by_type, ->(type) { where(waf_rule_type: type) }
|
||||||
scope :network_rules, -> { where(rule_type: "network") }
|
scope :network_rules, -> { network }
|
||||||
scope :rate_limit_rules, -> { where(rule_type: "rate_limit") }
|
scope :rate_limit_rules, -> { rate_limit }
|
||||||
scope :path_pattern_rules, -> { where(rule_type: "path_pattern") }
|
scope :path_pattern_rules, -> { path_pattern }
|
||||||
scope :by_source, ->(source) { where(source: source) }
|
scope :by_source, ->(source) { where(source: source) }
|
||||||
scope :surgical_blocks, -> { where(source: "manual:surgical_block") }
|
scope :surgical_blocks, -> { where(source: "manual:surgical_block") }
|
||||||
scope :surgical_exceptions, -> { where(source: "manual:surgical_exception") }
|
scope :surgical_exceptions, -> { where(source: "manual:surgical_exception") }
|
||||||
scope :policy_generated, -> { where(source: "policy") }
|
scope :policy_generated, -> { where(source: "policy") }
|
||||||
scope :from_waf_policy, ->(waf_policy) { where(waf_policy: waf_policy) }
|
scope :from_waf_policy, ->(waf_policy) { where(waf_policy: waf_policy) }
|
||||||
|
|
||||||
|
# Legacy scopes for backward compatibility
|
||||||
|
scope :by_type_legacy, ->(type) { where(rule_type: type) }
|
||||||
|
scope :network_rules_legacy, -> { where(rule_type: "network") }
|
||||||
|
scope :rate_limit_rules_legacy, -> { where(rule_type: "rate_limit") }
|
||||||
|
scope :path_pattern_rules_legacy, -> { where(rule_type: "path_pattern") }
|
||||||
|
|
||||||
# Sync queries
|
# Sync queries
|
||||||
scope :since, ->(timestamp) { where("updated_at >= ?", Time.at(timestamp)).order(:updated_at, :id) }
|
scope :since, ->(timestamp) { where("updated_at >= ?", Time.at(timestamp)).order(:updated_at, :id) }
|
||||||
scope :sync_order, -> { order(:updated_at, :id) }
|
scope :sync_order, -> { order(:updated_at, :id) }
|
||||||
@@ -51,18 +89,19 @@ class Rule < ApplicationRecord
|
|||||||
before_validation :set_defaults
|
before_validation :set_defaults
|
||||||
before_validation :parse_json_fields
|
before_validation :parse_json_fields
|
||||||
before_save :calculate_priority_for_network_rules
|
before_save :calculate_priority_for_network_rules
|
||||||
|
before_save :sync_legacy_columns
|
||||||
|
|
||||||
# Rule type checks
|
# Rule type checks
|
||||||
def network_rule?
|
def network_rule?
|
||||||
rule_type == "network"
|
waf_rule_type_network?
|
||||||
end
|
end
|
||||||
|
|
||||||
def rate_limit_rule?
|
def rate_limit_rule?
|
||||||
rule_type == "rate_limit"
|
waf_rule_type_rate_limit?
|
||||||
end
|
end
|
||||||
|
|
||||||
def path_pattern_rule?
|
def path_pattern_rule?
|
||||||
rule_type == "path_pattern"
|
waf_rule_type_path_pattern?
|
||||||
end
|
end
|
||||||
|
|
||||||
# Network-specific methods
|
# Network-specific methods
|
||||||
@@ -104,16 +143,16 @@ class Rule < ApplicationRecord
|
|||||||
|
|
||||||
# Action-specific methods
|
# Action-specific methods
|
||||||
def redirect_action?
|
def redirect_action?
|
||||||
action == "redirect"
|
waf_action_redirect?
|
||||||
end
|
end
|
||||||
|
|
||||||
def challenge_action?
|
def challenge_action?
|
||||||
action == "challenge"
|
waf_action_challenge?
|
||||||
end
|
end
|
||||||
|
|
||||||
# Redirect/challenge convenience methods
|
# Redirect/challenge convenience methods
|
||||||
def redirect_url
|
def redirect_url
|
||||||
metadata&.dig('redirect_url')
|
metadata_hash['redirect_url']
|
||||||
end
|
end
|
||||||
|
|
||||||
def redirect_status
|
def redirect_status
|
||||||
@@ -162,12 +201,13 @@ class Rule < ApplicationRecord
|
|||||||
end
|
end
|
||||||
|
|
||||||
def disable!(reason: nil)
|
def disable!(reason: nil)
|
||||||
update!(
|
new_metadata = metadata_hash.merge(
|
||||||
enabled: false,
|
|
||||||
metadata: metadata.merge(
|
|
||||||
disabled_at: Time.current.iso8601,
|
disabled_at: Time.current.iso8601,
|
||||||
disabled_reason: reason
|
disabled_reason: reason
|
||||||
)
|
)
|
||||||
|
update!(
|
||||||
|
enabled: false,
|
||||||
|
metadata: new_metadata
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -180,8 +220,8 @@ class Rule < ApplicationRecord
|
|||||||
def to_agent_format
|
def to_agent_format
|
||||||
format = {
|
format = {
|
||||||
id: id,
|
id: id,
|
||||||
rule_type: rule_type,
|
waf_rule_type: waf_rule_type,
|
||||||
waf_action: action, # Agents expect 'waf_action' field
|
waf_action: waf_action, # Use the enum field directly
|
||||||
conditions: agent_conditions,
|
conditions: agent_conditions,
|
||||||
priority: agent_priority,
|
priority: agent_priority,
|
||||||
expires_at: expires_at&.to_i, # Agents expect Unix timestamps
|
expires_at: expires_at&.to_i, # Agents expect Unix timestamps
|
||||||
@@ -224,8 +264,8 @@ class Rule < ApplicationRecord
|
|||||||
network_range = NetworkRange.find_or_create_by_cidr(cidr, user: user, source: 'user_created')
|
network_range = NetworkRange.find_or_create_by_cidr(cidr, user: user, source: 'user_created')
|
||||||
|
|
||||||
create!(
|
create!(
|
||||||
rule_type: 'network',
|
waf_rule_type: 'network',
|
||||||
action: action,
|
waf_action: action,
|
||||||
network_range: network_range,
|
network_range: network_range,
|
||||||
user: user,
|
user: user,
|
||||||
**options
|
**options
|
||||||
@@ -237,8 +277,8 @@ class Rule < ApplicationRecord
|
|||||||
network_range = NetworkRange.find_or_create_by_cidr(parent_cidr, user: user, source: 'user_created')
|
network_range = NetworkRange.find_or_create_by_cidr(parent_cidr, user: user, source: 'user_created')
|
||||||
|
|
||||||
block_rule = create!(
|
block_rule = create!(
|
||||||
rule_type: 'network',
|
waf_rule_type: 'network',
|
||||||
action: 'deny',
|
waf_action: 'deny',
|
||||||
network_range: network_range,
|
network_range: network_range,
|
||||||
source: 'manual:surgical_block',
|
source: 'manual:surgical_block',
|
||||||
user: user,
|
user: user,
|
||||||
@@ -255,8 +295,8 @@ class Rule < ApplicationRecord
|
|||||||
ip_network_range = NetworkRange.find_or_create_by_cidr("#{ip_address}/#{ip_address.include?(':') ? '128' : '32'}", user: user, source: 'user_created')
|
ip_network_range = NetworkRange.find_or_create_by_cidr("#{ip_address}/#{ip_address.include?(':') ? '128' : '32'}", user: user, source: 'user_created')
|
||||||
|
|
||||||
exception_rule = create!(
|
exception_rule = create!(
|
||||||
rule_type: 'network',
|
waf_rule_type: 'network',
|
||||||
action: 'allow',
|
waf_action: 'allow',
|
||||||
network_range: ip_network_range,
|
network_range: ip_network_range,
|
||||||
source: 'manual:surgical_exception',
|
source: 'manual:surgical_exception',
|
||||||
user: user,
|
user: user,
|
||||||
@@ -277,8 +317,8 @@ class Rule < ApplicationRecord
|
|||||||
network_range = NetworkRange.find_or_create_by_cidr(cidr, user: user, source: 'user_created')
|
network_range = NetworkRange.find_or_create_by_cidr(cidr, user: user, source: 'user_created')
|
||||||
|
|
||||||
create!(
|
create!(
|
||||||
rule_type: 'rate_limit',
|
waf_rule_type: 'rate_limit',
|
||||||
action: 'rate_limit',
|
waf_action: 'rate_limit',
|
||||||
network_range: network_range,
|
network_range: network_range,
|
||||||
conditions: { cidr: cidr, scope: 'ip' },
|
conditions: { cidr: cidr, scope: 'ip' },
|
||||||
metadata: {
|
metadata: {
|
||||||
@@ -307,7 +347,7 @@ class Rule < ApplicationRecord
|
|||||||
|
|
||||||
# This would need efficient IP range queries
|
# This would need efficient IP range queries
|
||||||
# For now, simple IP match
|
# For now, simple IP match
|
||||||
Event.where(ip_address: network_range.network_address)
|
Event.where("ip_address <<= ?", network_range.cidr)
|
||||||
.recent
|
.recent
|
||||||
.limit(limit)
|
.limit(limit)
|
||||||
end
|
end
|
||||||
@@ -324,6 +364,18 @@ class Rule < ApplicationRecord
|
|||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Helper method to safely access metadata as hash
|
||||||
|
def metadata_hash
|
||||||
|
case metadata
|
||||||
|
when Hash
|
||||||
|
metadata
|
||||||
|
when String
|
||||||
|
metadata.present? ? (JSON.parse(metadata) rescue {}) : {}
|
||||||
|
else
|
||||||
|
{}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
def set_defaults
|
def set_defaults
|
||||||
@@ -361,7 +413,7 @@ class Rule < ApplicationRecord
|
|||||||
end
|
end
|
||||||
|
|
||||||
def validate_conditions_by_type
|
def validate_conditions_by_type
|
||||||
case rule_type
|
case waf_rule_type
|
||||||
when "network"
|
when "network"
|
||||||
# Network rules don't need conditions in DB - stored in network_range
|
# Network rules don't need conditions in DB - stored in network_range
|
||||||
true
|
true
|
||||||
@@ -394,7 +446,7 @@ class Rule < ApplicationRecord
|
|||||||
end
|
end
|
||||||
|
|
||||||
def validate_metadata_by_action
|
def validate_metadata_by_action
|
||||||
case action
|
case waf_action
|
||||||
when "redirect"
|
when "redirect"
|
||||||
unless metadata&.dig("redirect_url").present?
|
unless metadata&.dig("redirect_url").present?
|
||||||
errors.add(:metadata, "must include 'redirect_url' for redirect action")
|
errors.add(:metadata, "must include 'redirect_url' for redirect action")
|
||||||
@@ -457,4 +509,14 @@ class Rule < ApplicationRecord
|
|||||||
self.metadata ||= {}
|
self.metadata ||= {}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def sync_legacy_columns
|
||||||
|
# Sync enum values to legacy string columns for backward compatibility
|
||||||
|
if waf_action.present?
|
||||||
|
self[:action] = waf_action
|
||||||
|
end
|
||||||
|
if waf_rule_type.present?
|
||||||
|
self[:rule_type] = waf_rule_type
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
@@ -0,0 +1,15 @@
|
|||||||
|
class RenameRuleMatchedToRuleIdInEvents < ActiveRecord::Migration[8.1]
|
||||||
|
def change
|
||||||
|
# Add new rule_id column (instant - just metadata change)
|
||||||
|
add_column :events, :rule_id, :bigint
|
||||||
|
|
||||||
|
# Drop old rule_matched string column (instant - no data to migrate)
|
||||||
|
remove_column :events, :rule_matched, :string
|
||||||
|
|
||||||
|
# Add foreign key constraint (fast - all values are NULL)
|
||||||
|
add_foreign_key :events, :rules
|
||||||
|
|
||||||
|
# Add index for analytics queries (fast - mostly NULL values)
|
||||||
|
add_index :events, :rule_id
|
||||||
|
end
|
||||||
|
end
|
||||||
@@ -0,0 +1,16 @@
|
|||||||
|
class RenameEventIdToRequestIdInEvents < ActiveRecord::Migration[8.1]
|
||||||
|
def change
|
||||||
|
# Only rename the column if it still exists as event_id
|
||||||
|
if column_exists?(:events, :event_id)
|
||||||
|
rename_column :events, :event_id, :request_id
|
||||||
|
end
|
||||||
|
|
||||||
|
# Rename the unique index if it still exists with the old name
|
||||||
|
if index_name_exists?(:events, :index_events_on_event_id)
|
||||||
|
rename_index :events, :index_events_on_event_id, :index_events_on_request_id
|
||||||
|
elsif !index_name_exists?(:events, :index_events_on_request_id)
|
||||||
|
# Create the index with the new name if neither exists
|
||||||
|
add_index :events, :request_id, unique: true, name: :index_events_on_request_id
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
37
db/migrate/20251113031234_add_enums_to_rules.rb
Normal file
37
db/migrate/20251113031234_add_enums_to_rules.rb
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
class AddEnumsToRules < ActiveRecord::Migration[8.1]
|
||||||
|
def change
|
||||||
|
# Add enum columns with default values
|
||||||
|
add_column :rules, :waf_action, :integer, default: 0, null: false
|
||||||
|
add_column :rules, :waf_rule_type, :integer, default: 0, null: false
|
||||||
|
|
||||||
|
# Add indexes for enum columns
|
||||||
|
add_index :rules, :waf_action
|
||||||
|
add_index :rules, :waf_rule_type
|
||||||
|
|
||||||
|
# Migrate existing data
|
||||||
|
# Map action strings to integers (starting from 0 to match Rails enum convention)
|
||||||
|
execute <<-SQL
|
||||||
|
UPDATE rules
|
||||||
|
SET waf_action = CASE action
|
||||||
|
WHEN 'allow' THEN 0
|
||||||
|
WHEN 'deny' THEN 1
|
||||||
|
WHEN 'rate_limit' THEN 2
|
||||||
|
WHEN 'redirect' THEN 3
|
||||||
|
WHEN 'log' THEN 4
|
||||||
|
WHEN 'challenge' THEN 5
|
||||||
|
ELSE 0
|
||||||
|
END;
|
||||||
|
SQL
|
||||||
|
|
||||||
|
# Map rule_type strings to integers
|
||||||
|
execute <<-SQL
|
||||||
|
UPDATE rules
|
||||||
|
SET waf_rule_type = CASE rule_type
|
||||||
|
WHEN 'network' THEN 0
|
||||||
|
WHEN 'rate_limit' THEN 1
|
||||||
|
WHEN 'path_pattern' THEN 2
|
||||||
|
ELSE 0
|
||||||
|
END;
|
||||||
|
SQL
|
||||||
|
end
|
||||||
|
end
|
||||||
229
test/controllers/api/events_controller_test.rb
Normal file
229
test/controllers/api/events_controller_test.rb
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require "test_helper"
|
||||||
|
|
||||||
|
class Api::EventsControllerTest < ActionDispatch::IntegrationTest
|
||||||
|
def setup
|
||||||
|
@dsn = Dsn.create!(name: "Test DSN", key: "test-api-key-1234567890abcdef")
|
||||||
|
@disabled_dsn = Dsn.create!(name: "Disabled DSN", key: "disabled-key-1234567890abcdef", enabled: false)
|
||||||
|
|
||||||
|
@sample_event_data = {
|
||||||
|
"timestamp" => Time.current.iso8601,
|
||||||
|
"method" => "GET",
|
||||||
|
"path" => "/api/test",
|
||||||
|
"status" => 200,
|
||||||
|
"ip" => "192.168.1.100",
|
||||||
|
"user_agent" => "TestAgent/1.0"
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should create event with valid DSN via query parameter" do
|
||||||
|
post api_events_path,
|
||||||
|
params: @sample_event_data.merge(baffle_key: @dsn.key),
|
||||||
|
as: :json
|
||||||
|
|
||||||
|
assert_response :success
|
||||||
|
json_response = JSON.parse(response.body)
|
||||||
|
assert json_response["success"]
|
||||||
|
assert_not_nil json_response["rule_version"]
|
||||||
|
assert_not_nil response.headers['X-Rule-Version']
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should create event with valid DSN via Authorization header" do
|
||||||
|
post api_events_path,
|
||||||
|
headers: { "Authorization" => "Bearer #{@dsn.key}" },
|
||||||
|
params: @sample_event_data,
|
||||||
|
as: :json
|
||||||
|
|
||||||
|
assert_response :success
|
||||||
|
json_response = JSON.parse(response.body)
|
||||||
|
assert json_response["success"]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should create event with valid DSN via X-Baffle-Auth header" do
|
||||||
|
post api_events_path,
|
||||||
|
headers: { "X-Baffle-Auth" => "Baffle baffle_key=#{@dsn.key}, baffle_version=1" },
|
||||||
|
params: @sample_event_data,
|
||||||
|
as: :json
|
||||||
|
|
||||||
|
assert_response :success
|
||||||
|
json_response = JSON.parse(response.body)
|
||||||
|
assert json_response["success"]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should create event with valid DSN via Basic auth" do
|
||||||
|
credentials = Base64.strict_encode64("#{@dsn.key}:password")
|
||||||
|
post api_events_path,
|
||||||
|
headers: { "Authorization" => "Basic #{credentials}" },
|
||||||
|
params: @sample_event_data,
|
||||||
|
as: :json
|
||||||
|
|
||||||
|
assert_response :success
|
||||||
|
json_response = JSON.parse(response.body)
|
||||||
|
assert json_response["success"]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should create event with form encoded data" do
|
||||||
|
post api_events_path,
|
||||||
|
headers: { "Authorization" => "Bearer #{@dsn.key}" },
|
||||||
|
params: @sample_event_data,
|
||||||
|
as: :url_encoded
|
||||||
|
|
||||||
|
assert_response :success
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should include rules in response when agent has no version" do
|
||||||
|
# Create some test rules
|
||||||
|
Rule.create!(action: "block", pattern_type: "ip", pattern: "192.168.1.0/24", reason: "Test rule")
|
||||||
|
Rule.create!(action: "allow", pattern_type: "ip", pattern: "10.0.0.0/8", reason: "Allow internal")
|
||||||
|
|
||||||
|
post api_events_path,
|
||||||
|
headers: { "Authorization" => "Bearer #{@dsn.key}" },
|
||||||
|
params: @sample_event_data,
|
||||||
|
as: :json
|
||||||
|
|
||||||
|
assert_response :success
|
||||||
|
json_response = JSON.parse(response.body)
|
||||||
|
assert json_response["success"]
|
||||||
|
assert json_response["rules_changed"]
|
||||||
|
assert_not_nil json_response["rules"]
|
||||||
|
assert_equal 2, json_response["rules"].length
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should include only new rules when agent has old version" do
|
||||||
|
# Create rules with different versions
|
||||||
|
old_rule = Rule.create!(action: "block", pattern_type: "ip", pattern: "192.168.1.0/24", reason: "Old rule", version: 1)
|
||||||
|
new_rule = Rule.create!(action: "block", pattern_type: "ip", pattern: "203.0.113.0/24", reason: "New rule", version: 2)
|
||||||
|
|
||||||
|
event_data_with_version = @sample_event_data.merge("last_rule_sync" => 1)
|
||||||
|
|
||||||
|
post api_events_path,
|
||||||
|
headers: { "Authorization" => "Bearer #{@dsn.key}" },
|
||||||
|
params: event_data_with_version,
|
||||||
|
as: :json
|
||||||
|
|
||||||
|
assert_response :success
|
||||||
|
json_response = JSON.parse(response.body)
|
||||||
|
assert json_response["success"]
|
||||||
|
assert json_response["rules_changed"]
|
||||||
|
assert_equal 1, json_response["rules"].length
|
||||||
|
assert_equal "203.0.113.0/24", json_response["rules"].first["pattern"]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should not include rules when agent has latest version" do
|
||||||
|
# Create a rule and get its version
|
||||||
|
rule = Rule.create!(action: "block", pattern_type: "ip", pattern: "192.168.1.0/24", reason: "Test rule")
|
||||||
|
latest_version = Rule.latest_version
|
||||||
|
|
||||||
|
event_data_with_latest_version = @sample_event_data.merge("last_rule_sync" => latest_version)
|
||||||
|
|
||||||
|
post api_events_path,
|
||||||
|
headers: { "Authorization" => "Bearer #{@dsn.key}" },
|
||||||
|
params: event_data_with_latest_version,
|
||||||
|
as: :json
|
||||||
|
|
||||||
|
assert_response :success
|
||||||
|
json_response = JSON.parse(response.body)
|
||||||
|
assert json_response["success"]
|
||||||
|
assert_not json_response["rules_changed"]
|
||||||
|
assert_nil json_response["rules"]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should return unauthorized with invalid DSN key" do
|
||||||
|
post api_events_path,
|
||||||
|
headers: { "Authorization" => "Bearer invalid-key-1234567890abcdef" },
|
||||||
|
params: @sample_event_data,
|
||||||
|
as: :json
|
||||||
|
|
||||||
|
assert_response :unauthorized
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should return unauthorized with disabled DSN" do
|
||||||
|
post api_events_path,
|
||||||
|
headers: { "Authorization" => "Bearer #{@disabled_dsn.key}" },
|
||||||
|
params: @sample_event_data,
|
||||||
|
as: :json
|
||||||
|
|
||||||
|
assert_response :unauthorized
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should return unauthorized with no authentication" do
|
||||||
|
post api_events_path,
|
||||||
|
params: @sample_event_data,
|
||||||
|
as: :json
|
||||||
|
|
||||||
|
assert_response :unauthorized
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should return bad request with invalid JSON" do
|
||||||
|
post api_events_path,
|
||||||
|
headers: { "Authorization" => "Bearer #{@dsn.key}" },
|
||||||
|
params: "invalid json {",
|
||||||
|
as: :json
|
||||||
|
|
||||||
|
assert_response :bad_request
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should handle empty request body gracefully" do
|
||||||
|
post api_events_path,
|
||||||
|
headers: { "Authorization" => "Bearer #{@dsn.key}" },
|
||||||
|
params: {},
|
||||||
|
as: :json
|
||||||
|
|
||||||
|
assert_response :success
|
||||||
|
json_response = JSON.parse(response.body)
|
||||||
|
assert json_response["success"]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should set sampling headers in response" do
|
||||||
|
post api_events_path,
|
||||||
|
headers: { "Authorization" => "Bearer #{@dsn.key}" },
|
||||||
|
params: @sample_event_data,
|
||||||
|
as: :json
|
||||||
|
|
||||||
|
assert_response :success
|
||||||
|
assert_not_nil response.headers['X-Sample-Rate']
|
||||||
|
assert_not_nil response.headers['X-Sample-Until']
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should set rule version header in response" do
|
||||||
|
post api_events_path,
|
||||||
|
headers: { "Authorization" => "Bearer #{@dsn.key}" },
|
||||||
|
params: @sample_event_data,
|
||||||
|
as: :json
|
||||||
|
|
||||||
|
assert_response :success
|
||||||
|
assert_not_nil response.headers['X-Rule-Version']
|
||||||
|
assert_match /^\d+$/, response.headers['X-Rule-Version']
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should handle large event payloads" do
|
||||||
|
large_payload = @sample_event_data.merge(
|
||||||
|
"large_field" => "x" * 10000, # 10KB of data
|
||||||
|
"headers" => { "user-agent" => "TestAgent", "accept" => "*/*" },
|
||||||
|
"custom_data" => Hash[*(1..100).map { |i| ["key#{i}", "value#{i}"] }.flatten]
|
||||||
|
)
|
||||||
|
|
||||||
|
post api_events_path,
|
||||||
|
headers: { "Authorization" => "Bearer #{@dsn.key}" },
|
||||||
|
params: large_payload,
|
||||||
|
as: :json
|
||||||
|
|
||||||
|
assert_response :success
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should process event asynchronously" do
|
||||||
|
# Clear any existing jobs
|
||||||
|
ActiveJob::Base.queue_adapter.perform_enqueued_at_jobs = true
|
||||||
|
ActiveJob::Base.queue_adapter.perform_enqueued_jobs = false
|
||||||
|
|
||||||
|
assert_difference 'ProcessWafEventJob.jobs.count', 1 do
|
||||||
|
post api_events_path,
|
||||||
|
headers: { "Authorization" => "Bearer #{@dsn.key}" },
|
||||||
|
params: @sample_event_data,
|
||||||
|
as: :json
|
||||||
|
end
|
||||||
|
|
||||||
|
assert_response :success
|
||||||
|
end
|
||||||
|
end
|
||||||
@@ -10,18 +10,21 @@ module Api
|
|||||||
key: "test-key-#{SecureRandom.hex(8)}"
|
key: "test-key-#{SecureRandom.hex(8)}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@rule1_network_range = NetworkRange.create!(cidr: "10.0.0.0/8")
|
||||||
@rule1 = Rule.create!(
|
@rule1 = Rule.create!(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "10.0.0.0/8" },
|
network_range: @rule1_network_range,
|
||||||
source: "manual"
|
source: "manual",
|
||||||
|
user: users(:one)
|
||||||
)
|
)
|
||||||
|
|
||||||
@rule2 = Rule.create!(
|
@rule2 = Rule.create!(
|
||||||
rule_type: "rate_limit",
|
waf_rule_type: "rate_limit",
|
||||||
action: "rate_limit",
|
waf_action: "rate_limit",
|
||||||
conditions: { cidr: "0.0.0.0/0", scope: "global" },
|
conditions: { cidr: "0.0.0.0/0", scope: "global" },
|
||||||
metadata: { limit: 100, window: 60 }
|
metadata: { limit: 100, window: 60 },
|
||||||
|
user: users(:one)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -68,8 +71,8 @@ module Api
|
|||||||
assert_equal 2, json["rules"].length
|
assert_equal 2, json["rules"].length
|
||||||
|
|
||||||
rule = json["rules"].find { |r| r["id"] == @rule1.id }
|
rule = json["rules"].find { |r| r["id"] == @rule1.id }
|
||||||
assert_equal "network_v4", rule["rule_type"]
|
assert_equal "network", rule["waf_rule_type"]
|
||||||
assert_equal "deny", rule["action"]
|
assert_equal "deny", rule["waf_action"]
|
||||||
assert_equal({ "cidr" => "10.0.0.0/8" }, rule["conditions"])
|
assert_equal({ "cidr" => "10.0.0.0/8" }, rule["conditions"])
|
||||||
assert_equal 8, rule["priority"]
|
assert_equal 8, rule["priority"]
|
||||||
end
|
end
|
||||||
@@ -159,24 +162,27 @@ module Api
|
|||||||
|
|
||||||
test "rules are ordered by updated_at for sync" do
|
test "rules are ordered by updated_at for sync" do
|
||||||
# Create rules with different timestamps
|
# Create rules with different timestamps
|
||||||
|
oldest_range = NetworkRange.create!(cidr: "192.168.1.0/24")
|
||||||
oldest = Rule.create!(
|
oldest = Rule.create!(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "192.168.1.0/24" }
|
network_range: oldest_range
|
||||||
)
|
)
|
||||||
oldest.update_column(:updated_at, 3.hours.ago)
|
oldest.update_column(:updated_at, 3.hours.ago)
|
||||||
|
|
||||||
|
middle_range = NetworkRange.create!(cidr: "192.168.2.0/24")
|
||||||
middle = Rule.create!(
|
middle = Rule.create!(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "192.168.2.0/24" }
|
network_range: middle_range
|
||||||
)
|
)
|
||||||
middle.update_column(:updated_at, 2.hours.ago)
|
middle.update_column(:updated_at, 2.hours.ago)
|
||||||
|
|
||||||
|
newest_range = NetworkRange.create!(cidr: "192.168.3.0/24")
|
||||||
newest = Rule.create!(
|
newest = Rule.create!(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "192.168.3.0/24" }
|
network_range: newest_range
|
||||||
)
|
)
|
||||||
|
|
||||||
get "/api/rules?since=#{4.hours.ago.iso8601}"
|
get "/api/rules?since=#{4.hours.ago.iso8601}"
|
||||||
|
|||||||
2
test/fixtures/ipv4_ranges.yml.bak
vendored
Normal file
2
test/fixtures/ipv4_ranges.yml.bak
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
# Read about fixtures at https://api.rubyonrails.org/classes/ActiveRecord/FixtureSet.html
|
||||||
|
# Empty fixtures - tests create their own data
|
||||||
2
test/fixtures/ipv6_ranges.yml.bak
vendored
Normal file
2
test/fixtures/ipv6_ranges.yml.bak
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
# Read about fixtures at https://api.rubyonrails.org/classes/ActiveRecord/FixtureSet.html
|
||||||
|
# Empty fixtures - tests create their own data
|
||||||
1
test/fixtures/rule_sets.yml.bak
vendored
Normal file
1
test/fixtures/rule_sets.yml.bak
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
# Empty fixtures
|
||||||
9
test/fixtures/settings.yml
vendored
Normal file
9
test/fixtures/settings.yml
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# Read about fixtures at https://api.rubyonrails.org/classes/ActiveRecord/FixtureSet.html
|
||||||
|
|
||||||
|
one:
|
||||||
|
key: MyString1
|
||||||
|
value: MyString1
|
||||||
|
|
||||||
|
two:
|
||||||
|
key: MyString2
|
||||||
|
value: MyString2
|
||||||
23
test/fixtures/waf_policies.yml
vendored
Normal file
23
test/fixtures/waf_policies.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# Read about fixtures at https://api.rubyonrails.org/classes/ActiveRecord/FixtureSet.html
|
||||||
|
|
||||||
|
one:
|
||||||
|
name: Policy One
|
||||||
|
description: MyText
|
||||||
|
policy_type: MyString
|
||||||
|
policy_action: MyString
|
||||||
|
targets:
|
||||||
|
enabled: false
|
||||||
|
expires_at: 2025-11-10 13:30:53
|
||||||
|
user: one
|
||||||
|
additional_data:
|
||||||
|
|
||||||
|
two:
|
||||||
|
name: Policy Two
|
||||||
|
description: MyText
|
||||||
|
policy_type: MyString
|
||||||
|
policy_action: MyString
|
||||||
|
targets:
|
||||||
|
enabled: false
|
||||||
|
expires_at: 2025-11-10 13:30:53
|
||||||
|
user: two
|
||||||
|
additional_data:
|
||||||
292
test/integration/waf_policy_brazil_test.rb
Normal file
292
test/integration/waf_policy_brazil_test.rb
Normal file
@@ -0,0 +1,292 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require "test_helper"
|
||||||
|
|
||||||
|
# Custom test class that avoids fixture loading issues
|
||||||
|
class WafPolicyBrazilTest < Minitest::Test
|
||||||
|
def setup
|
||||||
|
# Clean up any existing data
|
||||||
|
Event.delete_all
|
||||||
|
Rule.delete_all
|
||||||
|
NetworkRange.delete_all
|
||||||
|
WafPolicy.delete_all
|
||||||
|
User.delete_all
|
||||||
|
|
||||||
|
@user = User.create!(email_address: "test@example.com", password: "password")
|
||||||
|
|
||||||
|
# Create a WAF policy to block Brazil
|
||||||
|
@brazil_policy = WafPolicy.create_country_policy(
|
||||||
|
['BR'],
|
||||||
|
policy_action: 'deny',
|
||||||
|
user: @user,
|
||||||
|
name: "Block Brazil"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Sample event data for a Brazilian IP
|
||||||
|
@brazil_ip = "177.104.144.0" # Known Brazilian IP
|
||||||
|
@brazil_event_data = {
|
||||||
|
"request_id" => "brazil-test-123",
|
||||||
|
"timestamp" => Time.now.iso8601,
|
||||||
|
"request" => {
|
||||||
|
"ip" => @brazil_ip,
|
||||||
|
"method" => "GET",
|
||||||
|
"path" => "/api/test",
|
||||||
|
"headers" => {
|
||||||
|
"host" => "example.com",
|
||||||
|
"user-agent" => "TestAgent/1.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"response" => {
|
||||||
|
"status_code" => 200,
|
||||||
|
"duration_ms" => 150
|
||||||
|
},
|
||||||
|
"waf_action" => "allow",
|
||||||
|
"server_name" => "test-server",
|
||||||
|
"environment" => "test",
|
||||||
|
"geo" => {
|
||||||
|
"country_code" => "BR",
|
||||||
|
"city" => "São Paulo"
|
||||||
|
},
|
||||||
|
"agent" => {
|
||||||
|
"name" => "baffle-agent",
|
||||||
|
"version" => "1.0.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def teardown
|
||||||
|
Event.delete_all
|
||||||
|
Rule.delete_all
|
||||||
|
NetworkRange.delete_all
|
||||||
|
WafPolicy.delete_all
|
||||||
|
User.delete_all
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_brazil_waf_policy_generates_block_rule_when_brazilian_event_is_processed
|
||||||
|
# Process the Brazilian event
|
||||||
|
event = Event.create_from_waf_payload!("brazil-test", @brazil_event_data)
|
||||||
|
assert event.persisted?
|
||||||
|
|
||||||
|
# Extract country code from payload geo data
|
||||||
|
country_code = event.payload.dig("geo", "country_code")
|
||||||
|
assert_equal "BR", country_code
|
||||||
|
assert_equal @brazil_ip, event.ip_address.to_s
|
||||||
|
|
||||||
|
# Ensure network range exists for the Brazilian IP
|
||||||
|
network_range = NetworkRangeGenerator.find_or_create_for_ip(@brazil_ip)
|
||||||
|
assert network_range.persisted?
|
||||||
|
assert network_range.contains_ip?(@brazil_ip)
|
||||||
|
|
||||||
|
# Set the country on the network range to simulate geo-lookup
|
||||||
|
network_range.update!(country: 'BR')
|
||||||
|
|
||||||
|
# Process WAF policies for this network range
|
||||||
|
ProcessWafPoliciesJob.perform_now(network_range: network_range, event: event)
|
||||||
|
|
||||||
|
# Verify that a blocking rule was generated
|
||||||
|
generated_rules = Rule.where(
|
||||||
|
network_range: network_range,
|
||||||
|
policy_action: 'deny',
|
||||||
|
waf_policy: @brazil_policy
|
||||||
|
)
|
||||||
|
|
||||||
|
assert_equal 1, generated_rules.count, "Should have generated exactly one blocking rule"
|
||||||
|
|
||||||
|
rule = generated_rules.first
|
||||||
|
assert_equal 'deny', rule.action
|
||||||
|
assert_equal network_range, rule.network_range
|
||||||
|
assert_equal @brazil_policy, rule.waf_policy
|
||||||
|
assert_equal "policy", rule.source
|
||||||
|
assert rule.enabled?, "Generated rule should be enabled"
|
||||||
|
|
||||||
|
# Verify rule metadata contains policy information
|
||||||
|
metadata = rule.metadata
|
||||||
|
assert_equal @brazil_policy.id, metadata['generated_by_policy']
|
||||||
|
assert_equal "Block Brazil", metadata['policy_name']
|
||||||
|
assert_equal "country", metadata['policy_type']
|
||||||
|
assert_equal "country", metadata['matched_field']
|
||||||
|
assert_equal "BR", metadata['matched_value']
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_non_brazilian_event_does_not_generate_block_rule_from_brazil_policy
|
||||||
|
# Create event data for a US IP
|
||||||
|
us_ip = "8.8.8.8" # Known US IP
|
||||||
|
us_event_data = @brazil_event_data.dup
|
||||||
|
us_event_data["event_id"] = "us-test-123"
|
||||||
|
us_event_data["request"]["ip"] = us_ip
|
||||||
|
us_event_data["geo"]["country_code"] = "US"
|
||||||
|
us_event_data["geo"]["city"] = "Mountain View"
|
||||||
|
|
||||||
|
# Process the US event
|
||||||
|
event = Event.create_from_waf_payload!("us-test", us_event_data)
|
||||||
|
assert event.persisted?
|
||||||
|
|
||||||
|
# Extract country code from payload geo data
|
||||||
|
country_code = event.payload.dig("geo", "country_code")
|
||||||
|
assert_equal "US", country_code
|
||||||
|
assert_equal us_ip, event.ip_address.to_s
|
||||||
|
|
||||||
|
# Ensure network range exists for the US IP
|
||||||
|
network_range = NetworkRangeGenerator.find_or_create_for_ip(us_ip)
|
||||||
|
assert network_range.persisted?
|
||||||
|
network_range.update!(country: 'US')
|
||||||
|
|
||||||
|
# Process WAF policies for this network range
|
||||||
|
ProcessWafPoliciesJob.perform_now(network_range: network_range, event: event)
|
||||||
|
|
||||||
|
# Verify that no blocking rule was generated
|
||||||
|
generated_rules = Rule.where(
|
||||||
|
network_range: network_range,
|
||||||
|
policy_action: 'deny',
|
||||||
|
waf_policy: @brazil_policy
|
||||||
|
)
|
||||||
|
|
||||||
|
assert_equal 0, generated_rules.count, "Should not have generated any blocking rules for US IP"
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_multiple_country_policies_generate_rules_for_matching_countries_only
|
||||||
|
# Create additional policy to block China
|
||||||
|
china_policy = WafPolicy.create_country_policy(
|
||||||
|
['CN'],
|
||||||
|
policy_action: 'deny',
|
||||||
|
user: @user,
|
||||||
|
name: "Block China"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create Chinese IP event
|
||||||
|
china_ip = "220.181.38.148" # Known Chinese IP
|
||||||
|
china_event_data = @brazil_event_data.dup
|
||||||
|
china_event_data["event_id"] = "china-test-123"
|
||||||
|
china_event_data["request"]["ip"] = china_ip
|
||||||
|
china_event_data["geo"]["country_code"] = "CN"
|
||||||
|
china_event_data["geo"]["city"] = "Beijing"
|
||||||
|
|
||||||
|
# Process Chinese event
|
||||||
|
china_event = Event.create_from_waf_payload!("china-test", china_event_data)
|
||||||
|
china_network_range = NetworkRangeGenerator.find_or_create_for_ip(china_ip)
|
||||||
|
china_network_range.update!(country: 'CN')
|
||||||
|
|
||||||
|
# Process Brazilian event (from setup)
|
||||||
|
brazil_event = Event.create_from_waf_payload!("brazil-test", @brazil_event_data)
|
||||||
|
brazil_network_range = NetworkRangeGenerator.find_or_create_for_ip(@brazil_ip)
|
||||||
|
brazil_network_range.update!(country: 'BR')
|
||||||
|
|
||||||
|
# Process WAF policies for both network ranges
|
||||||
|
ProcessWafPoliciesJob.perform_now(network_range: brazil_network_range, event: brazil_event)
|
||||||
|
ProcessWafPoliciesJob.perform_now(network_range: china_network_range, event: china_event)
|
||||||
|
|
||||||
|
# Verify Brazil IP matched Brazil policy only
|
||||||
|
brazil_rules = Rule.where(network_range: brazil_network_range)
|
||||||
|
assert_equal 1, brazil_rules.count
|
||||||
|
brazil_rule = brazil_rules.first
|
||||||
|
assert_equal @brazil_policy, brazil_rule.waf_policy
|
||||||
|
assert_equal "BR", brazil_rule.metadata['matched_value']
|
||||||
|
|
||||||
|
# Verify China IP matched China policy only
|
||||||
|
china_rules = Rule.where(network_range: china_network_range)
|
||||||
|
assert_equal 1, china_rules.count
|
||||||
|
china_rule = china_rules.first
|
||||||
|
assert_equal china_policy, china_rule.waf_policy
|
||||||
|
assert_equal "CN", china_rule.metadata['matched_value']
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_policy_expiration_prevents_rule_generation
|
||||||
|
# Create an expired Brazil policy
|
||||||
|
expired_policy = WafPolicy.create_country_policy(
|
||||||
|
['BR'],
|
||||||
|
policy_action: 'deny',
|
||||||
|
user: @user,
|
||||||
|
name: "Expired Brazil Block",
|
||||||
|
expires_at: 1.day.ago
|
||||||
|
)
|
||||||
|
|
||||||
|
# Process Brazilian event
|
||||||
|
event = Event.create_from_waf_payload!("expired-test", @brazil_event_data)
|
||||||
|
network_range = NetworkRangeGenerator.find_or_create_for_ip(@brazil_ip)
|
||||||
|
network_range.update!(country: 'BR')
|
||||||
|
|
||||||
|
# Process WAF policies
|
||||||
|
ProcessWafPoliciesJob.perform_now(network_range: network_range, event: event)
|
||||||
|
|
||||||
|
# Verify that no rule was generated from expired policy
|
||||||
|
generated_rules = Rule.where(
|
||||||
|
network_range: network_range,
|
||||||
|
policy_action: 'deny',
|
||||||
|
waf_policy: expired_policy
|
||||||
|
)
|
||||||
|
|
||||||
|
assert_equal 0, generated_rules.count, "Expired policy should not generate rules"
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_disabled_policy_prevents_rule_generation
|
||||||
|
# Create a disabled Brazil policy
|
||||||
|
disabled_policy = WafPolicy.create_country_policy(
|
||||||
|
['BR'],
|
||||||
|
policy_action: 'deny',
|
||||||
|
user: @user,
|
||||||
|
name: "Disabled Brazil Block"
|
||||||
|
)
|
||||||
|
disabled_policy.update!(enabled: false)
|
||||||
|
|
||||||
|
# Process Brazilian event
|
||||||
|
event = Event.create_from_waf_payload!("disabled-test", @brazil_event_data)
|
||||||
|
network_range = NetworkRangeGenerator.find_or_create_for_ip(@brazil_ip)
|
||||||
|
network_range.update!(country: 'BR')
|
||||||
|
|
||||||
|
# Process WAF policies
|
||||||
|
ProcessWafPoliciesJob.perform_now(network_range: network_range, event: event)
|
||||||
|
|
||||||
|
# Verify that no rule was generated from disabled policy
|
||||||
|
generated_rules = Rule.where(
|
||||||
|
network_range: network_range,
|
||||||
|
policy_action: 'deny',
|
||||||
|
waf_policy: disabled_policy
|
||||||
|
)
|
||||||
|
|
||||||
|
assert_equal 0, generated_rules.count, "Disabled policy should not generate rules"
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_policy_action_types_are_correctly_applied_to_generated_rules
|
||||||
|
# Test different policy actions
|
||||||
|
redirect_policy = WafPolicy.create_country_policy(
|
||||||
|
['BR'],
|
||||||
|
policy_action: 'redirect',
|
||||||
|
user: @user,
|
||||||
|
name: "Redirect Brazil",
|
||||||
|
additional_data: {
|
||||||
|
'redirect_url' => 'https://example.com/blocked',
|
||||||
|
'redirect_status' => 302
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
challenge_policy = WafPolicy.create_country_policy(
|
||||||
|
['BR'],
|
||||||
|
policy_action: 'challenge',
|
||||||
|
user: @user,
|
||||||
|
name: "Challenge Brazil",
|
||||||
|
additional_data: {
|
||||||
|
'challenge_type' => 'captcha',
|
||||||
|
'challenge_message' => 'Please verify you are human'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Process Brazilian event for redirect policy
|
||||||
|
event = Event.create_from_waf_payload!("redirect-test", @brazil_event_data)
|
||||||
|
network_range = NetworkRangeGenerator.find_or_create_for_ip(@brazil_ip)
|
||||||
|
network_range.update!(country: 'BR')
|
||||||
|
|
||||||
|
# Manually create rule for redirect policy to test metadata handling
|
||||||
|
redirect_rule = redirect_policy.create_rule_for_network_range(network_range)
|
||||||
|
assert redirect_rule.persisted?
|
||||||
|
assert_equal 'redirect', redirect_rule.action
|
||||||
|
assert_equal 'https://example.com/blocked', redirect_rule.redirect_url
|
||||||
|
assert_equal 302, redirect_rule.redirect_status
|
||||||
|
|
||||||
|
# Manually create rule for challenge policy to test metadata handling
|
||||||
|
challenge_rule = challenge_policy.create_rule_for_network_range(network_range)
|
||||||
|
assert challenge_rule.persisted?
|
||||||
|
assert_equal 'challenge', challenge_rule.action
|
||||||
|
assert_equal 'captcha', challenge_rule.challenge_type
|
||||||
|
assert_equal 'Please verify you are human', challenge_rule.challenge_message
|
||||||
|
end
|
||||||
|
end
|
||||||
290
test/integration/waf_policy_integration_test.rb
Normal file
290
test/integration/waf_policy_integration_test.rb
Normal file
@@ -0,0 +1,290 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require "test_helper"
|
||||||
|
|
||||||
|
class WafPolicyIntegrationTest < ActiveSupport::TestCase
|
||||||
|
# Don't load any fixtures
|
||||||
|
self.use_transactional_tests = true
|
||||||
|
|
||||||
|
def setup
|
||||||
|
# Clean up any existing data
|
||||||
|
Event.delete_all
|
||||||
|
Rule.delete_all
|
||||||
|
NetworkRange.delete_all
|
||||||
|
WafPolicy.delete_all
|
||||||
|
User.delete_all
|
||||||
|
Project.delete_all
|
||||||
|
|
||||||
|
@user = User.create!(email_address: "test@example.com", password: "password")
|
||||||
|
@project = Project.create!(name: "Test Project", slug: "test-project", public_key: "test-key-123456")
|
||||||
|
|
||||||
|
# Create a WAF policy to block Brazil
|
||||||
|
@brazil_policy = WafPolicy.create_country_policy(
|
||||||
|
['BR'],
|
||||||
|
policy_policy_action: 'deny',
|
||||||
|
user: @user,
|
||||||
|
name: "Block Brazil"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Sample event data for a Brazilian IP
|
||||||
|
@brazil_ip = "177.104.144.10" # Known Brazilian IP
|
||||||
|
@brazil_event_data = {
|
||||||
|
"request_id" => "brazil-test-123",
|
||||||
|
"timestamp" => Time.now.iso8601,
|
||||||
|
"request" => {
|
||||||
|
"ip" => @brazil_ip,
|
||||||
|
"method" => "GET",
|
||||||
|
"path" => "/api/test",
|
||||||
|
"headers" => {
|
||||||
|
"host" => "example.com",
|
||||||
|
"user-agent" => "TestAgent/1.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"response" => {
|
||||||
|
"status_code" => 200,
|
||||||
|
"duration_ms" => 150
|
||||||
|
},
|
||||||
|
"waf_action" => "allow",
|
||||||
|
"server_name" => "test-server",
|
||||||
|
"environment" => "test",
|
||||||
|
"geo" => {
|
||||||
|
"country_code" => "BR",
|
||||||
|
"city" => "São Paulo"
|
||||||
|
},
|
||||||
|
"agent" => {
|
||||||
|
"name" => "baffle-agent",
|
||||||
|
"version" => "1.0.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def teardown
|
||||||
|
Event.delete_all
|
||||||
|
Rule.delete_all
|
||||||
|
NetworkRange.delete_all
|
||||||
|
WafPolicy.delete_all
|
||||||
|
User.delete_all
|
||||||
|
end
|
||||||
|
|
||||||
|
test "Brazil WAF policy generates block rule when Brazilian event is processed" do
|
||||||
|
# Process the Brazilian event
|
||||||
|
event = Event.create_from_waf_payload!("brazil-test", @brazil_event_data)
|
||||||
|
assert event.persisted?
|
||||||
|
assert_equal "BR", event.country_code
|
||||||
|
assert_equal @brazil_ip, event.ip_address
|
||||||
|
|
||||||
|
# Ensure network range exists for the Brazilian IP
|
||||||
|
network_range = NetworkRangeGenerator.find_or_create_for_ip(@brazil_ip)
|
||||||
|
assert network_range.persisted?
|
||||||
|
assert network_range.contains_ip?(@brazil_ip)
|
||||||
|
|
||||||
|
# Set the country on the network range to simulate geo-lookup
|
||||||
|
network_range.update!(country: 'BR')
|
||||||
|
|
||||||
|
# Process WAF policies for this network range
|
||||||
|
ProcessWafPoliciesJob.perform_now(network_range: network_range, event: event)
|
||||||
|
|
||||||
|
# Verify that a blocking rule was generated
|
||||||
|
generated_rules = Rule.where(
|
||||||
|
network_range: network_range,
|
||||||
|
policy_action: 'deny',
|
||||||
|
waf_policy: @brazil_policy
|
||||||
|
)
|
||||||
|
|
||||||
|
assert_equal 1, generated_rules.count, "Should have generated exactly one blocking rule"
|
||||||
|
|
||||||
|
rule = generated_rules.first
|
||||||
|
assert_equal 'deny', rule.action
|
||||||
|
assert_equal network_range, rule.network_range
|
||||||
|
assert_equal @brazil_policy, rule.waf_policy
|
||||||
|
assert_equal "policy:Block Brazil", rule.source
|
||||||
|
assert rule.enabled?, "Generated rule should be enabled"
|
||||||
|
|
||||||
|
# Verify rule metadata contains policy information
|
||||||
|
metadata = rule.metadata
|
||||||
|
assert_equal @brazil_policy.id, metadata['generated_by_policy']
|
||||||
|
assert_equal "Block Brazil", metadata['policy_name']
|
||||||
|
assert_equal "country", metadata['policy_type']
|
||||||
|
assert_equal "country", metadata['matched_field']
|
||||||
|
assert_equal "BR", metadata['matched_value']
|
||||||
|
end
|
||||||
|
|
||||||
|
test "Non-Brazilian event does not generate block rule from Brazil policy" do
|
||||||
|
# Create event data for a US IP
|
||||||
|
us_ip = "8.8.8.8" # Known US IP
|
||||||
|
us_event_data = @brazil_event_data.dup
|
||||||
|
us_event_data["event_id"] = "us-test-123"
|
||||||
|
us_event_data["request"]["ip"] = us_ip
|
||||||
|
us_event_data["geo"]["country_code"] = "US"
|
||||||
|
us_event_data["geo"]["city"] = "Mountain View"
|
||||||
|
|
||||||
|
# Process the US event
|
||||||
|
event = Event.create_from_waf_payload!("us-test", us_event_data)
|
||||||
|
assert event.persisted?
|
||||||
|
assert_equal "US", event.country_code
|
||||||
|
assert_equal us_ip, event.ip_address
|
||||||
|
|
||||||
|
# Ensure network range exists for the US IP
|
||||||
|
network_range = NetworkRangeGenerator.find_or_create_for_ip(us_ip)
|
||||||
|
assert network_range.persisted?
|
||||||
|
network_range.update!(country: 'US')
|
||||||
|
|
||||||
|
# Process WAF policies for this network range
|
||||||
|
ProcessWafPoliciesJob.perform_now(network_range: network_range, event: event)
|
||||||
|
|
||||||
|
# Verify that no blocking rule was generated
|
||||||
|
generated_rules = Rule.where(
|
||||||
|
network_range: network_range,
|
||||||
|
policy_action: 'deny',
|
||||||
|
waf_policy: @brazil_policy
|
||||||
|
)
|
||||||
|
|
||||||
|
assert_equal 0, generated_rules.count, "Should not have generated any blocking rules for US IP"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "Multiple country policies generate rules for matching countries only" do
|
||||||
|
# Create additional policy to block China
|
||||||
|
china_policy = WafPolicy.create_country_policy(
|
||||||
|
['CN'],
|
||||||
|
policy_action: 'deny',
|
||||||
|
user: @user,
|
||||||
|
name: "Block China"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create Chinese IP event
|
||||||
|
china_ip = "220.181.38.148" # Known Chinese IP
|
||||||
|
china_event_data = @brazil_event_data.dup
|
||||||
|
china_event_data["event_id"] = "china-test-123"
|
||||||
|
china_event_data["request"]["ip"] = china_ip
|
||||||
|
china_event_data["geo"]["country_code"] = "CN"
|
||||||
|
china_event_data["geo"]["city"] = "Beijing"
|
||||||
|
|
||||||
|
# Process Chinese event
|
||||||
|
china_event = Event.create_from_waf_payload!("china-test", china_event_data)
|
||||||
|
china_network_range = NetworkRangeGenerator.find_or_create_for_ip(china_ip)
|
||||||
|
china_network_range.update!(country: 'CN')
|
||||||
|
|
||||||
|
# Process Brazilian event (from setup)
|
||||||
|
brazil_event = Event.create_from_waf_payload!("brazil-test", @brazil_event_data)
|
||||||
|
brazil_network_range = NetworkRangeGenerator.find_or_create_for_ip(@brazil_ip)
|
||||||
|
brazil_network_range.update!(country: 'BR')
|
||||||
|
|
||||||
|
# Process WAF policies for both network ranges
|
||||||
|
ProcessWafPoliciesJob.perform_now(network_range: brazil_network_range, event: brazil_event)
|
||||||
|
ProcessWafPoliciesJob.perform_now(network_range: china_network_range, event: china_event)
|
||||||
|
|
||||||
|
# Verify Brazil IP matched Brazil policy only
|
||||||
|
brazil_rules = Rule.where(network_range: brazil_network_range)
|
||||||
|
assert_equal 1, brazil_rules.count
|
||||||
|
brazil_rule = brazil_rules.first
|
||||||
|
assert_equal @brazil_policy, brazil_rule.waf_policy
|
||||||
|
assert_equal "BR", brazil_rule.metadata['matched_value']
|
||||||
|
|
||||||
|
# Verify China IP matched China policy only
|
||||||
|
china_rules = Rule.where(network_range: china_network_range)
|
||||||
|
assert_equal 1, china_rules.count
|
||||||
|
china_rule = china_rules.first
|
||||||
|
assert_equal china_policy, china_rule.waf_policy
|
||||||
|
assert_equal "CN", china_rule.metadata['matched_value']
|
||||||
|
end
|
||||||
|
|
||||||
|
test "Policy expiration prevents rule generation" do
|
||||||
|
# Create an expired Brazil policy
|
||||||
|
expired_policy = WafPolicy.create_country_policy(
|
||||||
|
['BR'],
|
||||||
|
policy_action: 'deny',
|
||||||
|
user: @user,
|
||||||
|
name: "Expired Brazil Block",
|
||||||
|
expires_at: 1.day.ago
|
||||||
|
)
|
||||||
|
|
||||||
|
# Process Brazilian event
|
||||||
|
event = Event.create_from_waf_payload!("expired-test", @brazil_event_data)
|
||||||
|
network_range = NetworkRangeGenerator.find_or_create_for_ip(@brazil_ip)
|
||||||
|
network_range.update!(country: 'BR')
|
||||||
|
|
||||||
|
# Process WAF policies
|
||||||
|
ProcessWafPoliciesJob.perform_now(network_range: network_range, event: event)
|
||||||
|
|
||||||
|
# Verify that no rule was generated from expired policy
|
||||||
|
generated_rules = Rule.where(
|
||||||
|
network_range: network_range,
|
||||||
|
policy_action: 'deny',
|
||||||
|
waf_policy: expired_policy
|
||||||
|
)
|
||||||
|
|
||||||
|
assert_equal 0, generated_rules.count, "Expired policy should not generate rules"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "Disabled policy prevents rule generation" do
|
||||||
|
# Create a disabled Brazil policy
|
||||||
|
disabled_policy = WafPolicy.create_country_policy(
|
||||||
|
['BR'],
|
||||||
|
policy_action: 'deny',
|
||||||
|
user: @user,
|
||||||
|
name: "Disabled Brazil Block"
|
||||||
|
)
|
||||||
|
disabled_policy.update!(enabled: false)
|
||||||
|
|
||||||
|
# Process Brazilian event
|
||||||
|
event = Event.create_from_waf_payload!("disabled-test", @brazil_event_data)
|
||||||
|
network_range = NetworkRangeGenerator.find_or_create_for_ip(@brazil_ip)
|
||||||
|
network_range.update!(country: 'BR')
|
||||||
|
|
||||||
|
# Process WAF policies
|
||||||
|
ProcessWafPoliciesJob.perform_now(network_range: network_range, event: event)
|
||||||
|
|
||||||
|
# Verify that no rule was generated from disabled policy
|
||||||
|
generated_rules = Rule.where(
|
||||||
|
network_range: network_range,
|
||||||
|
policy_action: 'deny',
|
||||||
|
waf_policy: disabled_policy
|
||||||
|
)
|
||||||
|
|
||||||
|
assert_equal 0, generated_rules.count, "Disabled policy should not generate rules"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "Policy action types are correctly applied to generated rules" do
|
||||||
|
# Test different policy actions
|
||||||
|
redirect_policy = WafPolicy.create_country_policy(
|
||||||
|
['BR'],
|
||||||
|
policy_action: 'redirect',
|
||||||
|
user: @user,
|
||||||
|
name: "Redirect Brazil",
|
||||||
|
additional_data: {
|
||||||
|
'redirect_url' => 'https://example.com/blocked',
|
||||||
|
'redirect_status' => 302
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
challenge_policy = WafPolicy.create_country_policy(
|
||||||
|
['BR'],
|
||||||
|
policy_action: 'challenge',
|
||||||
|
user: @user,
|
||||||
|
name: "Challenge Brazil",
|
||||||
|
additional_data: {
|
||||||
|
'challenge_type' => 'captcha',
|
||||||
|
'challenge_message' => 'Please verify you are human'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Process Brazilian event for redirect policy
|
||||||
|
event = Event.create_from_waf_payload!("redirect-test", @brazil_event_data)
|
||||||
|
network_range = NetworkRangeGenerator.find_or_create_for_ip(@brazil_ip)
|
||||||
|
network_range.update!(country: 'BR')
|
||||||
|
|
||||||
|
# Manually create rule for redirect policy to test metadata handling
|
||||||
|
redirect_rule = redirect_policy.create_rule_for_network_range(network_range)
|
||||||
|
assert redirect_rule.persisted?
|
||||||
|
assert_equal 'redirect', redirect_rule.action
|
||||||
|
assert_equal 'https://example.com/blocked', redirect_rule.redirect_url
|
||||||
|
assert_equal 302, redirect_rule.redirect_status
|
||||||
|
|
||||||
|
# Manually create rule for challenge policy to test metadata handling
|
||||||
|
challenge_rule = challenge_policy.create_rule_for_network_range(network_range)
|
||||||
|
assert challenge_rule.persisted?
|
||||||
|
assert_equal 'challenge', challenge_rule.action
|
||||||
|
assert_equal 'captcha', challenge_rule.challenge_type
|
||||||
|
assert_equal 'Please verify you are human', challenge_rule.challenge_message
|
||||||
|
end
|
||||||
|
end
|
||||||
@@ -4,18 +4,20 @@ require "test_helper"
|
|||||||
|
|
||||||
class ExpiredRulesCleanupJobTest < ActiveJob::TestCase
|
class ExpiredRulesCleanupJobTest < ActiveJob::TestCase
|
||||||
test "disables expired rules" do
|
test "disables expired rules" do
|
||||||
|
expired_range = NetworkRange.create!(cidr: "10.0.0.0/8")
|
||||||
expired_rule = Rule.create!(
|
expired_rule = Rule.create!(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "10.0.0.0/8" },
|
network_range: expired_range,
|
||||||
expires_at: 1.hour.ago,
|
expires_at: 1.hour.ago,
|
||||||
enabled: true
|
enabled: true
|
||||||
)
|
)
|
||||||
|
|
||||||
|
active_range = NetworkRange.create!(cidr: "192.168.0.0/16")
|
||||||
active_rule = Rule.create!(
|
active_rule = Rule.create!(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "192.168.0.0/16" },
|
network_range: active_range,
|
||||||
expires_at: 1.hour.from_now,
|
expires_at: 1.hour.from_now,
|
||||||
enabled: true
|
enabled: true
|
||||||
)
|
)
|
||||||
@@ -28,10 +30,11 @@ class ExpiredRulesCleanupJobTest < ActiveJob::TestCase
|
|||||||
end
|
end
|
||||||
|
|
||||||
test "does not affect rules without expiration" do
|
test "does not affect rules without expiration" do
|
||||||
|
permanent_range = NetworkRange.create!(cidr: "10.0.0.0/8")
|
||||||
permanent_rule = Rule.create!(
|
permanent_rule = Rule.create!(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "10.0.0.0/8" },
|
network_range: permanent_range,
|
||||||
expires_at: nil,
|
expires_at: nil,
|
||||||
enabled: true
|
enabled: true
|
||||||
)
|
)
|
||||||
@@ -42,10 +45,11 @@ class ExpiredRulesCleanupJobTest < ActiveJob::TestCase
|
|||||||
end
|
end
|
||||||
|
|
||||||
test "does not affect already disabled rules" do
|
test "does not affect already disabled rules" do
|
||||||
|
disabled_range = NetworkRange.create!(cidr: "10.0.0.0/8")
|
||||||
disabled_expired_rule = Rule.create!(
|
disabled_expired_rule = Rule.create!(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "10.0.0.0/8" },
|
network_range: disabled_range,
|
||||||
expires_at: 1.hour.ago,
|
expires_at: 1.hour.ago,
|
||||||
enabled: false
|
enabled: false
|
||||||
)
|
)
|
||||||
@@ -57,10 +61,11 @@ class ExpiredRulesCleanupJobTest < ActiveJob::TestCase
|
|||||||
end
|
end
|
||||||
|
|
||||||
test "updates updated_at timestamp when disabling" do
|
test "updates updated_at timestamp when disabling" do
|
||||||
|
expired_range = NetworkRange.create!(cidr: "10.0.0.0/8")
|
||||||
expired_rule = Rule.create!(
|
expired_rule = Rule.create!(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "10.0.0.0/8" },
|
network_range: expired_range,
|
||||||
expires_at: 1.hour.ago,
|
expires_at: 1.hour.ago,
|
||||||
enabled: true
|
enabled: true
|
||||||
)
|
)
|
||||||
@@ -75,18 +80,20 @@ class ExpiredRulesCleanupJobTest < ActiveJob::TestCase
|
|||||||
end
|
end
|
||||||
|
|
||||||
test "deletes old disabled rules when running at 1am" do
|
test "deletes old disabled rules when running at 1am" do
|
||||||
|
old_range = NetworkRange.create!(cidr: "10.0.0.0/8")
|
||||||
old_disabled_rule = Rule.create!(
|
old_disabled_rule = Rule.create!(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "10.0.0.0/8" },
|
network_range: old_range,
|
||||||
enabled: false
|
enabled: false
|
||||||
)
|
)
|
||||||
old_disabled_rule.update_column(:updated_at, 31.days.ago)
|
old_disabled_rule.update_column(:updated_at, 31.days.ago)
|
||||||
|
|
||||||
|
recent_range = NetworkRange.create!(cidr: "192.168.0.0/16")
|
||||||
recent_disabled_rule = Rule.create!(
|
recent_disabled_rule = Rule.create!(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "192.168.0.0/16" },
|
network_range: recent_range,
|
||||||
enabled: false
|
enabled: false
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -99,10 +106,11 @@ class ExpiredRulesCleanupJobTest < ActiveJob::TestCase
|
|||||||
end
|
end
|
||||||
|
|
||||||
test "does not delete old rules when not running at 1am" do
|
test "does not delete old rules when not running at 1am" do
|
||||||
|
old_range = NetworkRange.create!(cidr: "10.0.0.0/8")
|
||||||
old_disabled_rule = Rule.create!(
|
old_disabled_rule = Rule.create!(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "10.0.0.0/8" },
|
network_range: old_range,
|
||||||
enabled: false
|
enabled: false
|
||||||
)
|
)
|
||||||
old_disabled_rule.update_column(:updated_at, 31.days.ago)
|
old_disabled_rule.update_column(:updated_at, 31.days.ago)
|
||||||
@@ -116,10 +124,11 @@ class ExpiredRulesCleanupJobTest < ActiveJob::TestCase
|
|||||||
|
|
||||||
test "returns count of disabled rules" do
|
test "returns count of disabled rules" do
|
||||||
3.times do |i|
|
3.times do |i|
|
||||||
|
range = NetworkRange.create!(cidr: "10.#{i}.0.0/16")
|
||||||
Rule.create!(
|
Rule.create!(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "10.#{i}.0.0/16" },
|
network_range: range,
|
||||||
expires_at: 1.hour.ago,
|
expires_at: 1.hour.ago,
|
||||||
enabled: true
|
enabled: true
|
||||||
)
|
)
|
||||||
|
|||||||
387
test/jobs/fetch_ipapi_data_job_test.rb
Normal file
387
test/jobs/fetch_ipapi_data_job_test.rb
Normal file
@@ -0,0 +1,387 @@
|
|||||||
|
require "test_helper"
|
||||||
|
|
||||||
|
class FetchIpapiDataJobTest < ActiveJob::TestCase
|
||||||
|
setup do
|
||||||
|
@tracking_network = NetworkRange.create!(
|
||||||
|
network: "192.168.1.0/24",
|
||||||
|
source: "auto_generated",
|
||||||
|
creation_reason: "IPAPI tracking network"
|
||||||
|
)
|
||||||
|
@sample_ipapi_data = {
|
||||||
|
"ip" => "192.168.1.100",
|
||||||
|
"type" => "ipv4",
|
||||||
|
"continent_code" => "NA",
|
||||||
|
"continent_name" => "North America",
|
||||||
|
"country_code" => "US",
|
||||||
|
"country_name" => "United States",
|
||||||
|
"region_code" => "CA",
|
||||||
|
"region_name" => "California",
|
||||||
|
"city" => "San Francisco",
|
||||||
|
"zip" => "94102",
|
||||||
|
"latitude" => 37.7749,
|
||||||
|
"longitude" => -122.4194,
|
||||||
|
"location" => {
|
||||||
|
"geoname_id" => 5391959,
|
||||||
|
"capital" => "Washington D.C.",
|
||||||
|
"languages" => [
|
||||||
|
{
|
||||||
|
"code" => "en",
|
||||||
|
"name" => "English",
|
||||||
|
"native" => "English"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"country_flag" => "https://cdn.ipapi.com/flags/us.svg",
|
||||||
|
"country_flag_emoji" => "🇺🇸",
|
||||||
|
"country_flag_emoji_unicode" => "U+1F1FA U+1F1F8",
|
||||||
|
"calling_code" => "1",
|
||||||
|
"is_eu" => false
|
||||||
|
},
|
||||||
|
"time_zone" => {
|
||||||
|
"id" => "America/Los_Angeles",
|
||||||
|
"current_time" => "2023-12-07T12:00:00+00:00",
|
||||||
|
"gmt_offset" => -28800,
|
||||||
|
"code" => "PST",
|
||||||
|
"is_dst" => false
|
||||||
|
},
|
||||||
|
"currency" => {
|
||||||
|
"code" => "USD",
|
||||||
|
"name" => "US Dollar",
|
||||||
|
"plural" => "US dollars",
|
||||||
|
"symbol" => "$",
|
||||||
|
"symbol_native" => "$"
|
||||||
|
},
|
||||||
|
"connection" => {
|
||||||
|
"asn" => 12345,
|
||||||
|
"isp" => "Test ISP",
|
||||||
|
"domain" => "test.com",
|
||||||
|
"type" => "isp"
|
||||||
|
},
|
||||||
|
"security" => {
|
||||||
|
"is_proxy" => false,
|
||||||
|
"is_crawler" => false,
|
||||||
|
"is_tor" => false,
|
||||||
|
"threat_level" => "low",
|
||||||
|
"threat_types" => []
|
||||||
|
},
|
||||||
|
"asn" => {
|
||||||
|
"asn" => "AS12345 Test ISP",
|
||||||
|
"domain" => "test.com",
|
||||||
|
"route" => "192.168.1.0/24",
|
||||||
|
"type" => "isp"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
teardown do
|
||||||
|
# Clean up any test networks
|
||||||
|
NetworkRange.where(network: "192.168.1.0/24").delete_all
|
||||||
|
NetworkRange.where(network: "203.0.113.0/24").delete_all
|
||||||
|
end
|
||||||
|
|
||||||
|
# Successful Data Fetching
|
||||||
|
test "fetches and stores IPAPI data successfully" do
|
||||||
|
# Mock Ipapi.lookup
|
||||||
|
Ipapi.expects(:lookup).with("192.168.1.0").returns(@sample_ipapi_data)
|
||||||
|
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: @tracking_network.id)
|
||||||
|
|
||||||
|
@tracking_network.reload
|
||||||
|
assert_equal @sample_ipapi_data, @tracking_network.network_data_for(:ipapi)
|
||||||
|
assert_not_nil @tracking_network.last_api_fetch
|
||||||
|
assert @tracking_network.network_data['ipapi_queried_at'] > 5.seconds.ago.to_i
|
||||||
|
assert_equal "192.168.1.0/24", @tracking_network.network_data['ipapi_returned_cidr']
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles IPAPI returning different route than tracking network" do
|
||||||
|
# IPAPI returns a more specific network
|
||||||
|
different_route_data = @sample_ipapi_data.dup
|
||||||
|
different_route_data["asn"]["route"] = "203.0.113.0/25"
|
||||||
|
|
||||||
|
Ipapi.expects(:lookup).with("192.168.1.0").returns(different_route_data)
|
||||||
|
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: @tracking_network.id)
|
||||||
|
|
||||||
|
# Should create new network range for the correct route
|
||||||
|
target_network = NetworkRange.find_by(network: "203.0.113.0/25")
|
||||||
|
assert_not_nil target_network
|
||||||
|
assert_equal different_route_data, target_network.network_data_for(:ipapi)
|
||||||
|
assert_equal "api_imported", target_network.source
|
||||||
|
assert_match /Created from IPAPI lookup/, target_network.creation_reason
|
||||||
|
|
||||||
|
# Tracking network should be marked as queried with the returned CIDR
|
||||||
|
@tracking_network.reload
|
||||||
|
assert_equal "203.0.113.0/25", @tracking_network.network_data['ipapi_returned_cidr']
|
||||||
|
end
|
||||||
|
|
||||||
|
test "uses existing network when IPAPI returns different route" do
|
||||||
|
# Create the target network first
|
||||||
|
existing_network = NetworkRange.create!(
|
||||||
|
network: "203.0.113.0/25",
|
||||||
|
source: "manual",
|
||||||
|
creation_reason: "Pre-existing"
|
||||||
|
)
|
||||||
|
|
||||||
|
different_route_data = @sample_ipapi_data.dup
|
||||||
|
different_route_data["asn"]["route"] = "203.0.113.0/25"
|
||||||
|
|
||||||
|
Ipapi.expects(:lookup).with("192.168.1.0").returns(different_route_data)
|
||||||
|
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: @tracking_network.id)
|
||||||
|
|
||||||
|
# Should use existing network, not create new one
|
||||||
|
existing_network.reload
|
||||||
|
assert_equal different_route_data, existing_network.network_data_for(:ipapi)
|
||||||
|
assert_equal 1, NetworkRange.where(network: "203.0.113.0/25").count
|
||||||
|
end
|
||||||
|
|
||||||
|
# Error Handling
|
||||||
|
test "handles IPAPI returning error gracefully" do
|
||||||
|
error_data = {
|
||||||
|
"error" => true,
|
||||||
|
"reason" => "Invalid IP address",
|
||||||
|
"ip" => "192.168.1.0"
|
||||||
|
}
|
||||||
|
|
||||||
|
Ipapi.expects(:lookup).with("192.168.1.0").returns(error_data)
|
||||||
|
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: @tracking_network.id)
|
||||||
|
|
||||||
|
# Should mark as queried to avoid immediate retry
|
||||||
|
@tracking_network.reload
|
||||||
|
assert @tracking_network.network_data['ipapi_queried_at'] > 5.seconds.ago.to_i
|
||||||
|
assert_equal "192.168.1.0/24", @tracking_network.network_data['ipapi_returned_cidr']
|
||||||
|
|
||||||
|
# Should not store the error data
|
||||||
|
assert_empty @tracking_network.network_data_for(:ipapi)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles IPAPI returning nil gracefully" do
|
||||||
|
Ipapi.expects(:lookup).with("192.168.1.0").returns(nil)
|
||||||
|
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: @tracking_network.id)
|
||||||
|
|
||||||
|
# Should mark as queried to avoid immediate retry
|
||||||
|
@tracking_network.reload
|
||||||
|
assert @tracking_network.network_data['ipapi_queried_at'] > 5.seconds.ago.to_i
|
||||||
|
assert_equal "192.168.1.0/24", @tracking_network.network_data['ipapi_returned_cidr']
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles missing network range gracefully" do
|
||||||
|
# Use non-existent network range ID
|
||||||
|
assert_nothing_raised do
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: 99999)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles IPAPI service errors gracefully" do
|
||||||
|
Ipapi.expects(:lookup).with("192.168.1.0").raises(StandardError.new("Service unavailable"))
|
||||||
|
|
||||||
|
# Should not raise error but should clear fetching status
|
||||||
|
assert_nothing_raised do
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: @tracking_network.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Fetching status should be cleared
|
||||||
|
assert_not @tracking_network.is_fetching_api_data?(:ipapi)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Fetching Status Management
|
||||||
|
test "clears fetching status when done" do
|
||||||
|
@tracking_network.mark_as_fetching_api_data!(:ipapi)
|
||||||
|
|
||||||
|
Ipapi.expects(:lookup).with("192.168.1.0").returns(@sample_ipapi_data)
|
||||||
|
|
||||||
|
assert @tracking_network.is_fetching_api_data?(:ipapi)
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: @tracking_network.id)
|
||||||
|
assert_not @tracking_network.is_fetching_api_data?(:ipapi)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "clears fetching status even on error" do
|
||||||
|
@tracking_network.mark_as_fetching_api_data!(:ipapi)
|
||||||
|
|
||||||
|
Ipapi.expects(:lookup).with("192.168.1.0").raises(StandardError.new("Service error"))
|
||||||
|
|
||||||
|
assert @tracking_network.is_fetching_api_data?(:ipapi)
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: @tracking_network.id)
|
||||||
|
assert_not @tracking_network.is_fetching_api_data?(:ipapi)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "clears fetching status when network range not found" do
|
||||||
|
# Create network range and mark as fetching
|
||||||
|
temp_network = NetworkRange.create!(
|
||||||
|
network: "10.0.0.0/24",
|
||||||
|
source: "auto_generated"
|
||||||
|
)
|
||||||
|
temp_network.mark_as_fetching_api_data!(:ipapi)
|
||||||
|
|
||||||
|
# Try to fetch with non-existent ID
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: 99999)
|
||||||
|
|
||||||
|
# Original network should still have fetching status cleared (ensure block runs)
|
||||||
|
temp_network.reload
|
||||||
|
assert_not temp_network.is_fetching_api_data?(:ipapi)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Turbo Broadcast
|
||||||
|
test "broadcasts IPAPI update on success" do
|
||||||
|
Ipapi.expects(:lookup).with("192.168.1.0").returns(@sample_ipapi_data)
|
||||||
|
|
||||||
|
# Expect Turbo broadcast
|
||||||
|
Turbo::StreamsChannel.expects(:broadcast_replace_to)
|
||||||
|
.with("network_range_#{@tracking_network.id}", {
|
||||||
|
target: "ipapi_data_section",
|
||||||
|
partial: "network_ranges/ipapi_data",
|
||||||
|
locals: {
|
||||||
|
ipapi_data: @sample_ipapi_data,
|
||||||
|
network_range: @tracking_network,
|
||||||
|
parent_with_ipapi: nil,
|
||||||
|
ipapi_loading: false
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: @tracking_network.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "does not broadcast on error" do
|
||||||
|
error_data = { "error" => true, "reason" => "Invalid IP" }
|
||||||
|
Ipapi.expects(:lookup).with("192.168.1.0").returns(error_data)
|
||||||
|
|
||||||
|
# Should not broadcast
|
||||||
|
Turbo::StreamsChannel.expects(:broadcast_replace_to).never
|
||||||
|
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: @tracking_network.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Network Address Extraction
|
||||||
|
test "extracts correct sample IP from network" do
|
||||||
|
# Test with different network formats
|
||||||
|
ipv4_network = NetworkRange.create!(network: "203.0.113.0/24")
|
||||||
|
Ipapi.expects(:lookup).with("203.0.113.0").returns(@sample_ipapi_data)
|
||||||
|
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: ipv4_network.id)
|
||||||
|
|
||||||
|
ipv6_network = NetworkRange.create!(network: "2001:db8::/64")
|
||||||
|
Ipapi.expects(:lookup).with("2001:db8::").returns(@sample_ipapi_data)
|
||||||
|
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: ipv6_network.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Data Storage
|
||||||
|
test "stores complete IPAPI data in network_data" do
|
||||||
|
Ipapi.expects(:lookup).with("192.168.1.0").returns(@sample_ipapi_data)
|
||||||
|
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: @tracking_network.id)
|
||||||
|
|
||||||
|
stored_data = @tracking_network.reload.network_data_for(:ipapi)
|
||||||
|
assert_equal @sample_ipapi_data["country_code"], stored_data["country_code"]
|
||||||
|
assert_equal @sample_ipapi_data["city"], stored_data["city"]
|
||||||
|
assert_equal @sample_ipapi_data["asn"]["asn"], stored_data["asn"]["asn"]
|
||||||
|
assert_equal @sample_ipapi_data["security"]["is_proxy"], stored_data["security"]["is_proxy"]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "updates last_api_fetch timestamp" do
|
||||||
|
original_time = 1.hour.ago
|
||||||
|
@tracking_network.update!(last_api_fetch: original_time)
|
||||||
|
|
||||||
|
Ipapi.expects(:lookup).with("192.168.1.0").returns(@sample_ipapi_data)
|
||||||
|
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: @tracking_network.id)
|
||||||
|
|
||||||
|
@tracking_network.reload
|
||||||
|
assert @tracking_network.last_api_fetch > original_time
|
||||||
|
end
|
||||||
|
|
||||||
|
# IPv6 Support
|
||||||
|
test "handles IPv6 networks correctly" do
|
||||||
|
ipv6_network = NetworkRange.create!(
|
||||||
|
network: "2001:db8::/64",
|
||||||
|
source: "auto_generated",
|
||||||
|
creation_reason: "IPAPI tracking network"
|
||||||
|
)
|
||||||
|
|
||||||
|
ipv6_data = @sample_ipapi_data.dup
|
||||||
|
ipv6_data["ip"] = "2001:db8::1"
|
||||||
|
ipv6_data["type"] = "ipv6"
|
||||||
|
ipv6_data["asn"]["route"] = "2001:db8::/32"
|
||||||
|
|
||||||
|
Ipapi.expects(:lookup).with("2001:db8::").returns(ipv6_data)
|
||||||
|
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: ipv6_network.id)
|
||||||
|
|
||||||
|
ipv6_network.reload
|
||||||
|
assert_equal ipv6_data, ipv6_network.network_data_for(:ipapi)
|
||||||
|
assert_equal "2001:db8::/32", ipv6_network.network_data['ipapi_returned_cidr']
|
||||||
|
end
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
test "logs successful fetch" do
|
||||||
|
log_output = StringIO.new
|
||||||
|
logger = Logger.new(log_output)
|
||||||
|
original_logger = Rails.logger
|
||||||
|
Rails.logger = logger
|
||||||
|
|
||||||
|
Ipapi.expects(:lookup).with("192.168.1.0").returns(@sample_ipapi_data)
|
||||||
|
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: @tracking_network.id)
|
||||||
|
|
||||||
|
log_content = log_output.string
|
||||||
|
assert_match /Fetching IPAPI data for 192\.168\.1\.0\/24 using IP 192\.168\.1\.0/, log_content
|
||||||
|
assert_match /Successfully fetched IPAPI data/, log_content
|
||||||
|
|
||||||
|
Rails.logger = original_logger
|
||||||
|
end
|
||||||
|
|
||||||
|
test "logs errors and warnings" do
|
||||||
|
log_output = StringIO.new
|
||||||
|
logger = Logger.new(log_output)
|
||||||
|
original_logger = Rails.logger
|
||||||
|
Rails.logger = logger
|
||||||
|
|
||||||
|
error_data = { "error" => true, "reason" => "Rate limited" }
|
||||||
|
Ipapi.expects(:lookup).with("192.168.1.0").returns(error_data)
|
||||||
|
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: @tracking_network.id)
|
||||||
|
|
||||||
|
log_content = log_output.string
|
||||||
|
assert_match /IPAPI returned error for 192\.168\.1\.0\/24/, log_content
|
||||||
|
|
||||||
|
Rails.logger = original_logger
|
||||||
|
end
|
||||||
|
|
||||||
|
test "logs different route handling" do
|
||||||
|
log_output = StringIO.new
|
||||||
|
logger = Logger.new(log_output)
|
||||||
|
original_logger = Rails.logger
|
||||||
|
Rails.logger = logger
|
||||||
|
|
||||||
|
different_route_data = @sample_ipapi_data.dup
|
||||||
|
different_route_data["asn"]["route"] = "203.0.113.0/25"
|
||||||
|
|
||||||
|
Ipapi.expects(:lookup).with("192.168.1.0").returns(different_route_data)
|
||||||
|
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: @tracking_network.id)
|
||||||
|
|
||||||
|
log_content = log_output.string
|
||||||
|
assert_match /IPAPI returned different route: 203\.0\.113\.0\/25/, log_content
|
||||||
|
assert_match /Storing IPAPI data on correct network: 203\.0\.113\.0\/25/, log_content
|
||||||
|
|
||||||
|
Rails.logger = original_logger
|
||||||
|
end
|
||||||
|
|
||||||
|
test "logs service errors with backtrace" do
|
||||||
|
log_output = StringIO.new
|
||||||
|
logger = Logger.new(log_output)
|
||||||
|
original_logger = Rails.logger
|
||||||
|
Rails.logger = logger
|
||||||
|
|
||||||
|
Ipapi.expects(:lookup).with("192.168.1.0").raises(StandardError.new("Connection failed"))
|
||||||
|
|
||||||
|
FetchIpapiDataJob.perform_now(network_range_id: @tracking_network.id)
|
||||||
|
|
||||||
|
log_content = log_output.string
|
||||||
|
assert_match /Failed to fetch IPAPI data for network_range #{@tracking_network.id}/, log_content
|
||||||
|
assert_match /Connection failed/, log_content
|
||||||
|
|
||||||
|
Rails.logger = original_logger
|
||||||
|
end
|
||||||
|
end
|
||||||
@@ -18,7 +18,7 @@ class PathScannerDetectorJobTest < ActiveJob::TestCase
|
|||||||
["/.env", "/.git", "/wp-admin"].each do |path|
|
["/.env", "/.git", "/wp-admin"].each do |path|
|
||||||
Event.create!(
|
Event.create!(
|
||||||
project: @project,
|
project: @project,
|
||||||
event_id: SecureRandom.uuid,
|
request_id: SecureRandom.uuid,
|
||||||
timestamp: Time.current,
|
timestamp: Time.current,
|
||||||
ip_address: ip,
|
ip_address: ip,
|
||||||
request_path: path,
|
request_path: path,
|
||||||
@@ -45,7 +45,7 @@ class PathScannerDetectorJobTest < ActiveJob::TestCase
|
|||||||
3.times do |i|
|
3.times do |i|
|
||||||
Event.create!(
|
Event.create!(
|
||||||
project: @project,
|
project: @project,
|
||||||
event_id: SecureRandom.uuid,
|
request_id: SecureRandom.uuid,
|
||||||
timestamp: Time.current,
|
timestamp: Time.current,
|
||||||
ip_address: ip,
|
ip_address: ip,
|
||||||
request_path: "/.env",
|
request_path: "/.env",
|
||||||
@@ -71,7 +71,7 @@ class PathScannerDetectorJobTest < ActiveJob::TestCase
|
|||||||
paths.each do |path|
|
paths.each do |path|
|
||||||
Event.create!(
|
Event.create!(
|
||||||
project: @project,
|
project: @project,
|
||||||
event_id: SecureRandom.uuid,
|
request_id: SecureRandom.uuid,
|
||||||
timestamp: Time.current,
|
timestamp: Time.current,
|
||||||
ip_address: ip,
|
ip_address: ip,
|
||||||
request_path: path,
|
request_path: path,
|
||||||
@@ -95,7 +95,7 @@ class PathScannerDetectorJobTest < ActiveJob::TestCase
|
|||||||
2.times do
|
2.times do
|
||||||
Event.create!(
|
Event.create!(
|
||||||
project: @project,
|
project: @project,
|
||||||
event_id: SecureRandom.uuid,
|
request_id: SecureRandom.uuid,
|
||||||
timestamp: Time.current,
|
timestamp: Time.current,
|
||||||
ip_address: ip,
|
ip_address: ip,
|
||||||
request_path: "/.env",
|
request_path: "/.env",
|
||||||
@@ -114,7 +114,7 @@ class PathScannerDetectorJobTest < ActiveJob::TestCase
|
|||||||
# Old event (outside lookback window)
|
# Old event (outside lookback window)
|
||||||
old_event = Event.create!(
|
old_event = Event.create!(
|
||||||
project: @project,
|
project: @project,
|
||||||
event_id: SecureRandom.uuid,
|
request_id: SecureRandom.uuid,
|
||||||
timestamp: 10.minutes.ago,
|
timestamp: 10.minutes.ago,
|
||||||
ip_address: ip,
|
ip_address: ip,
|
||||||
request_path: "/.env",
|
request_path: "/.env",
|
||||||
@@ -125,7 +125,7 @@ class PathScannerDetectorJobTest < ActiveJob::TestCase
|
|||||||
2.times do
|
2.times do
|
||||||
Event.create!(
|
Event.create!(
|
||||||
project: @project,
|
project: @project,
|
||||||
event_id: SecureRandom.uuid,
|
request_id: SecureRandom.uuid,
|
||||||
timestamp: Time.current,
|
timestamp: Time.current,
|
||||||
ip_address: ip,
|
ip_address: ip,
|
||||||
request_path: "/.git",
|
request_path: "/.git",
|
||||||
@@ -154,7 +154,7 @@ class PathScannerDetectorJobTest < ActiveJob::TestCase
|
|||||||
3.times do
|
3.times do
|
||||||
Event.create!(
|
Event.create!(
|
||||||
project: @project,
|
project: @project,
|
||||||
event_id: SecureRandom.uuid,
|
request_id: SecureRandom.uuid,
|
||||||
timestamp: Time.current,
|
timestamp: Time.current,
|
||||||
ip_address: ip,
|
ip_address: ip,
|
||||||
request_path: "/.env",
|
request_path: "/.env",
|
||||||
@@ -173,7 +173,7 @@ class PathScannerDetectorJobTest < ActiveJob::TestCase
|
|||||||
3.times do
|
3.times do
|
||||||
Event.create!(
|
Event.create!(
|
||||||
project: @project,
|
project: @project,
|
||||||
event_id: SecureRandom.uuid,
|
request_id: SecureRandom.uuid,
|
||||||
timestamp: Time.current,
|
timestamp: Time.current,
|
||||||
ip_address: ip,
|
ip_address: ip,
|
||||||
request_path: "/.env",
|
request_path: "/.env",
|
||||||
@@ -198,7 +198,7 @@ class PathScannerDetectorJobTest < ActiveJob::TestCase
|
|||||||
3.times do
|
3.times do
|
||||||
Event.create!(
|
Event.create!(
|
||||||
project: @project,
|
project: @project,
|
||||||
event_id: SecureRandom.uuid,
|
request_id: SecureRandom.uuid,
|
||||||
timestamp: Time.current,
|
timestamp: Time.current,
|
||||||
ip_address: ip,
|
ip_address: ip,
|
||||||
request_path: "/.env",
|
request_path: "/.env",
|
||||||
@@ -216,7 +216,7 @@ class PathScannerDetectorJobTest < ActiveJob::TestCase
|
|||||||
# Create event with invalid IP
|
# Create event with invalid IP
|
||||||
Event.create!(
|
Event.create!(
|
||||||
project: @project,
|
project: @project,
|
||||||
event_id: SecureRandom.uuid,
|
request_id: SecureRandom.uuid,
|
||||||
timestamp: Time.current,
|
timestamp: Time.current,
|
||||||
ip_address: "invalid-ip",
|
ip_address: "invalid-ip",
|
||||||
request_path: "/.env",
|
request_path: "/.env",
|
||||||
@@ -235,7 +235,7 @@ class PathScannerDetectorJobTest < ActiveJob::TestCase
|
|||||||
3.times do
|
3.times do
|
||||||
Event.create!(
|
Event.create!(
|
||||||
project: @project,
|
project: @project,
|
||||||
event_id: SecureRandom.uuid,
|
request_id: SecureRandom.uuid,
|
||||||
timestamp: Time.current,
|
timestamp: Time.current,
|
||||||
ip_address: ip,
|
ip_address: ip,
|
||||||
request_path: "/.env",
|
request_path: "/.env",
|
||||||
|
|||||||
363
test/jobs/process_waf_event_job_test.rb
Normal file
363
test/jobs/process_waf_event_job_test.rb
Normal file
@@ -0,0 +1,363 @@
|
|||||||
|
require "test_helper"
|
||||||
|
|
||||||
|
class ProcessWafEventJobTest < ActiveJob::TestCase
|
||||||
|
setup do
|
||||||
|
@sample_event_data = {
|
||||||
|
"request_id" => "test-event-123",
|
||||||
|
"timestamp" => Time.current.iso8601,
|
||||||
|
"request" => {
|
||||||
|
"ip" => "192.168.1.100",
|
||||||
|
"method" => "GET",
|
||||||
|
"path" => "/api/test",
|
||||||
|
"headers" => {
|
||||||
|
"host" => "example.com",
|
||||||
|
"user-agent" => "TestAgent/1.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"response" => {
|
||||||
|
"status_code" => 200,
|
||||||
|
"duration_ms" => 150
|
||||||
|
},
|
||||||
|
"waf_action" => "allow",
|
||||||
|
"server_name" => "test-server",
|
||||||
|
"environment" => "test"
|
||||||
|
}
|
||||||
|
|
||||||
|
@headers = { "Content-Type" => "application/json" }
|
||||||
|
end
|
||||||
|
|
||||||
|
# Single Event Processing
|
||||||
|
test "processes single event with request_id" do
|
||||||
|
assert_difference 'Event.count', 1 do
|
||||||
|
ProcessWafEventJob.perform_now(event_data: @sample_event_data, headers: @headers)
|
||||||
|
end
|
||||||
|
|
||||||
|
event = Event.last
|
||||||
|
assert_equal "test-event-123", event.request_id
|
||||||
|
assert_equal "192.168.1.100", event.ip_address
|
||||||
|
assert_equal "/api/test", event.request_path
|
||||||
|
assert_equal "get", event.request_method
|
||||||
|
assert_equal "allow", event.waf_action
|
||||||
|
end
|
||||||
|
|
||||||
|
test "processes single event with legacy event_id" do
|
||||||
|
event_data = @sample_event_data.dup
|
||||||
|
event_data.delete("request_id")
|
||||||
|
event_data["event_id"] = "legacy-event-456"
|
||||||
|
|
||||||
|
assert_difference 'Event.count', 1 do
|
||||||
|
ProcessWafEventJob.perform_now(event_data: event_data, headers: @headers)
|
||||||
|
end
|
||||||
|
|
||||||
|
event = Event.last
|
||||||
|
assert_equal "legacy-event-456", event.request_id
|
||||||
|
end
|
||||||
|
|
||||||
|
test "processes single event with correlation_id" do
|
||||||
|
event_data = @sample_event_data.dup
|
||||||
|
event_data.delete("request_id")
|
||||||
|
event_data["correlation_id"] = "correlation-789"
|
||||||
|
|
||||||
|
assert_difference 'Event.count', 1 do
|
||||||
|
ProcessWafEventJob.perform_now(event_data: event_data, headers: @headers)
|
||||||
|
end
|
||||||
|
|
||||||
|
event = Event.last
|
||||||
|
assert_equal "correlation-789", event.request_id
|
||||||
|
end
|
||||||
|
|
||||||
|
test "generates UUID for events without ID" do
|
||||||
|
event_data = @sample_event_data.dup
|
||||||
|
event_data.delete("request_id")
|
||||||
|
|
||||||
|
assert_difference 'Event.count', 1 do
|
||||||
|
ProcessWafEventJob.perform_now(event_data: event_data, headers: @headers)
|
||||||
|
end
|
||||||
|
|
||||||
|
event = Event.last
|
||||||
|
assert_not_nil event.request_id
|
||||||
|
assert_match /\A[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\z/, event.request_id
|
||||||
|
end
|
||||||
|
|
||||||
|
# Multiple Events Processing
|
||||||
|
test "processes multiple events in events array" do
|
||||||
|
event1 = @sample_event_data.dup
|
||||||
|
event1["request_id"] = "event-1"
|
||||||
|
event1["request"]["ip"] = "192.168.1.1"
|
||||||
|
|
||||||
|
event2 = @sample_event_data.dup
|
||||||
|
event2["request_id"] = "event-2"
|
||||||
|
event2["request"]["ip"] = "192.168.1.2"
|
||||||
|
|
||||||
|
batch_data = {
|
||||||
|
"events" => [event1, event2]
|
||||||
|
}
|
||||||
|
|
||||||
|
assert_difference 'Event.count', 2 do
|
||||||
|
ProcessWafEventJob.perform_now(event_data: batch_data, headers: @headers)
|
||||||
|
end
|
||||||
|
|
||||||
|
request_ids = Event.last(2).pluck(:request_id)
|
||||||
|
assert_includes request_ids, "event-1"
|
||||||
|
assert_includes request_ids, "event-2"
|
||||||
|
end
|
||||||
|
|
||||||
|
# Duplicate Handling
|
||||||
|
test "skips duplicate events" do
|
||||||
|
# Create event first
|
||||||
|
Event.create_from_waf_payload!("test-event-123", @sample_event_data)
|
||||||
|
|
||||||
|
assert_no_difference 'Event.count' do
|
||||||
|
ProcessWafEventJob.perform_now(event_data: @sample_event_data, headers: @headers)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles duplicates within batch" do
|
||||||
|
event1 = @sample_event_data.dup
|
||||||
|
event1["request_id"] = "duplicate-test"
|
||||||
|
|
||||||
|
event2 = @sample_event_data.dup
|
||||||
|
event2["request_id"] = "duplicate-test"
|
||||||
|
|
||||||
|
batch_data = {
|
||||||
|
"events" => [event1, event2]
|
||||||
|
}
|
||||||
|
|
||||||
|
assert_difference 'Event.count', 1 do
|
||||||
|
ProcessWafEventJob.perform_now(event_data: batch_data, headers: @headers)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Network Range Processing
|
||||||
|
test "creates tracking network for event IP" do
|
||||||
|
ProcessWafEventJob.perform_now(event_data: @sample_event_data, headers: @headers)
|
||||||
|
|
||||||
|
event = Event.last
|
||||||
|
assert_not_nil event.network_range_id
|
||||||
|
|
||||||
|
# Should create /24 tracking network for IPv4
|
||||||
|
tracking_network = event.network_range
|
||||||
|
assert_equal "192.168.1.0/24", tracking_network.network.to_s
|
||||||
|
assert_equal "auto_generated", tracking_network.source
|
||||||
|
assert_equal "IPAPI tracking network", tracking_network.creation_reason
|
||||||
|
end
|
||||||
|
|
||||||
|
test "queues IPAPI enrichment when needed" do
|
||||||
|
event_data = @sample_event_data.dup
|
||||||
|
event_data["request"]["ip"] = "8.8.8.8" # Public IP that needs enrichment
|
||||||
|
|
||||||
|
assert_enqueued_jobs 1, only: [FetchIpapiDataJob] do
|
||||||
|
ProcessWafEventJob.perform_now(event_data: event_data, headers: @headers)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "skips IPAPI enrichment when recently queried" do
|
||||||
|
# Create tracking network with recent query
|
||||||
|
tracking_network = NetworkRange.create!(
|
||||||
|
network: "192.168.1.0/24",
|
||||||
|
source: "auto_generated",
|
||||||
|
creation_reason: "IPAPI tracking network"
|
||||||
|
)
|
||||||
|
tracking_network.mark_ipapi_queried!("192.168.1.0/24")
|
||||||
|
|
||||||
|
assert_no_enqueued_jobs only: [FetchIpapiDataJob] do
|
||||||
|
ProcessWafEventJob.perform_now(event_data: @sample_event_data, headers: @headers)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "skips IPAPI enrichment when already fetching" do
|
||||||
|
tracking_network = NetworkRange.create!(
|
||||||
|
network: "192.168.1.0/24",
|
||||||
|
source: "auto_generated",
|
||||||
|
creation_reason: "IPAPI tracking network"
|
||||||
|
)
|
||||||
|
tracking_network.mark_as_fetching_api_data!(:ipapi)
|
||||||
|
|
||||||
|
assert_no_enqueued_jobs only: [FetchIpapiDataJob] do
|
||||||
|
ProcessWafEventJob.perform_now(event_data: @sample_event_data, headers: @headers)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# WAF Policy Evaluation
|
||||||
|
test "evaluates WAF policies when needed" do
|
||||||
|
tracking_network = NetworkRange.create!(
|
||||||
|
network: "192.168.1.0/24",
|
||||||
|
source: "auto_generated",
|
||||||
|
creation_reason: "IPAPI tracking network"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mock WafPolicyMatcher
|
||||||
|
WafPolicyMatcher.expects(:evaluate_and_mark!).with(tracking_network).returns({
|
||||||
|
generated_rules: [],
|
||||||
|
evaluated_policies: []
|
||||||
|
})
|
||||||
|
|
||||||
|
ProcessWafEventJob.perform_now(event_data: @sample_event_data, headers: @headers)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "skips policy evaluation when not needed" do
|
||||||
|
tracking_network = NetworkRange.create!(
|
||||||
|
network: "192.168.1.0/24",
|
||||||
|
source: "auto_generated",
|
||||||
|
creation_reason: "IPAPI tracking network",
|
||||||
|
policies_evaluated_at: 5.minutes.ago
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should not call WafPolicyMatcher
|
||||||
|
WafPolicyMatcher.expects(:evaluate_and_mark!).never
|
||||||
|
|
||||||
|
ProcessWafEventJob.perform_now(event_data: @sample_event_data, headers: @headers)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Error Handling
|
||||||
|
test "handles invalid event data format gracefully" do
|
||||||
|
invalid_data = {
|
||||||
|
"invalid" => "data"
|
||||||
|
}
|
||||||
|
|
||||||
|
assert_no_difference 'Event.count' do
|
||||||
|
assert_nothing_raised do
|
||||||
|
ProcessWafEventJob.perform_now(event_data: invalid_data, headers: @headers)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles event creation errors gracefully" do
|
||||||
|
invalid_event_data = @sample_event_data.dup
|
||||||
|
invalid_event_data.delete("request") # Missing required request data
|
||||||
|
|
||||||
|
assert_no_difference 'Event.count' do
|
||||||
|
assert_nothing_raised do
|
||||||
|
ProcessWafEventJob.perform_now(event_data: invalid_event_data, headers: @headers)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles network processing errors gracefully" do
|
||||||
|
# Create a tracking network that will cause an error
|
||||||
|
tracking_network = NetworkRange.create!(
|
||||||
|
network: "192.168.1.0/24",
|
||||||
|
source: "auto_generated",
|
||||||
|
creation_reason: "IPAPI tracking network"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mock WafPolicyMatcher to raise an error
|
||||||
|
WafPolicyMatcher.expects(:evaluate_and_mark!).with(tracking_network).raises(StandardError.new("Policy evaluation failed"))
|
||||||
|
|
||||||
|
# Event should still be created despite policy evaluation error
|
||||||
|
assert_difference 'Event.count', 1 do
|
||||||
|
assert_nothing_raised do
|
||||||
|
ProcessWafEventJob.perform_now(event_data: @sample_event_data, headers: @headers)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles events without network ranges" do
|
||||||
|
event_data = @sample_event_data.dup
|
||||||
|
event_data["request"]["ip"] = "127.0.0.1" # Private/local IP
|
||||||
|
|
||||||
|
assert_difference 'Event.count', 1 do
|
||||||
|
assert_nothing_raised do
|
||||||
|
ProcessWafEventJob.perform_now(event_data: event_data, headers: @headers)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
event = Event.last
|
||||||
|
assert_nil event.network_range_id
|
||||||
|
end
|
||||||
|
|
||||||
|
# Performance Logging
|
||||||
|
test "logs processing metrics" do
|
||||||
|
log_output = StringIO.new
|
||||||
|
logger = Logger.new(log_output)
|
||||||
|
original_logger = Rails.logger
|
||||||
|
Rails.logger = logger
|
||||||
|
|
||||||
|
ProcessWafEventJob.perform_now(event_data: @sample_event_data, headers: @headers)
|
||||||
|
|
||||||
|
log_content = log_output.string
|
||||||
|
assert_match /Processed WAF event test-event-123 in \d+\.\d+ms/, log_content
|
||||||
|
assert_match /Processed 1 WAF events/, log_content
|
||||||
|
|
||||||
|
Rails.logger = original_logger
|
||||||
|
end
|
||||||
|
|
||||||
|
test "logs IPAPI fetch decisions" do
|
||||||
|
log_output = StringIO.new
|
||||||
|
logger = Logger.new(log_output)
|
||||||
|
original_logger = Rails.logger
|
||||||
|
Rails.logger = logger
|
||||||
|
|
||||||
|
# Use a public IP to trigger IPAPI fetch
|
||||||
|
event_data = @sample_event_data.dup
|
||||||
|
event_data["request"]["ip"] = "8.8.8.8"
|
||||||
|
|
||||||
|
ProcessWafEventJob.perform_now(event_data: event_data, headers: @headers)
|
||||||
|
|
||||||
|
log_content = log_output.string
|
||||||
|
assert_match /Queueing IPAPI fetch for IP 8\.8\.8\.8/, log_content
|
||||||
|
|
||||||
|
Rails.logger = original_logger
|
||||||
|
end
|
||||||
|
|
||||||
|
# IPv6 Support
|
||||||
|
test "creates /64 tracking network for IPv6 addresses" do
|
||||||
|
event_data = @sample_event_data.dup
|
||||||
|
event_data["request"]["ip"] = "2001:db8::1"
|
||||||
|
|
||||||
|
ProcessWafEventJob.perform_now(event_data: event_data, headers: @headers)
|
||||||
|
|
||||||
|
event = Event.last
|
||||||
|
tracking_network = event.network_range
|
||||||
|
assert_equal "2001:db8::/64", tracking_network.network.to_s
|
||||||
|
end
|
||||||
|
|
||||||
|
# Mixed Batch Processing
|
||||||
|
test "processes mixed valid and invalid events in batch" do
|
||||||
|
valid_event = @sample_event_data.dup
|
||||||
|
valid_event["request_id"] = "valid-event"
|
||||||
|
|
||||||
|
invalid_event = {
|
||||||
|
"invalid" => "data",
|
||||||
|
"request_id" => "invalid-event"
|
||||||
|
}
|
||||||
|
|
||||||
|
batch_data = {
|
||||||
|
"events" => [valid_event, invalid_event]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Should only create the valid event
|
||||||
|
assert_difference 'Event.count', 1 do
|
||||||
|
ProcessWafEventJob.perform_now(event_data: batch_data, headers: @headers)
|
||||||
|
end
|
||||||
|
|
||||||
|
assert_equal "valid-event", Event.last.request_id
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles very large batches efficiently" do
|
||||||
|
events = []
|
||||||
|
100.times do |i|
|
||||||
|
event = @sample_event_data.dup
|
||||||
|
event["request_id"] = "batch-event-#{i}"
|
||||||
|
event["request"]["ip"] = "192.168.#{i / 256}.#{i % 256}"
|
||||||
|
events << event
|
||||||
|
end
|
||||||
|
|
||||||
|
batch_data = {
|
||||||
|
"events" => events
|
||||||
|
}
|
||||||
|
|
||||||
|
start_time = Time.current
|
||||||
|
ProcessWafEventJob.perform_now(event_data: batch_data, headers: @headers)
|
||||||
|
processing_time = Time.current - start_time
|
||||||
|
|
||||||
|
assert_equal 100, Event.count
|
||||||
|
assert processing_time < 5.seconds, "Processing 100 events should take less than 5 seconds"
|
||||||
|
end
|
||||||
|
|
||||||
|
# Integration with Other Jobs
|
||||||
|
test "coordinates with BackfillRecentNetworkIntelligenceJob" do
|
||||||
|
# This would be tested based on how the job enqueues other jobs
|
||||||
|
# Implementation depends on your specific job coordination logic
|
||||||
|
end
|
||||||
|
end
|
||||||
68
test/models/dsn_auth_service_test.rb
Normal file
68
test/models/dsn_auth_service_test.rb
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require "test_helper"
|
||||||
|
|
||||||
|
class DsnAuthServiceTest < ActiveSupport::TestCase
|
||||||
|
self.use_transactional_tests = true
|
||||||
|
|
||||||
|
def setup
|
||||||
|
@dsn = Dsn.create!(name: "Test DSN", key: "test-auth-key-1234567890abcdef")
|
||||||
|
end
|
||||||
|
|
||||||
|
def teardown
|
||||||
|
Dsn.delete_all
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should authenticate via query parameter baffle_key" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
request.query_parameters = { "baffle_key" => @dsn.key }
|
||||||
|
|
||||||
|
authenticated_dsn = DsnAuthenticationService.authenticate(request)
|
||||||
|
assert_equal @dsn, authenticated_dsn
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should authenticate via Authorization Bearer header" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
request.headers["Authorization"] = "Bearer #{@dsn.key}"
|
||||||
|
|
||||||
|
authenticated_dsn = DsnAuthenticationService.authenticate(request)
|
||||||
|
assert_equal @dsn, authenticated_dsn
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should authenticate via Basic auth with username as key" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
credentials = Base64.strict_encode64("#{@dsn.key}:ignored-password")
|
||||||
|
request.headers["Authorization"] = "Basic #{credentials}"
|
||||||
|
|
||||||
|
authenticated_dsn = DsnAuthenticationService.authenticate(request)
|
||||||
|
assert_equal @dsn, authenticated_dsn
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should fail authentication with disabled DSN" do
|
||||||
|
@dsn.update!(enabled: false)
|
||||||
|
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
request.query_parameters = { "baffle_key" => @dsn.key }
|
||||||
|
|
||||||
|
assert_raises(DsnAuthenticationService::AuthenticationError) do
|
||||||
|
DsnAuthenticationService.authenticate(request)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should fail authentication with non-existent key" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
request.query_parameters = { "baffle_key" => "non-existent-key" }
|
||||||
|
|
||||||
|
assert_raises(DsnAuthenticationService::AuthenticationError) do
|
||||||
|
DsnAuthenticationService.authenticate(request)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should fail authentication with no authentication method" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
|
||||||
|
assert_raises(DsnAuthenticationService::AuthenticationError) do
|
||||||
|
DsnAuthenticationService.authenticate(request)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
140
test/models/dsn_simple_test.rb
Normal file
140
test/models/dsn_simple_test.rb
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require "test_helper"
|
||||||
|
|
||||||
|
class DsnSimpleTest < ActiveSupport::TestCase
|
||||||
|
# Don't use any fixtures
|
||||||
|
self.use_transactional_tests = true
|
||||||
|
|
||||||
|
def setup
|
||||||
|
@dsn = Dsn.new(name: "Test DSN")
|
||||||
|
end
|
||||||
|
|
||||||
|
def teardown
|
||||||
|
Dsn.delete_all
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should be valid with valid attributes" do
|
||||||
|
assert @dsn.valid?
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should not be valid without name" do
|
||||||
|
@dsn.name = nil
|
||||||
|
assert_not @dsn.valid?
|
||||||
|
assert_includes @dsn.errors[:name], "can't be blank"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should automatically generate key on create" do
|
||||||
|
@dsn.save!
|
||||||
|
assert_not_nil @dsn.key
|
||||||
|
assert_equal 64, @dsn.key.length # hex(32) = 64 characters
|
||||||
|
assert_match /\A[a-f0-9]{64}\z/, @dsn.key
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should not override existing key when saved" do
|
||||||
|
@dsn.key = "existing-key-123"
|
||||||
|
@dsn.save!
|
||||||
|
assert_equal "existing-key-123", @dsn.key
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should enforce unique keys" do
|
||||||
|
@dsn.save!
|
||||||
|
dsn2 = Dsn.new(name: "Another DSN", key: @dsn.key)
|
||||||
|
assert_not dsn2.valid?
|
||||||
|
assert_includes dsn2.errors[:key], "has already been taken"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should default to enabled" do
|
||||||
|
@dsn.save!
|
||||||
|
assert @dsn.enabled?
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should authenticate with valid key" do
|
||||||
|
@dsn.save!
|
||||||
|
authenticated_dsn = Dsn.authenticate(@dsn.key)
|
||||||
|
assert_equal @dsn, authenticated_dsn
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should not authenticate with invalid key" do
|
||||||
|
@dsn.save!
|
||||||
|
assert_nil Dsn.authenticate("invalid-key")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should not authenticate disabled DSNs" do
|
||||||
|
@dsn.save!
|
||||||
|
@dsn.update!(enabled: false)
|
||||||
|
assert_nil Dsn.authenticate(@dsn.key)
|
||||||
|
end
|
||||||
|
|
||||||
|
# URL Generation Tests
|
||||||
|
test "should generate full DSN URL in development" do
|
||||||
|
@dsn.key = "test-key-1234567890abcdef"
|
||||||
|
@dsn.save!
|
||||||
|
|
||||||
|
expected = "http://test-key-1234567890abcdef@localhost"
|
||||||
|
assert_equal expected, @dsn.full_dsn_url
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should generate API endpoint URL in development" do
|
||||||
|
@dsn.save!
|
||||||
|
|
||||||
|
expected = "http://localhost"
|
||||||
|
assert_equal expected, @dsn.api_endpoint_url
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should use custom host from environment variable" do
|
||||||
|
ENV['RAILS_HOST'] = 'baffle.example.com'
|
||||||
|
|
||||||
|
@dsn.key = "custom-key-1234567890abcdef"
|
||||||
|
@dsn.save!
|
||||||
|
|
||||||
|
assert_equal "http://custom-key-1234567890abcdef@baffle.example.com", @dsn.full_dsn_url
|
||||||
|
assert_equal "http://baffle.example.com", @dsn.api_endpoint_url
|
||||||
|
|
||||||
|
ENV.delete('RAILS_HOST')
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should handle long hex keys in URLs" do
|
||||||
|
long_key = "c92b7f8ad94ea3400299d8a6ff19e409c2df8c4540022c3167b8ac1002931624"
|
||||||
|
@dsn.key = long_key
|
||||||
|
@dsn.save!
|
||||||
|
|
||||||
|
expected = "http://#{long_key}@localhost"
|
||||||
|
assert_equal expected, @dsn.full_dsn_url
|
||||||
|
end
|
||||||
|
|
||||||
|
# Scope Tests
|
||||||
|
test "enabled scope should return only enabled DSNs" do
|
||||||
|
enabled_dsn = Dsn.create!(name: "Enabled DSN", enabled: true)
|
||||||
|
disabled_dsn = Dsn.create!(name: "Disabled DSN", enabled: false)
|
||||||
|
|
||||||
|
enabled_dsns = Dsn.enabled
|
||||||
|
|
||||||
|
assert_includes enabled_dsns, enabled_dsn
|
||||||
|
assert_not_includes enabled_dsns, disabled_dsn
|
||||||
|
end
|
||||||
|
|
||||||
|
# Security Tests
|
||||||
|
test "should generate cryptographically secure keys" do
|
||||||
|
keys = []
|
||||||
|
5.times do
|
||||||
|
dsn = Dsn.create!(name: "Test DSN #{Time.current.to_f}")
|
||||||
|
keys << dsn.key
|
||||||
|
end
|
||||||
|
|
||||||
|
# All keys should be unique
|
||||||
|
assert_equal keys.length, keys.uniq.length
|
||||||
|
|
||||||
|
# All keys should be valid hex
|
||||||
|
keys.each do |key|
|
||||||
|
assert_equal 64, key.length
|
||||||
|
assert_match /\A[a-f0-9]{64}\z/, key
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should not allow nil keys" do
|
||||||
|
@dsn.key = nil
|
||||||
|
assert_not @dsn.valid?
|
||||||
|
assert_includes @dsn.errors[:key], "can't be blank"
|
||||||
|
end
|
||||||
|
end
|
||||||
162
test/models/dsn_test.rb
Normal file
162
test/models/dsn_test.rb
Normal file
@@ -0,0 +1,162 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require "test_helper"
|
||||||
|
|
||||||
|
class DsnTest < ActiveSupport::TestCase
|
||||||
|
# Disable fixtures since we're creating test data manually
|
||||||
|
self.use_instantiated_fixtures = false
|
||||||
|
def setup
|
||||||
|
@dsn = Dsn.new(name: "Test DSN")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should be valid with valid attributes" do
|
||||||
|
assert @dsn.valid?
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should not be valid without name" do
|
||||||
|
@dsn.name = nil
|
||||||
|
assert_not @dsn.valid?
|
||||||
|
assert_includes @dsn.errors[:name], "can't be blank"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should automatically generate key on create" do
|
||||||
|
@dsn.save!
|
||||||
|
assert_not_nil @dsn.key
|
||||||
|
assert_equal 64, @dsn.key.length # hex(32) = 64 characters
|
||||||
|
assert_match /\A[a-f0-9]{64}\z/, @dsn.key
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should not override existing key when saved" do
|
||||||
|
@dsn.key = "existing-key-123"
|
||||||
|
@dsn.save!
|
||||||
|
assert_equal "existing-key-123", @dsn.key
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should enforce unique keys" do
|
||||||
|
@dsn.save!
|
||||||
|
dsn2 = Dsn.new(name: "Another DSN", key: @dsn.key)
|
||||||
|
assert_not dsn2.valid?
|
||||||
|
assert_includes dsn2.errors[:key], "has already been taken"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should default to enabled" do
|
||||||
|
@dsn.save!
|
||||||
|
assert @dsn.enabled?
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should authenticate with valid key" do
|
||||||
|
@dsn.save!
|
||||||
|
authenticated_dsn = Dsn.authenticate(@dsn.key)
|
||||||
|
assert_equal @dsn, authenticated_dsn
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should not authenticate with invalid key" do
|
||||||
|
@dsn.save!
|
||||||
|
assert_nil Dsn.authenticate("invalid-key")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should not authenticate disabled DSNs" do
|
||||||
|
@dsn.save!
|
||||||
|
@dsn.update!(enabled: false)
|
||||||
|
assert_nil Dsn.authenticate(@dsn.key)
|
||||||
|
end
|
||||||
|
|
||||||
|
# URL Generation Tests
|
||||||
|
test "should generate full DSN URL in development" do
|
||||||
|
@dsn.key = "test-key-1234567890abcdef"
|
||||||
|
@dsn.save!
|
||||||
|
|
||||||
|
expected = "http://test-key-1234567890abcdef@localhost"
|
||||||
|
assert_equal expected, @dsn.full_dsn_url
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should generate API endpoint URL in development" do
|
||||||
|
@dsn.save!
|
||||||
|
|
||||||
|
expected = "http://localhost"
|
||||||
|
assert_equal expected, @dsn.api_endpoint_url
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should use HTTPS in production environment" do
|
||||||
|
# Temporarily switch to production environment
|
||||||
|
original_env = Rails.env
|
||||||
|
Rails.env = "production"
|
||||||
|
|
||||||
|
@dsn.key = "prod-key-1234567890abcdef"
|
||||||
|
@dsn.save!
|
||||||
|
|
||||||
|
assert_equal "https://prod-key-1234567890abcdef@localhost", @dsn.full_dsn_url
|
||||||
|
assert_equal "https://localhost", @dsn.api_endpoint_url
|
||||||
|
|
||||||
|
# Restore original environment
|
||||||
|
Rails.env = original_env
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should use custom host from environment variable" do
|
||||||
|
ENV['RAILS_HOST'] = 'baffle.example.com'
|
||||||
|
|
||||||
|
@dsn.key = "custom-key-1234567890abcdef"
|
||||||
|
@dsn.save!
|
||||||
|
|
||||||
|
assert_equal "http://custom-key-1234567890abcdef@baffle.example.com", @dsn.full_dsn_url
|
||||||
|
assert_equal "http://baffle.example.com", @dsn.api_endpoint_url
|
||||||
|
|
||||||
|
ENV.delete('RAILS_HOST')
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should use action mailer default host if configured" do
|
||||||
|
Rails.application.config.action_mailer.default_url_options = { host: 'mail.baffle.com' }
|
||||||
|
|
||||||
|
@dsn.key = "mail-key-1234567890abcdef"
|
||||||
|
@dsn.save!
|
||||||
|
|
||||||
|
assert_equal "http://mail-key-1234567890abcdef@mail.baffle.com", @dsn.full_dsn_url
|
||||||
|
assert_equal "http://mail.baffle.com", @dsn.api_endpoint_url
|
||||||
|
|
||||||
|
Rails.application.config.action_mailer.default_url_options = {}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should handle long hex keys in URLs" do
|
||||||
|
long_key = "c92b7f8ad94ea3400299d8a6ff19e409c2df8c4540022c3167b8ac1002931624"
|
||||||
|
@dsn.key = long_key
|
||||||
|
@dsn.save!
|
||||||
|
|
||||||
|
expected = "http://#{long_key}@localhost"
|
||||||
|
assert_equal expected, @dsn.full_dsn_url
|
||||||
|
end
|
||||||
|
|
||||||
|
# Scope Tests
|
||||||
|
test "enabled scope should return only enabled DSNs" do
|
||||||
|
enabled_dsn = Dsn.create!(name: "Enabled DSN", enabled: true)
|
||||||
|
disabled_dsn = Dsn.create!(name: "Disabled DSN", enabled: false)
|
||||||
|
|
||||||
|
enabled_dsns = Dsn.enabled
|
||||||
|
|
||||||
|
assert_includes enabled_dsns, enabled_dsn
|
||||||
|
assert_not_includes enabled_dsns, disabled_dsn
|
||||||
|
end
|
||||||
|
|
||||||
|
# Security Tests
|
||||||
|
test "should generate cryptographically secure keys" do
|
||||||
|
keys = []
|
||||||
|
10.times do
|
||||||
|
dsn = Dsn.create!(name: "Test DSN #{Time.current.to_f}")
|
||||||
|
keys << dsn.key
|
||||||
|
end
|
||||||
|
|
||||||
|
# All keys should be unique
|
||||||
|
assert_equal keys.length, keys.uniq.length
|
||||||
|
|
||||||
|
# All keys should be valid hex
|
||||||
|
keys.each do |key|
|
||||||
|
assert_equal 64, key.length
|
||||||
|
assert_match /\A[a-f0-9]{64}\z/, key
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should not allow nil keys" do
|
||||||
|
@dsn.key = nil
|
||||||
|
assert_not @dsn.valid?
|
||||||
|
assert_includes @dsn.errors[:key], "can't be blank"
|
||||||
|
end
|
||||||
|
end
|
||||||
@@ -5,7 +5,7 @@ require "test_helper"
|
|||||||
class EventTest < ActiveSupport::TestCase
|
class EventTest < ActiveSupport::TestCase
|
||||||
def setup
|
def setup
|
||||||
@sample_payload = {
|
@sample_payload = {
|
||||||
"event_id" => "test-event-123",
|
"request_id" => "test-event-123",
|
||||||
"timestamp" => Time.now.iso8601,
|
"timestamp" => Time.now.iso8601,
|
||||||
"request" => {
|
"request" => {
|
||||||
"ip" => "192.168.1.1",
|
"ip" => "192.168.1.1",
|
||||||
@@ -46,7 +46,7 @@ class EventTest < ActiveSupport::TestCase
|
|||||||
event = Event.create_from_waf_payload!("test-123", @sample_payload)
|
event = Event.create_from_waf_payload!("test-123", @sample_payload)
|
||||||
|
|
||||||
assert event.persisted?
|
assert event.persisted?
|
||||||
assert_equal "test-123", event.event_id
|
assert_equal "test-123", event.request_id
|
||||||
assert_equal "192.168.1.1", event.ip_address
|
assert_equal "192.168.1.1", event.ip_address
|
||||||
assert_equal "/api/test", event.request_path
|
assert_equal "/api/test", event.request_path
|
||||||
assert_equal 200, event.response_status
|
assert_equal 200, event.response_status
|
||||||
@@ -66,7 +66,7 @@ class EventTest < ActiveSupport::TestCase
|
|||||||
test_methods.each_with_index do |method, index|
|
test_methods.each_with_index do |method, index|
|
||||||
payload = @sample_payload.dup
|
payload = @sample_payload.dup
|
||||||
payload["request"]["method"] = method
|
payload["request"]["method"] = method
|
||||||
payload["event_id"] = "test-method-#{method.downcase}"
|
payload["request_id"] = "test-method-#{method.downcase}"
|
||||||
|
|
||||||
event = Event.create_from_waf_payload!("test-method-#{method.downcase}", payload)
|
event = Event.create_from_waf_payload!("test-method-#{method.downcase}", payload)
|
||||||
|
|
||||||
@@ -91,7 +91,7 @@ class EventTest < ActiveSupport::TestCase
|
|||||||
test_actions.each do |action, expected_enum, expected_int|
|
test_actions.each do |action, expected_enum, expected_int|
|
||||||
payload = @sample_payload.dup
|
payload = @sample_payload.dup
|
||||||
payload["waf_action"] = action
|
payload["waf_action"] = action
|
||||||
payload["event_id"] = "test-action-#{action}"
|
payload["request_id"] = "test-action-#{action}"
|
||||||
|
|
||||||
event = Event.create_from_waf_payload!("test-action-#{action}", payload)
|
event = Event.create_from_waf_payload!("test-action-#{action}", payload)
|
||||||
|
|
||||||
@@ -143,7 +143,7 @@ class EventTest < ActiveSupport::TestCase
|
|||||||
|
|
||||||
# Event 1: GET + allow
|
# Event 1: GET + allow
|
||||||
Event.create_from_waf_payload!("get-allow", {
|
Event.create_from_waf_payload!("get-allow", {
|
||||||
"event_id" => "get-allow",
|
"request_id" => "get-allow",
|
||||||
"timestamp" => Time.now.iso8601,
|
"timestamp" => Time.now.iso8601,
|
||||||
"request" => {
|
"request" => {
|
||||||
"ip" => "192.168.1.1",
|
"ip" => "192.168.1.1",
|
||||||
@@ -157,7 +157,7 @@ class EventTest < ActiveSupport::TestCase
|
|||||||
|
|
||||||
# Event 2: POST + allow
|
# Event 2: POST + allow
|
||||||
Event.create_from_waf_payload!("post-allow", {
|
Event.create_from_waf_payload!("post-allow", {
|
||||||
"event_id" => "post-allow",
|
"request_id" => "post-allow",
|
||||||
"timestamp" => Time.now.iso8601,
|
"timestamp" => Time.now.iso8601,
|
||||||
"request" => {
|
"request" => {
|
||||||
"ip" => "192.168.1.1",
|
"ip" => "192.168.1.1",
|
||||||
@@ -171,7 +171,7 @@ class EventTest < ActiveSupport::TestCase
|
|||||||
|
|
||||||
# Event 3: GET + deny
|
# Event 3: GET + deny
|
||||||
Event.create_from_waf_payload!("get-deny", {
|
Event.create_from_waf_payload!("get-deny", {
|
||||||
"event_id" => "get-deny",
|
"request_id" => "get-deny",
|
||||||
"timestamp" => Time.now.iso8601,
|
"timestamp" => Time.now.iso8601,
|
||||||
"request" => {
|
"request" => {
|
||||||
"ip" => "192.168.1.1",
|
"ip" => "192.168.1.1",
|
||||||
@@ -202,7 +202,7 @@ class EventTest < ActiveSupport::TestCase
|
|||||||
# Create event without enum values (simulating old data)
|
# Create event without enum values (simulating old data)
|
||||||
event = Event.create!(
|
event = Event.create!(
|
||||||
project: @project,
|
project: @project,
|
||||||
event_id: "normalization-test",
|
request_id: "normalization-test",
|
||||||
timestamp: Time.current,
|
timestamp: Time.current,
|
||||||
payload: @sample_payload,
|
payload: @sample_payload,
|
||||||
ip_address: "192.168.1.1",
|
ip_address: "192.168.1.1",
|
||||||
@@ -279,7 +279,7 @@ class EventTest < ActiveSupport::TestCase
|
|||||||
timestamps.each_with_index do |timestamp, index|
|
timestamps.each_with_index do |timestamp, index|
|
||||||
payload = @sample_payload.dup
|
payload = @sample_payload.dup
|
||||||
payload["timestamp"] = timestamp
|
payload["timestamp"] = timestamp
|
||||||
payload["event_id"] = "timestamp-test-#{index}"
|
payload["request_id"] = "timestamp-test-#{index}"
|
||||||
|
|
||||||
event = Event.create_from_waf_payload!("timestamp-test-#{index}", payload)
|
event = Event.create_from_waf_payload!("timestamp-test-#{index}", payload)
|
||||||
assert event.timestamp.is_a?(Time), "Timestamp #{index} should be parsed as Time"
|
assert event.timestamp.is_a?(Time), "Timestamp #{index} should be parsed as Time"
|
||||||
@@ -289,7 +289,7 @@ class EventTest < ActiveSupport::TestCase
|
|||||||
|
|
||||||
test "handles missing optional fields gracefully" do
|
test "handles missing optional fields gracefully" do
|
||||||
minimal_payload = {
|
minimal_payload = {
|
||||||
"event_id" => "minimal-test",
|
"request_id" => "minimal-test",
|
||||||
"timestamp" => Time.now.iso8601,
|
"timestamp" => Time.now.iso8601,
|
||||||
"request" => {
|
"request" => {
|
||||||
"ip" => "10.0.0.1",
|
"ip" => "10.0.0.1",
|
||||||
|
|||||||
675
test/models/network_range_test.rb
Normal file
675
test/models/network_range_test.rb
Normal file
@@ -0,0 +1,675 @@
|
|||||||
|
require "test_helper"
|
||||||
|
|
||||||
|
class NetworkRangeTest < ActiveSupport::TestCase
|
||||||
|
setup do
|
||||||
|
@ipv4_range = NetworkRange.new(network: "192.168.1.0/24")
|
||||||
|
@ipv6_range = NetworkRange.new(network: "2001:db8::/32")
|
||||||
|
@user = users(:jason)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Validations
|
||||||
|
test "should be valid with network address" do
|
||||||
|
assert @ipv4_range.valid?
|
||||||
|
assert @ipv6_range.valid?
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should not be valid without network" do
|
||||||
|
@ipv4_range.network = nil
|
||||||
|
assert_not @ipv4_range.valid?
|
||||||
|
assert_includes @ipv4_range.errors[:network], "can't be blank"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should validate network uniqueness" do
|
||||||
|
@ipv4_range.save!
|
||||||
|
duplicate = NetworkRange.new(network: "192.168.1.0/24")
|
||||||
|
assert_not duplicate.valid?
|
||||||
|
assert_includes duplicate.errors[:network], "has already been taken"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should validate source inclusion" do
|
||||||
|
valid_sources = %w[api_imported user_created manual auto_generated inherited geolite_asn geolite_country]
|
||||||
|
valid_sources.each do |source|
|
||||||
|
@ipv4_range.source = source
|
||||||
|
assert @ipv4_range.valid?, "Source #{source} should be valid"
|
||||||
|
end
|
||||||
|
|
||||||
|
@ipv4_range.source = "invalid_source"
|
||||||
|
assert_not @ipv4_range.valid?
|
||||||
|
assert_includes @ipv4_range.errors[:source], "is not included in the list"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should validate ASN numericality" do
|
||||||
|
@ipv4_range.asn = 12345
|
||||||
|
assert @ipv4_range.valid?
|
||||||
|
|
||||||
|
@ipv4_range.asn = 0
|
||||||
|
assert_not @ipv4_range.valid?
|
||||||
|
assert_includes @ipv4_range.errors[:asn], "must be greater than 0"
|
||||||
|
|
||||||
|
@ipv4_range.asn = -1
|
||||||
|
assert_not @ipv4_range.valid?
|
||||||
|
assert_includes @ipv4_range.errors[:asn], "must be greater than 0"
|
||||||
|
|
||||||
|
@ipv4_range.asn = "not_a_number"
|
||||||
|
assert_not @ipv4_range.valid?
|
||||||
|
|
||||||
|
@ipv4_range.asn = nil
|
||||||
|
assert @ipv4_range.valid?
|
||||||
|
end
|
||||||
|
|
||||||
|
# Callbacks
|
||||||
|
test "should set default source before validation" do
|
||||||
|
range = NetworkRange.new(network: "10.0.0.0/8")
|
||||||
|
range.valid?
|
||||||
|
assert_equal "api_imported", range.source
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should not override existing source" do
|
||||||
|
range = NetworkRange.new(network: "10.0.0.0/8", source: "user_created")
|
||||||
|
range.valid?
|
||||||
|
assert_equal "user_created", range.source
|
||||||
|
end
|
||||||
|
|
||||||
|
# Virtual Attributes (CIDR)
|
||||||
|
test "cidr getter returns network as string" do
|
||||||
|
@ipv4_range.save!
|
||||||
|
assert_equal "192.168.1.0/24", @ipv4_range.cidr
|
||||||
|
assert_equal "192.168.1.0/24", @ipv4_range.network.to_s
|
||||||
|
end
|
||||||
|
|
||||||
|
test "cidr setter sets network from string" do
|
||||||
|
range = NetworkRange.new
|
||||||
|
range.cidr = "10.0.0.0/16"
|
||||||
|
assert_equal "10.0.0.0/16", range.network.to_s
|
||||||
|
end
|
||||||
|
|
||||||
|
# Network Properties
|
||||||
|
test "prefix_length returns correct network prefix" do
|
||||||
|
@ipv4_range.save!
|
||||||
|
@ipv6_range.save!
|
||||||
|
|
||||||
|
assert_equal 24, @ipv4_range.prefix_length
|
||||||
|
assert_equal 32, @ipv6_range.prefix_length
|
||||||
|
end
|
||||||
|
|
||||||
|
test "network_address returns network address" do
|
||||||
|
@ipv4_range.save!
|
||||||
|
assert_equal "192.168.1.0", @ipv4_range.network_address
|
||||||
|
end
|
||||||
|
|
||||||
|
test "broadcast_address returns correct broadcast for IPv4" do
|
||||||
|
@ipv4_range.save!
|
||||||
|
assert_equal "192.168.1.255", @ipv4_range.broadcast_address
|
||||||
|
end
|
||||||
|
|
||||||
|
test "broadcast_address returns nil for IPv6" do
|
||||||
|
@ipv6_range.save!
|
||||||
|
assert_nil @ipv6_range.broadcast_address
|
||||||
|
end
|
||||||
|
|
||||||
|
test "family detection works correctly" do
|
||||||
|
@ipv4_range.save!
|
||||||
|
@ipv6_range.save!
|
||||||
|
|
||||||
|
assert_equal 4, @ipv4_range.family
|
||||||
|
assert_equal 6, @ipv6_range.family
|
||||||
|
end
|
||||||
|
|
||||||
|
test "ipv4? and ipv6? predicate methods work" do
|
||||||
|
@ipv4_range.save!
|
||||||
|
@ipv6_range.save!
|
||||||
|
|
||||||
|
assert @ipv4_range.ipv4?
|
||||||
|
assert_not @ipv4_range.ipv6?
|
||||||
|
|
||||||
|
assert @ipv6_range.ipv6?
|
||||||
|
assert_not @ipv6_range.ipv4?
|
||||||
|
end
|
||||||
|
|
||||||
|
test "virtual? works correctly" do
|
||||||
|
range = NetworkRange.new(network: "10.0.0.0/8")
|
||||||
|
assert range.virtual?
|
||||||
|
|
||||||
|
range.save!
|
||||||
|
assert_not range.virtual?
|
||||||
|
end
|
||||||
|
|
||||||
|
# Network Containment
|
||||||
|
test "contains_ip? works correctly" do
|
||||||
|
@ipv4_range.save!
|
||||||
|
|
||||||
|
assert @ipv4_range.contains_ip?("192.168.1.1")
|
||||||
|
assert @ipv4_range.contains_ip?("192.168.1.254")
|
||||||
|
assert_not @ipv4_range.contains_ip?("192.168.2.1")
|
||||||
|
assert_not @ipv4_range.contains_ip?("2001:db8::1")
|
||||||
|
|
||||||
|
# Test IPv6
|
||||||
|
@ipv6_range.save!
|
||||||
|
assert @ipv6_range.contains_ip?("2001:db8::1")
|
||||||
|
assert @ipv6_range.contains_ip?("2001:db8:ffff::ffff")
|
||||||
|
assert_not @ipv6_range.contains_ip?("2001:db9::1")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "contains_network? works correctly" do
|
||||||
|
@ipv4_range.save!
|
||||||
|
|
||||||
|
# More specific network
|
||||||
|
assert @ipv4_range.contains_network?("192.168.1.0/25")
|
||||||
|
assert @ipv4_range.contains_network?("192.168.1.128/25")
|
||||||
|
|
||||||
|
# Same network
|
||||||
|
assert @ipv4_range.contains_network?("192.168.1.0/24")
|
||||||
|
|
||||||
|
# Less specific network
|
||||||
|
assert_not @ipv4_range.contains_network?("192.168.0.0/16")
|
||||||
|
|
||||||
|
# Different network
|
||||||
|
assert_not @ipv4_range.contains_network?("10.0.0.0/8")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "overlaps? works correctly" do
|
||||||
|
@ipv4_range.save!
|
||||||
|
|
||||||
|
# Overlapping networks
|
||||||
|
assert @ipv4_range.overlaps?("192.168.1.0/25") # More specific
|
||||||
|
assert @ipv4_range.overlaps?("192.168.0.0/23") # Less specific
|
||||||
|
assert @ipv4_range.overlaps?("192.168.1.128/25") # Partial overlap
|
||||||
|
|
||||||
|
# Non-overlapping
|
||||||
|
assert_not @ipv4_range.overlaps?("10.0.0.0/8")
|
||||||
|
assert_not @ipv4_range.overlaps?("172.16.0.0/12")
|
||||||
|
end
|
||||||
|
|
||||||
|
# Parent/Child Relationships
|
||||||
|
test "parent_ranges finds containing networks" do
|
||||||
|
# Create parent and child networks
|
||||||
|
parent = NetworkRange.create!(network: "192.168.0.0/16")
|
||||||
|
@ipv4_range.save! # 192.168.1.0/24
|
||||||
|
child = NetworkRange.create!(network: "192.168.1.0/25")
|
||||||
|
|
||||||
|
parents = @ipv4_range.parent_ranges
|
||||||
|
assert_includes parents, parent
|
||||||
|
assert_not_includes parents, child
|
||||||
|
assert_not_includes parents, @ipv4_range
|
||||||
|
|
||||||
|
# Should be ordered by specificity (more specific first)
|
||||||
|
assert_equal parent, parents.first
|
||||||
|
end
|
||||||
|
|
||||||
|
test "child_ranges finds contained networks" do
|
||||||
|
# Create parent and child networks
|
||||||
|
parent = NetworkRange.create!(network: "192.168.0.0/16")
|
||||||
|
@ipv4_range.save! # 192.168.1.0/24
|
||||||
|
child = NetworkRange.create!(network: "192.168.1.0/25")
|
||||||
|
|
||||||
|
children = parent.child_ranges
|
||||||
|
assert_includes children, @ipv4_range
|
||||||
|
assert_includes children, child
|
||||||
|
assert_not_includes children, parent
|
||||||
|
|
||||||
|
# Should be ordered by specificity (less specific first)
|
||||||
|
assert_equal @ipv4_range, children.first
|
||||||
|
end
|
||||||
|
|
||||||
|
test "sibling_ranges finds same-level networks" do
|
||||||
|
# Create sibling networks
|
||||||
|
sibling1 = NetworkRange.create!(network: "192.168.0.0/24")
|
||||||
|
@ipv4_range.save! # 192.168.1.0/24
|
||||||
|
sibling2 = NetworkRange.create!(network: "192.168.2.0/24")
|
||||||
|
|
||||||
|
siblings = @ipv4_range.sibling_ranges
|
||||||
|
assert_includes siblings, sibling1
|
||||||
|
assert_includes siblings, sibling2
|
||||||
|
assert_not_includes siblings, @ipv4_range
|
||||||
|
end
|
||||||
|
|
||||||
|
# Intelligence and Inheritance
|
||||||
|
test "has_intelligence? detects available intelligence data" do
|
||||||
|
range = NetworkRange.new(network: "10.0.0.0/8")
|
||||||
|
assert_not range.has_intelligence?
|
||||||
|
|
||||||
|
range.asn = 12345
|
||||||
|
assert range.has_intelligence?
|
||||||
|
|
||||||
|
range.asn = nil
|
||||||
|
range.company = "Test Company"
|
||||||
|
assert range.has_intelligence?
|
||||||
|
|
||||||
|
range.company = nil
|
||||||
|
range.country = "US"
|
||||||
|
assert range.has_intelligence?
|
||||||
|
|
||||||
|
range.country = nil
|
||||||
|
range.is_datacenter = true
|
||||||
|
assert range.has_intelligence?
|
||||||
|
end
|
||||||
|
|
||||||
|
test "own_intelligence returns correct data structure" do
|
||||||
|
range = NetworkRange.create!(
|
||||||
|
network: "10.0.0.0/8",
|
||||||
|
asn: 12345,
|
||||||
|
asn_org: "Test ASN",
|
||||||
|
company: "Test Company",
|
||||||
|
country: "US",
|
||||||
|
is_datacenter: true,
|
||||||
|
is_proxy: false,
|
||||||
|
is_vpn: false,
|
||||||
|
source: "manual"
|
||||||
|
)
|
||||||
|
|
||||||
|
intelligence = range.own_intelligence
|
||||||
|
assert_equal 12345, intelligence[:asn]
|
||||||
|
assert_equal "Test ASN", intelligence[:asn_org]
|
||||||
|
assert_equal "Test Company", intelligence[:company]
|
||||||
|
assert_equal "US", intelligence[:country]
|
||||||
|
assert_equal true, intelligence[:is_datacenter]
|
||||||
|
assert_equal false, intelligence[:is_proxy]
|
||||||
|
assert_equal false, intelligence[:is_vpn]
|
||||||
|
assert_equal false, intelligence[:inherited]
|
||||||
|
assert_equal "manual", intelligence[:source]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "inherited_intelligence returns own data when available" do
|
||||||
|
child = NetworkRange.create!(
|
||||||
|
network: "192.168.1.0/24",
|
||||||
|
country: "US",
|
||||||
|
company: "Test Company"
|
||||||
|
)
|
||||||
|
|
||||||
|
intelligence = child.inherited_intelligence
|
||||||
|
assert_equal "US", intelligence[:country]
|
||||||
|
assert_equal "Test Company", intelligence[:company]
|
||||||
|
assert_equal false, intelligence[:inherited]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "inherited_intelligence inherits from parent when needed" do
|
||||||
|
parent = NetworkRange.create!(
|
||||||
|
network: "192.168.0.0/16",
|
||||||
|
country: "US",
|
||||||
|
company: "Test Company"
|
||||||
|
)
|
||||||
|
child = NetworkRange.create!(network: "192.168.1.0/24")
|
||||||
|
|
||||||
|
intelligence = child.inherited_intelligence
|
||||||
|
assert_equal "US", intelligence[:country]
|
||||||
|
assert_equal "Test Company", intelligence[:company]
|
||||||
|
assert_equal true, intelligence[:inherited]
|
||||||
|
assert_equal parent.cidr, intelligence[:parent_cidr]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "parent_with_intelligence finds nearest parent with data" do
|
||||||
|
grandparent = NetworkRange.create!(network: "10.0.0.0/8", country: "US")
|
||||||
|
parent = NetworkRange.create!(network: "10.1.0.0/16")
|
||||||
|
child = NetworkRange.create!(network: "10.1.1.0/24")
|
||||||
|
|
||||||
|
found_parent = child.parent_with_intelligence
|
||||||
|
assert_equal grandparent, found_parent
|
||||||
|
assert_not_equal parent, found_parent
|
||||||
|
end
|
||||||
|
|
||||||
|
# API Data Management
|
||||||
|
test "is_fetching_api_data? tracks active fetches" do
|
||||||
|
range = NetworkRange.create!(network: "10.0.0.0/8")
|
||||||
|
|
||||||
|
assert_not range.is_fetching_api_data?(:ipapi)
|
||||||
|
|
||||||
|
range.mark_as_fetching_api_data!(:ipapi)
|
||||||
|
assert range.is_fetching_api_data?(:ipapi)
|
||||||
|
|
||||||
|
range.clear_fetching_status!(:ipapi)
|
||||||
|
assert_not range.is_fetching_api_data?(:ipapi)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should_fetch_api_data? prevents duplicate fetches" do
|
||||||
|
range = NetworkRange.create!(network: "10.0.0.0/8")
|
||||||
|
|
||||||
|
# Should fetch initially
|
||||||
|
assert range.should_fetch_api_data?(:ipapi)
|
||||||
|
|
||||||
|
# Should not fetch while fetching
|
||||||
|
range.mark_as_fetching_api_data!(:ipapi)
|
||||||
|
assert_not range.should_fetch_api_data?(:ipapi)
|
||||||
|
|
||||||
|
# Should fetch again after clearing
|
||||||
|
range.clear_fetching_status!(:ipapi)
|
||||||
|
assert range.should_fetch_api_data?(:ipapi)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "has_ipapi_data_available? checks inheritance" do
|
||||||
|
parent = NetworkRange.create!(network: "10.0.0.0/8")
|
||||||
|
parent.set_network_data(:ipapi, { country: "US" })
|
||||||
|
parent.save!
|
||||||
|
|
||||||
|
child = NetworkRange.create!(network: "10.0.1.0/24")
|
||||||
|
|
||||||
|
assert child.has_ipapi_data_available?
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should_fetch_ipapi_data? respects parent fetching status" do
|
||||||
|
parent = NetworkRange.create!(network: "10.0.0.0/8")
|
||||||
|
child = NetworkRange.create!(network: "10.0.1.0/24")
|
||||||
|
|
||||||
|
parent.mark_as_fetching_api_data!(:ipapi)
|
||||||
|
assert_not child.should_fetch_ipapi_data?
|
||||||
|
|
||||||
|
parent.clear_fetching_status!(:ipapi)
|
||||||
|
assert child.should_fetch_ipapi_data?
|
||||||
|
end
|
||||||
|
|
||||||
|
# Network Data Management
|
||||||
|
test "network_data_for and set_network_data work correctly" do
|
||||||
|
range = NetworkRange.create!(network: "10.0.0.0/8")
|
||||||
|
|
||||||
|
assert_equal({}, range.network_data_for(:ipapi))
|
||||||
|
|
||||||
|
data = { country: "US", city: "New York" }
|
||||||
|
range.set_network_data(:ipapi, data)
|
||||||
|
range.save!
|
||||||
|
|
||||||
|
assert_equal data, range.network_data_for(:ipapi)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "has_network_data_from? checks data presence" do
|
||||||
|
range = NetworkRange.create!(network: "10.0.0.0/8")
|
||||||
|
|
||||||
|
assert_not range.has_network_data_from?(:ipapi)
|
||||||
|
|
||||||
|
range.set_network_data(:ipapi, { country: "US" })
|
||||||
|
range.save!
|
||||||
|
|
||||||
|
assert range.has_network_data_from?(:ipapi)
|
||||||
|
end
|
||||||
|
|
||||||
|
# IPAPI Tracking Methods
|
||||||
|
test "find_or_create_tracking_network_for_ip works correctly" do
|
||||||
|
# IPv4 - should create /24
|
||||||
|
tracking_range = NetworkRange.find_or_create_tracking_network_for_ip("192.168.1.100")
|
||||||
|
assert_equal "192.168.1.0/24", tracking_range.network.to_s
|
||||||
|
assert_equal "auto_generated", tracking_range.source
|
||||||
|
assert_equal "IPAPI tracking network", tracking_range.creation_reason
|
||||||
|
|
||||||
|
# IPv6 - should create /64
|
||||||
|
ipv6_tracking = NetworkRange.find_or_create_tracking_network_for_ip("2001:db8::1")
|
||||||
|
assert_equal "2001:db8::/64", ipv6_tracking.network.to_s
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should_fetch_ipapi_for_ip? works correctly" do
|
||||||
|
tracking_range = NetworkRange.create!(network: "192.168.1.0/8")
|
||||||
|
|
||||||
|
# Should fetch initially
|
||||||
|
assert NetworkRange.should_fetch_ipapi_for_ip?("192.168.1.100")
|
||||||
|
|
||||||
|
# Mark as queried recently
|
||||||
|
tracking_range.mark_ipapi_queried!("192.168.1.0/24")
|
||||||
|
assert_not NetworkRange.should_fetch_ipapi_for_ip?("192.168.1.100")
|
||||||
|
|
||||||
|
# Should fetch for old queries
|
||||||
|
tracking_range.network_data['ipapi_queried_at'] = 2.years.ago.to_i
|
||||||
|
tracking_range.save!
|
||||||
|
assert NetworkRange.should_fetch_ipapi_for_ip?("192.168.1.100")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "mark_ipapi_queried! stores query metadata" do
|
||||||
|
range = NetworkRange.create!(network: "192.168.1.0/24")
|
||||||
|
|
||||||
|
range.mark_ipapi_queried!("192.168.1.128/25")
|
||||||
|
|
||||||
|
assert range.network_data['ipapi_queried_at'] > 5.seconds.ago.to_i
|
||||||
|
assert_equal "192.168.1.128/25", range.network_data['ipapi_returned_cidr']
|
||||||
|
end
|
||||||
|
|
||||||
|
# JSON Helper Methods
|
||||||
|
test "abuser_scores_hash handles JSON correctly" do
|
||||||
|
range = NetworkRange.create!(network: "10.0.0.0/8")
|
||||||
|
|
||||||
|
assert_equal({}, range.abuser_scores_hash)
|
||||||
|
|
||||||
|
range.abuser_scores_hash = { ipquality: 85, abuseipdb: 92 }
|
||||||
|
range.save!
|
||||||
|
|
||||||
|
assert_equal({ "ipquality" => 85, "abuseipdb" => 92 }, JSON.parse(range.abuser_scores))
|
||||||
|
assert_equal({ ipquality: 85, abuseipdb: 92 }, range.abuser_scores_hash)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "additional_data_hash handles JSON correctly" do
|
||||||
|
range = NetworkRange.create!(network: "10.0.0.0/8")
|
||||||
|
|
||||||
|
assert_equal({}, range.additional_data_hash)
|
||||||
|
|
||||||
|
range.additional_data_hash = { tags: ["malicious", "botnet"], notes: "Test data" }
|
||||||
|
range.save!
|
||||||
|
|
||||||
|
parsed_data = JSON.parse(range.additional_data)
|
||||||
|
assert_equal ["malicious", "botnet"], parsed_data["tags"]
|
||||||
|
assert_equal "Test data", parsed_data["notes"]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Scopes
|
||||||
|
test "ipv4 and ipv6 scopes work correctly" do
|
||||||
|
ipv4_range = NetworkRange.create!(network: "192.168.1.0/24")
|
||||||
|
ipv6_range = NetworkRange.create!(network: "2001:db8::/32")
|
||||||
|
|
||||||
|
assert_includes NetworkRange.ipv4, ipv4_range
|
||||||
|
assert_not_includes NetworkRange.ipv4, ipv6_range
|
||||||
|
|
||||||
|
assert_includes NetworkRange.ipv6, ipv6_range
|
||||||
|
assert_not_includes NetworkRange.ipv6, ipv4_range
|
||||||
|
end
|
||||||
|
|
||||||
|
test "filtering scopes work correctly" do
|
||||||
|
range1 = NetworkRange.create!(network: "192.168.1.0/24", country: "US", company: "Google", asn: 15169, is_datacenter: true)
|
||||||
|
range2 = NetworkRange.create!(network: "10.0.0.0/8", country: "BR", company: "Amazon", asn: 16509, is_proxy: true)
|
||||||
|
|
||||||
|
assert_includes NetworkRange.by_country("US"), range1
|
||||||
|
assert_not_includes NetworkRange.by_country("US"), range2
|
||||||
|
|
||||||
|
assert_includes NetworkRange.by_company("Google"), range1
|
||||||
|
assert_not_includes NetworkRange.by_company("Google"), range2
|
||||||
|
|
||||||
|
assert_includes NetworkRange.by_asn(15169), range1
|
||||||
|
assert_not_includes NetworkRange.by_asn(15169), range2
|
||||||
|
|
||||||
|
assert_includes NetworkRange.datacenter, range1
|
||||||
|
assert_not_includes NetworkRange.datacenter, range2
|
||||||
|
|
||||||
|
assert_includes NetworkRange.proxy, range2
|
||||||
|
assert_not_includes NetworkRange.proxy, range1
|
||||||
|
end
|
||||||
|
|
||||||
|
# Class Methods
|
||||||
|
test "contains_ip class method finds most specific network" do
|
||||||
|
parent = NetworkRange.create!(network: "192.168.0.0/16")
|
||||||
|
child = NetworkRange.create!(network: "192.168.1.0/24")
|
||||||
|
|
||||||
|
found = NetworkRange.contains_ip("192.168.1.100")
|
||||||
|
assert_equal child, found.first # More specific should come first
|
||||||
|
end
|
||||||
|
|
||||||
|
test "overlapping class method finds overlapping networks" do
|
||||||
|
range1 = NetworkRange.create!(network: "192.168.0.0/16")
|
||||||
|
range2 = NetworkRange.create!(network: "192.168.1.0/24")
|
||||||
|
range3 = NetworkRange.create!(network: "10.0.0.0/8")
|
||||||
|
|
||||||
|
overlapping = NetworkRange.overlapping("192.168.1.0/24")
|
||||||
|
assert_includes overlapping, range1
|
||||||
|
assert_includes overlapping, range2
|
||||||
|
assert_not_includes overlapping, range3
|
||||||
|
end
|
||||||
|
|
||||||
|
test "find_or_create_by_cidr works correctly" do
|
||||||
|
# Creates new record
|
||||||
|
range = NetworkRange.find_or_create_by_cidr("10.0.0.0/8", user: @user, source: "manual")
|
||||||
|
assert range.persisted?
|
||||||
|
assert_equal "10.0.0.0/8", range.network.to_s
|
||||||
|
assert_equal @user, range.user
|
||||||
|
assert_equal "manual", range.source
|
||||||
|
|
||||||
|
# Returns existing record
|
||||||
|
existing = NetworkRange.find_or_create_by_cidr("10.0.0.0/8")
|
||||||
|
assert_equal range.id, existing.id
|
||||||
|
end
|
||||||
|
|
||||||
|
test "find_by_ip_or_network handles both IP and network inputs" do
|
||||||
|
range = NetworkRange.create!(network: "192.168.1.0/24")
|
||||||
|
|
||||||
|
# Find by IP within range
|
||||||
|
found_by_ip = NetworkRange.find_by_ip_or_network("192.168.1.100")
|
||||||
|
assert_includes found_by_ip, range
|
||||||
|
|
||||||
|
# Find by exact network
|
||||||
|
found_by_network = NetworkRange.find_by_ip_or_network("192.168.1.0/24")
|
||||||
|
assert_includes found_by_network, range
|
||||||
|
|
||||||
|
# Return none for invalid input
|
||||||
|
assert_equal NetworkRange.none, NetworkRange.find_by_ip_or_network("")
|
||||||
|
assert_equal NetworkRange.none, NetworkRange.find_by_ip_or_network("invalid")
|
||||||
|
end
|
||||||
|
|
||||||
|
# Analytics Methods
|
||||||
|
test "events_count returns counter cache value" do
|
||||||
|
range = NetworkRange.create!(network: "192.168.1.0/24")
|
||||||
|
|
||||||
|
assert_equal 0, range.events_count
|
||||||
|
|
||||||
|
# Update counter cache manually for testing
|
||||||
|
range.update_column(:events_count, 5)
|
||||||
|
assert_equal 5, range.events_count
|
||||||
|
end
|
||||||
|
|
||||||
|
test "events method finds events within range" do
|
||||||
|
range = NetworkRange.create!(network: "192.168.1.0/24")
|
||||||
|
|
||||||
|
# Create test events
|
||||||
|
matching_event = Event.create!(
|
||||||
|
request_id: "test-1",
|
||||||
|
timestamp: Time.current,
|
||||||
|
payload: {},
|
||||||
|
ip_address: "192.168.1.100"
|
||||||
|
)
|
||||||
|
non_matching_event = Event.create!(
|
||||||
|
request_id: "test-2",
|
||||||
|
timestamp: Time.current,
|
||||||
|
payload: {},
|
||||||
|
ip_address: "10.0.0.1"
|
||||||
|
)
|
||||||
|
|
||||||
|
found_events = range.events
|
||||||
|
assert_includes found_events, matching_event
|
||||||
|
assert_not_includes found_events, non_matching_event
|
||||||
|
end
|
||||||
|
|
||||||
|
test "blocking_rules and active_rules work correctly" do
|
||||||
|
range = NetworkRange.create!(network: "192.168.1.0/24")
|
||||||
|
|
||||||
|
blocking_rule = Rule.create!(
|
||||||
|
rule_type: "network",
|
||||||
|
action: "deny",
|
||||||
|
network_range: range,
|
||||||
|
user: @user,
|
||||||
|
enabled: true
|
||||||
|
)
|
||||||
|
allow_rule = Rule.create!(
|
||||||
|
rule_type: "network",
|
||||||
|
action: "allow",
|
||||||
|
network_range: range,
|
||||||
|
user: @user,
|
||||||
|
enabled: true
|
||||||
|
)
|
||||||
|
disabled_rule = Rule.create!(
|
||||||
|
rule_type: "network",
|
||||||
|
action: "deny",
|
||||||
|
network_range: range,
|
||||||
|
user: @user,
|
||||||
|
enabled: false
|
||||||
|
)
|
||||||
|
|
||||||
|
assert_includes range.blocking_rules, blocking_rule
|
||||||
|
assert_not_includes range.blocking_rules, allow_rule
|
||||||
|
assert_not_includes range.blocking_rules, disabled_rule
|
||||||
|
|
||||||
|
assert_includes range.active_rules, blocking_rule
|
||||||
|
assert_includes range.active_rules, allow_rule
|
||||||
|
assert_not_includes range.active_rules, disabled_rule
|
||||||
|
end
|
||||||
|
|
||||||
|
# Policy Evaluation
|
||||||
|
test "needs_policy_evaluation? works correctly" do
|
||||||
|
range = NetworkRange.create!(network: "192.168.1.0/24")
|
||||||
|
|
||||||
|
# Should evaluate if never evaluated
|
||||||
|
assert range.needs_policy_evaluation?
|
||||||
|
|
||||||
|
# Should evaluate if policies updated since last evaluation
|
||||||
|
range.update!(policies_evaluated_at: 1.hour.ago)
|
||||||
|
WafPolicy.create!(name: "Test Policy", policy_type: "country", targets: ["US"], policy_action: "deny", user: @user)
|
||||||
|
assert range.needs_policy_evaluation?
|
||||||
|
|
||||||
|
# Should not evaluate if up to date
|
||||||
|
range.update!(policies_evaluated_at: 5.minutes.ago)
|
||||||
|
assert_not range.needs_policy_evaluation?
|
||||||
|
end
|
||||||
|
|
||||||
|
# String Representations
|
||||||
|
test "to_s returns cidr" do
|
||||||
|
@ipv4_range.save!
|
||||||
|
assert_equal @ipv4_range.cidr, @ipv4_range.to_s
|
||||||
|
end
|
||||||
|
|
||||||
|
test "to_param parameterizes cidr" do
|
||||||
|
@ipv4_range.save!
|
||||||
|
assert_equal "192.168.1.0_24", @ipv4_range.to_param
|
||||||
|
end
|
||||||
|
|
||||||
|
# Geographic Lookup
|
||||||
|
test "geo_lookup_country! updates country when available" do
|
||||||
|
range = NetworkRange.create!(network: "8.8.8.0/24") # Google's network
|
||||||
|
|
||||||
|
# Mock GeoIpService
|
||||||
|
GeoIpService.expects(:lookup_country).with("8.8.8.0").returns("US")
|
||||||
|
|
||||||
|
range.geo_lookup_country!
|
||||||
|
assert_equal "US", range.reload.country
|
||||||
|
end
|
||||||
|
|
||||||
|
test "geo_lookup_country! handles errors gracefully" do
|
||||||
|
range = NetworkRange.create!(network: "192.168.1.0/24")
|
||||||
|
|
||||||
|
# Mock GeoIpService to raise error
|
||||||
|
GeoIpService.expects(:lookup_country).with("192.168.1.0").raises(StandardError.new("Service error"))
|
||||||
|
|
||||||
|
# Should not raise error but log it
|
||||||
|
assert_nothing_raised do
|
||||||
|
range.geo_lookup_country!
|
||||||
|
end
|
||||||
|
|
||||||
|
assert_nil range.reload.country
|
||||||
|
end
|
||||||
|
|
||||||
|
# Stats Methods
|
||||||
|
test "import_stats_by_source returns statistics" do
|
||||||
|
NetworkRange.create!(network: "10.0.0.0/8", source: "manual")
|
||||||
|
NetworkRange.create!(network: "192.168.1.0/24", source: "api_imported")
|
||||||
|
NetworkRange.create!(network: "172.16.0.0/12", source: "api_imported")
|
||||||
|
|
||||||
|
stats = NetworkRange.import_stats_by_source
|
||||||
|
assert_equal 2, stats.count
|
||||||
|
|
||||||
|
api_stats = stats.find { |s| s.source == "api_imported" }
|
||||||
|
assert_equal 2, api_stats.count
|
||||||
|
end
|
||||||
|
|
||||||
|
test "geolite_coverage_stats returns detailed coverage information" do
|
||||||
|
NetworkRange.create!(network: "10.0.0.0/8", source: "geolite_asn", asn: 12345)
|
||||||
|
NetworkRange.create!(network: "192.168.1.0/24", source: "geolite_country", country: "US")
|
||||||
|
NetworkRange.create!(network: "172.16.0.0/12", source: "geolite_asn", country: "BR")
|
||||||
|
|
||||||
|
stats = NetworkRange.geolite_coverage_stats
|
||||||
|
assert_equal 3, stats[:total_networks]
|
||||||
|
assert_equal 2, stats[:asn_networks]
|
||||||
|
assert_equal 1, stats[:country_networks]
|
||||||
|
assert_equal 2, stats[:with_asn_data]
|
||||||
|
assert_equal 1, stats[:with_country_data]
|
||||||
|
assert_equal 2, stats[:unique_countries]
|
||||||
|
assert_equal 2, stats[:unique_asns]
|
||||||
|
end
|
||||||
|
end
|
||||||
@@ -3,25 +3,30 @@
|
|||||||
require "test_helper"
|
require "test_helper"
|
||||||
|
|
||||||
class RuleTest < ActiveSupport::TestCase
|
class RuleTest < ActiveSupport::TestCase
|
||||||
|
|
||||||
# Validation tests
|
# Validation tests
|
||||||
test "should create valid network_v4 rule" do
|
test "should create valid network rule" do
|
||||||
|
network_range = NetworkRange.create!(cidr: "10.0.0.0/8")
|
||||||
rule = Rule.new(
|
rule = Rule.new(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "10.0.0.0/8" },
|
network_range: network_range,
|
||||||
source: "manual"
|
source: "manual",
|
||||||
|
user: users(:one)
|
||||||
)
|
)
|
||||||
assert rule.valid?
|
assert rule.valid?
|
||||||
rule.save!
|
rule.save!
|
||||||
assert_equal 8, rule.priority # Auto-calculated from CIDR prefix
|
assert_equal 8, rule.priority # Auto-calculated from CIDR prefix
|
||||||
end
|
end
|
||||||
|
|
||||||
test "should create valid network_v6 rule" do
|
test "should create valid network rule with IPv6" do
|
||||||
|
network_range = NetworkRange.create!(cidr: "2001:db8::/32")
|
||||||
rule = Rule.new(
|
rule = Rule.new(
|
||||||
rule_type: "network_v6",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "2001:db8::/32" },
|
network_range: network_range,
|
||||||
source: "manual"
|
source: "manual",
|
||||||
|
user: users(:one)
|
||||||
)
|
)
|
||||||
assert rule.valid?
|
assert rule.valid?
|
||||||
rule.save!
|
rule.save!
|
||||||
@@ -30,53 +35,58 @@ class RuleTest < ActiveSupport::TestCase
|
|||||||
|
|
||||||
test "should create valid rate_limit rule" do
|
test "should create valid rate_limit rule" do
|
||||||
rule = Rule.new(
|
rule = Rule.new(
|
||||||
rule_type: "rate_limit",
|
waf_rule_type: "rate_limit",
|
||||||
action: "rate_limit",
|
waf_action: "rate_limit",
|
||||||
conditions: { cidr: "0.0.0.0/0", scope: "global" },
|
conditions: { cidr: "0.0.0.0/0", scope: "global" },
|
||||||
metadata: { limit: 100, window: 60 },
|
metadata: { limit: 100, window: 60 },
|
||||||
source: "manual"
|
source: "manual",
|
||||||
|
user: users(:one)
|
||||||
)
|
)
|
||||||
assert rule.valid?
|
assert rule.valid?
|
||||||
end
|
end
|
||||||
|
|
||||||
test "should create valid path_pattern rule" do
|
test "should create valid path_pattern rule" do
|
||||||
rule = Rule.new(
|
rule = Rule.new(
|
||||||
rule_type: "path_pattern",
|
waf_rule_type: "path_pattern",
|
||||||
action: "log",
|
waf_action: "log",
|
||||||
conditions: { patterns: ["/.env", "/.git"] },
|
conditions: { patterns: ["/.env", "/.git"] },
|
||||||
source: "default"
|
source: "default",
|
||||||
|
user: users(:one)
|
||||||
)
|
)
|
||||||
assert rule.valid?
|
assert rule.valid?
|
||||||
end
|
end
|
||||||
|
|
||||||
test "should require rule_type" do
|
test "should require waf_rule_type" do
|
||||||
rule = Rule.new(action: "deny", conditions: { cidr: "10.0.0.0/8" })
|
rule = Rule.new(waf_action: "deny", waf_rule_type: nil, conditions: { patterns: ["/test"] }, user: users(:one))
|
||||||
assert_not rule.valid?
|
assert_not rule.valid?
|
||||||
assert_includes rule.errors[:rule_type], "can't be blank"
|
assert_includes rule.errors[:waf_rule_type], "can't be blank"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "should require action" do
|
test "should require waf_action" do
|
||||||
rule = Rule.new(rule_type: "network_v4", conditions: { cidr: "10.0.0.0/8" })
|
rule = Rule.new(waf_rule_type: "path_pattern", waf_action: nil, conditions: { patterns: ["/test"] }, user: users(:one))
|
||||||
assert_not rule.valid?
|
assert_not rule.valid?
|
||||||
assert_includes rule.errors[:action], "can't be blank"
|
assert_includes rule.errors[:waf_action], "can't be blank"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "should validate network_v4 has valid IPv4 CIDR" do
|
test "should validate network has valid CIDR" do
|
||||||
rule = Rule.new(
|
rule = Rule.new(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "2001:db8::/32" } # IPv6 in IPv4 rule
|
conditions: { cidr: "invalid-cidr" }, # Invalid CIDR
|
||||||
|
user: users(:one)
|
||||||
)
|
)
|
||||||
assert_not rule.valid?
|
assert_not rule.valid?
|
||||||
assert_includes rule.errors[:conditions], "cidr must be IPv4 for network_v4 rules"
|
# Network rules now validate differently - they need a network_range
|
||||||
|
assert_includes rule.errors[:network_range], "is required for network rules"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "should validate rate_limit has limit and window in metadata" do
|
test "should validate rate_limit has limit and window in metadata" do
|
||||||
rule = Rule.new(
|
rule = Rule.new(
|
||||||
rule_type: "rate_limit",
|
waf_rule_type: "rate_limit",
|
||||||
action: "rate_limit",
|
waf_action: "rate_limit",
|
||||||
conditions: { cidr: "0.0.0.0/0", scope: "global" },
|
conditions: { cidr: "0.0.0.0/0", scope: "global" },
|
||||||
metadata: { limit: 100 } # Missing window
|
metadata: { limit: 100 }, # Missing window
|
||||||
|
user: users(:one)
|
||||||
)
|
)
|
||||||
assert_not rule.valid?
|
assert_not rule.valid?
|
||||||
assert_includes rule.errors[:metadata], "must include 'limit' and 'window' for rate_limit rules"
|
assert_includes rule.errors[:metadata], "must include 'limit' and 'window' for rate_limit rules"
|
||||||
@@ -84,46 +94,56 @@ class RuleTest < ActiveSupport::TestCase
|
|||||||
|
|
||||||
# Default value tests
|
# Default value tests
|
||||||
test "should default enabled to true" do
|
test "should default enabled to true" do
|
||||||
|
network_range = NetworkRange.create!(cidr: "10.0.0.0/8")
|
||||||
rule = Rule.create!(
|
rule = Rule.create!(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "10.0.0.0/8" }
|
network_range: network_range,
|
||||||
|
user: users(:one)
|
||||||
)
|
)
|
||||||
assert rule.enabled?
|
assert rule.enabled?
|
||||||
end
|
end
|
||||||
|
|
||||||
# Priority calculation tests
|
# Priority calculation tests
|
||||||
test "should calculate priority from IPv4 CIDR prefix" do
|
test "should calculate priority from IPv4 CIDR prefix" do
|
||||||
|
network_range = NetworkRange.create!(cidr: "192.168.1.0/24")
|
||||||
rule = Rule.create!(
|
rule = Rule.create!(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "192.168.1.0/24" }
|
network_range: network_range,
|
||||||
|
user: users(:one)
|
||||||
)
|
)
|
||||||
assert_equal 24, rule.priority
|
assert_equal 24, rule.priority
|
||||||
end
|
end
|
||||||
|
|
||||||
# Scope tests
|
# Scope tests
|
||||||
test "active scope returns enabled and non-expired rules" do
|
test "active scope returns enabled and non-expired rules" do
|
||||||
|
active_range = NetworkRange.create!(cidr: "10.0.0.0/8")
|
||||||
active = Rule.create!(
|
active = Rule.create!(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "10.0.0.0/8" },
|
network_range: active_range,
|
||||||
enabled: true
|
|
||||||
)
|
|
||||||
|
|
||||||
disabled = Rule.create!(
|
|
||||||
rule_type: "network_v4",
|
|
||||||
action: "deny",
|
|
||||||
conditions: { cidr: "192.168.0.0/16" },
|
|
||||||
enabled: false
|
|
||||||
)
|
|
||||||
|
|
||||||
expired = Rule.create!(
|
|
||||||
rule_type: "network_v4",
|
|
||||||
action: "deny",
|
|
||||||
conditions: { cidr: "172.16.0.0/12" },
|
|
||||||
enabled: true,
|
enabled: true,
|
||||||
expires_at: 1.hour.ago
|
user: users(:one)
|
||||||
|
)
|
||||||
|
|
||||||
|
disabled_range = NetworkRange.create!(cidr: "192.168.0.0/16")
|
||||||
|
disabled = Rule.create!(
|
||||||
|
waf_rule_type: "network",
|
||||||
|
waf_action: "deny",
|
||||||
|
network_range: disabled_range,
|
||||||
|
enabled: false,
|
||||||
|
user: users(:one)
|
||||||
|
)
|
||||||
|
|
||||||
|
expired_range = NetworkRange.create!(cidr: "172.16.0.0/12")
|
||||||
|
expired = Rule.create!(
|
||||||
|
waf_rule_type: "network",
|
||||||
|
waf_action: "deny",
|
||||||
|
network_range: expired_range,
|
||||||
|
enabled: true,
|
||||||
|
expires_at: 1.hour.ago,
|
||||||
|
user: users(:one)
|
||||||
)
|
)
|
||||||
|
|
||||||
results = Rule.active.to_a
|
results = Rule.active.to_a
|
||||||
@@ -134,20 +154,24 @@ class RuleTest < ActiveSupport::TestCase
|
|||||||
|
|
||||||
# Instance method tests
|
# Instance method tests
|
||||||
test "active? returns true for enabled non-expired rule" do
|
test "active? returns true for enabled non-expired rule" do
|
||||||
|
network_range = NetworkRange.create!(cidr: "10.0.0.0/8")
|
||||||
rule = Rule.create!(
|
rule = Rule.create!(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "10.0.0.0/8" },
|
network_range: network_range,
|
||||||
enabled: true
|
enabled: true,
|
||||||
|
user: users(:one)
|
||||||
)
|
)
|
||||||
assert rule.active?
|
assert rule.active?
|
||||||
end
|
end
|
||||||
|
|
||||||
test "disable! sets enabled to false and adds metadata" do
|
test "disable! sets enabled to false and adds metadata" do
|
||||||
|
network_range = NetworkRange.create!(cidr: "10.0.0.0/8")
|
||||||
rule = Rule.create!(
|
rule = Rule.create!(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "10.0.0.0/8" }
|
network_range: network_range,
|
||||||
|
user: users(:one)
|
||||||
)
|
)
|
||||||
|
|
||||||
rule.disable!(reason: "False positive")
|
rule.disable!(reason: "False positive")
|
||||||
@@ -159,20 +183,22 @@ class RuleTest < ActiveSupport::TestCase
|
|||||||
|
|
||||||
# Agent format tests
|
# Agent format tests
|
||||||
test "to_agent_format returns correct structure" do
|
test "to_agent_format returns correct structure" do
|
||||||
|
network_range = NetworkRange.create!(cidr: "10.0.0.0/8")
|
||||||
rule = Rule.create!(
|
rule = Rule.create!(
|
||||||
rule_type: "network_v4",
|
waf_rule_type: "network",
|
||||||
action: "deny",
|
waf_action: "deny",
|
||||||
conditions: { cidr: "10.0.0.0/8" },
|
network_range: network_range,
|
||||||
expires_at: 1.day.from_now,
|
expires_at: 1.day.from_now,
|
||||||
source: "manual",
|
source: "manual",
|
||||||
metadata: { reason: "Test" }
|
metadata: { reason: "Test" },
|
||||||
|
user: users(:one)
|
||||||
)
|
)
|
||||||
|
|
||||||
format = rule.to_agent_format
|
format = rule.to_agent_format
|
||||||
|
|
||||||
assert_equal rule.id, format[:id]
|
assert_equal rule.id, format[:id]
|
||||||
assert_equal "network_v4", format[:rule_type]
|
assert_equal "network", format[:waf_rule_type]
|
||||||
assert_equal "deny", format[:action]
|
assert_equal "deny", format[:waf_action]
|
||||||
assert_equal 8, format[:priority]
|
assert_equal 8, format[:priority]
|
||||||
assert_equal true, format[:enabled]
|
assert_equal true, format[:enabled]
|
||||||
end
|
end
|
||||||
|
|||||||
7
test/models/setting_test.rb
Normal file
7
test/models/setting_test.rb
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
require "test_helper"
|
||||||
|
|
||||||
|
class SettingTest < ActiveSupport::TestCase
|
||||||
|
# test "the truth" do
|
||||||
|
# assert true
|
||||||
|
# end
|
||||||
|
end
|
||||||
474
test/models/waf_policy_test.rb
Normal file
474
test/models/waf_policy_test.rb
Normal file
@@ -0,0 +1,474 @@
|
|||||||
|
require "test_helper"
|
||||||
|
|
||||||
|
class WafPolicyTest < ActiveSupport::TestCase
|
||||||
|
setup do
|
||||||
|
@user = users(:jason)
|
||||||
|
@policy = WafPolicy.new(
|
||||||
|
name: "Block Malicious IPs",
|
||||||
|
policy_type: "country",
|
||||||
|
targets: ["BR", "CN"],
|
||||||
|
policy_action: "deny",
|
||||||
|
user: @user
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Validations
|
||||||
|
test "should be valid with all required attributes" do
|
||||||
|
assert @policy.valid?
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should not be valid without name" do
|
||||||
|
@policy.name = nil
|
||||||
|
assert_not @policy.valid?
|
||||||
|
assert_includes @policy.errors[:name], "can't be blank"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should not be valid without unique name" do
|
||||||
|
@policy.name = waf_policies(:one).name
|
||||||
|
assert_not @policy.valid?
|
||||||
|
assert_includes @policy.errors[:name], "has already been taken"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should validate policy_type inclusion" do
|
||||||
|
@policy.policy_type = "invalid_type"
|
||||||
|
assert_not @policy.valid?
|
||||||
|
assert_includes @policy.errors[:policy_type], "is not included in the list"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should validate policy_action inclusion" do
|
||||||
|
@policy.policy_action = "invalid_action"
|
||||||
|
assert_not @policy.valid?
|
||||||
|
assert_includes @policy.errors[:policy_action], "is not included in the list"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should not be valid without targets" do
|
||||||
|
@policy.targets = []
|
||||||
|
assert_not @policy.valid?
|
||||||
|
assert_includes @policy.errors[:targets], "can't be blank"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should validate targets is an array" do
|
||||||
|
@policy.targets = "not an array"
|
||||||
|
assert_not @policy.valid?
|
||||||
|
assert_includes @policy.errors[:targets], "must be an array"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should validate country targets format" do
|
||||||
|
@policy.policy_type = "country"
|
||||||
|
|
||||||
|
# Valid country codes
|
||||||
|
@policy.targets = ["US", "BR", "CN"]
|
||||||
|
assert @policy.valid?
|
||||||
|
|
||||||
|
# Invalid country codes
|
||||||
|
@policy.targets = ["USA", "123", "B"]
|
||||||
|
assert_not @policy.valid?
|
||||||
|
assert_includes @policy.errors[:targets], "must be valid ISO country codes"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should validate ASN targets format" do
|
||||||
|
@policy.policy_type = "asn"
|
||||||
|
|
||||||
|
# Valid ASNs
|
||||||
|
@policy.targets = [12345, 67890]
|
||||||
|
assert @policy.valid?
|
||||||
|
|
||||||
|
# Invalid ASNs
|
||||||
|
@policy.targets = ["AS12345", -1, 0]
|
||||||
|
assert_not @policy.valid?
|
||||||
|
assert_includes @policy.errors[:targets], "must be valid ASNs"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should validate company targets format" do
|
||||||
|
@policy.policy_type = "company"
|
||||||
|
|
||||||
|
# Valid company names
|
||||||
|
@policy.targets = ["Google", "Amazon Web Services", "Microsoft"]
|
||||||
|
assert @policy.valid?
|
||||||
|
|
||||||
|
# Invalid company names
|
||||||
|
@policy.targets = ["", nil, " "]
|
||||||
|
assert_not @policy.valid?
|
||||||
|
assert_includes @policy.errors[:targets], "must be valid company names"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should validate network_type targets format" do
|
||||||
|
@policy.policy_type = "network_type"
|
||||||
|
|
||||||
|
# Valid network types
|
||||||
|
@policy.targets = ["datacenter", "proxy", "vpn", "standard"]
|
||||||
|
assert @policy.valid?
|
||||||
|
|
||||||
|
# Invalid network types
|
||||||
|
@policy.targets = ["invalid", "malicious", "botnet"]
|
||||||
|
assert_not @policy.valid?
|
||||||
|
assert_includes @policy.errors[:targets], "must be one of: datacenter, proxy, vpn, standard"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should validate redirect configuration" do
|
||||||
|
@policy.policy_action = "redirect"
|
||||||
|
|
||||||
|
# Valid redirect config
|
||||||
|
@policy.additional_data = { "redirect_url" => "https://example.com/blocked" }
|
||||||
|
assert @policy.valid?
|
||||||
|
|
||||||
|
# Missing redirect URL
|
||||||
|
@policy.additional_data = { "other_config" => "value" }
|
||||||
|
assert_not @policy.valid?
|
||||||
|
assert_includes @policy.errors[:additional_data], "must include 'redirect_url' for redirect action"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should validate challenge configuration" do
|
||||||
|
@policy.policy_action = "challenge"
|
||||||
|
|
||||||
|
# Valid challenge types
|
||||||
|
@policy.additional_data = { "challenge_type" => "captcha" }
|
||||||
|
assert @policy.valid?
|
||||||
|
|
||||||
|
@policy.additional_data = { "challenge_type" => "javascript" }
|
||||||
|
assert @policy.valid?
|
||||||
|
|
||||||
|
# Invalid challenge type
|
||||||
|
@policy.additional_data = { "challenge_type" => "invalid" }
|
||||||
|
assert_not @policy.valid?
|
||||||
|
assert_includes @policy.errors[:additional_data], "challenge_type must be one of: captcha, javascript, proof_of_work"
|
||||||
|
|
||||||
|
# No challenge type (should be valid, uses defaults)
|
||||||
|
@policy.additional_data = {}
|
||||||
|
assert @policy.valid?
|
||||||
|
end
|
||||||
|
|
||||||
|
# Defaults and Callbacks
|
||||||
|
test "should default to enabled" do
|
||||||
|
@policy.enabled = nil
|
||||||
|
@policy.save!
|
||||||
|
assert @policy.enabled?
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should default targets to empty array" do
|
||||||
|
policy = WafPolicy.new(
|
||||||
|
name: "Test Policy",
|
||||||
|
policy_type: "country",
|
||||||
|
policy_action: "deny",
|
||||||
|
user: @user
|
||||||
|
)
|
||||||
|
# before_validation should set defaults
|
||||||
|
policy.valid?
|
||||||
|
assert_equal [], policy.targets
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should default additional_data to empty hash" do
|
||||||
|
policy = WafPolicy.new(
|
||||||
|
name: "Test Policy",
|
||||||
|
policy_type: "country",
|
||||||
|
targets: ["US"],
|
||||||
|
policy_action: "deny",
|
||||||
|
user: @user
|
||||||
|
)
|
||||||
|
policy.valid?
|
||||||
|
assert_equal({}, policy.additional_data)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Policy Type Methods
|
||||||
|
test "policy type predicate methods work correctly" do
|
||||||
|
country_policy = WafPolicy.new(policy_type: "country")
|
||||||
|
assert country_policy.country_policy?
|
||||||
|
assert_not country_policy.asn_policy?
|
||||||
|
assert_not country_policy.company_policy?
|
||||||
|
assert_not country_policy.network_type_policy?
|
||||||
|
|
||||||
|
asn_policy = WafPolicy.new(policy_type: "asn")
|
||||||
|
assert_not asn_policy.country_policy?
|
||||||
|
assert asn_policy.asn_policy?
|
||||||
|
assert_not asn_policy.company_policy?
|
||||||
|
assert_not asn_policy.network_type_policy?
|
||||||
|
|
||||||
|
company_policy = WafPolicy.new(policy_type: "company")
|
||||||
|
assert_not company_policy.country_policy?
|
||||||
|
assert_not company_policy.asn_policy?
|
||||||
|
assert company_policy.company_policy?
|
||||||
|
assert_not company_policy.network_type_policy?
|
||||||
|
|
||||||
|
network_type_policy = WafPolicy.new(policy_type: "network_type")
|
||||||
|
assert_not network_type_policy.country_policy?
|
||||||
|
assert_not network_type_policy.asn_policy?
|
||||||
|
assert_not network_type_policy.company_policy?
|
||||||
|
assert network_type_policy.network_type_policy?
|
||||||
|
end
|
||||||
|
|
||||||
|
# Action Methods
|
||||||
|
test "action predicate methods work correctly" do
|
||||||
|
allow_policy = WafPolicy.new(policy_action: "allow")
|
||||||
|
assert allow_policy.allow_action?
|
||||||
|
assert_not allow_policy.deny_action?
|
||||||
|
assert_not allow_policy.redirect_action?
|
||||||
|
assert_not allow_policy.challenge_action?
|
||||||
|
|
||||||
|
deny_policy = WafPolicy.new(policy_action: "deny")
|
||||||
|
assert_not deny_policy.allow_action?
|
||||||
|
assert deny_policy.deny_action?
|
||||||
|
assert_not deny_policy.redirect_action?
|
||||||
|
assert_not deny_policy.challenge_action?
|
||||||
|
|
||||||
|
redirect_policy = WafPolicy.new(policy_action: "redirect")
|
||||||
|
assert_not redirect_policy.allow_action?
|
||||||
|
assert_not redirect_policy.deny_action?
|
||||||
|
assert redirect_policy.redirect_action?
|
||||||
|
assert_not redirect_policy.challenge_action?
|
||||||
|
|
||||||
|
challenge_policy = WafPolicy.new(policy_action: "challenge")
|
||||||
|
assert_not challenge_policy.allow_action?
|
||||||
|
assert_not challenge_policy.deny_action?
|
||||||
|
assert_not challenge_policy.redirect_action?
|
||||||
|
assert challenge_policy.challenge_action?
|
||||||
|
end
|
||||||
|
|
||||||
|
# Policy action methods (to avoid Rails conflicts)
|
||||||
|
test "policy action predicate methods work correctly" do
|
||||||
|
policy = WafPolicy.new(policy_action: "deny")
|
||||||
|
assert policy.deny_policy_action?
|
||||||
|
assert_not policy.allow_policy_action?
|
||||||
|
assert_not policy.redirect_policy_action?
|
||||||
|
assert_not policy.challenge_policy_action?
|
||||||
|
end
|
||||||
|
|
||||||
|
# Lifecycle Methods
|
||||||
|
test "active? works correctly" do
|
||||||
|
# Active policy
|
||||||
|
active_policy = WafPolicy.new(enabled: true, expires_at: nil)
|
||||||
|
assert active_policy.active?
|
||||||
|
|
||||||
|
# Enabled but expired
|
||||||
|
expired_policy = WafPolicy.new(enabled: true, expires_at: 1.day.ago)
|
||||||
|
assert_not expired_policy.active?
|
||||||
|
|
||||||
|
# Disabled with future expiration
|
||||||
|
disabled_policy = WafPolicy.new(enabled: false, expires_at: 1.day.from_now)
|
||||||
|
assert_not disabled_policy.active?
|
||||||
|
|
||||||
|
# Disabled with no expiration
|
||||||
|
disabled_no_exp = WafPolicy.new(enabled: false, expires_at: nil)
|
||||||
|
assert_not disabled_no_exp.active?
|
||||||
|
|
||||||
|
# Enabled with future expiration
|
||||||
|
future_exp = WafPolicy.new(enabled: true, expires_at: 1.day.from_now)
|
||||||
|
assert future_exp.active?
|
||||||
|
end
|
||||||
|
|
||||||
|
test "expired? works correctly" do
|
||||||
|
assert_not WafPolicy.new(expires_at: nil).expired?
|
||||||
|
assert_not WafPolicy.new(expires_at: 1.day.from_now).expired?
|
||||||
|
assert WafPolicy.new(expires_at: 1.day.ago).expired?
|
||||||
|
assert WafPolicy.new(expires_at: Time.current).expired?
|
||||||
|
end
|
||||||
|
|
||||||
|
test "activate! enables policy" do
|
||||||
|
@policy.enabled = false
|
||||||
|
@policy.save!
|
||||||
|
|
||||||
|
@policy.activate!
|
||||||
|
assert @policy.reload.enabled?
|
||||||
|
end
|
||||||
|
|
||||||
|
test "deactivate! disables policy" do
|
||||||
|
@policy.enabled = true
|
||||||
|
@policy.save!
|
||||||
|
|
||||||
|
@policy.deactivate!
|
||||||
|
assert_not @policy.reload.enabled?
|
||||||
|
end
|
||||||
|
|
||||||
|
test "expire! sets expiration to now" do
|
||||||
|
@policy.expire!
|
||||||
|
assert @policy.reload.expires_at <= Time.current
|
||||||
|
end
|
||||||
|
|
||||||
|
# Scopes
|
||||||
|
test "enabled scope returns only enabled policies" do
|
||||||
|
enabled_policy = WafPolicy.create!(
|
||||||
|
name: "Enabled Policy",
|
||||||
|
policy_type: "country",
|
||||||
|
targets: ["US"],
|
||||||
|
policy_action: "deny",
|
||||||
|
user: @user,
|
||||||
|
enabled: true
|
||||||
|
)
|
||||||
|
disabled_policy = WafPolicy.create!(
|
||||||
|
name: "Disabled Policy",
|
||||||
|
policy_type: "country",
|
||||||
|
targets: ["US"],
|
||||||
|
policy_action: "deny",
|
||||||
|
user: @user,
|
||||||
|
enabled: false
|
||||||
|
)
|
||||||
|
|
||||||
|
enabled_policies = WafPolicy.enabled
|
||||||
|
assert_includes enabled_policies, enabled_policy
|
||||||
|
assert_not_includes enabled_policies, disabled_policy
|
||||||
|
end
|
||||||
|
|
||||||
|
test "active scope returns only active policies" do
|
||||||
|
active_policy = WafPolicy.create!(
|
||||||
|
name: "Active Policy",
|
||||||
|
policy_type: "country",
|
||||||
|
targets: ["US"],
|
||||||
|
policy_action: "deny",
|
||||||
|
user: @user,
|
||||||
|
enabled: true,
|
||||||
|
expires_at: 1.day.from_now
|
||||||
|
)
|
||||||
|
expired_policy = WafPolicy.create!(
|
||||||
|
name: "Expired Policy",
|
||||||
|
policy_type: "country",
|
||||||
|
targets: ["US"],
|
||||||
|
policy_action: "deny",
|
||||||
|
user: @user,
|
||||||
|
enabled: true,
|
||||||
|
expires_at: 1.day.ago
|
||||||
|
)
|
||||||
|
disabled_policy = WafPolicy.create!(
|
||||||
|
name: "Disabled Policy",
|
||||||
|
policy_type: "country",
|
||||||
|
targets: ["US"],
|
||||||
|
policy_action: "deny",
|
||||||
|
user: @user,
|
||||||
|
enabled: false
|
||||||
|
)
|
||||||
|
|
||||||
|
active_policies = WafPolicy.active
|
||||||
|
assert_includes active_policies, active_policy
|
||||||
|
assert_not_includes active_policies, expired_policy
|
||||||
|
assert_not_includes active_policies, disabled_policy
|
||||||
|
end
|
||||||
|
|
||||||
|
# Class Factory Methods
|
||||||
|
test "create_country_policy works correctly" do
|
||||||
|
policy = WafPolicy.create_country_policy(
|
||||||
|
["US", "CA"],
|
||||||
|
policy_action: "deny",
|
||||||
|
user: @user,
|
||||||
|
name: "Custom Name"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert policy.persisted?
|
||||||
|
assert_equal "Custom Name", policy.name
|
||||||
|
assert_equal "country", policy.policy_type
|
||||||
|
assert_equal "deny", policy.policy_action
|
||||||
|
assert_equal ["US", "CA"], policy.targets
|
||||||
|
assert_equal @user, policy.user
|
||||||
|
end
|
||||||
|
|
||||||
|
test "create_asn_policy works correctly" do
|
||||||
|
policy = WafPolicy.create_asn_policy(
|
||||||
|
[12345, 67890],
|
||||||
|
policy_action: "challenge",
|
||||||
|
user: @user
|
||||||
|
)
|
||||||
|
|
||||||
|
assert policy.persisted?
|
||||||
|
assert_equal "challenge ASNs 12345, 67890", policy.name
|
||||||
|
assert_equal "asn", policy.policy_type
|
||||||
|
assert_equal "challenge", policy.policy_action
|
||||||
|
assert_equal [12345, 67890], policy.targets
|
||||||
|
end
|
||||||
|
|
||||||
|
test "create_company_policy works correctly" do
|
||||||
|
policy = WafPolicy.create_company_policy(
|
||||||
|
["Google", "Amazon"],
|
||||||
|
policy_action: "deny",
|
||||||
|
user: @user
|
||||||
|
)
|
||||||
|
|
||||||
|
assert policy.persisted?
|
||||||
|
assert_equal "deny Google, Amazon", policy.name
|
||||||
|
assert_equal "company", policy.policy_type
|
||||||
|
assert_equal ["Google", "Amazon"], policy.targets
|
||||||
|
end
|
||||||
|
|
||||||
|
test "create_network_type_policy works correctly" do
|
||||||
|
policy = WafPolicy.create_network_type_policy(
|
||||||
|
["datacenter", "proxy"],
|
||||||
|
policy_action: "redirect",
|
||||||
|
user: @user,
|
||||||
|
additional_data: { redirect_url: "https://example.com/blocked" }
|
||||||
|
)
|
||||||
|
|
||||||
|
assert policy.persisted?
|
||||||
|
assert_equal "redirect datacenter, proxy", policy.name
|
||||||
|
assert_equal "network_type", policy.policy_type
|
||||||
|
assert_equal ["datacenter", "proxy"], policy.targets
|
||||||
|
end
|
||||||
|
|
||||||
|
# Redirect/Challenge Specific Methods
|
||||||
|
test "redirect_url and redirect_status methods work" do
|
||||||
|
policy = WafPolicy.new(
|
||||||
|
policy_action: "redirect",
|
||||||
|
additional_data: {
|
||||||
|
"redirect_url" => "https://example.com/blocked",
|
||||||
|
"redirect_status" => 301
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert_equal "https://example.com/blocked", policy.redirect_url
|
||||||
|
assert_equal 301, policy.redirect_status
|
||||||
|
|
||||||
|
# Default status
|
||||||
|
policy.additional_data = { "redirect_url" => "https://example.com/blocked" }
|
||||||
|
assert_equal 302, policy.redirect_status
|
||||||
|
end
|
||||||
|
|
||||||
|
test "challenge_type and challenge_message methods work" do
|
||||||
|
policy = WafPolicy.new(
|
||||||
|
policy_action: "challenge",
|
||||||
|
additional_data: {
|
||||||
|
"challenge_type" => "javascript",
|
||||||
|
"challenge_message" => "Please verify you are human"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert_equal "javascript", policy.challenge_type
|
||||||
|
assert_equal "Please verify you are human", policy.challenge_message
|
||||||
|
|
||||||
|
# Default challenge type
|
||||||
|
policy.additional_data = {}
|
||||||
|
assert_equal "captcha", policy.challenge_type
|
||||||
|
end
|
||||||
|
|
||||||
|
# Statistics
|
||||||
|
test "generated_rules_count works" do
|
||||||
|
@policy.save!
|
||||||
|
|
||||||
|
# Initially no rules
|
||||||
|
assert_equal 0, @policy.generated_rules_count
|
||||||
|
|
||||||
|
# Create some rules
|
||||||
|
network_range = NetworkRange.create!(ip_range: "192.168.1.0/24")
|
||||||
|
@policy.create_rule_for_network_range(network_range)
|
||||||
|
|
||||||
|
assert_equal 1, @policy.generated_rules_count
|
||||||
|
end
|
||||||
|
|
||||||
|
test "effectiveness_stats returns correct data" do
|
||||||
|
@policy.save!
|
||||||
|
|
||||||
|
stats = @policy.effectiveness_stats
|
||||||
|
|
||||||
|
assert_equal 0, stats[:total_rules_generated]
|
||||||
|
assert_equal 0, stats[:active_rules]
|
||||||
|
assert_equal 0, stats[:rules_last_7_days]
|
||||||
|
assert_equal "country", stats[:policy_type]
|
||||||
|
assert_equal "deny", stats[:policy_action]
|
||||||
|
assert_equal 2, stats[:targets_count]
|
||||||
|
end
|
||||||
|
|
||||||
|
# String representations
|
||||||
|
test "to_s returns name" do
|
||||||
|
assert_equal @policy.name, @policy.to_s
|
||||||
|
end
|
||||||
|
|
||||||
|
test "to_param parameterizes name" do
|
||||||
|
@policy.name = "Block Brazil & China"
|
||||||
|
expected = "block-brazil-china"
|
||||||
|
assert_equal expected, @policy.to_param
|
||||||
|
end
|
||||||
|
end
|
||||||
158
test/services/dsn_authentication_service_test.rb
Normal file
158
test/services/dsn_authentication_service_test.rb
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require "test_helper"
|
||||||
|
|
||||||
|
class DsnAuthenticationServiceTest < ActiveSupport::TestCase
|
||||||
|
def setup
|
||||||
|
@dsn = Dsn.create!(name: "Test DSN", key: "test-auth-key-1234567890abcdef")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should authenticate via query parameter baffle_key" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
request.query_parameters = { "baffle_key" => @dsn.key }
|
||||||
|
|
||||||
|
authenticated_dsn = DsnAuthenticationService.authenticate(request)
|
||||||
|
assert_equal @dsn, authenticated_dsn
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should authenticate via query parameter sentry_key" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
request.query_parameters = { "sentry_key" => @dsn.key }
|
||||||
|
|
||||||
|
authenticated_dsn = DsnAuthenticationService.authenticate(request)
|
||||||
|
assert_equal @dsn, authenticated_dsn
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should authenticate via query parameter glitchtip_key" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
request.query_parameters = { "glitchtip_key" => @dsn.key }
|
||||||
|
|
||||||
|
authenticated_dsn = DsnAuthenticationService.authenticate(request)
|
||||||
|
assert_equal @dsn, authenticated_dsn
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should authenticate via X-Baffle-Auth header" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
request.headers["X-Baffle-Auth"] = "Baffle baffle_key=#{@dsn.key}, baffle_version=1"
|
||||||
|
|
||||||
|
authenticated_dsn = DsnAuthenticationService.authenticate(request)
|
||||||
|
assert_equal @dsn, authenticated_dsn
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should authenticate via X-Sentry-Auth header" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
request.headers["X-Sentry-Auth"] = "Sentry sentry_key=#{@dsn.key}, sentry_version=7"
|
||||||
|
|
||||||
|
authenticated_dsn = DsnAuthenticationService.authenticate(request)
|
||||||
|
assert_equal @dsn, authenticated_dsn
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should authenticate via Authorization Bearer header" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
request.headers["Authorization"] = "Bearer #{@dsn.key}"
|
||||||
|
|
||||||
|
authenticated_dsn = DsnAuthenticationService.authenticate(request)
|
||||||
|
assert_equal @dsn, authenticated_dsn
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should authenticate via Basic auth with username as key" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
credentials = Base64.strict_encode64("#{@dsn.key}:ignored-password")
|
||||||
|
request.headers["Authorization"] = "Basic #{credentials}"
|
||||||
|
|
||||||
|
authenticated_dsn = DsnAuthenticationService.authenticate(request)
|
||||||
|
assert_equal @dsn, authenticated_dsn
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should prioritize query parameter over other methods" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
request.query_parameters = { "baffle_key" => @dsn.key }
|
||||||
|
request.headers["Authorization"] = "Bearer wrong-key"
|
||||||
|
|
||||||
|
authenticated_dsn = DsnAuthenticationService.authenticate(request)
|
||||||
|
assert_equal @dsn, authenticated_dsn
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should fail authentication with disabled DSN" do
|
||||||
|
@dsn.update!(enabled: false)
|
||||||
|
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
request.query_parameters = { "baffle_key" => @dsn.key }
|
||||||
|
|
||||||
|
assert_raises(DsnAuthenticationService::AuthenticationError) do
|
||||||
|
DsnAuthenticationService.authenticate(request)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should fail authentication with non-existent key" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
request.query_parameters = { "baffle_key" => "non-existent-key" }
|
||||||
|
|
||||||
|
assert_raises(DsnAuthenticationService::AuthenticationError) do
|
||||||
|
DsnAuthenticationService.authenticate(request)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should fail authentication with no authentication method" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
|
||||||
|
assert_raises(DsnAuthenticationService::AuthenticationError) do
|
||||||
|
DsnAuthenticationService.authenticate(request)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should handle malformed Authorization header" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
request.headers["Authorization"] = "InvalidHeader"
|
||||||
|
|
||||||
|
assert_nil DsnAuthenticationService.authenticate(request)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should handle malformed Basic auth" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
request.headers["Authorization"] = "Basic invalid-base64"
|
||||||
|
|
||||||
|
assert_nil DsnAuthenticationService.authenticate(request)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should handle malformed X-Baffle-Auth header" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
request.headers["X-Baffle-Auth"] = "Invalid format"
|
||||||
|
|
||||||
|
assert_nil DsnAuthenticationService.authenticate(request)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should handle empty query parameters" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
request.query_parameters = { "baffle_key" => "" }
|
||||||
|
|
||||||
|
assert_nil DsnAuthenticationService.authenticate(request)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should extract key from complex X-Baffle-Auth header" do
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
request.headers["X-Baffle-Auth"] = "Baffle baffle_key=#{@dsn.key}, baffle_version=1, baffle_client=ruby-2.0.0"
|
||||||
|
|
||||||
|
authenticated_dsn = DsnAuthenticationService.authenticate(request)
|
||||||
|
assert_equal @dsn, authenticated_dsn
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should handle URL-style DSN in Basic auth" do
|
||||||
|
# This simulates using the full DSN URL: https://key@domain.com
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
credentials = Base64.strict_encode64("#{@dsn.key}:")
|
||||||
|
request.headers["Authorization"] = "Basic #{credentials}"
|
||||||
|
|
||||||
|
authenticated_dsn = DsnAuthenticationService.authenticate(request)
|
||||||
|
assert_equal @dsn, authenticated_dsn
|
||||||
|
end
|
||||||
|
|
||||||
|
test "should handle special characters in DSN keys" do
|
||||||
|
special_dsn = Dsn.create!(name: "Special DSN", key: "special-key-with-dashes_123")
|
||||||
|
request = ActionDispatch::TestRequest.create
|
||||||
|
request.headers["Authorization"] = "Bearer #{special_dsn.key}"
|
||||||
|
|
||||||
|
authenticated_dsn = DsnAuthenticationService.authenticate(request)
|
||||||
|
assert_equal special_dsn, authenticated_dsn
|
||||||
|
end
|
||||||
|
end
|
||||||
530
test/services/waf_policy_matcher_test.rb
Normal file
530
test/services/waf_policy_matcher_test.rb
Normal file
@@ -0,0 +1,530 @@
|
|||||||
|
require "test_helper"
|
||||||
|
|
||||||
|
class WafPolicyMatcherTest < ActiveSupport::TestCase
|
||||||
|
setup do
|
||||||
|
@user = users(:jason)
|
||||||
|
@network_range = NetworkRange.create!(
|
||||||
|
network: "192.168.1.0/24",
|
||||||
|
country: "BR",
|
||||||
|
company: "Test Company",
|
||||||
|
asn: 12345,
|
||||||
|
is_datacenter: false
|
||||||
|
)
|
||||||
|
@matcher = WafPolicyMatcher.new(network_range: @network_range)
|
||||||
|
end
|
||||||
|
|
||||||
|
teardown do
|
||||||
|
WafPolicy.delete_all
|
||||||
|
Rule.delete_all
|
||||||
|
NetworkRange.delete_all
|
||||||
|
end
|
||||||
|
|
||||||
|
# Initialization
|
||||||
|
test "initializes with network range" do
|
||||||
|
assert_equal @network_range, @matcher.network_range
|
||||||
|
assert_equal [], @matcher.matching_policies
|
||||||
|
assert_equal [], @matcher.generated_rules
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles nil network range" do
|
||||||
|
matcher = WafPolicyMatcher.new(network_range: nil)
|
||||||
|
assert_nil matcher.network_range
|
||||||
|
end
|
||||||
|
|
||||||
|
# Policy Matching
|
||||||
|
test "find_matching_policies returns policies that match network range" do
|
||||||
|
# Create policies that should match
|
||||||
|
brazil_policy = WafPolicy.create_country_policy(['BR'], 'deny', @user, "Block Brazil")
|
||||||
|
test_asn_policy = WafPolicy.create_asn_policy([12345], 'challenge', @user, "Challenge ASN")
|
||||||
|
test_company_policy = WafPolicy.create_company_policy(['Test Company'], 'redirect', @user, "Redirect Company")
|
||||||
|
|
||||||
|
# Create policies that should not match
|
||||||
|
us_policy = WafPolicy.create_country_policy(['US'], 'deny', @user, "Block US")
|
||||||
|
other_asn_policy = WafPolicy.create_asn_policy([67890], 'deny', @user, "Block Other ASN")
|
||||||
|
|
||||||
|
matching_policies = @matcher.find_matching_policies
|
||||||
|
|
||||||
|
assert_includes matching_policies, brazil_policy
|
||||||
|
assert_includes matching_policies, test_asn_policy
|
||||||
|
assert_includes matching_policies, test_company_policy
|
||||||
|
assert_not_includes matching_policies, us_policy
|
||||||
|
assert_not_includes matching_policies, other_asn_policy
|
||||||
|
end
|
||||||
|
|
||||||
|
test "find_matching_policies sorts by policy type priority" do
|
||||||
|
# Create different policy types with same creation time
|
||||||
|
base_time = 1.hour.ago
|
||||||
|
|
||||||
|
# Country policy (highest priority)
|
||||||
|
country_policy = WafPolicy.create!(
|
||||||
|
name: "Country Policy",
|
||||||
|
policy_type: "country",
|
||||||
|
targets: ["BR"],
|
||||||
|
policy_action: "deny",
|
||||||
|
user: @user,
|
||||||
|
created_at: base_time
|
||||||
|
)
|
||||||
|
|
||||||
|
# ASN policy (second priority)
|
||||||
|
asn_policy = WafPolicy.create!(
|
||||||
|
name: "ASN Policy",
|
||||||
|
policy_type: "asn",
|
||||||
|
targets: [12345],
|
||||||
|
policy_action: "deny",
|
||||||
|
user: @user,
|
||||||
|
created_at: base_time
|
||||||
|
)
|
||||||
|
|
||||||
|
# Company policy (third priority)
|
||||||
|
company_policy = WafPolicy.create!(
|
||||||
|
name: "Company Policy",
|
||||||
|
policy_type: "company",
|
||||||
|
targets: ["Test Company"],
|
||||||
|
policy_action: "deny",
|
||||||
|
user: @user,
|
||||||
|
created_at: base_time
|
||||||
|
)
|
||||||
|
|
||||||
|
# Network type policy (lowest priority)
|
||||||
|
network_type_policy = WafPolicy.create!(
|
||||||
|
name: "Network Type Policy",
|
||||||
|
policy_type: "network_type",
|
||||||
|
targets: ["standard"],
|
||||||
|
policy_action: "deny",
|
||||||
|
user: @user,
|
||||||
|
created_at: base_time
|
||||||
|
)
|
||||||
|
|
||||||
|
matching_policies = @matcher.find_matching_policies
|
||||||
|
|
||||||
|
# Should be ordered by priority: country > asn > company > network_type
|
||||||
|
assert_equal country_policy, matching_policies[0]
|
||||||
|
assert_equal asn_policy, matching_policies[1]
|
||||||
|
assert_equal company_policy, matching_policies[2]
|
||||||
|
assert_equal network_type_policy, matching_policies[3]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "find_matching_policies sorts by creation date for same priority" do
|
||||||
|
# Create two country policies with different creation times
|
||||||
|
older_policy = WafPolicy.create!(
|
||||||
|
name: "Older Policy",
|
||||||
|
policy_type: "country",
|
||||||
|
targets: ["BR"],
|
||||||
|
policy_action: "deny",
|
||||||
|
user: @user,
|
||||||
|
created_at: 2.hours.ago
|
||||||
|
)
|
||||||
|
|
||||||
|
newer_policy = WafPolicy.create!(
|
||||||
|
name: "Newer Policy",
|
||||||
|
policy_type: "country",
|
||||||
|
targets: ["BR"],
|
||||||
|
policy_action: "deny",
|
||||||
|
user: @user,
|
||||||
|
created_at: 1.hour.ago
|
||||||
|
)
|
||||||
|
|
||||||
|
matching_policies = @matcher.find_matching_policies
|
||||||
|
|
||||||
|
# Newer policy should come first
|
||||||
|
assert_equal newer_policy, matching_policies[0]
|
||||||
|
assert_equal older_policy, matching_policies[1]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "find_matching_policies skips inactive policies" do
|
||||||
|
# Create active policy
|
||||||
|
active_policy = WafPolicy.create_country_policy(['BR'], 'deny', @user, "Active Policy")
|
||||||
|
|
||||||
|
# Create disabled policy
|
||||||
|
disabled_policy = WafPolicy.create_country_policy(['BR'], 'deny', @user, "Disabled Policy")
|
||||||
|
disabled_policy.update!(enabled: false)
|
||||||
|
|
||||||
|
# Create expired policy
|
||||||
|
expired_policy = WafPolicy.create_country_policy(['BR'], 'deny', @user, "Expired Policy")
|
||||||
|
expired_policy.update!(expires_at: 1.hour.ago)
|
||||||
|
|
||||||
|
matching_policies = @matcher.find_matching_policies
|
||||||
|
|
||||||
|
assert_includes matching_policies, active_policy
|
||||||
|
assert_not_includes matching_policies, disabled_policy
|
||||||
|
assert_not_includes matching_policies, expired_policy
|
||||||
|
end
|
||||||
|
|
||||||
|
test "find_matching_policies returns empty array for nil network range" do
|
||||||
|
matcher = WafPolicyMatcher.new(network_range: nil)
|
||||||
|
matching_policies = matcher.find_matching_policies
|
||||||
|
assert_equal [], matching_policies
|
||||||
|
end
|
||||||
|
|
||||||
|
# Rule Generation
|
||||||
|
test "generate_rules creates rules for matching policies" do
|
||||||
|
brazil_policy = WafPolicy.create_country_policy(['BR'], 'deny', @user, "Block Brazil")
|
||||||
|
@matcher.instance_variable_set(:@matching_policies, [brazil_policy])
|
||||||
|
|
||||||
|
generated_rules = @matcher.generate_rules
|
||||||
|
|
||||||
|
assert_equal 1, generated_rules.length
|
||||||
|
rule = generated_rules.first
|
||||||
|
assert_equal brazil_policy, rule.waf_policy
|
||||||
|
assert_equal @network_range, rule.network_range
|
||||||
|
assert_equal "deny", rule.action
|
||||||
|
end
|
||||||
|
|
||||||
|
test "generate_rules handles multiple matching policies" do
|
||||||
|
policies = [
|
||||||
|
WafPolicy.create_country_policy(['BR'], 'deny', @user, "Block Brazil"),
|
||||||
|
WafPolicy.create_asn_policy([12345], 'challenge', @user, "Challenge ASN"),
|
||||||
|
WafPolicy.create_company_policy(['Test Company'], 'redirect', @user, "Redirect Company")
|
||||||
|
]
|
||||||
|
@matcher.instance_variable_set(:@matching_policies, policies)
|
||||||
|
|
||||||
|
generated_rules = @matcher.generate_rules
|
||||||
|
|
||||||
|
assert_equal 3, generated_rules.length
|
||||||
|
assert_equal "deny", generated_rules[0].action
|
||||||
|
assert_equal "challenge", generated_rules[1].action
|
||||||
|
assert_equal "redirect", generated_rules[2].action
|
||||||
|
end
|
||||||
|
|
||||||
|
test "generate_rules returns existing rules instead of duplicates" do
|
||||||
|
policy = WafPolicy.create_country_policy(['BR'], 'deny', @user, "Block Brazil")
|
||||||
|
|
||||||
|
# Create existing rule
|
||||||
|
existing_rule = Rule.create!(
|
||||||
|
rule_type: "network",
|
||||||
|
action: "deny",
|
||||||
|
network_range: @network_range,
|
||||||
|
waf_policy: policy,
|
||||||
|
user: @user,
|
||||||
|
enabled: true
|
||||||
|
)
|
||||||
|
|
||||||
|
@matcher.instance_variable_set(:@matching_policies, [policy])
|
||||||
|
generated_rules = @matcher.generate_rules
|
||||||
|
|
||||||
|
assert_equal 1, generated_rules.length
|
||||||
|
assert_equal existing_rule, generated_rules.first
|
||||||
|
end
|
||||||
|
|
||||||
|
test "generate_rules handles policy that fails to create rule" do
|
||||||
|
policy = WafPolicy.create_country_policy(['BR'], 'deny', @user, "Block Brazil")
|
||||||
|
|
||||||
|
# Mock policy to return nil for rule creation (e.g., expired policy)
|
||||||
|
policy.expects(:create_rule_for_network_range).with(@network_range).returns(nil)
|
||||||
|
|
||||||
|
@matcher.instance_variable_set(:@matching_policies, [policy])
|
||||||
|
generated_rules = @matcher.generate_rules
|
||||||
|
|
||||||
|
assert_equal [], generated_rules
|
||||||
|
end
|
||||||
|
|
||||||
|
test "generate_rules returns empty array for no matching policies" do
|
||||||
|
@matcher.instance_variable_set(:@matching_policies, [])
|
||||||
|
generated_rules = @matcher.generate_rules
|
||||||
|
assert_equal [], generated_rules
|
||||||
|
end
|
||||||
|
|
||||||
|
# Combined Operations
|
||||||
|
test "match_and_generate_rules does both operations" do
|
||||||
|
brazil_policy = WafPolicy.create_country_policy(['BR'], 'deny', @user, "Block Brazil")
|
||||||
|
|
||||||
|
result = @matcher.match_and_generate_rules
|
||||||
|
|
||||||
|
assert_equal 1, result[:matching_policies].length
|
||||||
|
assert_equal 1, result[:generated_rules].length
|
||||||
|
assert_includes result[:matching_policies], brazil_policy
|
||||||
|
assert_equal brazil_policy, result[:generated_rules].first.waf_policy
|
||||||
|
end
|
||||||
|
|
||||||
|
# Class Methods
|
||||||
|
test "self.process_network_range creates matcher and processes" do
|
||||||
|
policy = WafPolicy.create_country_policy(['BR'], 'deny', @user, "Block Brazil")
|
||||||
|
|
||||||
|
result = WafPolicyMatcher.process_network_range(@network_range)
|
||||||
|
|
||||||
|
assert_equal 1, result[:matching_policies].length
|
||||||
|
assert_equal 1, result[:generated_rules].length
|
||||||
|
end
|
||||||
|
|
||||||
|
test "self.evaluate_and_mark! processes and marks as evaluated" do
|
||||||
|
policy = WafPolicy.create_country_policy(['BR'], 'deny', @user, "Block Brazil")
|
||||||
|
|
||||||
|
original_evaluated_at = @network_range.policies_evaluated_at
|
||||||
|
result = WafPolicyMatcher.evaluate_and_mark!(@network_range)
|
||||||
|
|
||||||
|
assert_equal 1, result[:matching_policies].length
|
||||||
|
assert_equal 1, result[:generated_rules].length
|
||||||
|
|
||||||
|
@network_range.reload
|
||||||
|
assert @network_range.policies_evaluated_at > original_evaluated_at
|
||||||
|
end
|
||||||
|
|
||||||
|
test "self.evaluate_and_mark! handles nil network range" do
|
||||||
|
result = WafPolicyMatcher.evaluate_and_mark!(nil)
|
||||||
|
assert_equal({ matching_policies: [], generated_rules: [] }, result)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "self.batch_process_network_ranges processes multiple ranges" do
|
||||||
|
# Create multiple network ranges
|
||||||
|
range1 = NetworkRange.create!(network: "192.168.1.0/24", country: "BR")
|
||||||
|
range2 = NetworkRange.create!(network: "192.168.2.0/24", country: "US")
|
||||||
|
|
||||||
|
# Create policies
|
||||||
|
brazil_policy = WafPolicy.create_country_policy(['BR'], 'deny', @user, "Block Brazil")
|
||||||
|
us_policy = WafPolicy.create_country_policy(['US'], 'deny', @user, "Block US")
|
||||||
|
|
||||||
|
results = WafPolicyMatcher.batch_process_network_ranges([range1, range2])
|
||||||
|
|
||||||
|
assert_equal 2, results.length
|
||||||
|
assert_equal range1, results[0][:network_range]
|
||||||
|
assert_equal range2, results[1][:network_range]
|
||||||
|
assert_equal 1, results[0][:matching_policies].length
|
||||||
|
assert_equal 1, results[1][:matching_policies].length
|
||||||
|
end
|
||||||
|
|
||||||
|
test "self.process_ranges_without_policy_rules finds ranges needing evaluation" do
|
||||||
|
# Create range with intelligence but no rules
|
||||||
|
intelligent_range = NetworkRange.create!(
|
||||||
|
network: "192.168.1.0/24",
|
||||||
|
country: "BR",
|
||||||
|
asn: 12345
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create range with no intelligence
|
||||||
|
dumb_range = NetworkRange.create!(
|
||||||
|
network: "192.168.2.0/24"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create range with existing rules
|
||||||
|
range_with_rules = NetworkRange.create!(
|
||||||
|
network: "192.168.3.0/24",
|
||||||
|
country: "US"
|
||||||
|
)
|
||||||
|
policy = WafPolicy.create_country_policy(['US'], 'deny', @user, "Block US")
|
||||||
|
Rule.create!(
|
||||||
|
rule_type: "network",
|
||||||
|
action: "deny",
|
||||||
|
network_range: range_with_rules,
|
||||||
|
waf_policy: policy,
|
||||||
|
user: @user
|
||||||
|
)
|
||||||
|
|
||||||
|
results = WafPolicyMatcher.process_ranges_without_policy_rules(limit: 10)
|
||||||
|
|
||||||
|
# Should only process the intelligent range without rules
|
||||||
|
assert_equal 1, results.length
|
||||||
|
assert_equal intelligent_range, results[0][:network_range]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "self.reprocess_all_for_policy finds potential ranges for policy" do
|
||||||
|
# Create country policy
|
||||||
|
brazil_policy = WafPolicy.create_country_policy(['BR'], 'deny', @user, "Block Brazil")
|
||||||
|
|
||||||
|
# Create matching and non-matching ranges
|
||||||
|
brazil_range = NetworkRange.create!(network: "192.168.1.0/24", country: "BR")
|
||||||
|
us_range = NetworkRange.create!(network: "192.168.2.0/24", country: "US")
|
||||||
|
|
||||||
|
results = WafPolicyMatcher.reprocess_all_for_policy(brazil_policy)
|
||||||
|
|
||||||
|
assert_equal 1, results.length
|
||||||
|
assert_equal brazil_range, results[0][:network_range]
|
||||||
|
assert_not_nil results[0][:generated_rule]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "self.reprocess_all_for_policy handles different policy types" do
|
||||||
|
# Test ASN policy
|
||||||
|
asn_policy = WafPolicy.create_asn_policy([12345], 'deny', @user, "Block ASN")
|
||||||
|
asn_range = NetworkRange.create!(network: "192.168.1.0/24", asn: 12345)
|
||||||
|
|
||||||
|
# Test company policy
|
||||||
|
company_policy = WafPolicy.create_company_policy(['Test Corp'], 'deny', @user, "Block Company")
|
||||||
|
company_range = NetworkRange.create!(network: "192.168.2.0/24", company: "Test Corporation")
|
||||||
|
|
||||||
|
# Test network type policy
|
||||||
|
network_type_policy = WafPolicy.create_network_type_policy(['datacenter'], 'deny', @user, "Block Datacenter")
|
||||||
|
dc_range = NetworkRange.create!(network: "192.168.3.0/24", is_datacenter: true)
|
||||||
|
|
||||||
|
asn_results = WafPolicyMatcher.reprocess_all_for_policy(asn_policy)
|
||||||
|
company_results = WafPolicyMatcher.reprocess_all_for_policy(company_policy)
|
||||||
|
network_type_results = WafPolicyMatcher.reprocess_all_for_policy(network_type_policy)
|
||||||
|
|
||||||
|
assert_equal 1, asn_results.length
|
||||||
|
assert_equal 1, company_results.length
|
||||||
|
assert_equal 1, network_type_results.length
|
||||||
|
end
|
||||||
|
|
||||||
|
# Statistics and Reporting
|
||||||
|
test "self.matching_policies_for_network_range returns matching policies" do
|
||||||
|
policy = WafPolicy.create_country_policy(['BR'], 'deny', @user, "Block Brazil")
|
||||||
|
|
||||||
|
matching_policies = WafPolicyMatcher.matching_policies_for_network_range(@network_range)
|
||||||
|
|
||||||
|
assert_equal 1, matching_policies.length
|
||||||
|
assert_includes matching_policies, policy
|
||||||
|
end
|
||||||
|
|
||||||
|
test "self.policy_effectiveness_stats returns correct statistics" do
|
||||||
|
policy = WafPolicy.create_country_policy(['BR'], 'deny', @user, "Block Brazil")
|
||||||
|
|
||||||
|
# Create some rules for the policy
|
||||||
|
range1 = NetworkRange.create!(network: "192.168.1.0/24", country: "BR")
|
||||||
|
range2 = NetworkRange.create!(network: "192.168.2.0/24", country: "BR")
|
||||||
|
|
||||||
|
rule1 = Rule.create!(
|
||||||
|
rule_type: "network",
|
||||||
|
action: "deny",
|
||||||
|
network_range: range1,
|
||||||
|
waf_policy: policy,
|
||||||
|
user: @user
|
||||||
|
)
|
||||||
|
rule2 = Rule.create!(
|
||||||
|
rule_type: "network",
|
||||||
|
action: "deny",
|
||||||
|
network_range: range2,
|
||||||
|
waf_policy: policy,
|
||||||
|
user: @user,
|
||||||
|
enabled: false # Disabled rule
|
||||||
|
)
|
||||||
|
|
||||||
|
stats = WafPolicyMatcher.policy_effectiveness_stats(policy, days: 30)
|
||||||
|
|
||||||
|
assert_equal policy.name, stats[:policy_name]
|
||||||
|
assert_equal "country", stats[:policy_type]
|
||||||
|
assert_equal "deny", stats[:action]
|
||||||
|
assert_equal 2, stats[:rules_generated]
|
||||||
|
assert_equal 1, stats[:active_rules] # Only enabled rules
|
||||||
|
assert_equal 2, stats[:networks_protected]
|
||||||
|
assert_equal 30, stats[:period_days]
|
||||||
|
assert_equal 2.0 / 30, stats[:generation_rate]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Network Intelligence Matching
|
||||||
|
test "matches country policies based on network range country" do
|
||||||
|
range_with_country = NetworkRange.create!(network: "192.168.1.0/24", country: "BR")
|
||||||
|
range_without_country = NetworkRange.create!(network: "192.168.2.0/24")
|
||||||
|
|
||||||
|
brazil_policy = WafPolicy.create_country_policy(['BR'], 'deny', @user, "Block Brazil")
|
||||||
|
|
||||||
|
matcher1 = WafPolicyMatcher.new(network_range: range_with_country)
|
||||||
|
matcher2 = WafPolicyMatcher.new(network_range: range_without_country)
|
||||||
|
|
||||||
|
matching1 = matcher1.find_matching_policies
|
||||||
|
matching2 = matcher2.find_matching_policies
|
||||||
|
|
||||||
|
assert_includes matching1, brazil_policy
|
||||||
|
assert_not_includes matching2, brazil_policy
|
||||||
|
end
|
||||||
|
|
||||||
|
test "matches network type policies based on intelligence flags" do
|
||||||
|
dc_range = NetworkRange.create!(network: "192.168.1.0/24", is_datacenter: true)
|
||||||
|
proxy_range = NetworkRange.create!(network: "192.168.2.0/24", is_proxy: true)
|
||||||
|
standard_range = NetworkRange.create!(network: "192.168.3.0/24") # All flags false
|
||||||
|
|
||||||
|
dc_policy = WafPolicy.create_network_type_policy(['datacenter'], 'deny', @user, "Block Datacenter")
|
||||||
|
proxy_policy = WafPolicy.create_network_type_policy(['proxy'], 'deny', @user, "Block Proxy")
|
||||||
|
standard_policy = WafPolicy.create_network_type_policy(['standard'], 'deny', @user, "Block Standard")
|
||||||
|
|
||||||
|
dc_matcher = WafPolicyMatcher.new(network_range: dc_range)
|
||||||
|
proxy_matcher = WafPolicyMatcher.new(network_range: proxy_range)
|
||||||
|
standard_matcher = WafPolicyMatcher.new(network_range: standard_range)
|
||||||
|
|
||||||
|
assert_includes dc_matcher.find_matching_policies, dc_policy
|
||||||
|
assert_includes proxy_matcher.find_matching_policies, proxy_policy
|
||||||
|
assert_includes standard_matcher.find_matching_policies, standard_policy
|
||||||
|
end
|
||||||
|
|
||||||
|
# Inheritance Support
|
||||||
|
test "matches policies based on inherited intelligence" do
|
||||||
|
# Create parent network with intelligence
|
||||||
|
parent = NetworkRange.create!(
|
||||||
|
network: "192.168.0.0/16",
|
||||||
|
country: "BR",
|
||||||
|
company: "Test Corp"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create child network without its own intelligence
|
||||||
|
child = NetworkRange.create!(network: "192.168.1.0/24")
|
||||||
|
|
||||||
|
brazil_policy = WafPolicy.create_country_policy(['BR'], 'deny', @user, "Block Brazil")
|
||||||
|
company_policy = WafPolicy.create_company_policy(['Test Corp'], 'challenge', @user, "Challenge Corp")
|
||||||
|
|
||||||
|
matcher = WafPolicyMatcher.new(network_range: child)
|
||||||
|
matching_policies = matcher.find_matching_policies
|
||||||
|
|
||||||
|
# Should match based on inherited intelligence
|
||||||
|
assert_includes matching_policies, brazil_policy
|
||||||
|
assert_includes matching_policies, company_policy
|
||||||
|
end
|
||||||
|
|
||||||
|
# Performance and Edge Cases
|
||||||
|
test "handles large numbers of policies efficiently" do
|
||||||
|
# Create many policies
|
||||||
|
policies = []
|
||||||
|
100.times do |i|
|
||||||
|
policies << WafPolicy.create_country_policy(
|
||||||
|
["US"], "deny", @user, "Policy #{i}"
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Only one should match (our network is BR, not US)
|
||||||
|
matching_policies = @matcher.find_matching_policies
|
||||||
|
assert_equal 0, matching_policies.length
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles policies with complex additional_data" do
|
||||||
|
redirect_policy = WafPolicy.create!(
|
||||||
|
name: "Complex Redirect",
|
||||||
|
policy_type: "country",
|
||||||
|
targets: ["BR"],
|
||||||
|
policy_action: "redirect",
|
||||||
|
user: @user,
|
||||||
|
additional_data: {
|
||||||
|
"redirect_url" => "https://example.com/blocked",
|
||||||
|
"redirect_status" => 301,
|
||||||
|
"custom_headers" => {
|
||||||
|
"X-Block-Reason" => "Country blocked"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
rule = redirect_policy.create_rule_for_network_range(@network_range)
|
||||||
|
|
||||||
|
assert_not_nil rule
|
||||||
|
assert_equal "redirect", rule.action
|
||||||
|
assert rule.metadata['redirect_url'].present?
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles company name case-insensitive matching" do
|
||||||
|
range = NetworkRange.create!(
|
||||||
|
network: "192.168.1.0/24",
|
||||||
|
company: "Google LLC"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Policies with different case variations
|
||||||
|
policy1 = WafPolicy.create_company_policy(['google'], 'deny', @user, "Block Google")
|
||||||
|
policy2 = WafPolicy.create_company_policy(['GOOGLE LLC'], 'deny', @user, "Block Google LLC")
|
||||||
|
policy3 = WafPolicy.create_company_policy(['Microsoft'], 'deny', @user, "Block Microsoft")
|
||||||
|
|
||||||
|
matcher = WafPolicyMatcher.new(network_range: range)
|
||||||
|
matching_policies = matcher.find_matching_policies
|
||||||
|
|
||||||
|
assert_includes matching_policies, policy1
|
||||||
|
assert_includes matching_policies, policy2
|
||||||
|
assert_not_includes matching_policies, policy3
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles partial company name matching" do
|
||||||
|
range = NetworkRange.create!(
|
||||||
|
network: "192.168.1.0/24",
|
||||||
|
company: "Amazon Web Services"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Policy with partial match
|
||||||
|
policy = WafPolicy.create_company_policy(['Amazon'], 'deny', @user, "Block Amazon")
|
||||||
|
|
||||||
|
matcher = WafPolicyMatcher.new(network_range: range)
|
||||||
|
matching_policies = matcher.find_matching_policies
|
||||||
|
|
||||||
|
assert_includes matching_policies, policy
|
||||||
|
end
|
||||||
|
end
|
||||||
Reference in New Issue
Block a user