Accepts incoming events and correctly parses them into events. GeoLite2 integration complete"

This commit is contained in:
Dan Milne
2025-11-04 00:11:10 +11:00
parent 0cbd462e7c
commit 5ff166613e
49 changed files with 4489 additions and 322 deletions

View File

@@ -0,0 +1,50 @@
# frozen_string_literal: true
# ExpiredRulesCleanupJob - Disables rules that have expired
#
# This job runs periodically (hourly) to find rules with expires_at in the past
# and disables them. Agents will pick up these disabled rules in their next sync
# and remove them from their local evaluation tables.
#
# Schedule: Every hour (configured in initializer or cron)
class ExpiredRulesCleanupJob < ApplicationJob
queue_as :default
def perform
expired_rules = Rule.enabled.expired
Rails.logger.info "ExpiredRulesCleanupJob: Found #{expired_rules.count} expired rules"
return if expired_rules.empty?
# Disable all expired rules in a single update
count = expired_rules.update_all(
enabled: false,
updated_at: Time.current
)
Rails.logger.info "ExpiredRulesCleanupJob: Disabled #{count} expired rules"
# Optionally: Clean up old disabled rules after a retention period
cleanup_old_disabled_rules if should_cleanup_old_rules?
count
end
private
def should_cleanup_old_rules?
# Only cleanup on first hour of the day to avoid too frequent deletion
Time.current.hour == 1
end
def cleanup_old_disabled_rules
# Delete disabled rules older than 30 days (keep for audit trail)
old_rules = Rule.disabled.where("updated_at < ?", 30.days.ago)
if old_rules.any?
count = old_rules.delete_all
Rails.logger.info "ExpiredRulesCleanupJob: Deleted #{count} old disabled rules (>30 days)"
end
end
end

View File

@@ -0,0 +1,117 @@
# frozen_string_literal: true
# PathScannerDetectorJob - Detects IPs hitting scanner paths and auto-bans them
#
# This job analyzes recent events to find IPs hitting common scanner/bot paths
# like /.env, /.git, /wp-admin, etc. When detected, it creates temporary IP
# block rules that expire after 24 hours.
#
# Schedule: Every 5 minutes (configured in initializer or cron)
class PathScannerDetectorJob < ApplicationJob
queue_as :default
# Common paths that scanners/bots hit
SCANNER_PATHS = %w[
/.env
/.git
/wp-admin
/wp-login.php
/phpMyAdmin
/phpmyadmin
/.aws
/.ssh
/admin
/administrator
/.config
/backup
/db_backup
/.DS_Store
/web.config
].freeze
# Minimum hits to be considered a scanner
MIN_SCANNER_HITS = 3
# Look back window
LOOKBACK_WINDOW = 5.minutes
# Ban duration
BAN_DURATION = 24.hours
def perform
scanner_ips = detect_scanner_ips
Rails.logger.info "PathScannerDetectorJob: Found #{scanner_ips.count} scanner IPs"
scanner_ips.each do |ip_data|
create_ban_rule(ip_data)
end
scanner_ips.count
end
private
def detect_scanner_ips
# Find IPs that have hit scanner paths multiple times recently
Event
.where("timestamp > ?", LOOKBACK_WINDOW.ago)
.where("request_path IN (?)", SCANNER_PATHS)
.group(:ip_address)
.select("ip_address, COUNT(*) as hit_count, GROUP_CONCAT(DISTINCT request_path) as paths")
.having("COUNT(*) >= ?", MIN_SCANNER_HITS)
.map do |event|
{
ip: event.ip_address,
hit_count: event.hit_count,
paths: event.paths.to_s.split(",")
}
end
end
def create_ban_rule(ip_data)
ip = ip_data[:ip]
# Check if rule already exists for this IP
existing_rule = Rule.active.network_rules.find_by(
"conditions ->> 'cidr' = ?", "#{ip}/32"
)
if existing_rule
Rails.logger.info "PathScannerDetectorJob: Rule already exists for #{ip}, skipping"
return
end
# Determine if IPv4 or IPv6
addr = IPAddr.new(ip)
rule_type = addr.ipv4? ? "network_v4" : "network_v6"
# Create the ban rule
rule = Rule.create!(
rule_type: rule_type,
action: "deny",
conditions: { cidr: "#{ip}/32" },
priority: 32,
expires_at: BAN_DURATION.from_now,
source: "auto:scanner_detected",
enabled: true,
metadata: {
reason: "Scanner detected: hit #{ip_data[:paths].join(', ')}",
hit_count: ip_data[:hit_count],
paths: ip_data[:paths],
detected_at: Time.current.iso8601,
auto_generated: true
}
)
Rails.logger.info "PathScannerDetectorJob: Created ban rule #{rule.id} for #{ip} (expires: #{rule.expires_at})"
rule
rescue IPAddr::InvalidAddressError => e
Rails.logger.error "PathScannerDetectorJob: Invalid IP address #{ip}: #{e.message}"
nil
rescue ActiveRecord::RecordInvalid => e
Rails.logger.error "PathScannerDetectorJob: Failed to create rule for #{ip}: #{e.message}"
nil
end
end

View File

@@ -28,6 +28,15 @@ class ProcessWafEventJob < ApplicationJob
# Create the WAF event record
event = Event.create_from_waf_payload!(event_id, single_event_data, project)
# Enrich with geo-location data if missing
if event.ip_address.present? && event.country_code.blank?
begin
event.enrich_geo_location!
rescue => e
Rails.logger.warn "Failed to enrich geo location for event #{event.id}: #{e.message}"
end
end
# Trigger analytics processing
ProcessWafAnalyticsJob.perform_later(project_id: project_id, event_id: event.id)

View File

@@ -0,0 +1,66 @@
# frozen_string_literal: true
class UpdateGeoIpDatabaseJob < ApplicationJob
queue_as :default
# Schedule this job to run weekly to keep the GeoIP database updated
# Use: UpdateGeoIpDatabaseJob.set(wait: 1.week).perform_later
# Or set up in config/schedule.rb for recurring execution
def perform(force_update: false)
return unless auto_update_enabled?
Rails.logger.info "Starting GeoIP database update check"
if should_update_database? || force_update
success = GeoIpService.update_database!
if success
Rails.logger.info "GeoIP database successfully updated"
else
Rails.logger.error "Failed to update GeoIP database"
end
else
Rails.logger.info "GeoIP database is up to date, no update needed"
end
# No cleanup needed with file-system approach
rescue => e
Rails.logger.error "Error in UpdateGeoIpDatabaseJob: #{e.message}"
Rails.logger.error e.backtrace.join("\n")
end
private
def auto_update_enabled?
Rails.application.config.maxmind.auto_update_enabled
end
def should_update_database?
config = Rails.application.config.maxmind
database_path = default_database_path
# Check if database file exists
return true unless File.exist?(database_path)
# Check if database is outdated
max_age_days = config.max_age_days
file_mtime = File.mtime(database_path)
return true if file_mtime < max_age_days.days.ago
# Check if database file is readable and valid
begin
# Try to open the database to verify it's valid
MaxMind::DB.new(database_path)
false
rescue => e
Rails.logger.warn "GeoIP database file appears to be corrupted: #{e.message}"
true
end
end
def default_database_path
config = Rails.application.config.maxmind
File.join(config.storage_path, config.database_filename)
end
end