diff --git a/app/assets/stylesheets/application.css b/app/assets/stylesheets/application.css
index 1ddd232..aafb680 100644
--- a/app/assets/stylesheets/application.css
+++ b/app/assets/stylesheets/application.css
@@ -31,3 +31,35 @@
font-size: 0.875rem;
font-weight: 500;
}
+
+/* Tom Select overrides for better visibility */
+.ts-wrapper {
+ visibility: visible !important;
+ display: block !important;
+}
+
+.ts-wrapper .ts-control {
+ min-height: 38px;
+ padding: 6px 8px;
+ border: 1px solid #d1d5db;
+ border-radius: 6px;
+ background-color: white;
+}
+
+.ts-wrapper .ts-control input {
+ font-size: 14px;
+}
+
+.ts-wrapper.multi .ts-control > div {
+ background-color: #3b82f6;
+ color: white;
+ border-radius: 4px;
+ padding: 2px 6px;
+ margin: 2px;
+ font-size: 12px;
+}
+
+.ts-wrapper.multi .ts-control > div .remove {
+ color: white;
+ margin-left: 4px;
+}
diff --git a/app/controllers/data_imports_controller.rb b/app/controllers/data_imports_controller.rb
new file mode 100644
index 0000000..d5f5119
--- /dev/null
+++ b/app/controllers/data_imports_controller.rb
@@ -0,0 +1,131 @@
+class DataImportsController < ApplicationController
+ before_action :require_admin!
+ before_action :set_data_import, only: [:show, :destroy, :progress]
+
+ def index
+ @data_imports = DataImport.all
+
+ # Apply filters
+ @data_imports = @data_imports.where(import_type: params[:import_type]) if params[:import_type].present?
+ @data_imports = @data_imports.where(status: params[:status]) if params[:status].present?
+ @data_imports = @data_imports.where("filename ILIKE ?", "%#{params[:filename]}%") if params[:filename].present?
+
+ @pagy, @data_imports = pagy(@data_imports.order(created_at: :desc))
+ end
+
+ def new
+ @data_import = DataImport.new
+ end
+
+ def create
+ # Save uploaded file and queue import job
+ uploaded_file = params[:data_import][:file]
+ if uploaded_file.nil?
+ @data_import = DataImport.new
+ flash.now[:alert] = "Please select a file to import"
+ render :new, status: :unprocessable_entity
+ return
+ end
+
+ # Validate file type
+ unless valid_file?(uploaded_file)
+ @data_import = DataImport.new
+ flash.now[:alert] = "Invalid file type. Please upload a .csv or .zip file."
+ render :new, status: :unprocessable_entity
+ return
+ end
+
+ # Determine import type based on filename
+ import_type = detect_import_type_from_filename(uploaded_file.original_filename)
+
+ # Create the DataImport record with the attached file
+ @data_import = DataImport.create!(
+ import_type: import_type,
+ filename: uploaded_file.original_filename,
+ status: 'pending'
+ )
+
+ # Attach the file using Active Storage
+ @data_import.file.attach(uploaded_file)
+
+ # Queue appropriate import job - pass the entire DataImport object
+ if import_type == 'asn'
+ GeoliteAsnImportJob.perform_later(@data_import)
+ else
+ GeoliteCountryImportJob.perform_later(@data_import)
+ end
+
+ redirect_to @data_import, notice: "Import has been queued and will begin processing shortly."
+ rescue => e
+ Rails.logger.error "Error creating import: #{e.message}"
+ Rails.logger.error e.backtrace.join("\n")
+
+ @data_import = DataImport.new if @data_import.nil?
+ flash.now[:alert] = "Error processing file: #{e.message}"
+ render :new, status: :unprocessable_entity
+ end
+
+ def show
+ # Show will display import details and progress
+ end
+
+ def progress
+ # JSON endpoint for real-time progress updates
+ render json: {
+ id: @data_import.id,
+ status: @data_import.status,
+ progress_percentage: @data_import.progress_percentage,
+ processed_records: @data_import.processed_records,
+ total_records: @data_import.total_records,
+ failed_records: @data_import.failed_records,
+ duration: @data_import.duration,
+ records_per_second: @data_import.records_per_second,
+ import_stats: @data_import.import_stats,
+ error_message: @data_import.error_message,
+ started_at: @data_import.started_at,
+ completed_at: @data_import.completed_at
+ }
+ end
+
+ def destroy
+ if @data_import.processing?
+ redirect_to @data_import, alert: "Cannot delete an import that is currently processing."
+ else
+ @data_import.destroy
+ redirect_to data_imports_path, notice: "Import was successfully deleted."
+ end
+ end
+
+ private
+
+ def set_data_import
+ @data_import = DataImport.find(params[:id])
+ end
+
+ def data_import_params
+ # No parameters needed since we detect everything automatically
+ {}
+ end
+
+ def valid_file?(uploaded_file)
+ return false unless uploaded_file.respond_to?(:original_filename)
+
+ filename = uploaded_file.original_filename.downcase
+ filename.end_with?('.csv', '.zip')
+ end
+
+ def detect_import_type_from_filename(filename)
+ # Try to detect based on filename first
+ if filename.downcase.include?('asn')
+ 'asn'
+ elsif filename.downcase.include?('country')
+ 'country'
+ else
+ 'country' # Default fallback
+ end
+ end
+
+ def require_admin!
+ redirect_to root_path, alert: "Access denied. Admin privileges required." unless current_user&.admin?
+ end
+end
\ No newline at end of file
diff --git a/app/controllers/events_controller.rb b/app/controllers/events_controller.rb
index 7a546b2..ead1215 100644
--- a/app/controllers/events_controller.rb
+++ b/app/controllers/events_controller.rb
@@ -9,7 +9,8 @@ class EventsController < ApplicationController
# Apply filters
@events = @events.by_ip(params[:ip]) if params[:ip].present?
@events = @events.by_waf_action(params[:waf_action]) if params[:waf_action].present?
- @events = @events.where(country_code: params[:country]) if params[:country].present?
+ @events = @events.joins("JOIN network_ranges ON events.ip_address <<= network_ranges.network")
+ .where("network_ranges.country = ?", params[:country]) if params[:country].present?
# Network-based filters
@events = @events.by_company(params[:company]) if params[:company].present?
diff --git a/app/javascript/application.js b/app/javascript/application.js
index 0d7b494..bd940eb 100644
--- a/app/javascript/application.js
+++ b/app/javascript/application.js
@@ -1,3 +1,4 @@
// Configure your import map in config/importmap.rb. Read more: https://github.com/rails/importmap-rails
import "@hotwired/turbo-rails"
import "controllers"
+import "tom-select"
diff --git a/app/javascript/controllers/country_selector_controller.js b/app/javascript/controllers/country_selector_controller.js
new file mode 100644
index 0000000..9eb9086
--- /dev/null
+++ b/app/javascript/controllers/country_selector_controller.js
@@ -0,0 +1,145 @@
+import { Controller } from "@hotwired/stimulus"
+
+export default class extends Controller {
+ static targets = [ "select" ]
+ static values = {
+ options: Array,
+ placeholder: String
+ }
+
+ connect() {
+ // Check if the element is visible, if not, wait for it to become visible
+ if (this.isHidden()) {
+ // Element is hidden, set up a MutationObserver to watch for visibility changes
+ this.observer = new MutationObserver(() => {
+ if (!this.isHidden()) {
+ this.initializeTomSelect()
+ this.observer.disconnect()
+ }
+ })
+
+ this.observer.observe(this.element, {
+ attributes: true,
+ attributeFilter: ['class']
+ })
+
+ // Also check periodically as a fallback
+ this.checkInterval = setInterval(() => {
+ if (!this.isHidden()) {
+ this.initializeTomSelect()
+ this.cleanup()
+ }
+ }, 500)
+ } else {
+ // Element is already visible, initialize immediately
+ this.initializeTomSelect()
+ }
+ }
+
+ isHidden() {
+ return this.element.offsetParent === null || this.element.classList.contains('hidden')
+ }
+
+ cleanup() {
+ if (this.observer) {
+ this.observer.disconnect()
+ this.observer = null
+ }
+ if (this.checkInterval) {
+ clearInterval(this.checkInterval)
+ this.checkInterval = null
+ }
+ }
+
+ initializeTomSelect() {
+ if (!this.hasSelectTarget) {
+ console.log('No select target found')
+ return
+ }
+
+ // Check if Tom Select is available
+ if (typeof TomSelect === 'undefined') {
+ console.log('Tom Select is not loaded')
+ return
+ }
+
+ // If TomSelect is already initialized, destroy it first
+ if (this.tomSelect) {
+ this.tomSelect.destroy()
+ }
+
+ console.log('Initializing Tom Select with options:', this.optionsValue.length, 'countries')
+ console.log('First few country options:', this.optionsValue.slice(0, 3))
+
+ // Prepare options for Tom Select
+ const options = this.optionsValue.map(([display, value]) => ({
+ value: value,
+ text: display,
+ // Add searchable fields for better search
+ search: display + ' ' + value
+ }))
+
+ // Get currently selected values from the hidden select
+ const selectedValues = Array.from(this.selectTarget.selectedOptions).map(option => option.value)
+
+ try {
+ // Initialize Tom Select
+ this.tomSelect = new TomSelect(this.selectTarget, {
+ options: options,
+ items: selectedValues,
+ plugins: ['remove_button'],
+ maxItems: null,
+ maxOptions: 1000,
+ create: false,
+ placeholder: this.placeholderValue || "Search and select countries...",
+ searchField: ['text', 'search'],
+ searchConjunction: 'or',
+ onItemAdd: function() {
+ // Clear the search input after selecting an item
+ this.setTextboxValue('');
+ this.refreshOptions();
+ },
+ render: {
+ option: function(data, escape) {
+ return `
+ ${escape(data.text)}
+
`
+ },
+ item: function(data, escape) {
+ return `
+ ${escape(data.text)}
+
`
+ }
+ },
+ dropdownParent: 'body',
+ copyClassesToDropdown: false
+ })
+
+ console.log('Tom Select successfully initialized for country selector')
+
+ // Make sure the wrapper is visible
+ setTimeout(() => {
+ if (this.tomSelect && this.tomSelect.wrapper) {
+ this.tomSelect.wrapper.style.visibility = 'visible'
+ this.tomSelect.wrapper.style.display = 'block'
+ console.log('Tom Select wrapper made visible')
+ }
+ }, 100)
+
+ } catch (error) {
+ console.error('Error initializing Tom Select:', error)
+ }
+ }
+
+ // Public method to reinitialize if needed
+ reinitialize() {
+ this.initializeTomSelect()
+ }
+
+ disconnect() {
+ this.cleanup()
+ if (this.tomSelect) {
+ this.tomSelect.destroy()
+ }
+ }
+}
\ No newline at end of file
diff --git a/app/javascript/controllers/data_import_progress_controller.js b/app/javascript/controllers/data_import_progress_controller.js
new file mode 100644
index 0000000..e99000f
--- /dev/null
+++ b/app/javascript/controllers/data_import_progress_controller.js
@@ -0,0 +1,76 @@
+import { Controller } from "@hotwired/stimulus"
+
+export default class extends Controller {
+ static targets = ["progressBar", "totalRecords", "processedRecords", "failedRecords", "recordsPerSecond"]
+ static values = {
+ importId: Number,
+ refreshInterval: { type: Number, default: 2000 }
+ }
+
+ connect() {
+ if (this.hasImportIdValue) {
+ this.startUpdating()
+ }
+ }
+
+ disconnect() {
+ this.stopUpdating()
+ }
+
+ startUpdating() {
+ this.updateProgress()
+ this.interval = setInterval(() => {
+ this.updateProgress()
+ }, this.refreshIntervalValue)
+ }
+
+ stopUpdating() {
+ if (this.interval) {
+ clearInterval(this.interval)
+ }
+ }
+
+ async updateProgress() {
+ try {
+ const response = await fetch(`/data_imports/${this.importIdValue}/progress`)
+ const data = await response.json()
+
+ this.updateProgressBar(data.progress_percentage)
+ this.updateStats(data)
+
+ // If completed or failed, reload the page
+ if (data.status === 'completed' || data.status === 'failed') {
+ setTimeout(() => {
+ window.location.reload()
+ }, 2000)
+ this.stopUpdating()
+ }
+ } catch (error) {
+ console.error('Error updating progress:', error)
+ }
+ }
+
+ updateProgressBar(percentage) {
+ if (this.hasProgressBarTarget) {
+ this.progressBarTarget.style.width = `${percentage}%`
+ }
+ }
+
+ updateStats(data) {
+ if (this.hasTotalRecordsTarget) {
+ this.totalRecordsTarget.textContent = data.total_records.toLocaleString()
+ }
+
+ if (this.hasProcessedRecordsTarget) {
+ this.processedRecordsTarget.textContent = data.processed_records.toLocaleString()
+ }
+
+ if (this.hasFailedRecordsTarget) {
+ this.failedRecordsTarget.textContent = data.failed_records.toLocaleString()
+ }
+
+ if (this.hasRecordsPerSecondTarget) {
+ this.recordsPerSecondTarget.textContent = data.records_per_second.toLocaleString()
+ }
+ }
+}
\ No newline at end of file
diff --git a/app/jobs/geolite_asn_import_job.rb b/app/jobs/geolite_asn_import_job.rb
new file mode 100644
index 0000000..d3f79cd
--- /dev/null
+++ b/app/jobs/geolite_asn_import_job.rb
@@ -0,0 +1,101 @@
+class GeoliteAsnImportJob < ApplicationJob
+ queue_as :default
+
+ # No retry needed for CSV processing - either works or fails immediately
+
+ def perform(data_import)
+ Rails.logger.info "Starting GeoLite ASN import job for DataImport #{data_import.id}"
+
+ # Check if file is attached
+ unless data_import.file.attached?
+ Rails.logger.error "No file attached to DataImport #{data_import.id}"
+ data_import.fail!("No file attached")
+ return
+ end
+
+ # Download the file to a temporary location
+ temp_file = download_to_temp_file(data_import.file.blob)
+ if temp_file.nil?
+ Rails.logger.error "Failed to download file from storage"
+ data_import.fail!("Failed to download file from storage")
+ return
+ end
+
+ Rails.logger.info "File downloaded to: #{temp_file}"
+ Rails.logger.info "File exists: #{File.exist?(temp_file)}"
+ Rails.logger.info "File size: #{File.size(temp_file)} bytes" if File.exist?(temp_file)
+
+ # Mark as processing
+ data_import.start_processing!
+
+ importer = nil
+ begin
+ Rails.logger.info "Creating GeoliteAsnImporter"
+ importer = GeoliteAsnImporter.new(temp_file, data_import: data_import)
+ Rails.logger.info "Calling importer.import"
+ result = importer.import
+
+ # Update final stats
+ data_import.update_progress(
+ processed: result[:processed_records],
+ failed: result[:failed_records],
+ stats: {
+ total_records: result[:total_records],
+ errors: result[:errors].last(10), # Keep last 10 errors
+ completed_at: Time.current
+ }
+ )
+
+ data_import.complete!
+
+ # Log completion
+ Rails.logger.info "GeoLite ASN import completed: #{result[:processed_records]} processed, #{result[:failed_records]} failed"
+
+ rescue => e
+ Rails.logger.error "GeoLite ASN import failed: #{e.message}"
+ Rails.logger.error e.backtrace.join("\n")
+
+ # Update final stats even on failure
+ if importer
+ data_import.update_progress(
+ processed: importer.instance_variable_get(:@processed_records),
+ failed: importer.instance_variable_get(:@failed_records),
+ stats: {
+ total_records: importer.instance_variable_get(:@total_records),
+ current_file: File.basename(temp_file),
+ errors: importer.instance_variable_get(:@errors).last(10),
+ failed_at: Time.current
+ }
+ )
+ end
+
+ data_import.fail!(e.message)
+ raise
+ ensure
+ # Cleanup temporary files
+ File.delete(temp_file) if temp_file && File.exist?(temp_file)
+ end
+ end
+
+ private
+
+ def download_to_temp_file(blob)
+ # Create a temporary file with the original filename
+ temp_file = Tempfile.new([blob.filename.to_s])
+ temp_file.binmode
+
+ # Download the blob content
+ blob.open do |file|
+ temp_file.write(file.read)
+ end
+
+ temp_file.close
+ temp_file.path
+ rescue => e
+ Rails.logger.error "Error downloading file: #{e.message}"
+ Rails.logger.error e.backtrace.join("\n")
+ temp_file&.close
+ temp_file&.unlink
+ nil
+ end
+end
\ No newline at end of file
diff --git a/app/jobs/geolite_country_import_job.rb b/app/jobs/geolite_country_import_job.rb
new file mode 100644
index 0000000..29774dd
--- /dev/null
+++ b/app/jobs/geolite_country_import_job.rb
@@ -0,0 +1,101 @@
+class GeoliteCountryImportJob < ApplicationJob
+ queue_as :default
+
+ # No retry needed for CSV processing - either works or fails immediately
+
+ def perform(data_import)
+ Rails.logger.info "Starting GeoLite Country import job for DataImport #{data_import.id}"
+
+ # Check if file is attached
+ unless data_import.file.attached?
+ Rails.logger.error "No file attached to DataImport #{data_import.id}"
+ data_import.fail!("No file attached")
+ return
+ end
+
+ # Download the file to a temporary location
+ temp_file = download_to_temp_file(data_import.file.blob)
+ if temp_file.nil?
+ Rails.logger.error "Failed to download file from storage"
+ data_import.fail!("Failed to download file from storage")
+ return
+ end
+
+ Rails.logger.info "File downloaded to: #{temp_file}"
+ Rails.logger.info "File exists: #{File.exist?(temp_file)}"
+ Rails.logger.info "File size: #{File.size(temp_file)} bytes" if File.exist?(temp_file)
+
+ # Mark as processing
+ data_import.start_processing!
+
+ importer = nil
+ begin
+ Rails.logger.info "Creating GeoliteCountryImporter"
+ importer = GeoliteCountryImporter.new(temp_file, data_import: data_import)
+ Rails.logger.info "Calling importer.import"
+ result = importer.import
+
+ # Update final stats
+ data_import.update_progress(
+ processed: result[:processed_records],
+ failed: result[:failed_records],
+ stats: {
+ total_records: result[:total_records],
+ errors: result[:errors].last(10), # Keep last 10 errors
+ completed_at: Time.current
+ }
+ )
+
+ data_import.complete!
+
+ # Log completion
+ Rails.logger.info "GeoLite Country import completed: #{result[:processed_records]} processed, #{result[:failed_records]} failed"
+
+ rescue => e
+ Rails.logger.error "GeoLite Country import failed: #{e.message}"
+ Rails.logger.error e.backtrace.join("\n")
+
+ # Update final stats even on failure
+ if importer
+ data_import.update_progress(
+ processed: importer.instance_variable_get(:@processed_records),
+ failed: importer.instance_variable_get(:@failed_records),
+ stats: {
+ total_records: importer.instance_variable_get(:@total_records),
+ current_file: File.basename(temp_file),
+ errors: importer.instance_variable_get(:@errors).last(10),
+ failed_at: Time.current
+ }
+ )
+ end
+
+ data_import.fail!(e.message)
+ raise
+ ensure
+ # Cleanup temporary files
+ File.delete(temp_file) if temp_file && File.exist?(temp_file)
+ end
+ end
+
+ private
+
+ def download_to_temp_file(blob)
+ # Create a temporary file with the original filename
+ temp_file = Tempfile.new([blob.filename.to_s])
+ temp_file.binmode
+
+ # Download the blob content
+ blob.open do |file|
+ temp_file.write(file.read)
+ end
+
+ temp_file.close
+ temp_file.path
+ rescue => e
+ Rails.logger.error "Error downloading file: #{e.message}"
+ Rails.logger.error e.backtrace.join("\n")
+ temp_file&.close
+ temp_file&.unlink
+ nil
+ end
+end
\ No newline at end of file
diff --git a/app/jobs/process_waf_analytics_job.rb b/app/jobs/process_waf_analytics_job.rb
index 320e718..6e6f50f 100644
--- a/app/jobs/process_waf_analytics_job.rb
+++ b/app/jobs/process_waf_analytics_job.rb
@@ -44,16 +44,20 @@ class ProcessWafAnalyticsJob < ApplicationJob
end
def analyze_geographic_distribution(event)
- return unless event.country_code.present?
+ return unless event.has_geo_data?
- # Check if this country is unusual globally
+ country_code = event.lookup_country
+ return unless country_code.present?
+
+ # Check if this country is unusual globally by joining through network ranges
country_events = Event
- .where(country_code: event.country_code)
+ .joins("JOIN network_ranges ON events.ip_address <<= network_ranges.network")
+ .where("network_ranges.country = ?", country_code)
.where(timestamp: 1.hour.ago..Time.current)
# If this is the first event from this country or unusual spike
if country_events.count == 1 || country_events.count > 100
- Rails.logger.info "Unusual geographic activity from #{event.country_code}"
+ Rails.logger.info "Unusual geographic activity from #{country_code}"
end
end
diff --git a/app/jobs/process_waf_event_job.rb b/app/jobs/process_waf_event_job.rb
index d62ac86..90cd62d 100644
--- a/app/jobs/process_waf_event_job.rb
+++ b/app/jobs/process_waf_event_job.rb
@@ -26,12 +26,14 @@ class ProcessWafEventJob < ApplicationJob
# Create the WAF event record
event = Event.create_from_waf_payload!(event_id, single_event_data)
- # Enrich with geo-location data if missing
- if event.ip_address.present? && event.country_code.blank?
+ # Log geo-location data status (uses NetworkRange delegation)
+ if event.ip_address.present?
begin
- event.enrich_geo_location!
+ unless event.has_geo_data?
+ Rails.logger.debug "No geo data available for event #{event.id} with IP #{event.ip_address}"
+ end
rescue => e
- Rails.logger.warn "Failed to enrich geo location for event #{event.id}: #{e.message}"
+ Rails.logger.warn "Failed to check geo data for event #{event.id}: #{e.message}"
end
end
diff --git a/app/jobs/process_waf_policies_job.rb b/app/jobs/process_waf_policies_job.rb
index 74a0263..b4ed900 100644
--- a/app/jobs/process_waf_policies_job.rb
+++ b/app/jobs/process_waf_policies_job.rb
@@ -7,7 +7,7 @@
class ProcessWafPoliciesJob < ApplicationJob
queue_as :waf_policies
- retry_on StandardError, wait: :exponentially_longer, attempts: 3
+ retry_on StandardError, wait: 5.seconds, attempts: 3
def perform(network_range_id:, event_id: nil)
# Find the network range
diff --git a/app/models/data_import.rb b/app/models/data_import.rb
new file mode 100644
index 0000000..da75709
--- /dev/null
+++ b/app/models/data_import.rb
@@ -0,0 +1,96 @@
+class DataImport < ApplicationRecord
+ has_one_attached :file
+
+ validates :import_type, presence: true, inclusion: { in: %w[asn country] }
+ validates :status, presence: true, inclusion: { in: %w[pending processing completed failed] }
+ validates :filename, presence: true
+
+ attribute :import_stats, default: -> { {} }
+
+ # Scopes
+ scope :recent, -> { order(created_at: :desc) }
+ scope :by_type, ->(type) { where(import_type: type) }
+ scope :by_status, ->(status) { where(status: status) }
+ scope :completed, -> { where(status: 'completed') }
+ scope :failed, -> { where(status: 'failed') }
+ scope :processing, -> { where(status: 'processing') }
+ scope :pending, -> { where(status: 'pending') }
+
+ # State management
+ def pending?
+ status == 'pending'
+ end
+
+ def processing?
+ status == 'processing'
+ end
+
+ def completed?
+ status == 'completed'
+ end
+
+ def failed?
+ status == 'failed'
+ end
+
+ def start_processing!
+ update!(
+ status: 'processing',
+ started_at: Time.current
+ )
+ end
+
+ def complete!
+ updates = {
+ status: 'completed',
+ completed_at: Time.current
+ }
+ updates[:total_records] = processed_records if total_records.zero?
+ update!(updates)
+ end
+
+ def fail!(error_message = nil)
+ update!(
+ status: 'failed',
+ completed_at: Time.current,
+ error_message: error_message
+ )
+ end
+
+ def progress_percentage
+ if total_records.zero?
+ processing? ? 0.1 : 0 # Show minimal progress for processing jobs
+ else
+ (processed_records.to_f / total_records * 100).round(2)
+ end
+ end
+
+ def duration
+ return 0 unless started_at
+ end_time = completed_at || Time.current
+ duration_seconds = (end_time - started_at).round(2)
+ duration_seconds.negative? ? 0 : duration_seconds
+ end
+
+ def records_per_second
+ # Handle very fast imports that complete in less than 1 second
+ if duration.zero?
+ # Use time since started if no duration available yet
+ time_elapsed = started_at ? (Time.current - started_at) : 0
+ return 0 if time_elapsed < 1
+ (processed_records.to_f / time_elapsed).round(2)
+ else
+ (processed_records.to_f / duration).round(2)
+ end
+ end
+
+ def update_progress(processed: nil, failed: nil, total_records: nil, stats: nil)
+ updates = {}
+ updates[:processed_records] = processed if processed
+ updates[:failed_records] = failed if failed
+ updates[:total_records] = total_records if total_records
+ updates[:import_stats] = stats if stats
+
+ update!(updates) if updates.any?
+ end
+end
\ No newline at end of file
diff --git a/app/models/event.rb b/app/models/event.rb
index 05b098d..1e9519d 100644
--- a/app/models/event.rb
+++ b/app/models/event.rb
@@ -25,6 +25,10 @@ class Event < ApplicationRecord
# Serialize segment IDs as array for easy manipulation in Railssqit
serialize :request_segment_ids, type: Array, coder: JSON
+ # Tags are stored as JSON arrays with PostgreSQL jsonb type
+ # This provides direct array access and efficient indexing
+ attribute :tags, :json, default: -> { [] }
+
validates :event_id, presence: true, uniqueness: true
validates :timestamp, presence: true
@@ -36,6 +40,21 @@ class Event < ApplicationRecord
scope :allowed, -> { where(waf_action: :allow) }
scope :rate_limited, -> { where(waf_action: 'rate_limit') }
+ # Tag-based filtering scopes using PostgreSQL array operators
+ scope :with_tag, ->(tag) { where("tags @> ARRAY[?]", tag.to_s) }
+
+ scope :with_any_tags, ->(tags) {
+ return none if tags.blank?
+ tag_array = Array(tags).map(&:to_s)
+ where("tags && ARRAY[?]", tag_array)
+ }
+
+ scope :with_all_tags, ->(tags) {
+ return none if tags.blank?
+ tag_array = Array(tags).map(&:to_s)
+ where("tags @> ARRAY[?]", tag_array)
+ }
+
# Network-based filtering scopes
scope :by_company, ->(company) {
joins("JOIN network_ranges ON events.ip_address <<= network_ranges.network")
@@ -234,7 +253,8 @@ class Event < ApplicationRecord
end
def tags
- payload&.dig("tags") || {}
+ # Use the dedicated tags column (array), fallback to payload during transition
+ super.presence || (payload&.dig("tags") || [])
end
def headers
@@ -281,6 +301,25 @@ class Event < ApplicationRecord
URI.parse(request_url).hostname rescue nil
end
+ # Tag helper methods
+ def add_tag(tag)
+ tag_str = tag.to_s
+ self.tags = (tags + [tag_str]).uniq unless tags.include?(tag_str)
+ end
+
+ def remove_tag(tag)
+ tag_str = tag.to_s
+ self.tags = tags - [tag_str] if tags.include?(tag_str)
+ end
+
+ def has_tag?(tag)
+ tags.include?(tag.to_s)
+ end
+
+ def tag_list
+ tags.join(', ')
+ end
+
# Normalize headers to lower case keys during import phase
def normalize_headers(headers)
return {} unless headers.is_a?(Hash)
diff --git a/app/models/network_range.rb b/app/models/network_range.rb
index 066b37f..5ad8e31 100644
--- a/app/models/network_range.rb
+++ b/app/models/network_range.rb
@@ -7,7 +7,7 @@
# and classification flags (datacenter, proxy, VPN).
class NetworkRange < ApplicationRecord
# Sources for network range creation
- SOURCES = %w[api_imported user_created manual auto_generated inherited].freeze
+ SOURCES = %w[api_imported user_created manual auto_generated inherited geolite_asn geolite_country].freeze
# Associations
has_many :rules, dependent: :destroy
@@ -29,6 +29,9 @@ class NetworkRange < ApplicationRecord
scope :vpn, -> { where(is_vpn: true) }
scope :user_created, -> { where(source: 'user_created') }
scope :api_imported, -> { where(source: 'api_imported') }
+ scope :geolite_imported, -> { where(source: ['geolite_asn', 'geolite_country']) }
+ scope :geolite_asn, -> { where(source: 'geolite_asn') }
+ scope :geolite_country, -> { where(source: 'geolite_country') }
scope :with_events, -> { where("events_count > 0") }
scope :most_active, -> { order(events_count: :desc) }
@@ -295,4 +298,44 @@ class NetworkRange < ApplicationRecord
# The inherited_intelligence method will pick up the new parent data
end
end
+
+ # Import-related class methods
+ def self.import_stats_by_source
+ group(:source)
+ .select(:source, 'COUNT(*) as count', 'MIN(created_at) as first_import', 'MAX(updated_at) as last_update')
+ .order(:source)
+ end
+
+ def self.geolite_coverage_stats
+ {
+ total_networks: geolite_imported.count,
+ asn_networks: geolite_asn.count,
+ country_networks: geolite_country.count,
+ with_asn_data: geolite_imported.where.not(asn: nil).count,
+ with_country_data: geolite_imported.where.not(country: nil).count,
+ with_proxy_data: geolite_imported.where(is_proxy: true).count,
+ unique_countries: geolite_imported.distinct.count(:country),
+ unique_asns: geolite_imported.distinct.count(:asn),
+ ipv4_networks: geolite_imported.ipv4.count,
+ ipv6_networks: geolite_imported.ipv6.count
+ }
+ end
+
+ def self.find_by_ip_or_network(query)
+ return none if query.blank?
+
+ begin
+ # Try to parse as IP address first
+ ip = IPAddr.new(query)
+ where("network >>= ?", ip.to_s)
+ rescue IPAddr::InvalidAddressError
+ # Try to parse as network
+ begin
+ network = IPAddr.new(query)
+ where(network: network.to_s)
+ rescue IPAddr::InvalidAddressError
+ none
+ end
+ end
+ end
end
\ No newline at end of file
diff --git a/app/models/waf_policy.rb b/app/models/waf_policy.rb
index 20ab7a9..46fc5a0 100644
--- a/app/models/waf_policy.rb
+++ b/app/models/waf_policy.rb
@@ -122,7 +122,7 @@ validate :targets_must_be_array
network_range: network_range,
waf_policy: self,
user: user,
- source: "policy:#{name}",
+ source: "policy",
metadata: build_rule_metadata(network_range),
priority: network_range.prefix_length
)
diff --git a/app/policies/waf_policy_policy.rb b/app/policies/waf_policy_policy.rb
new file mode 100644
index 0000000..1ace306
--- /dev/null
+++ b/app/policies/waf_policy_policy.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+class WafPolicyPolicy < ApplicationPolicy
+ def index?
+ true # All authenticated users can view policies
+ end
+
+ def show?
+ true # All authenticated users can view policy details
+ end
+
+ def new?
+ user.admin? || user.editor?
+ end
+
+ def create?
+ user.admin? || user.editor?
+ end
+
+ def edit?
+ user.admin? || (user.editor? && record.user == user)
+ end
+
+ def update?
+ user.admin? || (user.editor? && record.user == user)
+ end
+
+ def destroy?
+ user.admin? || (user.editor? && record.user == user)
+ end
+
+ def activate?
+ user.admin? || (user.editor? && record.user == user)
+ end
+
+ def deactivate?
+ user.admin? || (user.editor? && record.user == user)
+ end
+
+ def new_country?
+ create?
+ end
+
+ def create_country?
+ create?
+ end
+
+ class Scope < ApplicationPolicy::Scope
+ def resolve
+ if user.admin?
+ scope.all
+ else
+ # Non-admin users can only see their own policies
+ scope.where(user: user)
+ end
+ end
+ end
+end
\ No newline at end of file
diff --git a/app/services/country_helper.rb b/app/services/country_helper.rb
new file mode 100644
index 0000000..c76f7a2
--- /dev/null
+++ b/app/services/country_helper.rb
@@ -0,0 +1,151 @@
+# frozen_string_literal: true
+
+# CountryHelper - Service for country display utilities
+#
+# Provides methods to convert ISO country codes to display names,
+# generate country flags, and format country data for UI components.
+class CountryHelper
+ # Convert ISO code to display name
+ def self.display_name(iso_code)
+ return iso_code if iso_code.blank?
+
+ country = ISO3166::Country[iso_code]
+ country.local_name
+ rescue
+ iso_code
+ end
+
+ # Convert ISO code to flag emoji
+ def self.flag_emoji(iso_code)
+ return "" if iso_code.blank? || iso_code.length != 2
+
+ # Convert each letter to regional indicator symbol (A + 0x1F1E5)
+ iso_code.upcase.codepoints.map { |code| (code + 0x1F1E5).chr }.join
+ rescue
+ ""
+ end
+
+ # Display name with flag
+ def self.display_with_flag(iso_code)
+ return iso_code if iso_code.blank?
+
+ "#{display_name(iso_code)} (#{iso_code})"
+ end
+
+ # Check if ISO code is valid
+ def self.valid_iso_code?(iso_code)
+ return false if iso_code.blank?
+
+ ISO3166::Country[iso_code].present?
+ rescue
+ false
+ end
+
+ # Get all countries for select dropdowns
+ # Returns array of [display_name, iso_code] pairs
+ def self.all_for_select
+ ISO3166::Country.all.map do |country|
+ # Try different name sources in order of preference
+ # Use the proper countries gem methods
+ name = country.local_name.presence ||
+ country.iso_short_name.presence ||
+ country.common_name.presence ||
+ country.alpha2
+ display_name = "#{name} (#{country.alpha2})"
+ [display_name, country.alpha2]
+ end.sort_by { |name, _| name }
+ rescue => e
+ puts "Error in CountryHelper.all_for_select: #{e.message}"
+ puts e.backtrace
+ []
+ end
+
+
+ # Get countries by common regions for quick selection
+ def self.by_region
+ {
+ 'Americas' => [
+ 'US', 'CA', 'MX', 'BR', 'AR', 'CL', 'CO', 'PE', 'VE'
+ ],
+ 'Europe' => [
+ 'GB', 'DE', 'FR', 'IT', 'ES', 'NL', 'BE', 'CH', 'AT', 'SE',
+ 'NO', 'DK', 'FI', 'PL', 'CZ', 'HU', 'RO', 'GR', 'PT'
+ ],
+ 'Asia Pacific' => [
+ 'CN', 'JP', 'KR', 'IN', 'SG', 'AU', 'NZ', 'TH', 'MY', 'ID',
+ 'PH', 'VN', 'HK', 'TW'
+ ],
+ 'Middle East & Africa' => [
+ 'ZA', 'EG', 'NG', 'KE', 'SA', 'AE', 'IL', 'TR', 'IR'
+ ]
+ }
+ rescue
+ {}
+ end
+
+ # Get countries for specific region with display names
+ def self.countries_for_region(region_name)
+ country_codes = by_region[region_name] || []
+
+ country_codes.map do |code|
+ {
+ code: code,
+ name: display_name(code),
+ display: display_with_flag(code)
+ }
+ end
+ end
+
+ # Format multiple country targets for display
+ def self.format_targets(targets)
+ return [] if targets.blank?
+
+ targets.map do |target|
+ {
+ code: target,
+ name: display_name(target),
+ display: display_with_flag(target)
+ }
+ end
+ end
+
+ # Get popular countries for quick blocking (common threat sources)
+ def self.popular_for_blocking
+ [
+ { code: 'CN', name: 'China', display: '🇨🇳 China', reason: 'High bot/scanner activity' },
+ { code: 'RU', name: 'Russia', display: '🇷🇺 Russia', reason: 'State-sponsored attacks' },
+ { code: 'IN', name: 'India', display: '🇮🇳 India', reason: 'High spam volume' },
+ { code: 'BR', name: 'Brazil', display: '🇧🇷 Brazil', reason: 'Scanner activity' },
+ { code: 'IR', name: 'Iran', display: '🇮🇷 Iran', reason: 'Attacks on critical infrastructure' },
+ { code: 'KP', name: 'North Korea', display: '🇰🇵 North Korea', reason: 'State-sponsored hacking' }
+ ]
+ end
+
+ # Search countries by name or code
+ def self.search(query)
+ return [] if query.blank?
+
+ query = query.downcase
+ ISO3166::Country.all.select do |country|
+ country.alpha2.downcase.include?(query) ||
+ country.local_name.downcase.include?(query)
+ end.first(20).map { |c| [display_with_flag(c.alpha2), c.alpha2] }
+ rescue
+ []
+ end
+
+ # Country statistics for analytics
+ def self.usage_statistics(country_codes)
+ return {} if country_codes.blank?
+
+ stats = {}
+ country_codes.each do |code|
+ stats[code] = {
+ name: display_name(code),
+ flag: flag_emoji(code),
+ display: display_with_flag(code)
+ }
+ end
+ stats
+ end
+end
\ No newline at end of file
diff --git a/app/services/geolite_asn_importer.rb b/app/services/geolite_asn_importer.rb
new file mode 100644
index 0000000..de73716
--- /dev/null
+++ b/app/services/geolite_asn_importer.rb
@@ -0,0 +1,182 @@
+require 'csv'
+
+class GeoliteAsnImporter
+ BATCH_SIZE = 1000
+
+ def initialize(file_path, data_import:)
+ @file_path = file_path
+ @data_import = data_import
+ @total_records = 0
+ @processed_records = 0
+ @failed_records = 0
+ @errors = []
+ end
+
+ def import
+ Rails.logger.info "Starting import for file: #{@file_path}"
+ Rails.logger.info "File exists: #{File.exist?(@file_path)}"
+ Rails.logger.info "File size: #{File.size(@file_path)} bytes" if File.exist?(@file_path)
+
+ # Check if file is actually a zip by reading the magic bytes
+ is_zip_file = check_if_zip_file
+ Rails.logger.info "File is zip: #{is_zip_file}"
+
+ if is_zip_file
+ import_from_zip
+ else
+ import_csv_file(@file_path)
+ end
+
+ {
+ total_records: @total_records,
+ processed_records: @processed_records,
+ failed_records: @failed_records,
+ errors: @errors
+ }
+ end
+
+ private
+
+ def check_if_zip_file
+ # Check if the file starts with ZIP magic bytes (PK\x03\x04)
+ File.open(@file_path, 'rb') do |file|
+ header = file.read(4)
+ return header == "PK\x03\x04"
+ end
+ rescue => e
+ Rails.logger.error "Error checking if file is zip: #{e.message}"
+ false
+ end
+
+ def import_from_zip
+ require 'zip'
+ require 'stringio'
+
+ Rails.logger.info "Processing zip file directly: #{@file_path}"
+
+ # Read the entire ZIP file content into memory first
+ zip_content = File.binread(@file_path)
+
+ Zip::File.open_buffer(StringIO.new(zip_content)) do |zip_file|
+ zip_file.each do |entry|
+ if entry.name.include?('Blocks') && entry.name.end_with?('.csv')
+ Rails.logger.info "Processing ASN block file from zip: #{entry.name}"
+ process_csv_from_zip(zip_file, entry)
+ end
+ end
+ end
+ rescue => e
+ Rails.logger.error "Error processing ZIP file: #{e.message}"
+ Rails.logger.error e.backtrace.join("\n")
+ raise
+ end
+
+ def process_csv_from_zip(zip_file, entry)
+ zip_file.get_input_stream(entry) do |io|
+ # Read the entire content from the stream
+ content = io.read
+
+ CSV.parse(content, headers: true, header_converters: :symbol, encoding: 'UTF-8') do |row|
+ @total_records += 1
+
+ begin
+ import_record(row)
+ @processed_records += 1
+ rescue => e
+ @failed_records += 1
+ @errors << "Row #{@total_records}: #{e.message} - Data: #{row.to_h}"
+ end
+
+ update_progress_if_needed
+ end
+ end
+ end
+
+ def csv_files
+ if @file_path.end_with?('.zip')
+ # Look for extracted CSV files in the same directory
+ base_dir = File.dirname(@file_path)
+ base_name = File.basename(@file_path, '.zip')
+
+ [
+ File.join(base_dir, "#{base_name}-Blocks-IPv4.csv"),
+ File.join(base_dir, "#{base_name}-Blocks-IPv6.csv")
+ ].select { |file| File.exist?(file) }
+ else
+ [@file_path]
+ end
+ end
+
+ def import_csv_file(csv_file)
+ CSV.foreach(csv_file, headers: true, header_converters: :symbol, encoding: 'UTF-8') do |row|
+ @total_records += 1
+
+ begin
+ import_record(row)
+ @processed_records += 1
+ rescue => e
+ @failed_records += 1
+ @errors << "Row #{@total_records}: #{e.message} - Data: #{row.to_h}"
+
+ # Update progress every 100 records or on error
+ update_progress_if_needed
+ end
+
+ update_progress_if_needed
+ end
+ end
+
+ def import_record(row)
+ network = row[:network]
+ asn = row[:autonomous_system_number]&.to_i
+ asn_org = row[:autonomous_system_organization]&.strip
+
+ unless network && asn && asn_org
+ raise "Missing required fields: network=#{network}, asn=#{asn}, asn_org=#{asn_org}"
+ end
+
+ # Validate network format
+ IPAddr.new(network) # This will raise if invalid
+
+ NetworkRange.upsert(
+ {
+ network: network,
+ asn: asn,
+ asn_org: asn_org,
+ source: 'geolite_asn',
+ updated_at: Time.current
+ },
+ unique_by: :index_network_ranges_on_network_unique
+ )
+ end
+
+ def update_progress_if_needed
+ if (@processed_records + @failed_records) % 100 == 0
+ @data_import.update_progress(
+ processed: @processed_records,
+ failed: @failed_records,
+ total_records: @total_records,
+ stats: {
+ total_records: @total_records,
+ current_file: File.basename(@file_path),
+ recent_errors: @errors.last(5)
+ }
+ )
+ end
+ end
+
+ def extract_if_zipfile
+ return unless @file_path.end_with?('.zip')
+
+ require 'zip'
+
+ Zip::File.open(@file_path) do |zip_file|
+ zip_file.each do |entry|
+ if entry.name.end_with?('.csv')
+ extract_path = File.join(File.dirname(@file_path), entry.name)
+ entry.extract(extract_path)
+ end
+ end
+ end
+ end
+end
\ No newline at end of file
diff --git a/app/services/geolite_country_importer.rb b/app/services/geolite_country_importer.rb
new file mode 100644
index 0000000..bc39f39
--- /dev/null
+++ b/app/services/geolite_country_importer.rb
@@ -0,0 +1,288 @@
+require 'csv'
+
+class GeoliteCountryImporter
+ BATCH_SIZE = 1000
+
+ def initialize(file_path, data_import:)
+ @file_path = file_path
+ @data_import = data_import
+ @total_records = 0
+ @processed_records = 0
+ @failed_records = 0
+ @errors = []
+ @locations_cache = {}
+ end
+
+ def import
+ Rails.logger.info "Starting import for file: #{@file_path}"
+ Rails.logger.info "File exists: #{File.exist?(@file_path)}"
+ Rails.logger.info "File size: #{File.size(@file_path)} bytes" if File.exist?(@file_path)
+
+ # Check if file is actually a zip by reading the magic bytes
+ is_zip_file = check_if_zip_file
+ Rails.logger.info "File is zip: #{is_zip_file}"
+
+ if is_zip_file
+ Rails.logger.info "Calling import_from_zip"
+ import_from_zip
+ else
+ Rails.logger.info "Calling regular import (not zip)"
+ load_locations_data
+ import_csv_file(@file_path)
+ end
+
+ {
+ total_records: @total_records,
+ processed_records: @processed_records,
+ failed_records: @failed_records,
+ errors: @errors
+ }
+ end
+
+ private
+
+ def check_if_zip_file
+ # Check if the file starts with ZIP magic bytes (PK\x03\x04)
+ File.open(@file_path, 'rb') do |file|
+ header = file.read(4)
+ return header == "PK\x03\x04"
+ end
+ rescue => e
+ Rails.logger.error "Error checking if file is zip: #{e.message}"
+ false
+ end
+
+ def import_from_zip
+ require 'zip'
+ require 'stringio'
+
+ Rails.logger.info "Processing zip file directly: #{@file_path}"
+
+ # Read the entire ZIP file content into memory first
+ zip_content = File.binread(@file_path)
+
+ Zip::File.open_buffer(StringIO.new(zip_content)) do |zip_file|
+ # First, see what's in the zip
+ Rails.logger.info "Files in zip:"
+ zip_file.each do |entry|
+ Rails.logger.info " - #{entry.name} (#{entry.size} bytes)"
+ end
+
+ # First, load location data from zip
+ load_locations_data_from_zip(zip_file)
+
+ # Then process block files from zip
+ zip_file.each do |entry|
+ if entry.name.include?('Blocks') && entry.name.end_with?('.csv')
+ Rails.logger.info "Processing block file from zip: #{entry.name}"
+ process_csv_from_zip(zip_file, entry)
+ end
+ end
+ end
+ rescue => e
+ Rails.logger.error "Error processing ZIP file: #{e.message}"
+ Rails.logger.error e.backtrace.join("\n")
+ raise
+ end
+
+ def process_csv_from_zip(zip_file, entry)
+ zip_file.get_input_stream(entry) do |io|
+ # Read the entire content from the stream
+ content = io.read
+
+ # Try different encodings if UTF-8 fails
+ encodings = ['UTF-8', 'ISO-8859-1', 'Windows-1252']
+ encoding_used = nil
+
+ encodings.each do |encoding|
+ begin
+ # Parse the CSV content from the string
+ CSV.parse(content, headers: true, header_converters: :symbol, encoding: encoding) do |row|
+ @total_records += 1
+
+ begin
+ import_record(row)
+ @processed_records += 1
+ rescue => e
+ @failed_records += 1
+ @errors << "Row #{@total_records}: #{e.message} - Data: #{row.to_h}"
+ end
+
+ update_progress_if_needed
+ end
+ encoding_used = encoding
+ Rails.logger.info "Successfully processed #{entry.name} with #{encoding} encoding"
+ break
+ rescue CSV::InvalidEncodingError => e
+ Rails.logger.warn "Failed to process #{entry.name} with #{encoding} encoding: #{e.message}"
+ next if encoding != encodings.last
+ raise e if encoding == encodings.last
+ end
+ end
+
+ unless encoding_used
+ @errors << "Failed to process #{entry.name} with any supported encoding"
+ end
+ end
+ end
+
+ def load_locations_data_from_zip(zip_file)
+ require 'zip'
+
+ # Find all location files and prioritize English
+ location_entries = zip_file.select { |entry| entry.name.include?('Locations') && entry.name.end_with?('.csv') }
+
+ # Sort to prioritize English locations file
+ location_entries.sort_by! { |entry| entry.name.include?('Locations-en') ? 0 : 1 }
+
+ location_entries.each do |entry|
+ Rails.logger.info "Loading locations from: #{entry.name}"
+ zip_file.get_input_stream(entry) do |io|
+ # Read the entire content from the stream
+ content = io.read
+
+ # Try different encodings if UTF-8 fails
+ encodings = ['UTF-8', 'ISO-8859-1', 'Windows-1252']
+
+ encodings.each do |encoding|
+ begin
+ # Parse the CSV content from the string
+ CSV.parse(content, headers: true, header_converters: :symbol, encoding: encoding) do |row|
+ geoname_id = row[:geoname_id]
+ next unless geoname_id
+
+ @locations_cache[geoname_id] = {
+ country_iso_code: row[:country_iso_code],
+ country_name: row[:country_name],
+ continent_code: row[:continent_code],
+ continent_name: row[:continent_name],
+ is_in_european_union: row[:is_in_european_union]
+ }
+ end
+ Rails.logger.info "Loaded locations from #{entry.name} with #{encoding} encoding"
+ break
+ rescue CSV::InvalidEncodingError => e
+ Rails.logger.warn "Failed to load locations from #{entry.name} with #{encoding} encoding: #{e.message}"
+ next if encoding != encodings.last
+ raise e if encoding == encodings.last
+ end
+ end
+ end
+ end
+
+ Rails.logger.info "Loaded #{@locations_cache.size} location records"
+ end
+
+ def import_csv_file(csv_file)
+ CSV.foreach(csv_file, headers: true, header_converters: :symbol, encoding: 'UTF-8') do |row|
+ @total_records += 1
+
+ begin
+ import_record(row)
+ @processed_records += 1
+ rescue => e
+ @failed_records += 1
+ @errors << "Row #{@total_records}: #{e.message} - Data: #{row.to_h}"
+
+ # Update progress every 100 records or on error
+ update_progress_if_needed
+ end
+
+ update_progress_if_needed
+ end
+ end
+
+ def import_record(row)
+ network = row[:network]
+ geoname_id = row[:geoname_id]
+ registered_country_geoname_id = row[:registered_country_geoname_id]
+ is_anonymous_proxy = row[:is_anonymous_proxy] == '1'
+ is_satellite_provider = row[:is_satellite_provider] == '1'
+ is_anycast = row[:is_anycast] == '1'
+
+ unless network
+ raise "Missing required field: network"
+ end
+
+ # Validate network format
+ IPAddr.new(network) # This will raise if invalid
+
+ # Get location data - prefer geoname_id, then registered_country_geoname_id
+ location_data = @locations_cache[geoname_id] || @locations_cache[registered_country_geoname_id] || {}
+
+ additional_data = {
+ geoname_id: geoname_id,
+ registered_country_geoname_id: registered_country_geoname_id,
+ represented_country_geoname_id: row[:represented_country_geoname_id],
+ continent_code: location_data[:continent_code],
+ continent_name: location_data[:continent_name],
+ country_name: location_data[:country_name],
+ is_in_european_union: location_data[:is_in_european_union],
+ is_satellite_provider: is_satellite_provider,
+ is_anycast: is_anycast
+ }.compact
+
+ NetworkRange.upsert(
+ {
+ network: network,
+ country: location_data[:country_iso_code],
+ is_proxy: is_anonymous_proxy,
+ source: 'geolite_country',
+ additional_data: additional_data,
+ updated_at: Time.current
+ },
+ unique_by: :index_network_ranges_on_network_unique
+ )
+ end
+
+ def update_progress_if_needed
+ if (@processed_records + @failed_records) % 100 == 0
+ @data_import.update_progress(
+ processed: @processed_records,
+ failed: @failed_records,
+ total_records: @total_records,
+ stats: {
+ total_records: @total_records,
+ current_file: File.basename(@file_path),
+ locations_loaded: @locations_cache.size,
+ recent_errors: @errors.last(5)
+ }
+ )
+ end
+ end
+
+ def load_locations_data
+ locations_files = find_locations_files
+
+ locations_files.each do |locations_file|
+ CSV.foreach(locations_file, headers: true, header_converters: :symbol, encoding: 'UTF-8') do |row|
+ geoname_id = row[:geoname_id]
+ next unless geoname_id
+
+ @locations_cache[geoname_id] = {
+ country_iso_code: row[:country_iso_code],
+ country_name: row[:country_name],
+ continent_code: row[:continent_code],
+ continent_name: row[:continent_name],
+ is_in_european_union: row[:is_in_european_union]
+ }
+ end
+ end
+ end
+
+ def find_locations_files
+ if @file_path.end_with?('.zip')
+ base_dir = File.dirname(@file_path)
+ base_name = File.basename(@file_path, '.zip')
+
+ # Look for English locations file first, then any locations file
+ [
+ File.join(base_dir, "#{base_name}-Locations-en.csv"),
+ Dir[File.join(base_dir, "#{base_name}-Locations-*.csv")].first
+ ].compact.select { |file| File.exist?(file) }
+ else
+ base_dir = File.dirname(@file_path)
+ Dir[File.join(base_dir, "*Locations*.csv")].select { |file| File.exist?(file) }
+ end
+ end
+end
\ No newline at end of file
diff --git a/app/services/network_range_generator.rb b/app/services/network_range_generator.rb
new file mode 100644
index 0000000..b52e9de
--- /dev/null
+++ b/app/services/network_range_generator.rb
@@ -0,0 +1,159 @@
+# frozen_string_literal: true
+
+# Service for automatically creating network ranges for unmatched IPs
+class NetworkRangeGenerator
+ include ActiveModel::Model
+ include ActiveModel::Attributes
+
+ # Minimum network sizes for different IP types
+ IPV4_MIN_SIZE = 24 # /24 = 256 IPs
+ IPV6_MIN_SIZE = 64 # /64 = 2^64 IPs (standard IPv6 allocation)
+
+ # Special network ranges to avoid
+ RESERVED_RANGES = [
+ IPAddr.new('10.0.0.0/8'), # Private
+ IPAddr.new('172.16.0.0/12'), # Private
+ IPAddr.new('192.168.0.0/16'), # Private
+ IPAddr.new('127.0.0.0/8'), # Loopback
+ IPAddr.new('169.254.0.0/16'), # Link-local
+ IPAddr.new('224.0.0.0/4'), # Multicast
+ IPAddr.new('240.0.0.0/4'), # Reserved
+ IPAddr.new('::1/128'), # IPv6 loopback
+ IPAddr.new('fc00::/7'), # IPv6 private
+ IPAddr.new('fe80::/10'), # IPv6 link-local
+ IPAddr.new('ff00::/8') # IPv6 multicast
+ ].freeze
+
+ # Special network ranges to avoid
+ RESERVED_RANGES = [
+ IPAddr.new('10.0.0.0/8'), # Private
+ IPAddr.new('172.16.0.0/12'), # Private
+ IPAddr.new('192.168.0.0/16'), # Private
+ IPAddr.new('127.0.0.0/8'), # Loopback
+ IPAddr.new('169.254.0.0/16'), # Link-local
+ IPAddr.new('224.0.0.0/4'), # Multicast
+ IPAddr.new('240.0.0.0/4'), # Reserved
+ IPAddr.new('::1/128'), # IPv6 loopback
+ IPAddr.new('fc00::/7'), # IPv6 private
+ IPAddr.new('fe80::/10'), # IPv6 link-local
+ IPAddr.new('ff00::/8') # IPv6 multicast
+ ].freeze
+
+
+ class << self
+ # Find or create a network range for the given IP address
+ def find_or_create_for_ip(ip_address, user: nil)
+ ip_str = ip_address.to_s
+ ip_obj = ip_address.is_a?(IPAddr) ? ip_address : IPAddr.new(ip_str)
+
+ # Check if IP already matches existing ranges
+ existing_range = NetworkRange.contains_ip(ip_str).first
+ if existing_range
+ # If we have an existing range and it's a /32 (single IP),
+ # create a larger network range instead for better analytics
+ if existing_range.masklen == 32
+ # Don't overwrite manually created or imported ranges
+ unless %w[manual user_created api_imported].include?(existing_range.source)
+ return create_appropriate_network(ip_obj, user: user)
+ end
+ end
+ return existing_range
+ end
+
+ # Create the appropriate network range for this IP
+ create_appropriate_network(ip_obj, user: user)
+ end
+
+ # Get the appropriate minimum network size for an IP
+ def minimum_network_size(ip_address)
+ return IPV6_MIN_SIZE if ip_address.ipv6?
+
+ # For IPv4, use larger networks for known datacenter/ranges
+ if datacenter_ip?(ip_address)
+ 20 # /20 = 4096 IPs for large providers
+ else
+ IPV4_MIN_SIZE # /24 = 256 IPs for general use
+ end
+ end
+
+ # Check if IP is in a datacenter range
+ def datacenter_ip?(ip_address)
+ # Known major cloud provider ranges
+ cloud_ranges = [
+ IPAddr.new('3.0.0.0/8'), # AWS
+ IPAddr.new('52.0.0.0/8'), # AWS
+ IPAddr.new('54.0.0.0/8'), # AWS
+ IPAddr.new('13.0.0.0/8'), # AWS
+ IPAddr.new('104.16.0.0/12'), # Cloudflare
+ IPAddr.new('172.64.0.0/13'), # Cloudflare
+ IPAddr.new('104.24.0.0/14'), # Cloudflare
+ IPAddr.new('172.68.0.0/14'), # Cloudflare
+ IPAddr.new('108.170.0.0/16'), # Google
+ IPAddr.new('173.194.0.0/16'), # Google
+ IPAddr.new('209.85.0.0/16'), # Google
+ IPAddr.new('157.240.0.0/16'), # Facebook/Meta
+ IPAddr.new('31.13.0.0/16'), # Facebook/Meta
+ IPAddr.new('69.63.0.0/16'), # Facebook/Meta
+ IPAddr.new('173.252.0.0/16'), # Facebook/Meta
+ IPAddr.new('20.0.0.0/8'), # Microsoft Azure
+ IPAddr.new('40.64.0.0/10'), # Microsoft Azure
+ IPAddr.new('40.96.0.0/11'), # Microsoft Azure
+ IPAddr.new('40.112.0.0/12'), # Microsoft Azure
+ IPAddr.new('40.123.0.0/16'), # Microsoft Azure
+ IPAddr.new('40.124.0.0/14'), # Microsoft Azure
+ IPAddr.new('40.126.0.0/15'), # Microsoft Azure
+ ]
+
+ cloud_ranges.any? { |range| range.include?(ip_address) }
+ end
+
+ private
+
+ # Create the appropriate network range containing the IP
+ def create_appropriate_network(ip_address, user: nil)
+ prefix_length = minimum_network_size(ip_address)
+
+ # Create the network range with the IP at the center if possible
+ network_cidr = create_network_with_ip(ip_address, prefix_length)
+
+ # Check if network already exists
+ existing = NetworkRange.find_by(network: network_cidr)
+ return existing if existing
+
+ # Create new network range
+ NetworkRange.create!(
+ network: network_cidr,
+ source: 'auto_generated',
+ creation_reason: "auto-generated for unmatched IP traffic",
+ user: user,
+ company: nil, # Will be filled by enrichment job
+ asn: nil,
+ country: nil,
+ is_datacenter: datacenter_ip?(ip_address),
+ is_vpn: false,
+ is_proxy: false
+ )
+ end
+
+ # Create a network CIDR that contains the given IP with specified prefix length
+ def create_network_with_ip(ip_address, prefix_length)
+ # Convert IP to integer and apply mask
+ ip_int = ip_address.to_i
+
+ if ip_address.ipv6?
+ # For IPv6, mask to prefix length
+ mask = (2**128 - 1) ^ ((2**(128 - prefix_length)) - 1)
+ network_int = ip_int & mask
+ result = IPAddr.new(network_int, Socket::AF_INET6).mask(prefix_length)
+ else
+ # For IPv4, mask to prefix length
+ mask = (2**32 - 1) ^ ((2**(32 - prefix_length)) - 1)
+ network_int = ip_int & mask
+ result = IPAddr.new(network_int, Socket::AF_INET).mask(prefix_length)
+ end
+
+ # Return the CIDR notation
+ result.to_s
+ end
+ end
+end
\ No newline at end of file
diff --git a/app/services/waf_policy_matcher.rb b/app/services/waf_policy_matcher.rb
new file mode 100644
index 0000000..e06d8b4
--- /dev/null
+++ b/app/services/waf_policy_matcher.rb
@@ -0,0 +1,177 @@
+# frozen_string_literal: true
+
+# WafPolicyMatcher - Service to match NetworkRanges against active WafPolicies
+#
+# This service provides efficient matching of network ranges against firewall policies
+# and can generate rules when matches are found.
+class WafPolicyMatcher
+ include ActiveModel::Model
+ include ActiveModel::Attributes
+
+ attr_accessor :network_range
+ attr_reader :matching_policies, :generated_rules
+
+ def initialize(network_range:)
+ @network_range = network_range
+ @matching_policies = []
+ @generated_rules = []
+ end
+
+ # Find all active policies that match the given network range
+ def find_matching_policies
+ return [] unless network_range.present?
+
+ @matching_policies = active_policies.select do |policy|
+ policy.matches_network_range?(network_range)
+ end
+
+ # Sort by priority: country > asn > company > network_type, then by creation date
+ @matching_policies.sort_by do |policy|
+ priority_score = case policy.policy_type
+ when 'country'
+ 1
+ when 'asn'
+ 2
+ when 'company'
+ 3
+ when 'network_type'
+ 4
+ else
+ 99
+ end
+
+ [priority_score, policy.created_at]
+ end
+ end
+
+ # Generate rules from matching policies
+ def generate_rules
+ return [] if matching_policies.empty?
+
+ @generated_rules = matching_policies.map do |policy|
+ # Check if rule already exists for this network range and policy
+ existing_rule = Rule.find_by(
+ network_range: network_range,
+ waf_policy: policy,
+ enabled: true
+ )
+
+ if existing_rule
+ Rails.logger.debug "Rule already exists for network_range #{network_range.cidr} and policy #{policy.name}"
+ existing_rule
+ else
+ rule = policy.create_rule_for_network_range(network_range)
+ if rule
+ Rails.logger.info "Generated rule for network_range #{network_range.cidr} from policy #{policy.name}"
+ end
+ rule
+ end
+ end.compact
+ end
+
+ # Find and generate rules in one step
+ def match_and_generate_rules
+ find_matching_policies
+ generate_rules
+ end
+
+ # Class methods for batch processing
+ def self.process_network_range(network_range)
+ matcher = new(network_range: network_range)
+ matcher.match_and_generate_rules
+ end
+
+ def self.batch_process_network_ranges(network_ranges)
+ results = []
+
+ network_ranges.each do |network_range|
+ matcher = new(network_range: network_range)
+ result = matcher.match_and_generate_rules
+ results << {
+ network_range: network_range,
+ matching_policies: matcher.matching_policies,
+ generated_rules: matcher.generated_rules
+ }
+ end
+
+ results
+ end
+
+ # Process network ranges that need policy evaluation
+ def self.process_ranges_without_policy_rules(limit: 100)
+ # Find network ranges that don't have policy-generated rules
+ # but have intelligence data that could match policies
+ ranges_needing_evaluation = NetworkRange
+ .left_joins(:rules)
+ .where("rules.id IS NULL OR rules.waf_policy_id IS NULL")
+ .where("(country IS NOT NULL OR asn IS NOT NULL OR company IS NOT NULL OR is_datacenter = true OR is_proxy = true OR is_vpn = true)")
+ .limit(limit)
+ .includes(:rules)
+
+ batch_process_network_ranges(ranges_needing_evaluation)
+ end
+
+ # Re-evaluate all network ranges for policy changes
+ def self.reprocess_all_for_policy(waf_policy)
+ # Find all network ranges that could potentially match this policy
+ potential_ranges = case waf_policy.policy_type
+ when 'country'
+ NetworkRange.where(country: waf_policy.targets)
+ when 'asn'
+ NetworkRange.where(asn: waf_policy.targets)
+ when 'network_type'
+ NetworkRange.where(
+ "is_datacenter = ? OR is_proxy = ? OR is_vpn = ?",
+ waf_policy.targets.include?('datacenter'),
+ waf_policy.targets.include?('proxy'),
+ waf_policy.targets.include?('vpn')
+ )
+ when 'company'
+ # For company matching, we need to do text matching
+ NetworkRange.where("company ILIKE ANY (array[?])",
+ waf_policy.targets.map { |c| "%#{c}%" })
+ else
+ NetworkRange.none
+ end
+
+ results = []
+ potential_ranges.find_each do |network_range|
+ matcher = new(network_range: network_range)
+ if waf_policy.matches_network_range?(network_range)
+ rule = waf_policy.create_rule_for_network_range(network_range)
+ results << { network_range: network_range, generated_rule: rule } if rule
+ end
+ end
+
+ results
+ end
+
+ # Statistics and reporting
+ def self.matching_policies_for_network_range(network_range)
+ matcher = new(network_range: network_range)
+ matcher.find_matching_policies
+ end
+
+ def self.policy_effectiveness_stats(waf_policy, days: 30)
+ cutoff_date = days.days.ago
+
+ rules = waf_policy.generated_rules.where('created_at > ?', cutoff_date)
+
+ {
+ policy_name: waf_policy.name,
+ policy_type: waf_policy.policy_type,
+ action: waf_policy.action,
+ rules_generated: rules.count,
+ active_rules: rules.active.count,
+ networks_protected: rules.joins(:network_range).count('distinct network_ranges.id'),
+ period_days: days,
+ generation_rate: rules.count.to_f / days
+ }
+ end
+
+ private
+
+ def active_policies
+ @active_policies ||= WafPolicy.active
+ end
+end
\ No newline at end of file
diff --git a/app/views/data_imports/_progress_card.html.erb b/app/views/data_imports/_progress_card.html.erb
new file mode 100644
index 0000000..3da98b2
--- /dev/null
+++ b/app/views/data_imports/_progress_card.html.erb
@@ -0,0 +1,80 @@
+
+
+
+
Import Progress
+ <%# Reuse the status_badge helper - need to define it here since it's a partial %>
+ <% def status_badge(status) %>
+ <% case status %>
+ <% when 'pending' %>
+
+ <%= status.capitalize %>
+
+ <% when 'processing' %>
+
+ <%= status.capitalize %>
+
+ <% when 'completed' %>
+
+ <%= status.capitalize %>
+
+ <% when 'failed' %>
+
+ <%= status.capitalize %>
+
+ <% else %>
+
+ <%= status.capitalize %>
+
+ <% end %>
+ <% end %>
+ <%= status_badge(@data_import.status) %>
+
+
+
+
+
+ <%= number_with_delimiter(@data_import.processed_records) %> of <%= number_with_delimiter(@data_import.total_records) %> records
+ <%= @data_import.progress_percentage %>%
+
+
+
+
+
+
+
+
+ <%= number_with_delimiter(@data_import.total_records) %>
+
+
Total Records
+
+
+
+ <%= number_with_delimiter(@data_import.processed_records) %>
+
+
Processed
+
+
+
+ <%= number_with_delimiter(@data_import.failed_records) %>
+
+
Failed
+
+
+
+ <%= number_with_delimiter(@data_import.records_per_second) %>
+
+
Records/Sec
+
+
+
+
+
+<%# Auto-refresh logic for completed/failed imports %>
+<% if @data_import.completed? || @data_import.failed? %>
+
+<% end %>
\ No newline at end of file
diff --git a/app/views/data_imports/index.html.erb b/app/views/data_imports/index.html.erb
new file mode 100644
index 0000000..4bdcf69
--- /dev/null
+++ b/app/views/data_imports/index.html.erb
@@ -0,0 +1,273 @@
+<%# Helper methods %>
+<% def status_badge_class(status) %>
+ <% case status %>
+ <% when 'pending' %>
+ bg-gray-100 text-gray-800
+ <% when 'processing' %>
+ bg-blue-100 text-blue-800
+ <% when 'completed' %>
+ bg-green-100 text-green-800
+ <% when 'failed' %>
+ bg-red-100 text-red-800
+ <% else %>
+ bg-gray-100 text-gray-800
+ <% end %>
+<% end %>
+
+
+
+
+
+
+
+
GeoLite2 Data Imports
+
+ Manage and monitor your GeoLite2 database imports.
+
+
+ <%= link_to "New Import", new_data_import_path, class: "inline-flex items-center px-4 py-2 border border-transparent text-sm font-medium rounded-md shadow-sm text-white bg-blue-600 hover:bg-blue-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500" %>
+
+
+
+
+
+
+
+ <%= form_with(url: data_imports_path, method: :get, local: true) do |f| %>
+
+
+ <%= f.label :import_type, "Import Type", class: "block text-sm font-medium text-gray-700 mb-1" %>
+ <%= f.select :import_type,
+ options_for_select([['All Types', ''], ['ASN', 'asn'], ['Country', 'country']], params[:import_type]),
+ { }, { class: "block w-full rounded-md border-gray-300 shadow-sm focus:border-blue-500 focus:ring-blue-500 sm:text-sm" } %>
+
+
+ <%= f.label :status, "Status", class: "block text-sm font-medium text-gray-700 mb-1" %>
+ <%= f.select :status,
+ options_for_select([['All Statuses', ''], ['Pending', 'pending'], ['Processing', 'processing'], ['Completed', 'completed'], ['Failed', 'failed']], params[:status]),
+ { }, { class: "block w-full rounded-md border-gray-300 shadow-sm focus:border-blue-500 focus:ring-blue-500 sm:text-sm" } %>
+
+
+ <%= f.label :filename, "Filename", class: "block text-sm font-medium text-gray-700 mb-1" %>
+ <%= f.text_field :filename, value: params[:filename], class: "block w-full rounded-md border-gray-300 shadow-sm focus:border-blue-500 focus:ring-blue-500 sm:text-sm", placeholder: "Search filename..." %>
+
+
+ <%= f.submit "Filter", class: "w-full flex justify-center py-2 px-4 border border-transparent rounded-md shadow-sm text-sm font-medium text-white bg-blue-600 hover:bg-blue-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500" %>
+
+
+ <% end %>
+
+
+
+
+
+
+
+
+
+
<%= DataImport.count %>
+
Total Imports
+
+
+
+
+
+
+
+
+
<%= DataImport.completed.count %>
+
Completed
+
+
+
+
+
+
+
+
+
<%= DataImport.processing.count %>
+
Processing
+
+
+
+
+
+
+
+
+
<%= DataImport.failed.count %>
+
Failed
+
+
+
+
+
+
+
+
+
+
+
+
+ Filename
+
+
+ Type
+
+
+ Status
+
+
+ Progress
+
+
+ Created
+
+
+ Duration
+
+
+ Actions
+
+
+
+
+ <% if @data_imports.any? %>
+ <% @data_imports.each do |data_import| %>
+
+
+ <%= link_to data_import, class: "flex items-center text-blue-600 hover:text-blue-900 hover:underline" do %>
+
+
+
+ <%= truncate(data_import.filename, length: 40) %>
+ <% end %>
+
+
+
+ <%= data_import.import_type.upcase %>
+
+
+
+
+ <%= data_import.status.capitalize %>
+
+
+
+ <%= link_to data_import, class: "block hover:bg-gray-50 -mx-2 px-2 py-1 rounded" do %>
+ <% if data_import.processing? || data_import.total_records > 0 %>
+
+
+
+ <% if data_import.processed_records > 0 %>
+ <% if data_import.total_records > 0 && data_import.processed_records >= data_import.total_records %>
+ <%= number_with_delimiter(data_import.processed_records) %> total
+ <% else %>
+ <%= number_with_delimiter(data_import.processed_records) %> imported
+ <% end %>
+ <% else %>
+ Initializing...
+ <% end %>
+
+
+ <% else %>
+ Not started
+ <% end %>
+ <% end %>
+
+
+ <%= data_import.created_at.strftime('%Y-%m-%d %H:%M') %>
+
+
+ <% if data_import.duration > 0 %>
+ <%= distance_of_time_in_words(data_import.duration) %>
+ <% else %>
+ -
+ <% end %>
+
+
+ <% unless data_import.processing? %>
+ <%= link_to "Delete", data_import, method: :delete,
+ data: {
+ confirm: "Are you sure you want to delete this import?"
+ },
+ class: "text-red-600 hover:text-red-900" %>
+ <% else %>
+ Processing...
+ <% end %>
+
+
+ <% end %>
+ <% else %>
+
+
+
+
+
+ No imports found
+
+ <% if params[:import_type].present? || params[:status].present? || params[:filename].present? %>
+ Try adjusting your search filters or
+ <% else %>
+ Get started by uploading your first
+ <% end %>
+ <%= link_to "GeoLite2 import", new_data_import_path, class: "text-blue-600 hover:text-blue-500" %>.
+
+
+
+ <% end %>
+
+
+
+
+
+ <% if @pagy.pages > 1 %>
+
+
+
+
+ Showing
+ <%= @pagy.from %>
+ to
+ <%= @pagy.to %>
+ of
+ <%= @pagy.count %>
+ results
+
+
+
+ <%= pagy_nav_tailwind(@pagy) %>
+
+
+
+ <% end %>
+
+
\ No newline at end of file
diff --git a/app/views/data_imports/new.html.erb b/app/views/data_imports/new.html.erb
new file mode 100644
index 0000000..a3d0a59
--- /dev/null
+++ b/app/views/data_imports/new.html.erb
@@ -0,0 +1,162 @@
+
+
+
+
Import GeoLite2 Data
+
+ Upload GeoLite2-ASN-CSV or GeoLite2-Country-CSV files to import network range data.
+
+
+
+
+ <%= form_with(model: @data_import, local: true, class: "space-y-6") do |form| %>
+ <% if @data_import.errors.any? %>
+
+
+
+
+
+ There were <%= pluralize(@data_import.errors.count, "error") %> with your submission:
+
+
+
+ <% @data_import.errors.full_messages.each do |message| %>
+ <%= message %>
+ <% end %>
+
+
+
+
+
+ <% end %>
+
+
+
+ <%= form.label :file, "Select File", class: "block text-sm font-medium text-gray-700" %>
+
+
+
+
+
+
+
+ Upload a file
+ <%= form.file_field :file, id: "data_import_file", class: "sr-only", accept: ".csv,.zip", required: true %>
+
+
or drag and drop
+
+
+ CSV or ZIP files up to 500MB
+
+
+
+
+
+
+
+
+
+
+
Automatic Detection
+
+
The system will automatically detect whether your file contains ASN or Country data based on:
+
+ Filename (containing "ASN" or "Country")
+ Column headers in the CSV file
+
+
+
+
+
+
+
+
+
Supported File Formats
+
+
+
+
GeoLite2-ASN-CSV
+
Contains network ranges with ASN and organization information
+
Expected files: GeoLite2-ASN-Blocks-IPv4.csv, GeoLite2-ASN-Blocks-IPv6.csv
+
+
+
+
GeoLite2-Country-CSV
+
Contains network ranges with country geolocation data
+
Expected files: GeoLite2-Country-Blocks-IPv4.csv, GeoLite2-Country-Blocks-IPv6.csv, GeoLite2-Country-Locations-*.csv
+
+
+
+
+
+
+ <%= form.submit "Start Import", class: "w-full flex justify-center py-2 px-4 border border-transparent rounded-md shadow-sm text-sm font-medium text-white bg-blue-600 hover:bg-blue-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500 cursor-pointer transition-colors" %>
+
+ <% end %>
+
+
+
+
+
\ No newline at end of file
diff --git a/app/views/data_imports/show.html.erb b/app/views/data_imports/show.html.erb
new file mode 100644
index 0000000..ecad839
--- /dev/null
+++ b/app/views/data_imports/show.html.erb
@@ -0,0 +1,222 @@
+<%# Helper methods %>
+<% def status_badge(status) %>
+ <% case status %>
+ <% when 'pending' %>
+
+ <%= status.capitalize %>
+
+ <% when 'processing' %>
+
+ <%= status.capitalize %>
+
+ <% when 'completed' %>
+
+ <%= status.capitalize %>
+
+ <% when 'failed' %>
+
+ <%= status.capitalize %>
+
+ <% else %>
+
+ <%= status.capitalize %>
+
+ <% end %>
+<% end %>
+
+
+
+
+
+
+
+
Import Details
+
+ <%= @data_import.filename %>
+
+
+
+ <%= link_to "← Back to Imports", data_imports_path, class: "inline-flex items-center px-3 py-2 border border-gray-300 shadow-sm text-sm leading-4 font-medium rounded-md text-gray-700 bg-white hover:bg-gray-50 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500" %>
+ <% unless @data_import.processing? %>
+ <%= link_to "Delete", @data_import, method: :delete,
+ data: {
+ confirm: "Are you sure you want to delete this import record?"
+ },
+ class: "inline-flex items-center px-3 py-2 border border-red-300 shadow-sm text-sm leading-4 font-medium rounded-md text-red-700 bg-white hover:bg-red-50 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-red-500" %>
+ <% end %>
+
+
+
+
+
+
+
+
+
+
Import Progress
+ <%= status_badge(@data_import.status) %>
+
+
+
+
+
+
+ <% if @data_import.total_records > 0 && @data_import.processed_records >= @data_import.total_records %>
+ <%= number_with_delimiter(@data_import.processed_records) %> total records
+ <% elsif @data_import.total_records > 0 %>
+ <%= number_with_delimiter(@data_import.processed_records) %> records processed
+ <% else %>
+ Initializing...
+ <% end %>
+
+ <%= @data_import.progress_percentage %>%
+
+
+
+
+
+
+
+
+ <%= number_with_delimiter(@data_import.total_records) %>
+
+
Total Records
+
+
+
+ <%= number_with_delimiter(@data_import.processed_records) %>
+
+
Processed
+
+
+
+ <%= number_with_delimiter(@data_import.failed_records) %>
+
+
Failed
+
+
+
+ <%= number_with_delimiter(@data_import.records_per_second) %>
+
+
Records/Sec
+
+
+
+
+
+
+
+
+
Import Information
+
+
+
+
+
Import Type
+ <%= @data_import.import_type %>
+
+
+
Filename
+ <%= @data_import.filename %>
+
+
+
Started
+
+ <% if @data_import.processing? && @data_import.started_at %>
+ <%= time_ago_in_words(@data_import.started_at) %> ago
+ (<%= @data_import.started_at.strftime('%Y-%m-%d %H:%M:%S') %>)
+ <% elsif @data_import.processing? %>
+ Initializing...
+ <% elsif @data_import.started_at %>
+ <%= time_ago_in_words(@data_import.started_at) %> ago
+ (<%= @data_import.started_at.strftime('%Y-%m-%d %H:%M:%S') %>)
+ <% else %>
+ Not started
+ <% end %>
+
+
+
+
Duration
+
+ <% if @data_import.duration > 0 %>
+ <%= distance_of_time_in_words(@data_import.duration) %>
+ <% else %>
+ N/A
+ <% end %>
+
+
+
+
Completed
+
+ <% if @data_import.completed? && @data_import.completed_at %>
+ <%= time_ago_in_words(@data_import.completed_at) %> ago
+ (<%= @data_import.completed_at.strftime('%Y-%m-%d %H:%M:%S') %>)
+ <% elsif @data_import.completed? %>
+ Just now
+ <% elsif @data_import.processing? %>
+ In progress...
+ <% else %>
+ Not completed
+ <% end %>
+
+
+
+
+
+
+
+ <% if @data_import.error_message.present? || @data_import.import_stats['errors']&.any? %>
+
+
+
Error Details
+
+
+ <% if @data_import.error_message.present? %>
+
+
General Error
+
<%= @data_import.error_message %>
+
+ <% end %>
+
+ <% if @data_import.import_stats['errors']&.any? %>
+
+
Recent Errors (<%= @data_import.import_stats['errors'].size %>)
+
+
+ <% @data_import.import_stats['errors'].each do |error| %>
+ <%= error %>
+ <% end %>
+
+
+
+ <% end %>
+
+
+ <% end %>
+
+
+ <% if @data_import.import_stats&.any? && (@data_import.import_stats.except('errors', 'completed_at')).any? %>
+
+
+
Additional Statistics
+
+
+
+ <% @data_import.import_stats.except('errors', 'completed_at').each do |key, value| %>
+
+
<%= key.to_s.humanize %>
+ <%= value.is_a?(Hash) ? value.inspect : value %>
+
+ <% end %>
+
+
+
+ <% end %>
+
+
diff --git a/app/views/layouts/application.html.erb b/app/views/layouts/application.html.erb
index cb1dd9b..31e6791 100644
--- a/app/views/layouts/application.html.erb
+++ b/app/views/layouts/application.html.erb
@@ -20,6 +20,10 @@
<%# Includes all stylesheet files in app/assets/stylesheets %>
<%= stylesheet_link_tag :app, "data-turbo-track": "reload" %>
+
+ <%# Tom Select CSS for enhanced multi-select %>
+
+
<%= javascript_importmap_tags %>