50 lines
1.4 KiB
YAML
50 lines
1.4 KiB
YAML
# examples:
|
|
# periodic_cleanup:
|
|
# class: CleanSoftDeletedRecordsJob
|
|
# queue: background
|
|
# args: [ 1000, { batch_size: 500 } ]
|
|
# schedule: every hour
|
|
# periodic_cleanup_with_command:
|
|
# command: "SoftDeletedRecord.due.delete_all"
|
|
# priority: 2
|
|
# schedule: at 5am every day
|
|
|
|
# No recurring tasks configured yet
|
|
# (previously had clear_solid_queue_finished_jobs, but now preserve_finished_jobs: false in queue.yml)
|
|
|
|
# Clean up failed jobs older than 1 day
|
|
cleanup_failed_jobs:
|
|
command: "SolidQueue::FailedExecution.where('created_at < ?', 1.day.ago).delete_all"
|
|
queue: background
|
|
schedule: every 6 hours
|
|
|
|
# Disable expired rules automatically
|
|
expired_rules_cleanup:
|
|
class: ExpiredRulesCleanupJob
|
|
queue: default
|
|
schedule: every hour
|
|
|
|
# Clean up old events based on retention setting
|
|
cleanup_old_events:
|
|
class: CleanupOldEventsJob
|
|
queue: background
|
|
schedule: every hour
|
|
|
|
# Export events from PostgreSQL to Parquet files for fast analytics
|
|
export_events_to_parquet:
|
|
class: ExportEventsToParquetJob
|
|
queue: default
|
|
schedule: every 1 minutes
|
|
|
|
# Consolidate completed hours into day files
|
|
consolidate_parquet_hourly:
|
|
class: ConsolidateParquetHourlyJob
|
|
queue: default
|
|
schedule: "5 * * * *" # At 5 minutes past every hour
|
|
|
|
# Consolidate completed week into archive (Monday 00:05)
|
|
consolidate_parquet_weekly:
|
|
class: ConsolidateParquetWeeklyJob
|
|
queue: default
|
|
schedule: "5 0 * * 1" # Monday at 00:05
|