Skip to content

Commit

Permalink
Merge pull request #102 from co-cddo/add-update-data-button
Browse files Browse the repository at this point in the history
Add update data button
  • Loading branch information
RobNicholsGDS authored Dec 2, 2024
2 parents 6b37a2a + 4a3581e commit ba8119f
Show file tree
Hide file tree
Showing 17 changed files with 157 additions and 35 deletions.
3 changes: 3 additions & 0 deletions Gemfile
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,9 @@ gem "faraday"
gem "oj" # JSON parser
gem "rabl"

# Background job processing
gem "delayed_job_active_record"

group :development, :test do
# See https://guides.rubyonrails.org/debugging_rails_applications.html#debugging-with-the-debug-gem
gem "debug", platforms: %i[mri mingw x64_mingw]
Expand Down
6 changes: 6 additions & 0 deletions Gemfile.lock
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,11 @@ GEM
debug (1.8.0)
irb (>= 1.5.0)
reline (>= 0.3.1)
delayed_job (4.1.13)
activesupport (>= 3.0, < 9.0)
delayed_job_active_record (4.1.11)
activerecord (>= 3.0, < 9.0)
delayed_job (>= 3.0, < 5)
diff-lcs (1.5.0)
erubi (1.12.0)
factory_bot (6.2.1)
Expand Down Expand Up @@ -297,6 +302,7 @@ DEPENDENCIES
bootsnap
dartsass-rails
debug
delayed_job_active_record
factory_bot_rails
faker
faraday
Expand Down
1 change: 1 addition & 0 deletions Procfile
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
web: bundle exec puma -C config/puma.rb
release: rake db:migrate
worker: rake jobs:work
9 changes: 9 additions & 0 deletions app/controllers/agreements_controller.rb
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,15 @@ def show
@agreement = Agreement.find_by(record_id: params[:id])
end

def populate
if ENV["ALLOW_MANUAL_POPULATE"] == "true"
UpdateDataJob.perform_later
redirect_to(root_path, notice: "Data update job queued. It will take 5-10 minutes for the process to complete")
else
redirect_to(root_path, alert: "Data update job disabled")
end
end

private

def sort_by
Expand Down
9 changes: 9 additions & 0 deletions app/jobs/update_data_job.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
class UpdateDataJob < ApplicationJob
queue_as :default

def perform(*_args)
Rails.logger.info "Updata Data Job started"
UpdateDataFromSource.call
Rails.logger.info "Updata Data Job completed"
end
end
33 changes: 33 additions & 0 deletions app/services/update_data_from_source.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
class UpdateDataFromSource
def self.call
# Clear the search cache
PgSearch::Document.delete_all

models = [
Agreement,
ControlPerson,
Processor,
Power,
PowerAgreement,
PowerControlPerson,
AgreementControlPerson,
AgreementProcessor,
]

models.each(&:populate)

# Rebuild the search database
# ---------------------------
# Rebuilding for each class would normally do this, but the single table inheritance causes a problem
# because the search record identifies each item by the table name which is always "data_tables".
# This means that as you rebuild each class, the process will first remove the search records
# for the classes that have previously been built.
# To fix this first all the DataTable data is cleared, and then each class is rebuilt without
# cleanup.
PgSearch::Document.delete_by(searchable_type: "DataTable")

models.each do |model|
PgSearch::Multisearch.rebuild(model, clean_up: false) if model.respond_to?(:multisearchable)
end
end
end
2 changes: 2 additions & 0 deletions app/views/agreements/index.html.erb
Original file line number Diff line number Diff line change
Expand Up @@ -95,3 +95,5 @@
<h2 class="govuk-heading-m">Records not found</h2>
<p class="govuk-body">No records were found that match the current filter</p>
<% end %>

<%= govuk_button_to("Update data from source", populate_agreements_path, warning: true) if ENV["ALLOW_MANUAL_POPULATE"] == 'true' %>
5 changes: 5 additions & 0 deletions bin/delayed_job
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
#!/usr/bin/env ruby

require File.expand_path(File.join(File.dirname(__FILE__), '..', 'config', 'environment'))
require 'delayed/command'
Delayed::Command.new(ARGV).daemonize
2 changes: 2 additions & 0 deletions config/application.rb
Original file line number Diff line number Diff line change
Expand Up @@ -23,5 +23,7 @@ class Application < Rails::Application

# Data source options are :airtable and :rapid
config.data_source = ENV.fetch("DATA_SOURCE", :rapid).to_sym

config.active_job.queue_adapter = :delayed_job
end
end
4 changes: 2 additions & 2 deletions config/environments/heroku.rb
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,8 @@
# config.cache_store = :mem_cache_store

# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "dea_register_frontend_production"
config.active_job.queue_adapter = :delayed_job
config.active_job.queue_name_prefix = "dea_register_frontend_production"

config.action_mailer.perform_caching = false

Expand Down
4 changes: 2 additions & 2 deletions config/environments/production.rb
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,8 @@
# config.cache_store = :mem_cache_store

# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "dea_register_frontend_production"
config.active_job.queue_adapter = :delayed_job
config.active_job.queue_name_prefix = "dea_register_frontend_production"

config.action_mailer.perform_caching = false

Expand Down
4 changes: 3 additions & 1 deletion config/routes.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,9 @@
# Defines the root path route ("/")
root "agreements#index"

resources :agreements, only: [:show]
resources :agreements, only: [:show] do
post :populate, on: :collection
end
resources :agreements, only: %i[show index], constraints: { format: :json }
resources :powers, only: %i[index show]
resources :processors, only: %i[index show]
Expand Down
22 changes: 22 additions & 0 deletions db/migrate/20241202124358_create_delayed_jobs.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
class CreateDelayedJobs < ActiveRecord::Migration[7.0]
def self.up
create_table :delayed_jobs do |table|
table.integer :priority, default: 0, null: false # Allows some jobs to jump to the front of the queue
table.integer :attempts, default: 0, null: false # Provides for retries, but still fail eventually.
table.text :handler, null: false # YAML-encoded string of the object that will do work
table.text :last_error # reason for last failure (See Note below)
table.datetime :run_at # When to run. Could be Time.zone.now for immediately, or sometime in the future.
table.datetime :locked_at # Set when a client is working on this object
table.datetime :failed_at # Set when all retries have failed (actually, by default, the record is deleted instead)
table.string :locked_by # Who is working on this object (if locked)
table.string :queue # The name of the queue this job is in
table.timestamps null: true
end

add_index :delayed_jobs, %i[priority run_at], name: "delayed_jobs_priority"
end

def self.down
drop_table :delayed_jobs
end
end
17 changes: 16 additions & 1 deletion db/schema.rb

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

30 changes: 1 addition & 29 deletions db/seeds.rb
Original file line number Diff line number Diff line change
Expand Up @@ -11,35 +11,7 @@

start_time = Time.zone.now

# Clear the search cache
PgSearch::Document.delete_all

models = [
Agreement,
ControlPerson,
Processor,
Power,
PowerAgreement,
PowerControlPerson,
AgreementControlPerson,
AgreementProcessor,
]

models.each(&:populate)

# Rebuild the search database
# ---------------------------
# Rebuilding for each class would normally do this, but the single table inheritance causes a problem
# because the search record identifies each item by the table name which is always "data_tables".
# This means that as you rebuild each class, the process will first remove the search records
# for the classes that have previously been built.
# To fix this first all the DataTable data is cleared, and then each class is rebuilt without
# cleanup.
PgSearch::Document.delete_by(searchable_type: "DataTable")

models.each do |model|
PgSearch::Multisearch.rebuild(model, clean_up: false) if model.respond_to?(:multisearchable)
end
UpdateDataFromSource.call

# Load historic update logs from previous system
update_records = YAML.load_file(
Expand Down
19 changes: 19 additions & 0 deletions spec/jobs/update_data_job_spec.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
require "rails_helper"

RSpec.describe UpdateDataJob, type: :job do
include ActiveJob::TestHelper

before do
ActiveJob::Base.queue_adapter = :test
end

describe "#perform" do
it "calls update data from source process" do
expect { described_class.perform_later }.to have_enqueued_job

expect(UpdateDataFromSource).to receive(:call).and_return(true)

perform_enqueued_jobs
end
end
end
22 changes: 22 additions & 0 deletions spec/requests/agreements_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -234,4 +234,26 @@
expect(json.dig("agreement", "links", "index")).to include(agreements_path(format: :json))
end
end

describe "POST /agreements/populate" do
before do
ActiveJob::Base.queue_adapter = :test
end

it "does not trigger update and redirects to root" do
expect { post(populate_agreements_path) }.not_to have_enqueued_job
expect(response).to redirect_to(root_path)
end

context "with environment variable set" do
before do
allow(ENV).to receive(:[]).with("ALLOW_MANUAL_POPULATE").and_return("true")
end
it "triggers update and redirects to root" do
expect { post(populate_agreements_path) }.to have_enqueued_job

expect(response).to redirect_to(root_path)
end
end
end
end

0 comments on commit ba8119f

Please sign in to comment.