diff --git a/.atlas b/.atlas
index caf3c7c74..9014c1825 100644
--- a/.atlas
+++ b/.atlas
@@ -1 +1 @@
-0.0.156
+0.0.157
diff --git a/.version b/.version
index dbe590065..80803faf1 100644
--- a/.version
+++ b/.version
@@ -1 +1 @@
-2.8.1
+2.8.8
diff --git a/Gemfile b/Gemfile
index 1641e341d..ab3edd2ee 100644
--- a/Gemfile
+++ b/Gemfile
@@ -79,6 +79,7 @@ group :development, :test do
# See https://guides.rubyonrails.org/debugging_rails_applications.html#debugging-with-the-debug-gem
gem 'debug', platforms: %i[mri mingw x64_mingw]
gem 'faker'
+ gem 'fix-db-schema-conflicts'
gem 'rspec'
gem 'rspec-rails'
diff --git a/Gemfile.lock b/Gemfile.lock
index 9b9c46c14..49d81a848 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -85,7 +85,7 @@ GEM
addressable (2.8.7)
public_suffix (>= 2.0.2, < 7.0)
ast (2.4.2)
- atlas_rb (0.0.61)
+ atlas_rb (0.0.64)
faraday (~> 2.7)
faraday-follow_redirects (~> 0.3.0)
faraday-multipart (~> 1)
@@ -178,6 +178,8 @@ GEM
faraday-net_http (3.3.0)
net-http
ffi (1.17.0-x86_64-linux-gnu)
+ fix-db-schema-conflicts (3.1.1)
+ rubocop (>= 0.38.0)
fugit (1.11.1)
et-orbi (~> 1, >= 1.2.11)
raabro (~> 1.4)
@@ -341,6 +343,9 @@ GEM
actionpack (>= 5.2)
railties (>= 5.2)
rexml (3.3.7)
+ roo (2.10.1)
+ nokogiri (~> 1)
+ rubyzip (>= 1.3.0, < 3.0.0)
rsolr (2.6.0)
builder (>= 2.1.2)
faraday (>= 0.9, < 3, != 2.0.0)
@@ -457,6 +462,7 @@ DEPENDENCIES
enumerations
factory_bot_rails
faker
+ fix-db-schema-conflicts
good_job
hamlit
importmap-rails
@@ -469,6 +475,7 @@ DEPENDENCIES
puma
rails
rails-controller-testing
+ roo
rsolr (>= 1.0, < 3)
rspec
rspec-rails
diff --git a/app/controllers/loads_controller.rb b/app/controllers/loads_controller.rb
index 193209bfc..c2acc08b4 100644
--- a/app/controllers/loads_controller.rb
+++ b/app/controllers/loads_controller.rb
@@ -1,11 +1,16 @@
# frozen_string_literal: true
-require 'zip'
-require 'roo'
-
+# There needs to be work done on ingests and whether or not they go into load_report
+# as of right now as long as the files seem valid they make it into ingest and then attempt to load
+# but that makes the success rate of load_report 100% pretty much always (not always correct).
+# Also with some things it will look like it failed but all valid jobs haven't
class LoadsController < ApplicationController
def index
- @ingests = Ingest.order(created_at: :desc)
+ @load_reports = LoadReport.order(created_at: :desc)
+ end
+
+ def show
+ @load_report = LoadReport.find(params[:id])
end
def create
@@ -25,39 +30,70 @@ def create
private
def process_zip(zip)
- Zip::File.open(zip) do |zip_file|
- manifest_file = zip_file.find_entry('manifest.xlsx')
- if manifest_file
- process_spreadsheet(manifest_file, zip_file)
- else
- redirect_to loads_path, alert: 'Manifest file not found in ZIP.'
+ failures = []
+ load_report = nil
+
+ begin
+ Zip::File.open(zip) do |zip_file|
+ manifest_file = zip_file.find_entry('manifest.xlsx')
+ if manifest_file
+ load_report = process_spreadsheet(manifest_file, zip_file, failures)
+ else
+ failures << 'Manifest file not found in ZIP.'
+ end
end
+ rescue Zip::Error => e
+ failures << "Error processing ZIP file: #{e.message}"
+ end
+
+ if failures.empty?
+ load_report&.finish_load
+ redirect_to loads_path, notice: 'ZIP file processed successfully.'
+ else
+ load_report&.fail_load
+ redirect_to loads_path, alert: "Errors occurred during processing: #{failures.join(', ')}"
end
- rescue Zip::Error => e
- redirect_to loads_path, alert: "Error processing ZIP file: #{e.message}"
end
- def process_spreadsheet(xlsx_file, zip_file)
+ def process_spreadsheet(xlsx_file, zip_file, failures)
spreadsheet_content = xlsx_file.get_input_stream.read
spreadsheet = Roo::Spreadsheet.open(StringIO.new(spreadsheet_content), extension: :xlsx)
+ load_report = LoadReport.create!(status: :in_progress)
+ load_report.start_load
- spreadsheet.each_with_index do |row, index|
- next if index.zero?
- pid = row[0]
- file_name = row[1]
- if pid && file_name
- xml_entry = zip_file.find_entry(file_name)
- if xml_entry
- ingest = Ingest.create_from_spreadsheet_row(row)
- xml_content = xml_entry.get_input_stream.read
- UpdateMetadataJob.perform_later(pid, xml_content, ingest.id)
+ header_row = spreadsheet.row(1)
+ header_hash = {}
+ header_row.each_with_index do |cell, index|
+ header_hash[cell] = index
+ end
+ if header_hash.key?("PIDs") && header_hash.key?("MODS XML File Path")
+ pid_column = header_hash["PIDs"]
+ file_path_column = header_hash["MODS XML File Path"]
+ spreadsheet.each_with_index do |row, index|
+ next if index.zero?
+ pid = row[pid_column]
+ file_name = row[file_path_column]
+ if pid && file_name
+ xml_entry = zip_file.find_entry(file_name)
+ if xml_entry
+ ingest = Ingest.create_from_spreadsheet_row(pid, file_name, load_report.id)
+ xml_content = xml_entry.get_input_stream.read
+ UpdateMetadataJob.perform_later(pid, xml_content, ingest.id)
+ else
+ failures << "#{file_name} file not found in ZIP"
+ end
else
- redirect_to loads_path, alert: "#{file_name} file not found in ZIP: "; return
+ failures << "Missing PID or filename in row #{index + 1}"
end
end
+
+ load_report
+ else
+ failures << "Cannot find header labels"
+ load_report
end
- redirect_to loads_path, notice: 'ZIP file processed successfully.'
rescue StandardError => e
- redirect_to loads_path, alert: "Error processing spreadsheet: #{e.message}"
+ failures << "Error processing spreadsheet: #{e.message}"
+ load_report
end
end
diff --git a/app/models/ingest.rb b/app/models/ingest.rb
index 3919cc5d7..0cffaaec8 100644
--- a/app/models/ingest.rb
+++ b/app/models/ingest.rb
@@ -1,17 +1,20 @@
# frozen_string_literal: true
class Ingest < ApplicationRecord
- enum status: { pending: 0, completed: 1, failed: 2 }
+ belongs_to :load_report
+
+ enum :status, { pending: 0, completed: 1, failed: 2 }
validates :pid, presence: true
validates :xml_filename, presence: true
validates :status, presence: true
- def self.create_from_spreadsheet_row(row)
+ def self.create_from_spreadsheet_row(pid, file_name, load_report_id)
create!(
- pid: row[0],
- xml_filename: row[1],
- status: :pending
+ pid: pid,
+ xml_filename: file_name,
+ status: :pending,
+ load_report: LoadReport.find(load_report_id)
)
end
end
diff --git a/app/models/load_report.rb b/app/models/load_report.rb
new file mode 100644
index 000000000..7faa536c2
--- /dev/null
+++ b/app/models/load_report.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+class LoadReport < ApplicationRecord
+ has_many :ingests
+
+ enum :status, { in_progress: 0, completed: 1, failed: 2 }
+
+ validates :status, presence: true
+
+ def start_load
+ update(status: :in_progress, started_at: Time.now)
+ end
+
+ def finish_load
+ update(status: :completed, finished_at: Time.now)
+ end
+
+ def fail_load
+ update(status: :failed, finished_at: Time.now)
+ end
+
+ def success_rate
+ return 0 if ingests.empty?
+ ((ingests.completed.count.to_f / ingests.count.to_f) * 100).round(2)
+ end
+end
diff --git a/app/views/loads/index.html.erb b/app/views/loads/index.html.erb
deleted file mode 100644
index 7fcb06247..000000000
--- a/app/views/loads/index.html.erb
+++ /dev/null
@@ -1,35 +0,0 @@
-
Ingest Management
-
-Upload ZIP File
-
-<%= form_tag loads_path, multipart: true do %>
-
- <%= file_field_tag :file, accept: 'application/zip,application/x-zip-compressed' %>
-
-
- <%= submit_tag "Upload and Process" %>
-
-<% end %>
-
-Ingests
-
-
-
-
- PID |
- XML Filename |
- Status |
- Created At |
-
-
-
- <% @ingests.each do |ingest| %>
-
- <%= ingest.pid %> |
- <%= ingest.xml_filename %> |
- <%= ingest.status %> |
- <%= ingest.created_at %> |
-
- <% end %>
-
-
diff --git a/app/views/loads/index.html.haml b/app/views/loads/index.html.haml
new file mode 100644
index 000000000..3f05d71c2
--- /dev/null
+++ b/app/views/loads/index.html.haml
@@ -0,0 +1,30 @@
+%h2.py-3
+ Ingest Management
+
+.upload-form
+ %h3.py-3
+ Upload ZIP File
+ = form_tag loads_path, multipart: true do
+ .input-group.mb-3
+ %input#file.form-control{name: "file", type: "file", accept: 'application/zip,application/x-zip-compressed'}/
+ = submit_tag "Upload", class: 'btn btn-primary'
+%div
+ %h2.py-3
+ Load Reports
+ %table.table
+ %thead
+ %tr
+ %th Status
+ %th Started At
+ %th Finished At
+ %th Success Rate
+ %th View
+ %tbody
+ - @load_reports.each do |load_report|
+ %tr
+ %td= load_report.status
+ %td= load_report.started_at
+ %td= load_report.finished_at
+ %td= load_report.success_rate
+ %td
+ %a.btn.btn-sm.btn-primary{:href => load_path(load_report.id)} View
diff --git a/app/views/loads/show.html.haml b/app/views/loads/show.html.haml
new file mode 100644
index 000000000..390577319
--- /dev/null
+++ b/app/views/loads/show.html.haml
@@ -0,0 +1,17 @@
+%h2.py-3
+ Load Report
+%table.table
+ %thead
+ %tr
+ %th PID
+ %th XML Filename
+ %th Status
+ %th Created At
+ %tbody
+ - @load_report.ingests.each do |ingest|
+ %tr
+ %td= ingest.pid
+ %td= ingest.xml_filename
+ %td= ingest.status
+ %td= ingest.created_at
+
diff --git a/config/routes.rb b/config/routes.rb
index a56cfa92d..d18bf1051 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -27,12 +27,11 @@
resources :communities
resources :collections
+ resources :loads
resources :works
# xml
get '/xml/editor/:id' => 'xml#editor', as: 'xml_editor'
put '/xml/validate' => 'xml#validate'
put '/xml/update' => 'xml#update'
-
- resources :loads, only: [:index, :create]
end
diff --git a/db/migrate/20240926172305_create_load_reports.rb b/db/migrate/20240926172305_create_load_reports.rb
new file mode 100644
index 000000000..230df48c2
--- /dev/null
+++ b/db/migrate/20240926172305_create_load_reports.rb
@@ -0,0 +1,11 @@
+class CreateLoadReports < ActiveRecord::Migration[7.2]
+ def change
+ create_table :load_reports do |t|
+ t.integer :status, null: false, default: 0
+ t.datetime :started_at
+ t.datetime :finished_at
+
+ t.timestamps
+ end
+ end
+end
diff --git a/db/migrate/20240926172306_create_ingests.rb b/db/migrate/20240926172306_create_ingests.rb
index c0f275124..3096989ef 100644
--- a/db/migrate/20240926172306_create_ingests.rb
+++ b/db/migrate/20240926172306_create_ingests.rb
@@ -4,6 +4,7 @@ def change
t.string :pid, null: false
t.string :xml_filename, null: false
t.integer :status, null: false, default: 0
+ t.references :load_report, foreign_key: true, null: false
t.timestamps
end
diff --git a/db/schema.rb b/db/schema.rb
index f673b5d87..5d407e3be 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -10,95 +10,94 @@
#
# It's strongly recommended that you check this file into your version control system.
-ActiveRecord::Schema[7.2].define(version: 2024_09_09_175347) do
+ActiveRecord::Schema[7.2].define(version: 2024_09_26_172306) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
- enable_extension "uuid-ossp"
create_table "bookmarks", id: :serial, force: :cascade do |t|
- t.integer "user_id", null: false
- t.string "user_type"
+ t.datetime "created_at", precision: nil, null: false
t.string "document_id"
t.string "document_type"
t.binary "title"
- t.datetime "created_at", precision: nil, null: false
t.datetime "updated_at", precision: nil, null: false
+ t.integer "user_id", null: false
+ t.string "user_type"
t.index ["document_id"], name: "index_bookmarks_on_document_id"
t.index ["user_id"], name: "index_bookmarks_on_user_id"
end
create_table "good_job_batches", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
+ t.integer "callback_priority"
+ t.text "callback_queue_name"
t.datetime "created_at", null: false
- t.datetime "updated_at", null: false
t.text "description"
- t.jsonb "serialized_properties"
- t.text "on_finish"
- t.text "on_success"
- t.text "on_discard"
- t.text "callback_queue_name"
- t.integer "callback_priority"
- t.datetime "enqueued_at"
t.datetime "discarded_at"
+ t.datetime "enqueued_at"
t.datetime "finished_at"
+ t.text "on_discard"
+ t.text "on_finish"
+ t.text "on_success"
+ t.jsonb "serialized_properties"
+ t.datetime "updated_at", null: false
end
create_table "good_job_executions", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
- t.datetime "created_at", null: false
- t.datetime "updated_at", null: false
t.uuid "active_job_id", null: false
- t.text "job_class"
- t.text "queue_name"
- t.jsonb "serialized_params"
- t.datetime "scheduled_at"
- t.datetime "finished_at"
+ t.datetime "created_at", null: false
+ t.interval "duration"
t.text "error"
- t.integer "error_event", limit: 2
t.text "error_backtrace", array: true
+ t.integer "error_event", limit: 2
+ t.datetime "finished_at"
+ t.text "job_class"
t.uuid "process_id"
- t.interval "duration"
+ t.text "queue_name"
+ t.datetime "scheduled_at"
+ t.jsonb "serialized_params"
+ t.datetime "updated_at", null: false
t.index ["active_job_id", "created_at"], name: "index_good_job_executions_on_active_job_id_and_created_at"
t.index ["process_id", "created_at"], name: "index_good_job_executions_on_process_id_and_created_at"
end
create_table "good_job_processes", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.datetime "created_at", null: false
- t.datetime "updated_at", null: false
- t.jsonb "state"
t.integer "lock_type", limit: 2
+ t.jsonb "state"
+ t.datetime "updated_at", null: false
end
create_table "good_job_settings", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
t.datetime "created_at", null: false
- t.datetime "updated_at", null: false
t.text "key"
+ t.datetime "updated_at", null: false
t.jsonb "value"
t.index ["key"], name: "index_good_job_settings_on_key", unique: true
end
create_table "good_jobs", id: :uuid, default: -> { "gen_random_uuid()" }, force: :cascade do |t|
- t.text "queue_name"
- t.integer "priority"
- t.jsonb "serialized_params"
- t.datetime "scheduled_at"
- t.datetime "performed_at"
- t.datetime "finished_at"
- t.text "error"
- t.datetime "created_at", null: false
- t.datetime "updated_at", null: false
t.uuid "active_job_id"
+ t.uuid "batch_callback_id"
+ t.uuid "batch_id"
t.text "concurrency_key"
- t.text "cron_key"
- t.uuid "retried_good_job_id"
+ t.datetime "created_at", null: false
t.datetime "cron_at"
- t.uuid "batch_id"
- t.uuid "batch_callback_id"
- t.boolean "is_discrete"
+ t.text "cron_key"
+ t.text "error"
+ t.integer "error_event", limit: 2
t.integer "executions_count"
+ t.datetime "finished_at"
+ t.boolean "is_discrete"
t.text "job_class"
- t.integer "error_event", limit: 2
t.text "labels", array: true
- t.uuid "locked_by_id"
t.datetime "locked_at"
+ t.uuid "locked_by_id"
+ t.datetime "performed_at"
+ t.integer "priority"
+ t.text "queue_name"
+ t.uuid "retried_good_job_id"
+ t.datetime "scheduled_at"
+ t.jsonb "serialized_params"
+ t.datetime "updated_at", null: false
t.index ["active_job_id", "created_at"], name: "index_good_jobs_on_active_job_id_and_created_at"
t.index ["batch_callback_id"], name: "index_good_jobs_on_batch_callback_id", where: "(batch_callback_id IS NOT NULL)"
t.index ["batch_id"], name: "index_good_jobs_on_batch_id", where: "(batch_id IS NOT NULL)"
@@ -115,54 +114,44 @@
t.index ["scheduled_at"], name: "index_good_jobs_on_scheduled_at", where: "(finished_at IS NULL)"
end
- create_table "metadata_mods", force: :cascade do |t|
- t.jsonb "json_attributes"
+ create_table "ingests", force: :cascade do |t|
t.datetime "created_at", null: false
+ t.bigint "load_report_id", null: false
+ t.string "pid", null: false
+ t.integer "status", default: 0, null: false
t.datetime "updated_at", null: false
- t.string "valkyrie_id"
- end
-
- create_table "minter_states", id: :serial, force: :cascade do |t|
- t.string "namespace", default: "default", null: false
- t.string "template", null: false
- t.text "counters"
- t.bigint "seq", default: 0
- t.binary "rand"
- t.datetime "created_at", precision: nil, null: false
- t.datetime "updated_at", precision: nil, null: false
- t.index ["namespace"], name: "index_minter_states_on_namespace", unique: true
+ t.string "xml_filename", null: false
+ t.index ["load_report_id"], name: "index_ingests_on_load_report_id"
end
- create_table "orm_resources", id: :uuid, default: -> { "uuid_generate_v4()" }, force: :cascade do |t|
- t.jsonb "metadata", default: {}, null: false
- t.datetime "created_at", precision: nil, null: false
- t.datetime "updated_at", precision: nil, null: false
- t.string "internal_resource"
- t.integer "lock_version"
- t.index ["internal_resource"], name: "index_orm_resources_on_internal_resource"
- t.index ["metadata"], name: "index_orm_resources_on_metadata", using: :gin
- t.index ["metadata"], name: "index_orm_resources_on_metadata_jsonb_path_ops", opclass: :jsonb_path_ops, using: :gin
- t.index ["updated_at"], name: "index_orm_resources_on_updated_at"
+ create_table "load_reports", force: :cascade do |t|
+ t.datetime "created_at", null: false
+ t.datetime "finished_at"
+ t.datetime "started_at"
+ t.integer "status", default: 0, null: false
+ t.datetime "updated_at", null: false
end
create_table "searches", id: :serial, force: :cascade do |t|
+ t.datetime "created_at", precision: nil, null: false
t.binary "query_params"
+ t.datetime "updated_at", precision: nil, null: false
t.integer "user_id"
t.string "user_type"
- t.datetime "created_at", precision: nil, null: false
- t.datetime "updated_at", precision: nil, null: false
t.index ["user_id"], name: "index_searches_on_user_id"
end
create_table "users", force: :cascade do |t|
+ t.datetime "created_at", null: false
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
- t.string "reset_password_token"
- t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
- t.datetime "created_at", null: false
+ t.datetime "reset_password_sent_at"
+ t.string "reset_password_token"
t.datetime "updated_at", null: false
t.index ["email"], name: "index_users_on_email", unique: true
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
end
+
+ add_foreign_key "ingests", "load_reports"
end
diff --git a/spec/controllers/loads_controller_spec.rb b/spec/controllers/loads_controller_spec.rb
index 5b0554ba6..f33c4a687 100644
--- a/spec/controllers/loads_controller_spec.rb
+++ b/spec/controllers/loads_controller_spec.rb
@@ -6,6 +6,8 @@
let(:community) { AtlasRb::Community.create(nil, '/home/cerberus/web/spec/fixtures/files/community-mods.xml') }
let(:collection) { AtlasRb::Collection.create(community['id'], '/home/cerberus/web/spec/fixtures/files/collection-mods.xml') }
let(:work) { AtlasRb::Work.create(collection['id'], '/home/cerberus/web/spec/fixtures/files/work-mods.xml') }
+ let(:zip) { fixture_file_upload('/home/cerberus/web/spec/fixtures/files/metadata_existing_files.zip', 'application/zip') }
+ let(:zip_s) { fixture_file_upload('/home/cerberus/web/spec/fixtures/files/metadata_existing_file.zip', 'application/zip') }
describe 'noid test' do
it 'lets spec set the noid' do
@@ -15,4 +17,126 @@
expect(AtlasRb::Work.find('123')).to be_present
end
end
+
+ describe 'create popups' do
+ it 'processes the zip file successfully' do
+ post :create, params: { file: zip }
+ expect(response).to redirect_to(loads_path)
+ expect(flash[:notice]).to eq("ZIP file processed successfully.")
+ expect(flash[:alert]).to be_nil
+ end
+ end
+
+ describe 'creates LoadReports and Ingests' do
+ it 'creates a LoadReport' do
+ expect {
+ post :create, params: { file: zip }
+ }.to change(LoadReport, :count).by(1)
+ end
+
+ it 'creates Ingests' do
+ expect {
+ post :create, params: { file: zip }
+ }.to change(Ingest, :count).by(5)
+ end
+ end
+
+ describe 'updates metadata' do
+ it 'updates existing work with new metadata' do
+ AtlasRb::Work.metadata(work['id'], { 'noid' => 'neu:test123' })
+ found_work = AtlasRb::Work.find('neu:test123')
+ expect(found_work).to be_present
+ initial_xml_content = AtlasRb::Work.mods(found_work['id'], 'xml')
+ expect(initial_xml_content).to be_present
+ post :create, params: { file: zip_s }
+
+ updated_work = AtlasRb::Work.find('neu:test123')
+ expect(updated_work).to be_present
+ updated_xml_content = AtlasRb::Work.mods(updated_work['id'], 'xml')
+ expect(updated_xml_content).to be_present
+
+ expect(updated_xml_content).not_to eq(initial_xml_content)
+ end
+ end
+
+ describe 'error handling' do
+ it 'handles missing file' do
+ post :create
+ expect(response).to redirect_to(loads_path)
+ expect(flash[:alert]).to eq('No file uploaded. Please select a ZIP file.')
+ end
+
+ it 'handles non-zip file' do
+ non_zip = fixture_file_upload('/home/cerberus/web/spec/fixtures/files/spongebob.png', 'text/plain')
+ post :create, params: { file: non_zip }
+ expect(response).to redirect_to(loads_path)
+ expect(flash[:alert]).to include('Invalid file type')
+ end
+
+ it 'handles zip without manifest' do
+ zip_without_manifest = fixture_file_upload('/home/cerberus/web/spec/fixtures/files/zip_without_manifest.zip', 'application/zip')
+ post :create, params: { file: zip_without_manifest }
+ expect(response).to redirect_to(loads_path)
+ expect(flash[:alert]).to include('Manifest file not found in ZIP')
+ end
+
+ it 'handles missing XML file' do
+ zip_with_missing_xml = fixture_file_upload('/home/cerberus/web/spec/fixtures/files/zip_with_missing_xml.zip', 'application/zip')
+ post :create, params: { file: zip_with_missing_xml }
+ expect(response).to redirect_to(loads_path)
+ expect(flash[:alert]).to include('file not found in ZIP')
+ end
+
+ it 'handles missing PIDs in spreadsheet' do
+ zip_with_missing_pids = fixture_file_upload('/home/cerberus/web/spec/fixtures/files/zip_with_missing_pids.zip', 'application/zip')
+ post :create, params: { file: zip_with_missing_pids }
+ expect(response).to redirect_to(loads_path)
+ expect(flash[:alert]).to include('Missing PID or filename in row')
+ end
+
+ it 'handles Zip::Error' do
+ allow(Zip::File).to receive(:open).and_raise(Zip::Error.new("Corrupted zip file"))
+
+ post :create, params: { file: zip }
+
+ expect(response).to redirect_to(loads_path)
+ expect(flash[:alert]).to include("Error processing ZIP file: Corrupted zip file")
+ end
+
+ it 'handles StandardError during spreadsheet processing' do
+ allow(Roo::Spreadsheet).to receive(:open).and_raise(StandardError.new("Invalid spreadsheet format"))
+
+ post :create, params: { file: zip }
+
+ expect(response).to redirect_to(loads_path)
+ expect(flash[:alert]).to include("Error processing spreadsheet: Invalid spreadsheet format")
+ end
+
+ it 'handles missing headers in spreadsheet' do
+ zip_with_missing_headers = fixture_file_upload('/home/cerberus/web/spec/fixtures/files/no_header.zip', 'application/zip')
+
+ post :create, params: { file: zip_with_missing_headers }
+ expect(response).to redirect_to(loads_path)
+ expect(flash[:alert]).to include("Cannot find header labels")
+ end
+ end
+
+ describe "index" do
+ it "orders LoadReports" do
+ oldest_report = LoadReport.create!(status: :completed, created_at: 2.days.ago)
+ older_report = LoadReport.create!(status: :completed, created_at: 1.day.ago)
+ newer_report = LoadReport.create!(status: :completed, created_at: 1.hour.ago)
+ newest_report = LoadReport.create!(status: :completed, created_at: 1.minute.ago)
+
+ get :index
+
+ expect(assigns(:load_reports).to_a).to eq([newest_report, newer_report, older_report, oldest_report])
+ end
+ end
+
+ it "renders html template" do
+ get :index
+ expect(response).to render_template("loads/index")
+ end
end
+
diff --git a/spec/fixtures/files/.version b/spec/fixtures/files/.version
new file mode 100644
index 000000000..9f8d8a916
--- /dev/null
+++ b/spec/fixtures/files/.version
@@ -0,0 +1 @@
+2.8.3
diff --git a/spec/fixtures/files/metadata_existing_file.zip b/spec/fixtures/files/metadata_existing_file.zip
new file mode 100644
index 000000000..07fe50b54
Binary files /dev/null and b/spec/fixtures/files/metadata_existing_file.zip differ
diff --git a/spec/fixtures/files/metadata_existing_files.zip b/spec/fixtures/files/metadata_existing_files.zip
index 07fe50b54..79d6c5ed6 100644
Binary files a/spec/fixtures/files/metadata_existing_files.zip and b/spec/fixtures/files/metadata_existing_files.zip differ
diff --git a/spec/fixtures/files/no_header.zip b/spec/fixtures/files/no_header.zip
new file mode 100644
index 000000000..03d209337
Binary files /dev/null and b/spec/fixtures/files/no_header.zip differ
diff --git a/spec/fixtures/files/zip_with_missing_pids.zip b/spec/fixtures/files/zip_with_missing_pids.zip
new file mode 100644
index 000000000..11dcf99ac
Binary files /dev/null and b/spec/fixtures/files/zip_with_missing_pids.zip differ
diff --git a/spec/fixtures/files/zip_with_missing_xml.zip b/spec/fixtures/files/zip_with_missing_xml.zip
new file mode 100644
index 000000000..291f6a436
Binary files /dev/null and b/spec/fixtures/files/zip_with_missing_xml.zip differ
diff --git a/spec/fixtures/files/zip_without_manifest.zip b/spec/fixtures/files/zip_without_manifest.zip
new file mode 100644
index 000000000..ed95ea4dd
Binary files /dev/null and b/spec/fixtures/files/zip_without_manifest.zip differ
diff --git a/spec/rails_helper.rb b/spec/rails_helper.rb
index 347adbb92..a9a6b6fc2 100644
--- a/spec/rails_helper.rb
+++ b/spec/rails_helper.rb
@@ -49,17 +49,12 @@
end
RSpec.configure do |config|
config.before(:suite) do
- # TODO: Atlas wipe <- fixed! with below
- DatabaseCleaner[:active_record, db: :atlas_test].strategy = :deletion
- DatabaseCleaner[:active_record, db: :atlas_test].clean
- c = RSolr.connect(:url => 'http://solr:8983/solr/blacklight-test')
- c.delete_by_query '*:*'
- c.commit
+ AtlasRb::Reset.clean
end
config.include Devise::Test::ControllerHelpers, type: :controller
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
- config.fixture_path = Rails.root.join('spec/fixtures').to_s
+ config.fixture_paths = [Rails.root.join('spec/fixtures').to_s]
# Adding config includes for ViewComponent Test Helpers
config.include ViewComponent::TestHelpers, type: :component