Class: Bulkrax::CsvParser
Overview
rubocop:disable Metrics/ClassLength
Defined Under Namespace
Modules: CsvTemplateGeneration, CsvValidation, CsvValidationHelpers, CsvValidationHierarchy
Instance Attribute Summary collapse
#headers, #importerexporter
Class Method Summary
collapse
Instance Method Summary
collapse
#build_export_metadata, #build_for_exporter, #file_extension, #filename, #hyrax_record
#build_errored_entry_row, #setup_errored_entries_file, #write_errored_entries_file
#base_path, #calculate_type_delay, #copy_file, #create_collections, #create_entry_and_job, #create_file_sets, #create_objects, #create_relationships, #create_works, #exporter?, #find_or_create_entry, #generated_metadata_mapping, #get_field_mapping_hash_for, #import_file_path, import_supported?, #importer?, #initialize, #invalid_record, #limit_reached?, #macos_junk_entry?, #model_field_mappings, #new_entry, parser_fields, #path_for_import, #perform_method, #rebuild_entries, #rebuild_entry_query, #record, #record_deleted?, #record_has_source_identifier, #record_raw_metadata, #record_remove_and_rerun?, #related_children_parsed_mapping, #related_children_raw_mapping, #related_parents_parsed_mapping, #related_parents_raw_mapping, #remove_spaces_from_filenames, #required_elements, #source_identifier, #untar, #visibility, #work_entry_class, #work_identifier, #work_identifier_search_field, #write, #write_import_file, #zip
Instance Attribute Details
#collections ⇒ Object
rubocop:enabled Metrics/AbcSize
64
65
66
67
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 64
def collections
build_records if @collections.nil?
@collections
end
|
#file_sets ⇒ Object
74
75
76
77
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 74
def file_sets
build_records if @file_sets.nil?
@file_sets
end
|
#validation_mode ⇒ Object
Returns the value of attribute validation_mode.
10
11
12
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 10
def validation_mode
@validation_mode
end
|
#works ⇒ Object
69
70
71
72
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 69
def works
build_records if @works.nil?
@works
end
|
Class Method Details
.export_supported? ⇒ Boolean
12
13
14
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 12
def self.export_supported?
true
end
|
Instance Method Details
#build_records ⇒ Object
rubocop:disable Metrics/AbcSize
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 31
def build_records
@collections = []
@works = []
@file_sets = []
if model_field_mappings.map { |mfm| mfm.to_sym.in?(records.first.keys) }.any?
records.map do |r|
model_field_mappings.map(&:to_sym).each do |model_mapping|
next unless r.key?(model_mapping)
model = r[model_mapping].nil? ? "" : r[model_mapping].strip
if model.casecmp('collection').zero? || model.casecmp('collectionresource').zero?
@collections << r
elsif model.casecmp('fileset').zero? || model.casecmp('hyrax::fileset').zero?
@file_sets << r
else
@works << r
end
end
end
@collections = @collections.flatten.compact.uniq
@file_sets = @file_sets.flatten.compact.uniq
@works = @works.flatten.compact.uniq
else @works = records.flatten.compact.uniq
end
true
end
|
#collection_entry_class ⇒ Object
165
166
167
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 165
def collection_entry_class
CsvCollectionEntry
end
|
#collections_total ⇒ Object
79
80
81
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 79
def collections_total
collections.size
end
|
#create_new_entries ⇒ Object
Also known as:
create_from_collection, create_from_importer, create_from_worktype, create_from_all
143
144
145
146
147
148
149
150
151
152
153
154
155
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 143
def create_new_entries
current_records_for_export.each do |id, entry_class|
new_entry = find_or_create_entry(entry_class, id, 'Bulkrax::Exporter')
begin
entry = ExportWorkJob.perform_now(new_entry.id, current_run.id)
rescue => e
Rails.logger.info("#{e.message} was detected during export")
end
self. |= entry.parsed_metadata.keys if entry
end
end
|
#current_records_for_export ⇒ Object
136
137
138
139
140
141
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 136
def current_records_for_export
@current_records_for_export ||= Bulkrax::ParserExportRecordSet.for(
parser: self,
export_from: importerexporter.export_from
)
end
|
#entry_class ⇒ Object
161
162
163
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 161
def entry_class
CsvEntry
end
|
All possible column names
289
290
291
292
293
294
295
296
297
298
299
300
301
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 289
def
= (self.)
.delete('access_control_id') if .include?('access_control_id')
.prepend('model')
.prepend(source_identifier.to_s)
.prepend('id')
.uniq
end
|
#export_key_allowed(key) ⇒ Object
283
284
285
286
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 283
def export_key_allowed(key)
new_entry(entry_class, 'Bulkrax::Exporter').field_supported?(key) &&
key != source_identifier.to_s
end
|
#file_paths ⇒ Object
Retrieve file paths for [:file] mapping in records
and check all listed files exist.
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 351
def file_paths
raise StandardError, 'No records were found' if records.blank?
return [] if importerexporter.metadata_only?
@file_paths ||= records.map do |r|
file_mapping = Bulkrax.field_mappings.dig(self.class.to_s, 'file', :from)&.first&.to_sym || :file
next if r[file_mapping].blank?
split_value = Bulkrax.field_mappings.dig(self.class.to_s, :file, :split)
split_pattern = case split_value
when Regexp
split_value
when String
Regexp.new(split_value)
else
Bulkrax.multi_value_element_split_on
end
files_dir = path_to_files
raise StandardError, "Record references local files but no files directory could be resolved from the import path" if files_dir.nil?
r[file_mapping].split(split_pattern).map do |f|
file = File.join(files_dir, f.strip.tr(' ', '_'))
if File.exist?(file) file
else
raise "File #{file} does not exist"
end
end
end.flatten.compact.uniq
end
|
#file_set_entry_class ⇒ Object
169
170
171
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 169
def file_set_entry_class
CsvFileSetEntry
end
|
#file_sets_total ⇒ Object
87
88
89
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 87
def file_sets_total
file_sets.size
end
|
#import_fields ⇒ Object
We could use CsvEntry#fields_from_data(data) but that would mean re-reading the data
92
93
94
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 92
def import_fields
@import_fields ||= records.inject(:merge).keys.compact.uniq
end
|
#missing_elements(record) ⇒ Object
100
101
102
103
104
105
106
107
108
109
110
111
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 100
def missing_elements(record)
keys_from_record = keys_without_numbers(record.reject { |_, v| v.blank? }.keys.compact.uniq.map(&:to_s))
keys = []
mapping_values = importerexporter.mapping.stringify_keys
mapping_values.each do |k, v|
from_values = Array.wrap(v.is_a?(Hash) ? (v['from'] || v[:from]) : nil)
from_values.each do |vf|
keys << k if vf.present? && keys_from_record.include?(vf.to_s.strip)
end
end
required_elements.map(&:to_s) - keys.uniq.map(&:to_s)
end
|
#object_names ⇒ Object
303
304
305
306
307
308
309
310
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 303
def object_names
return @object_names if @object_names
@object_names = mapping.values.map { |value| value['object'] }
@object_names.uniq!&.delete(nil)
@object_names
end
|
#path_to_files(**args) ⇒ Object
Retrieve the path where we expect to find the files
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 383
def path_to_files(**args)
filename = args.fetch(:filename, '')
return @path_to_files if @path_to_files.present? && filename.blank?
have_zip_file = zip? || (parser_fields['attachments_zip_path'] && zip_file?(parser_fields['attachments_zip_path']))
@path_to_files = File.join(
have_zip_file ? importer_unzip_path : File.dirname(import_file_path), 'files', filename
)
return @path_to_files if File.exist?(@path_to_files)
File.join(importer_unzip_path, 'files', filename) if file? && zip?
end
|
#records(_opts = {}) ⇒ Object
16
17
18
19
20
21
22
23
24
25
26
27
28
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 16
def records(_opts = {})
return @records if @records.present?
file_for_import = only_updates ? parser_fields['partial_import_file_path'] : import_file_path
csv_data = entry_class.read_data(file_for_import)
unless validation_mode
importer.parser_fields['total'] = csv_data.count
importer.save
end
@records = csv_data.map { |record_data| entry_class.data_for_entry(record_data, nil, self) }
@records
end
|
#records_split_count ⇒ Object
194
195
196
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 194
def records_split_count
1000
end
|
#required_elements?(record) ⇒ Boolean
96
97
98
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 96
def required_elements?(record)
missing_elements(record).blank?
end
|
#retrieve_cloud_files(files, importer) ⇒ Object
TODO:
DownloadCloudFileJob before it starts
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 201
def retrieve_cloud_files(files, importer)
files_path = File.join(path_for_import, 'files')
FileUtils.mkdir_p(files_path) unless File.exist?(files_path)
target_files = []
files.each_pair do |_key, file|
if file['auth_header'].present?
file['headers'] ||= {}
file['headers'].merge!(file['auth_header'])
end
target_file = File.join(files_path, file['file_name'].tr(' ', '_'))
target_files << target_file
Bulkrax::DownloadCloudFileJob.perform_later(file, target_file)
end
importer[:parser_fields]['original_file_paths'] = target_files
return nil
end
|
#setup_export_file(folder_count) ⇒ Object
in the parser as it is specific to the format
342
343
344
345
346
347
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 342
def setup_export_file(folder_count)
path = File.join(importerexporter.exporter_export_path, folder_count.to_s)
FileUtils.mkdir_p(path) unless File.exist?(path)
File.join(path, "export_#{importerexporter.export_source}_from_#{importerexporter.export_from}_#{folder_count}.csv")
end
|
#sort_entries(entries) ⇒ Object
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 312
def sort_entries(entries)
entries.sort_by do |entry|
case entry.type
when 'Bulkrax::CsvCollectionEntry'
'1'
when 'Bulkrax::CsvFileSetEntry'
'2'
else
'0'
end
end
end
|
329
330
331
332
333
334
335
336
337
338
339
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 329
def ()
.sort_by do |item|
number = item.match(/\d+/)&.[](0) || 0.to_s
sort_number = number.rjust(4, "0")
object_prefix = object_names.detect { |o| item.match(/^#{o}/) } || item
remainder = item.gsub(/^#{object_prefix}_/, '').gsub(/_#{number}/, '')
"#{object_prefix}_#{sort_number}_#{remainder}"
end
end
|
#store_files(identifier, folder_count) ⇒ Object
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 248
def store_files(identifier, folder_count)
record = Bulkrax.object_factory.find(identifier)
return unless record
file_sets = Array.wrap(record) if record.file_set?
if file_sets.nil? file_sets = record.respond_to?(:file_sets) ? record.file_sets : record.members&.select(&:file_set?)
end
if importerexporter.include_thumbnails?
thumbnail = Bulkrax.object_factory.thumbnail_for(resource: record)
file_sets << thumbnail if thumbnail.present?
end
file_sets.each do |fs|
path = File.join(exporter_export_path, folder_count, 'files')
FileUtils.mkdir_p(path) unless File.exist? path
original_file = Bulkrax.object_factory.original_file(fileset: fs)
next if original_file.blank?
file = filename(fs)
io = original_file.respond_to?(:uri) ? open(original_file.uri) : original_file.file.io
File.open(File.join(path, file), 'wb') do |f|
f.write(io.read)
f.close
end
end
rescue Ldp::Gone
return
rescue StandardError => e
raise StandardError, "Unable to retrieve files for identifier #{identifier} - #{e.message}"
end
|
#total ⇒ Object
TODO: figure out why using the version of this method that’s in the bagit parser breaks specs for the “if importer?” line
179
180
181
182
183
184
185
186
187
188
189
190
191
192
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 179
def total
@total =
if importer?
importer.parser_fields['total'] || 0
elsif exporter?
limit.to_i.zero? ? current_records_for_export.count : limit.to_i
else
0
end
return @total
rescue StandardError
@total = 0
end
|
#unzip(file_to_unzip) ⇒ Object
400
401
402
403
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 400
def unzip(file_to_unzip)
super
normalize_unzipped_files_structure(importer_unzip_path)
end
|
#valid_entry_types ⇒ Object
173
174
175
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 173
def valid_entry_types
[collection_entry_class.to_s, file_set_entry_class.to_s, entry_class.to_s]
end
|
#valid_import? ⇒ Boolean
113
114
115
116
117
118
119
120
121
122
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 113
def valid_import?
compressed_record = records.flat_map(&:to_a).partition { |_, v| !v }.flatten(1).to_h
error_alert = "Missing at least one required element, missing element(s) are: #{missing_elements(compressed_record).join(', ')}"
raise StandardError, error_alert unless required_elements?(compressed_record)
file_paths.is_a?(Array)
rescue StandardError => e
set_status_info(e)
false
end
|
#works_total ⇒ Object
83
84
85
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 83
def works_total
works.size
end
|
#write_files ⇒ Object
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 224
def write_files
require 'open-uri'
folder_count = 0
sorted_entries = sort_entries(importerexporter.entries.uniq(&:identifier))
.select { |e| valid_entry_types.include?(e.type) }
group_size = limit.to_i.zero? ? total : limit.to_i
sorted_entries[0..group_size].in_groups_of(records_split_count, false) do |group|
folder_count += 1
CSV.open(setup_export_file(folder_count), "w", headers: , write_headers: true) do |csv|
group.each do |entry|
csv << entry.parsed_metadata
next if importerexporter.metadata_only? || entry.type == 'Bulkrax::CsvCollectionEntry'
store_files(entry.identifier, folder_count.to_s)
end
end
end
end
|
#write_partial_import_file(file) ⇒ Object
124
125
126
127
128
129
130
131
132
133
134
|
# File 'app/parsers/bulkrax/csv_parser.rb', line 124
def write_partial_import_file(file)
import_filename = import_file_path.split('/').last
partial_import_filename = "#{File.basename(import_filename, '.csv')}_corrected_entries.csv"
path = File.join(path_for_import, partial_import_filename)
FileUtils.mv(
file.path,
path
)
path
end
|