Class: Exwiw::Runner
- Inherits:
-
Object
- Object
- Exwiw::Runner
- Defined in:
- lib/exwiw/runner.rb
Instance Method Summary collapse
-
#initialize(connection_config:, output_dir:, config_dir:, dump_target:, logger:) ⇒ Runner
constructor
A new instance of Runner.
- #run ⇒ Object
Constructor Details
#initialize(connection_config:, output_dir:, config_dir:, dump_target:, logger:) ⇒ Runner
Returns a new instance of Runner.
7 8 9 10 11 12 13 14 15 16 17 18 19 |
# File 'lib/exwiw/runner.rb', line 7 def initialize( connection_config:, output_dir:, config_dir:, dump_target:, logger: ) @connection_config = connection_config @output_dir = output_dir @config_dir = config_dir @dump_target = dump_target @logger = logger end |
Instance Method Details
#run ⇒ Object
21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 |
# File 'lib/exwiw/runner.rb', line 21 def run adapter = Adapter.build(@connection_config, @logger) configs = load_table_config(adapter.class.table_config_class) table_by_name = configs.each_with_object({}) { |config, hash| hash[config.name] = config } target = table_by_name[@dump_target.table_name] adapter.validate_as_dump_target!(target) if target @logger.info("Determining table processing order...") ordered_table_names = DetermineTableProcessingOrder.run(configs.select { |c| adapter.dumpable?(c) }) if !Dir.exist?(@output_dir) FileUtils.mkdir_p(@output_dir) end total_size = ordered_table_names.size ordered_table_names.each_with_index do |table_name, idx| @logger.info("Processing table '#{table_name}'... (#{idx + 1}/#{total_size})") table = table_by_name.fetch(table_name) query_ast = adapter.build_query(table, @dump_target, table_by_name) results = adapter.execute(query_ast) record_num = results.size if record_num.zero? @logger.info(" No records matched. skip this table.") next end @logger.debug(" Generate INSERT statement...") chunk_size = table.bulk_insert_chunk_size chunks = chunk_size ? results.each_slice(chunk_size).to_a : [results] insert_sql = chunks.map { |chunk_rows| adapter.to_bulk_insert(chunk_rows, table) }.join("\n") @logger.info(" Generated INSERT statement for #{record_num} records (#{chunks.size} statement(s)).") insert_idx = (idx + 1).to_s.rjust(3, '0') File.open(File.join(@output_dir, "insert-#{insert_idx}-#{table_name}.#{adapter.output_extension}"), 'w') do |file| file.puts(insert_sql) end if adapter.supports_bulk_delete? @logger.debug(" Generate DELETE statement...") delete_sql = adapter.to_bulk_delete(query_ast, table) if @logger.debug? @logger.debug(" Generated DELETE statement:\n#{delete_sql}") else @logger.info(" Generated DELETE statement.") end delete_idx = (total_size - idx).to_s.rjust(3, '0') File.open(File.join(@output_dir, "delete-#{delete_idx}-#{table_name}.#{adapter.output_extension}"), 'w') do |file| file.puts(delete_sql) end end end end |