Class: ConfigOMat::Op::LoadMetaConfig

Inherits:
LifecycleVM::OpBase
  • Object
show all
Defined in:
lib/config_o_mat/shared/op/load_meta_config.rb

Defined Under Namespace

Modules: DeepMerge

Constant Summary collapse

LOG_TYPES =
%i[stdout file].freeze
LOG_CONFIG_KEYS =
%i[log_level log_type log_file].freeze

Instance Method Summary collapse

Instance Method Details

#callObject



69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
# File 'lib/config_o_mat/shared/op/load_meta_config.rb', line 69

def call
  default_config = {
    refresh_interval: 5,
    client_id: env.fetch('INVOCATION_ID') { SecureRandom.uuid },
    retry_count: 3,
    retry_wait: 2,
    services: [],
    templates: [],
    profiles: [],
    region: nil,
    gc_compact: 0,
    gc_stat: 0
  }

  # TODO: I would like to make this configurable. I think the trick
  # Sequel uses for its model classes (< Sequel::Model(source_dataset))
  # might be appropriate for how this system works?
  # do: Op::LoadMetaConfig(parser: Yaml.method(&:safe_load))
  parser = proc { |file| YAML.safe_load(file, symbolize_names: true) }
  file_ending = '.conf'

  files =
    Dir.children(configuration_directory)
       .lazy
       .select { |f| f.end_with?(file_ending) }
       .map { |f| File.join(configuration_directory, f) }
       .select { |f| File.file?(f) }
       .to_a
       .sort!

  loaded_files =
    files.each_with_object({}) do |file, hash|
      hash[file] = parser.call(File.read(file))
    rescue StandardError => e
      error file, e
    end

  # If we couldn't load a configuration file then it's probably not worth
  # exploding on any objects we fail to initialize -- that's an obvious
  # consequence and the additional noise might mask the root cause.
  return if errors?

  merged_config =
    loaded_files.each_with_object(default_config) do |(_filename, config), memo|
      memo.deep_merge!(config)
    end

  instantiate = proc do |key, klass|
    merged_config[key].each_with_object({}) do |(name, obj), defs|
      definition = klass.new(obj)
      definition.validate!
      defs[name] = definition
    rescue StandardError => e
      error key, { name => e }
    end
  end

  logger&.info(:log_config, configuration: merged_config.slice(*LOG_CONFIG_KEYS))

  self.service_defs = instantiate.call(:services, Service)
  self.template_defs = instantiate.call(:templates, Template)
  self.profile_defs = instantiate.call(:profiles, Profile)
  self.fallback_s3_bucket = merged_config[:fallback_s3_bucket]

  if profile_defs.values.any? { |pd| pd.s3_fallback } && (fallback_s3_bucket.nil? || fallback_s3_bucket.empty?)
    error :fallback_s3_bucket, 'must be present to use s3_fallback on profiles'
  end

  facter = merged_config[:facter]
  if facter
    facter_key = facter.kind_of?(String) ? facter.to_sym : :facter
    if profile_defs.key?(facter_key)
      error :facter, "conflicts with profile #{facter_key}"
    else
      profile_defs[facter_key] = ConfigOMat::FacterProfile.new
    end
  end

  self.logger = LogsForMyFamily::Logger.new if !logger

  log_type = merged_config[:log_type]&.to_sym || :stdout
  error :log_type, "must be one of #{LOG_TYPES.map(&:to_s)}" if log_type && !LOG_TYPES.include?(log_type)

  log_level = merged_config[:log_level]&.to_sym
  if log_level && !LogsForMyFamily::Logger::LEVELS.include?(log_level)
    error :log_level, "must be one of #{LogsForMyFamily::Logger::LEVELS}"
  end

  backend =
    if log_type == :file
      log_file = merged_config[:log_file] || 'configurator.log'
      if logs_directory.nil?
        error :log_type, 'must set logs directory with -l or $LOGS_DIRECTORY to set log_type to file'
      else
        FileLogWriter.new(File.join(logs_directory, log_file))
      end
    else
      StdoutLogWriter.new
    end

  # If we couldn't initialize our logger (or anything else) then bail here before
  # we try to use it.
  return if errors?

  logger.filter_level(log_level) if log_level
  logger.backends = [backend]

  # Re-log our merged config with our configured logger.
  logger.info(:parsed_config, configuration: merged_config)

  self.refresh_interval = merged_config[:refresh_interval]
  self.client_id = merged_config[:client_id]
  self.retry_count = merged_config[:retry_count]
  self.retries_left = retry_count
  self.retry_wait = merged_config[:retry_wait]
  self.region = merged_config[:region]
  self.gc_stat = merged_config[:gc_stat]
  self.gc_compact = merged_config[:gc_compact]

  self.dependencies = service_defs.each_with_object({}) do |(name, service), template_to_services|
    service.templates.each do |template|
      template = template.to_sym
      if !template_defs.key?(template)
        error :services, { name => "references undefined template #{template}" }
      else
        # Listing the same template multiple times is acceptable. Since we allow
        # merging config files, and this deep merges the service dependency list,
        # it's quite possible that a service could inadvertently declare the same
        # dependency twice in a way that's not easy to untangle.
        template_to_services[template] ||= Set.new
        template_to_services[template] << name
      end
    end
  end

  self.systemd_interface = SystemdInterface.new(DBus.system_bus)
end