Commit 0814284b authored by Jérémie Gaidamour's avatar Jérémie Gaidamour
Browse files

[dev] improved puppet configuration generators

- handle cleanly the case of uva/uvb that is not yet on the reference api
- handle cleanly the case of ceph at rennes
- cleaned up variable names
- removed unneeded stuffs
- simplified file managment
- link between git repository with an environement variable
- rakefile
parent c71e479e
......@@ -16,7 +16,9 @@ production: true
# needed by puppet generators
servers:
ceph[0-3]:
ceph:
nodes:
ceph[0-3]:
g5ksubnet:
network: 10.156.0.0/14
......
task :default => [:kadeployg5k, :conmang5k, :lanpowerg5k]
desc "Generate kadeployg5k configuration"
task :kadeployg5k do
ruby 'kadeployg5k.rb'
end
desc "Generate conmang5k configuration"
task :conmang5k do
ruby 'conmang5k.rb'
end
desc "Generate lanpowerg5k configuration"
task :lanpowerg5k do
ruby 'lanpowerg5k.rb'
end
\ No newline at end of file
......@@ -104,12 +104,22 @@ rennes:
sleep: '4'
command_delay: 15
ceph:
lanpower:
sleep: '4'
conman:
device: /usr/lib/conman/exec/ipmiconsole.exp
sophia:
sol:
conman:
device: /usr/lib/conman/exec/sun-elom.exp
lanpower:
command_delay: '60'
suno:
conman:
device: /usr/lib/conman/exec/ipmiconsole.exp
uva:
lanpower:
command_delay: '60'
uvb:
lanpower:
command_delay: '60'
#!/usr/bin/ruby
# This script generates conmang5k/files/<site_uid>/conman.conf from input/ and conf/conman.yaml
# This script generates conmang5k/files/<site_uid>/conman.conf from input/, conf/console.yaml and conf/console-password.yaml
require 'pp'
require 'erb'
require 'fileutils'
require 'pathname'
require '../lib/input_loader'
require '../lib/hash/hash.rb'
global_hash = load_yaml_file_hierarchy("../input/grid5000/")
$output_dir = 'output'
$output_dir = ENV['puppet_repo'] || 'output'
pwd_hash = YAML::load_file('./conf/console-password.yaml')
conf_hash = YAML::load_file('./conf/console.yaml')
conf_hash = conf_hash.expand_square_brackets()
# Input
refapi = load_yaml_file_hierarchy("../input/grid5000/")
config = YAML::load_file('./conf/console.yaml')
credentials = YAML::load_file('./conf/console-password.yaml')
def write_conman_file(site_uid, site, conf, passwd)
erb = ERB.new(File.read("templates/conman.erb"))
output_file = File.join($output_dir, 'conmang5k', 'files', site_uid, 'conman.conf')
# Create directory hierarchy
dirname = File.dirname(output_file)
FileUtils.mkdir_p(dirname) unless File.directory?(dirname)
# Apply ERB template and save
File.open(output_file, "w+") { |f|
f.write(erb.result(binding))
}
# Apply ERB template and save result to file
def write_conman_file(site_uid, site_refapi, site_config, site_credentials)
output = ERB.new(File.read("templates/conman.erb")).result(binding)
output_file = Pathname("#{$output_dir}/modules/conmang5k/files/#{site_uid}/conman.conf")
output_file.dirname.mkpath()
File.write(output_file, output)
end
# Loop over Grid'5000 sites
global_hash["sites"].each { |site_uid, site|
write_conman_file(site_uid, site, conf_hash[site_uid], pwd_hash[site_uid])
# Loop over each site
refapi["sites"].each { |site_uid, site_refapi|
write_conman_file(site_uid, site_refapi, config[site_uid], credentials[site_uid])
}
......@@ -6,12 +6,12 @@
require 'pp'
require 'erb'
require 'fileutils'
require 'pathname'
require '../lib/input_loader'
require '../lib/hash/hash.rb'
global_hash = load_yaml_file_hierarchy("../input/grid5000/")
output_dir = 'output'
$output_dir = ENV['puppet_repo'] || 'output'
# Compute cluster prefix
# input: cluster_list = ['graoully', 'graphene', 'griffon', ...]
......@@ -114,13 +114,9 @@ end
} # site['clusters'].each
output_file = File.join(output_dir, 'kadeployg5k', 'files', site_uid, "server_conf#{suffix.tr('-', '_')}", 'clusters.conf')
dirname = File.dirname(output_file)
FileUtils.mkdir_p(dirname) unless File.directory?(dirname)
File.open(output_file, 'w') do |f|
f.write(clusters_conf.to_yaml)
end
output_file = Pathname("#{$output_dir}/modules/kadeployg5k/files/#{site_uid}/server_conf#{suffix.tr('-', '_')}/clusters.conf")
output_file.dirname.mkpath()
File.write(output_file, clusters_conf.to_yaml)
#
# Generate <cluster_uid>-cluster.conf files
......@@ -133,16 +129,11 @@ end
site['clusters'].each { |cluster_uid, cluster|
data = data = conf[site_uid][cluster_uid]
cluster_yaml = ERB.new(File.read('templates/kadeployg5k.conf.erb')).result(binding)
output = ERB.new(File.read('templates/kadeployg5k.conf.erb')).result(binding)
output_file = File.join(output_dir, 'kadeployg5k', 'files', site_uid, "server_conf#{suffix.tr('-', '_')}", "#{cluster_uid}-cluster.conf")
dirname = File.dirname(output_file)
FileUtils.mkdir_p(dirname) unless File.directory?(dirname)
File.open(output_file, 'w') do |f|
f.write(cluster_yaml)
end
output_file = Pathname("#{$output_dir}/modules/kadeployg5k/files/#{site_uid}/server_conf#{suffix.tr('-', '_')}/#{cluster_uid}-cluster.conf")
output_file.dirname.mkpath()
File.write(output_file, output)
}
......
......@@ -4,43 +4,55 @@
require 'pp'
require 'yaml'
require 'fileutils'
require 'pathname'
require '../lib/input_loader'
require '../lib/hash/hash.rb'
$output_dir = 'output'
$output_dir = ENV['puppet_repo'] || 'output'
password = YAML::load_file('conf/console-password.yaml')
console = YAML::load_file('conf/console.yaml')
config = YAML::load_file('conf/console.yaml')
credentials = YAML::load_file('conf/console-password.yaml')
refapi = load_yaml_file_hierarchy("../input/grid5000/")
refapi = load_yaml_file_hierarchy("../input/grid5000/")
refapi['sites'].each { |site_uid, site_refapi|
h = {'clusters' => {} } # output hash
refapi["sites"].each { |site_uid, site|
# Generate config for both cluster and server entries of the refapi
site_refapi['servers'] ||= {}
cluster_list = site_refapi['clusters'].keys | site_refapi['servers'].keys | config[site_uid].keys | credentials[site_uid].keys
h = {'clusters' => {} }
site['clusters'].sort.each { |cluster_uid, cluster_refapi|
cluster_console = console[site_uid][cluster_uid]['lanpower'] rescue nil
cluster_password = password[site_uid].fetch(cluster_uid)
cluster_list.sort.each { |cluster_uid|
cluster_refapi = site_refapi['clusters'][cluster_uid].fetch('nodes') rescue site_refapi['servers'][cluster_uid].fetch('nodes') rescue nil
cluster_config = config[site_uid][cluster_uid]['lanpower'] rescue nil
cluster_credentials = credentials[site_uid].fetch(cluster_uid) rescue nil
# error handling:
# - refapi is optional for this generator but every cluster should still be on the ref api => display a warning message
# - credentials are mandatory and the cluster is skipped if info is missing
# - config is optional as the cluster might use the default configuration
puts "Warning: #{site_uid} - #{cluster_uid} not found in the reference api" if cluster_refapi.nil?
if cluster_credentials.nil?
puts "Warning: #{site_uid} - #{cluster_uid} not found in console-password.yaml... skipped"
next
end
# clusters:
# griffon:
# bmc: "ipmi"
# user: ""
# password: ""
# suffix: "-bmc"
# sleep: "6"
# clusters:
# griffon:
# bmc: "ipmi"
# user: ""
# password: ""
# suffix: "-bmc"
# sleep: "6"
cluster_hash = {}
cluster_hash['bmc'] = cluster_console.fetch('bmc') rescue 'ipmi'
cluster_hash['user'] = cluster_password.split(' ')[0]
cluster_hash['password'] = cluster_password.split(' ')[1]
cluster_hash['sleep'] = cluster_console.fetch('sleep') rescue '6'
cluster_hash['command_delay'] = cluster_console.fetch('command_delay') rescue nil
cluster_hash['bmc'] = cluster_config.fetch('bmc') rescue 'ipmi'
cluster_hash['user'] = cluster_credentials.split(' ')[0]
cluster_hash['password'] = cluster_credentials.split(' ')[1]
cluster_hash['sleep'] = cluster_config.fetch('sleep') rescue '6'
cluster_hash['command_delay'] = cluster_config.fetch('command_delay') rescue nil
cluster_hash['suffix'] = cluster_console.fetch('suffix') rescue nil
cluster_hash['suffix'] = cluster_refapi['nodes']["#{cluster_uid}-1"]['network_adapters']['bmc'].fetch('network_address').split('.')[0].gsub("#{cluster_uid}-1",'') rescue '-bmc' unless cluster_hash['suffix']
cluster_hash['suffix'] = cluster_config.fetch('suffix') rescue nil
cluster_hash['suffix'] = cluster_refapi["#{cluster_uid}-1"]['network_adapters']['bmc'].fetch('network_address').split('.')[0].gsub("#{cluster_uid}-1",'') rescue '-bmc' if cluster_hash['suffix'].nil?
cluster_hash.reject!{ |k,v| v == nil }
......@@ -49,11 +61,8 @@ refapi["sites"].each { |site_uid, site|
} # clusters.each
# Write output file
output_file = File.join($output_dir, 'lanpowerg5k', 'files', site_uid, 'lanpower.yaml')
dirname = File.dirname(output_file)
FileUtils.mkdir_p(dirname) unless File.directory?(dirname)
output_file = Pathname("#{$output_dir}/modules/lanpowerg5k/files/#{site_uid}/lanpower.yaml")
output_file.dirname.mkpath()
write_yaml(output_file, h)
}
......@@ -23,36 +23,49 @@ server logfile="/var/log/conman.log"
#
# * from conf/console-password.yaml: passwd hash
[site['clusters'], { 'servers' => { 'nodes' => site['servers'] || {} } }].each { |clusters|
clusters.sort.each { |cluster_uid, cluster|
cluster['nodes'].each_sort_by_node_uid { |node_uid, node|
# Generate conman config for both cluster and server entries of the refapi
site_refapi['servers'] ||= {}
cluster_list = site_refapi['clusters'].keys | site_refapi['servers'].keys | site_config.keys | site_credentials.keys
cluster_list.sort.each { |cluster_uid|
cluster_refapi = site_refapi['clusters'][cluster_uid].fetch('nodes') rescue site_refapi['servers'][cluster_uid].fetch('nodes') rescue nil
cluster_config = site_config[cluster_uid]['conman'] rescue nil
cluster_credentials = site_credentials.fetch(cluster_uid) rescue nil
# error handling:
# - refapi is mandatory for this generator (to get the list of nodes)
# - credentials are mandatory and the cluster is skipped if info is missing
# - config is optional as the cluster might use the default configuration
if cluster_refapi.nil?
puts "Warning: #{site_uid} - #{cluster_uid} not found in the reference api"
next
end
if cluster_credentials.nil?
puts "Warning: #{site_uid} - #{cluster_uid} not found in console-password.yaml... skipped"
next
end
cluster_uid = /[A-Za-z]*/.match(node_uid).to_s if cluster_uid == 'servers' # deduce cluster_uid from node_uid for 'servers' entries
password = passwd[node_uid] || passwd[cluster_uid] # this allows setting passwords on a cluster or node basis
puts "#{node_uid}; password not found" unless password
bmc = node['network_adapters']['bmc'].fetch('network_address').split('.')[0] rescue node_uid + '-bmc'
dev = conf[node_uid]['conman'].fetch('device') rescue nil
dev = conf[cluster_uid]['conman'].fetch('device') rescue '/usr/lib/conman/exec/ipmitool.exp' if dev == nil
if dev != 'ipmi'
# External process-based connection (default)
# Example : console name="node_uid" dev="/usr/lib/conman/exec/ipmitool.exp node_uid-bmc login password"
dev_args = "#{bmc} #{password}"
else
# IPMI Serial-Over-LAN connection
# Example : console name="node_uid" dev="ipmi:node_uid-bmc" ipmiopts="U:login,P:password,W:workaround_flag"
dev = "#{dev}:#{bmc}"
login, pass = password.split(' ')
opt = conf[node_uid]['conman'].fetch('workaround_flag') rescue nil
opt = conf[cluster_uid]['conman'].fetch('workaround_flag') rescue nil if opt == nil
ipmiopts = "U:#{login},P:#{pass}"
ipmiopts += ",W:#{opt}" if opt != nil
end
cluster_refapi.each_sort_by_node_uid { |node_uid, node|
dev = cluster_config.fetch('device') rescue '/usr/lib/conman/exec/ipmitool.exp'
bmc = node['network_adapters']['bmc'].fetch('network_address').split('.')[0] rescue node_uid + '-bmc'
if dev != 'ipmi'
# External process-based connection (default)
# Example : console name="node_uid" dev="/usr/lib/conman/exec/ipmitool.exp node_uid-bmc login password"
dev_args = "#{bmc} #{cluster_credentials}"
else
# IPMI Serial-Over-LAN connection
# Example : console name="node_uid" dev="ipmi:node_uid-bmc" ipmiopts="U:login,P:password,W:workaround_flag"
dev = "#{dev}:#{bmc}"
login, pass = cluster_credentials.split(' ')
opt = cluster_config.fetch('workaround_flag') rescue nil
ipmiopts = "U:#{login},P:#{pass}"
ipmiopts += ",W:#{opt}" if opt != nil
end
%>console name="<%= node_uid %>" dev="<%= dev %><%= " #{dev_args}" if dev_args %>"<%= " ipmiopts=\"#{ipmiopts}\"" if ipmiopts %>
<% } %>
<% } %>
<% } %>
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment