From 0b4c25cde75809e62a175adc565682bfc0738b9d Mon Sep 17 00:00:00 2001
From: Baptiste Jonglez <baptiste.jonglez@imag.fr>
Date: Thu, 3 Dec 2020 17:21:06 +0100
Subject: [PATCH] [gen/wiki] Fix wiki links to exotic clusters by simplifying
 hardware page headings

The page heading for each cluster currently contains the queue and the job
type, for instance:

== pyxis (exotic job type) ==
== graoully (production queue) ==
== drac (testing queue, exotic job type) ==

It means that anchors links are horrible, because they are automatically
generated by Mediawiki.  For instance:

  https://www.grid5000.fr/w/Nancy:Hardware#graoully_.28production_queue.29

We try to generate these horrible links in several places in the wiki
generation code, which is difficult.  In addition, it fails to take into
account the exotic job type, so all links to exotic clusters are currently
broken.

Fix this by only keeping the cluster name in the heading: this way, we
don't need to guess the horrible link anchors generated by Mediawiki.
The missing information (queue and job type i.e. "access condition") is
moved to the table presenting the cluster characteristics.
---
 lib/refrepo/gen/wiki/generators/hardware.rb      |  8 +++-----
 lib/refrepo/gen/wiki/generators/site_hardware.rb | 11 ++++++-----
 2 files changed, 9 insertions(+), 10 deletions(-)

diff --git a/lib/refrepo/gen/wiki/generators/hardware.rb b/lib/refrepo/gen/wiki/generators/hardware.rb
index 3a39f6956d..72a629ce1f 100644
--- a/lib/refrepo/gen/wiki/generators/hardware.rb
+++ b/lib/refrepo/gen/wiki/generators/hardware.rb
@@ -381,9 +381,7 @@ class G5KHardwareGenerator < WikiGenerator
               ((!d['reservation'].nil? && d['reservation']) ? '[[Disk_reservation|*]]' : '')
             }.join(', ') + ")"
           end
-          queues = cluster_hash['queues'] - ['admin', 'default']
-          queue_t = (queues.nil? || (queues.empty? ? '' : "_.28" + queues[0].gsub(' ', '_') + ' queue.29'))
-          nodes_data << { 'uid' => node_uid, 'data' => { 'main' => maindisk_t, 'hdd' => hdd_t, 'ssd' => ssd_t, 'reservation' => reservable_disks, 'queue' => queue_t } }
+          nodes_data << { 'uid' => node_uid, 'data' => { 'main' => maindisk_t, 'hdd' => hdd_t, 'ssd' => ssd_t, 'reservation' => reservable_disks } }
         end
         nd = nodes_data.group_by { |d| d['data'] }
         nd.each do |data, nodes|
@@ -396,7 +394,7 @@ class G5KHardwareGenerator < WikiGenerator
           end
           table_data << [
             "[[#{site_uid.capitalize}:Hardware|#{site_uid.capitalize}]]",
-              "[[#{site_uid.capitalize}:Hardware##{cluster_uid}#{data['queue']}|#{nodesetname}]]",
+              "[[#{site_uid.capitalize}:Hardware##{cluster_uid}|#{nodesetname}]]",
               nodes.length,
               data['main'],
               data['hdd'],
@@ -445,7 +443,7 @@ class G5KHardwareGenerator < WikiGenerator
         network_interfaces.sort.to_h.each { |num, interfaces|
           table_data << [
             "[[#{site_uid.capitalize}:Network|#{site_uid.capitalize}]]",
-            "[[#{site_uid.capitalize}:Hardware##{cluster_uid}" + (interfaces['queues'] == '' ? '' : "_.28#{queues.gsub(' ', '_')}.29") + "|#{cluster_uid}" + (network_interfaces.size==1 ? '' : '-' + G5K.nodeset(num)) + "]]",
+            "[[#{site_uid.capitalize}:Hardware##{cluster_uid}" + "|#{cluster_uid}" + (network_interfaces.size==1 ? '' : '-' + G5K.nodeset(num)) + "]]",
             num.count,
             interfaces['25g_count'].zero? ? '' : interfaces['25g_count'],
             interfaces['10g_count'].zero? ? '' : interfaces['10g_count'],
diff --git a/lib/refrepo/gen/wiki/generators/site_hardware.rb b/lib/refrepo/gen/wiki/generators/site_hardware.rb
index 92cdd88eeb..98fa6c605d 100644
--- a/lib/refrepo/gen/wiki/generators/site_hardware.rb
+++ b/lib/refrepo/gen/wiki/generators/site_hardware.rb
@@ -77,14 +77,13 @@ class SiteHardwareGenerator < WikiGenerator
     hardware[site].sort.to_h.each { |cluster_uid, cluster_hash|
       cluster_nodes = cluster_hash.keys.flatten.count
       queue = cluster_hash.map { |k, v| v['queue']}.first
-      queue_str = cluster_hash.map { |k, v| v['queue_str']}.first
       access_conditions = []
       access_conditions << "<b>#{queue}</b>&nbsp;queue" if queue != ''
       access_conditions << '<b>exotic</b>&nbsp;job&nbsp;type' if cluster_hash.map { |k, v| v['exotic']}.first
       table_columns = (with_sites == true ? ['Site'] : []) + ['Cluster',  'Access Condition', 'Date of arrival', { attributes: 'data-sort-type="number"', text: 'Nodes' }, 'CPU', { attributes: 'data-sort-type="number"', text: 'Cores' }, { attributes: 'data-sort-type="number"', text: 'Memory' }, { attributes: 'data-sort-type="number"', text: 'Storage' }, { attributes: 'data-sort-type="number"', text: 'Network' }] + ((site_accelerators.zero? && with_sites == false) ? [] : ['Accelerators'])
       data = partition(cluster_hash)
       table_data <<  (with_sites == true ? ["[[#{site.capitalize}:Hardware|#{site.capitalize}]]"] : []) + [
-        (with_sites == true ? "[[#{site.capitalize}:Hardware##{cluster_uid}" + (queue_str == '' ? '' : "_.28#{queue_str.gsub(' ', '_')}.29") + "|#{cluster_uid}]]" : "[[##{cluster_uid}" + (queue_str == '' ? '' : "_.28#{queue_str.gsub(' ', '_')}.29") + "|#{cluster_uid}]]"),
+        (with_sites == true ? "[[#{site.capitalize}:Hardware##{cluster_uid}" + "|#{cluster_uid}]]" : "[[##{cluster_uid}" + "|#{cluster_uid}]]"),
         access_conditions.join(",<br/>"),
         cell_data(data, 'date'),
         cluster_nodes,
@@ -120,7 +119,7 @@ class SiteHardwareGenerator < WikiGenerator
       access_conditions << "exotic job type" if cluster_hash.map { |k, v| v['exotic']}.first
       table_columns = ['Cluster',  'Queue', 'Date of arrival', { attributes: 'data-sort-type="number"', text: 'Nodes' }, 'CPU', { attributes: 'data-sort-type="number"', text: 'Cores' }, { attributes: 'data-sort-type="number"', text: 'Memory' }, { attributes: 'data-sort-type="number"', text: 'Storage' }, { attributes: 'data-sort-type="number"', text: 'Network' }] + (site_accelerators.zero? ? [] : ['Accelerators'])
 
-      text_data <<  ["\n== #{cluster_uid}" + (access_conditions.empty? ? '' : " (#{access_conditions.join(", ")})") + " ==\n"]
+      text_data <<  ["\n== #{cluster_uid} ==\n"]
       text_data << ["'''#{cluster_nodes} #{G5K.pluralize(cluster_nodes, 'node')}, #{cluster_cpus} #{G5K.pluralize(cluster_cpus, 'cpu')}, #{cluster_cores} #{G5K.pluralize(cluster_cores, 'core')}" + (subclusters == true ? ",''' split as follows due to differences between nodes " : "''' ") + "([https://public-api.grid5000.fr/stable/sites/#{site}/clusters/#{cluster_uid}/nodes.json?pretty=1 json])"]
 
       cluster_hash.sort.to_h.each_with_index { |(num, h), i|
@@ -140,14 +139,16 @@ class SiteHardwareGenerator < WikiGenerator
         elsif h['mic_str'] != ''
           accelerators = 'Xeon Phi'
         end
-        hash = {
+        hash = {}
+        hash['Access condition'] = access_conditions.join(", ") if not access_conditions.empty?
+        hash.merge!({
           'Model' => h['model'],
           'Date of arrival' => h['date'],
           'CPU' => h['processor_description'],
           'Memory' => h['ram_size'] + (!h['pmem_size'].nil? ? " + #{h['pmem_size']} [[PMEM]]" : ''),
           'Storage' => h['storage_description'],
           'Network' => h['network_description'],
-        }
+        })
         hash[accelerators] = h['accelerators_long'] if accelerators
         text_data << MW::generate_hash_table(hash)
       }
-- 
GitLab