diff --git a/lib/refrepo/gen/wiki/generators/hardware.rb b/lib/refrepo/gen/wiki/generators/hardware.rb
index 3a39f6956d56da11425bb18266d06b98c2d7fbd9..72a629ce1f51f204c314d1cb2524f8d18f3fc4a1 100644
--- a/lib/refrepo/gen/wiki/generators/hardware.rb
+++ b/lib/refrepo/gen/wiki/generators/hardware.rb
@@ -381,9 +381,7 @@ class G5KHardwareGenerator < WikiGenerator
               ((!d['reservation'].nil? && d['reservation']) ? '[[Disk_reservation|*]]' : '')
             }.join(', ') + ")"
           end
-          queues = cluster_hash['queues'] - ['admin', 'default']
-          queue_t = (queues.nil? || (queues.empty? ? '' : "_.28" + queues[0].gsub(' ', '_') + ' queue.29'))
-          nodes_data << { 'uid' => node_uid, 'data' => { 'main' => maindisk_t, 'hdd' => hdd_t, 'ssd' => ssd_t, 'reservation' => reservable_disks, 'queue' => queue_t } }
+          nodes_data << { 'uid' => node_uid, 'data' => { 'main' => maindisk_t, 'hdd' => hdd_t, 'ssd' => ssd_t, 'reservation' => reservable_disks } }
         end
         nd = nodes_data.group_by { |d| d['data'] }
         nd.each do |data, nodes|
@@ -396,7 +394,7 @@ class G5KHardwareGenerator < WikiGenerator
           end
           table_data << [
             "[[#{site_uid.capitalize}:Hardware|#{site_uid.capitalize}]]",
-              "[[#{site_uid.capitalize}:Hardware##{cluster_uid}#{data['queue']}|#{nodesetname}]]",
+              "[[#{site_uid.capitalize}:Hardware##{cluster_uid}|#{nodesetname}]]",
               nodes.length,
               data['main'],
               data['hdd'],
@@ -445,7 +443,7 @@ class G5KHardwareGenerator < WikiGenerator
         network_interfaces.sort.to_h.each { |num, interfaces|
           table_data << [
             "[[#{site_uid.capitalize}:Network|#{site_uid.capitalize}]]",
-            "[[#{site_uid.capitalize}:Hardware##{cluster_uid}" + (interfaces['queues'] == '' ? '' : "_.28#{queues.gsub(' ', '_')}.29") + "|#{cluster_uid}" + (network_interfaces.size==1 ? '' : '-' + G5K.nodeset(num)) + "]]",
+            "[[#{site_uid.capitalize}:Hardware##{cluster_uid}" + "|#{cluster_uid}" + (network_interfaces.size==1 ? '' : '-' + G5K.nodeset(num)) + "]]",
             num.count,
             interfaces['25g_count'].zero? ? '' : interfaces['25g_count'],
             interfaces['10g_count'].zero? ? '' : interfaces['10g_count'],
diff --git a/lib/refrepo/gen/wiki/generators/site_hardware.rb b/lib/refrepo/gen/wiki/generators/site_hardware.rb
index 92cdd88eeb75457f3e44122177210da08bb2bc82..98fa6c605d3108000d7c157ae3f3e502a3952c8e 100644
--- a/lib/refrepo/gen/wiki/generators/site_hardware.rb
+++ b/lib/refrepo/gen/wiki/generators/site_hardware.rb
@@ -77,14 +77,13 @@ class SiteHardwareGenerator < WikiGenerator
     hardware[site].sort.to_h.each { |cluster_uid, cluster_hash|
       cluster_nodes = cluster_hash.keys.flatten.count
       queue = cluster_hash.map { |k, v| v['queue']}.first
-      queue_str = cluster_hash.map { |k, v| v['queue_str']}.first
       access_conditions = []
       access_conditions << "<b>#{queue}</b>&nbsp;queue" if queue != ''
       access_conditions << '<b>exotic</b>&nbsp;job&nbsp;type' if cluster_hash.map { |k, v| v['exotic']}.first
       table_columns = (with_sites == true ? ['Site'] : []) + ['Cluster',  'Access Condition', 'Date of arrival', { attributes: 'data-sort-type="number"', text: 'Nodes' }, 'CPU', { attributes: 'data-sort-type="number"', text: 'Cores' }, { attributes: 'data-sort-type="number"', text: 'Memory' }, { attributes: 'data-sort-type="number"', text: 'Storage' }, { attributes: 'data-sort-type="number"', text: 'Network' }] + ((site_accelerators.zero? && with_sites == false) ? [] : ['Accelerators'])
       data = partition(cluster_hash)
       table_data <<  (with_sites == true ? ["[[#{site.capitalize}:Hardware|#{site.capitalize}]]"] : []) + [
-        (with_sites == true ? "[[#{site.capitalize}:Hardware##{cluster_uid}" + (queue_str == '' ? '' : "_.28#{queue_str.gsub(' ', '_')}.29") + "|#{cluster_uid}]]" : "[[##{cluster_uid}" + (queue_str == '' ? '' : "_.28#{queue_str.gsub(' ', '_')}.29") + "|#{cluster_uid}]]"),
+        (with_sites == true ? "[[#{site.capitalize}:Hardware##{cluster_uid}" + "|#{cluster_uid}]]" : "[[##{cluster_uid}" + "|#{cluster_uid}]]"),
         access_conditions.join(",<br/>"),
         cell_data(data, 'date'),
         cluster_nodes,
@@ -120,7 +119,7 @@ class SiteHardwareGenerator < WikiGenerator
       access_conditions << "exotic job type" if cluster_hash.map { |k, v| v['exotic']}.first
       table_columns = ['Cluster',  'Queue', 'Date of arrival', { attributes: 'data-sort-type="number"', text: 'Nodes' }, 'CPU', { attributes: 'data-sort-type="number"', text: 'Cores' }, { attributes: 'data-sort-type="number"', text: 'Memory' }, { attributes: 'data-sort-type="number"', text: 'Storage' }, { attributes: 'data-sort-type="number"', text: 'Network' }] + (site_accelerators.zero? ? [] : ['Accelerators'])
 
-      text_data <<  ["\n== #{cluster_uid}" + (access_conditions.empty? ? '' : " (#{access_conditions.join(", ")})") + " ==\n"]
+      text_data <<  ["\n== #{cluster_uid} ==\n"]
       text_data << ["'''#{cluster_nodes} #{G5K.pluralize(cluster_nodes, 'node')}, #{cluster_cpus} #{G5K.pluralize(cluster_cpus, 'cpu')}, #{cluster_cores} #{G5K.pluralize(cluster_cores, 'core')}" + (subclusters == true ? ",''' split as follows due to differences between nodes " : "''' ") + "([https://public-api.grid5000.fr/stable/sites/#{site}/clusters/#{cluster_uid}/nodes.json?pretty=1 json])"]
 
       cluster_hash.sort.to_h.each_with_index { |(num, h), i|
@@ -140,14 +139,16 @@ class SiteHardwareGenerator < WikiGenerator
         elsif h['mic_str'] != ''
           accelerators = 'Xeon Phi'
         end
-        hash = {
+        hash = {}
+        hash['Access condition'] = access_conditions.join(", ") if not access_conditions.empty?
+        hash.merge!({
           'Model' => h['model'],
           'Date of arrival' => h['date'],
           'CPU' => h['processor_description'],
           'Memory' => h['ram_size'] + (!h['pmem_size'].nil? ? " + #{h['pmem_size']} [[PMEM]]" : ''),
           'Storage' => h['storage_description'],
           'Network' => h['network_description'],
-        }
+        })
         hash[accelerators] = h['accelerators_long'] if accelerators
         text_data << MW::generate_hash_table(hash)
       }