|
|
|
@ -50,7 +50,8 @@ async function lp_solve(text) |
|
|
|
|
return { score, vars }; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
async function optimize_initial({ osd_tree, pg_count, pg_size = 3, pg_minsize = 2, max_combinations = 10000, parity_space = 1, ordered = false }) |
|
|
|
|
async function optimize_initial({ osd_tree, pg_count, pg_size = 3, pg_minsize = 2, hier_sizes = null, |
|
|
|
|
max_combinations = 10000, parity_space = 1, ordered = false, seq_layout = false }) |
|
|
|
|
{ |
|
|
|
|
if (!pg_count || !osd_tree) |
|
|
|
|
{ |
|
|
|
@ -58,7 +59,7 @@ async function optimize_initial({ osd_tree, pg_count, pg_size = 3, pg_minsize = |
|
|
|
|
} |
|
|
|
|
const all_weights = Object.assign({}, ...Object.values(osd_tree)); |
|
|
|
|
const total_weight = Object.values(all_weights).reduce((a, c) => Number(a) + Number(c), 0); |
|
|
|
|
const all_pgs = Object.values(random_combinations(osd_tree, pg_size, max_combinations, parity_space > 1)); |
|
|
|
|
const all_pgs = Object.values(random_hier_combinations(osd_tree, hier_sizes || [ pg_size, 1 ], max_combinations, parity_space > 1, seq_layout)); |
|
|
|
|
const pg_per_osd = {}; |
|
|
|
|
for (const pg of all_pgs) |
|
|
|
|
{ |
|
|
|
@ -216,39 +217,45 @@ function calc_intersect_weights(old_pg_size, pg_size, pg_count, prev_weights, al |
|
|
|
|
return move_weights; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
function add_valid_previous(osd_tree, prev_weights, all_pgs) |
|
|
|
|
function build_parent_per_leaf(osd_tree, res = {}, parents = []) |
|
|
|
|
{ |
|
|
|
|
// Add previous combinations that are still valid
|
|
|
|
|
const hosts = Object.keys(osd_tree).sort(); |
|
|
|
|
const host_per_osd = {}; |
|
|
|
|
for (const host in osd_tree) |
|
|
|
|
for (const item in osd_tree) |
|
|
|
|
{ |
|
|
|
|
for (const osd in osd_tree[host]) |
|
|
|
|
{ |
|
|
|
|
host_per_osd[osd] = host; |
|
|
|
|
} |
|
|
|
|
if (osd_tree[item] instanceof Object) |
|
|
|
|
build_parent_per_leaf(osd_tree[item], res, [ ...parents, item ]); |
|
|
|
|
else |
|
|
|
|
res[item] = parents; |
|
|
|
|
} |
|
|
|
|
return res; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
function add_valid_previous(osd_tree, prev_weights, all_pgs, hier_sizes) |
|
|
|
|
{ |
|
|
|
|
// Add previous combinations that are still valid
|
|
|
|
|
const parent_per_osd = build_parent_per_leaf(osd_tree); |
|
|
|
|
skip_pg: for (const pg_name in prev_weights) |
|
|
|
|
{ |
|
|
|
|
const seen_hosts = {}; |
|
|
|
|
const seen = []; |
|
|
|
|
const pg = pg_name.substr(3).split(/_/); |
|
|
|
|
for (const osd of pg) |
|
|
|
|
{ |
|
|
|
|
if (!host_per_osd[osd] || seen_hosts[host_per_osd[osd]]) |
|
|
|
|
{ |
|
|
|
|
if (!parent_per_osd[osd]) |
|
|
|
|
continue skip_pg; |
|
|
|
|
for (let i = 0; i < parent_per_osd[osd].length; i++) |
|
|
|
|
{ |
|
|
|
|
seen[parent_per_osd[osd][i]]++; |
|
|
|
|
if (seen[parent_per_osd[osd][i]] > hier_sizes[i]) |
|
|
|
|
continue skip_pg; |
|
|
|
|
} |
|
|
|
|
seen_hosts[host_per_osd[osd]] = true; |
|
|
|
|
} |
|
|
|
|
if (!all_pgs[pg_name]) |
|
|
|
|
{ |
|
|
|
|
all_pgs[pg_name] = pg; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
// Try to minimize data movement
|
|
|
|
|
async function optimize_change({ prev_pgs: prev_int_pgs, osd_tree, pg_size = 3, pg_minsize = 2, max_combinations = 10000, parity_space = 1, ordered = false }) |
|
|
|
|
async function optimize_change({ prev_pgs: prev_int_pgs, osd_tree, pg_size = 3, pg_minsize = 2, |
|
|
|
|
hier_sizes = null, max_combinations = 10000, parity_space = 1, ordered = false, seq_layout = false }) |
|
|
|
|
{ |
|
|
|
|
if (!osd_tree) |
|
|
|
|
{ |
|
|
|
@ -273,10 +280,10 @@ async function optimize_change({ prev_pgs: prev_int_pgs, osd_tree, pg_size = 3, |
|
|
|
|
} |
|
|
|
|
const old_pg_size = prev_int_pgs[0].length; |
|
|
|
|
// Get all combinations
|
|
|
|
|
let all_pgs = random_combinations(osd_tree, pg_size, max_combinations, parity_space > 1); |
|
|
|
|
let all_pgs = random_hier_combinations(osd_tree, hier_sizes || [ pg_size, 1 ], max_combinations, parity_space > 1, seq_layout); |
|
|
|
|
if (old_pg_size == pg_size) |
|
|
|
|
{ |
|
|
|
|
add_valid_previous(osd_tree, prev_weights, all_pgs); |
|
|
|
|
add_valid_previous(osd_tree, prev_weights, all_pgs, hier_sizes || [ pg_size, 1 ]); |
|
|
|
|
} |
|
|
|
|
all_pgs = Object.values(all_pgs); |
|
|
|
|
const pg_per_osd = {}; |
|
|
|
@ -502,41 +509,147 @@ function put_aligned_pgs(aligned_pgs, int_pgs, prev_int_pgs, keygen) |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
// Convert multi-level osd_tree = { level: number|string, id?: string, size?: number, children?: osd_tree }[]
|
|
|
|
|
// Convert multi-level tree_node = { level: number|string, id?: string, size?: number, children?: tree_node[] }
|
|
|
|
|
// levels = { string: number }
|
|
|
|
|
// to a two-level osd_tree suitable for all_combinations()
|
|
|
|
|
function flatten_tree(osd_tree, levels, failure_domain_level, osd_level, domains = {}, i = { i: 1 }) |
|
|
|
|
// to a multi-level OSD tree suitable for random_hier_combinations()
|
|
|
|
|
// (or in case of just 2 levels - for all_combinations() / random_combinations())
|
|
|
|
|
//
|
|
|
|
|
// Example:
|
|
|
|
|
// tree_node = { level: 'dc', children: [ { level: 'rack', children: [ { level: 'host', children: [ { level: 'osd', size: 10 } ] } ] } ] }
|
|
|
|
|
// extract_levels = [ 'rack', 'osd' ]
|
|
|
|
|
// level_defs = { dc: 1, rack: 2, host: 3, osd: 4 }
|
|
|
|
|
//
|
|
|
|
|
// Result:
|
|
|
|
|
// { rack0: { osd1: 10 } }
|
|
|
|
|
function extract_tree_levels(tree_node, extract_levels, level_defs, new_tree = { idx: 1, items: {} }) |
|
|
|
|
{ |
|
|
|
|
osd_level = levels[osd_level] || osd_level; |
|
|
|
|
failure_domain_level = levels[failure_domain_level] || failure_domain_level; |
|
|
|
|
for (const node of osd_tree) |
|
|
|
|
const next_level = Number(level_defs[extract_levels[0]] || extract_levels[0]) || 0; |
|
|
|
|
const level_name = level_defs[extract_levels[0]] ? extract_levels[0] : 'l'+extract_levels[0]+'_'; |
|
|
|
|
const is_leaf = extract_levels.length == 1; |
|
|
|
|
if ((level_defs[tree_node.level] || tree_node.level) >= next_level) |
|
|
|
|
{ |
|
|
|
|
if ((levels[node.level] || node.level) < failure_domain_level) |
|
|
|
|
if (!is_leaf) |
|
|
|
|
{ |
|
|
|
|
flatten_tree(node.children||[], levels, failure_domain_level, osd_level, domains, i); |
|
|
|
|
// Insert a (possibly fake) level
|
|
|
|
|
const nt = { idx: 1, items: {} }; |
|
|
|
|
new_tree.items[level_name+(new_tree.idx++)] = nt.items; |
|
|
|
|
extract_tree_levels(tree_node, extract_levels.slice(1), level_defs, nt); |
|
|
|
|
} |
|
|
|
|
else |
|
|
|
|
{ |
|
|
|
|
domains['dom'+(i.i++)] = extract_osds([ node ], levels, osd_level); |
|
|
|
|
// Insert a leaf node
|
|
|
|
|
const leaf_id = tree_node.id || (level_name+(new_tree.idx++)); |
|
|
|
|
new_tree.items[leaf_id] = tree_node.size; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
return domains; |
|
|
|
|
else |
|
|
|
|
{ |
|
|
|
|
for (const child_node of tree_node.children||[]) |
|
|
|
|
{ |
|
|
|
|
extract_tree_levels(child_node, extract_levels, level_defs, new_tree); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
return new_tree.items; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
function extract_osds(osd_tree, levels, osd_level, osds = {}) |
|
|
|
|
// generate random PGs with hierarchical failure domains, i.e. for example 3 DC each with 2 HOSTS
|
|
|
|
|
// osd_tree = { level3_id: { level2_id: { level1_id: scalar_value } }, ... }
|
|
|
|
|
// osd_tree may contain arbitrary number of levels, but level count must be the same across the whole tree
|
|
|
|
|
// size_per_level = number of items to select on each level, for example [3, 2, 1].
|
|
|
|
|
// must have the same number of items as the osd_tree level count.
|
|
|
|
|
// count = PG count to generate
|
|
|
|
|
// ordered = don't treat (x,y) and (y,x) as equal
|
|
|
|
|
// seq_layout = true for the [DC1,DC1,DC2,DC2,DC3,DC3] layout, false for [DC1,DC2,DC3,DC1,DC2,DC3] layout
|
|
|
|
|
function random_hier_combinations(osd_tree, size_per_level, count, ordered, seq_layout) |
|
|
|
|
{ |
|
|
|
|
for (const node of osd_tree) |
|
|
|
|
let seed = 0x5f020e43; |
|
|
|
|
const rng = () => |
|
|
|
|
{ |
|
|
|
|
seed ^= seed << 13; |
|
|
|
|
seed ^= seed >> 17; |
|
|
|
|
seed ^= seed << 5; |
|
|
|
|
return seed + 2147483648; |
|
|
|
|
}; |
|
|
|
|
const get_max_level = (o) => |
|
|
|
|
{ |
|
|
|
|
let lvl = 0; |
|
|
|
|
while (o instanceof Object) |
|
|
|
|
{ |
|
|
|
|
for (const k in o) |
|
|
|
|
{ |
|
|
|
|
lvl++; |
|
|
|
|
o = o[k]; |
|
|
|
|
break; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
return lvl; |
|
|
|
|
}; |
|
|
|
|
const max_level = get_max_level(osd_tree); |
|
|
|
|
const gen_pg = (select) => |
|
|
|
|
{ |
|
|
|
|
if ((levels[node.level] || node.level) >= osd_level) |
|
|
|
|
let pg = [ osd_tree ]; |
|
|
|
|
for (let level = 0; level < max_level; level++) |
|
|
|
|
{ |
|
|
|
|
osds[node.id] = node.size; |
|
|
|
|
let npg = []; |
|
|
|
|
for (let i = 0; i < pg.length; i++) |
|
|
|
|
{ |
|
|
|
|
const keys = pg[i] instanceof Object ? Object.keys(pg[i]) : []; |
|
|
|
|
const max_keys = keys.length < size_per_level[level] ? keys.length : size_per_level[level]; |
|
|
|
|
for (let j = 0; j < max_keys; j++) |
|
|
|
|
{ |
|
|
|
|
const r = select(level, i, j, (ordered ? keys.length : (keys.length - (max_keys - j - 1)))); |
|
|
|
|
const el = pg[i][keys[r]] instanceof Object ? pg[i][keys[r]] : keys[r]; |
|
|
|
|
npg[seq_layout ? i*size_per_level[level]+j : j*pg.length+i] = el; |
|
|
|
|
keys.splice(ordered ? r : 0, ordered ? 1 : (r+1)); |
|
|
|
|
} |
|
|
|
|
for (let j = max_keys; j < size_per_level[level]; j++) |
|
|
|
|
npg[seq_layout ? i*size_per_level[level]+j : j*pg.length+i] = NO_OSD; |
|
|
|
|
} |
|
|
|
|
pg = npg; |
|
|
|
|
} |
|
|
|
|
else |
|
|
|
|
return pg; |
|
|
|
|
}; |
|
|
|
|
const r = {}; |
|
|
|
|
// Generate random combinations including each OSD at least once
|
|
|
|
|
let has_next = true; |
|
|
|
|
let ctr = []; |
|
|
|
|
while (has_next) |
|
|
|
|
{ |
|
|
|
|
let pg = gen_pg((level, i, j, n) => |
|
|
|
|
{ |
|
|
|
|
if (i == 0 && j == 0) |
|
|
|
|
{ |
|
|
|
|
// Select a pre-determined OSD in the first position on each level
|
|
|
|
|
const r = ctr[level] == null || ctr[level][1] != n ? 0 : ctr[level][0]; |
|
|
|
|
ctr[level] = [ r, n ]; |
|
|
|
|
return r; |
|
|
|
|
} |
|
|
|
|
return rng() % n; |
|
|
|
|
}); |
|
|
|
|
for (let i = ctr.length-1; i >= 0; i--) |
|
|
|
|
{ |
|
|
|
|
extract_osds(node.children||[], levels, osd_level, osds); |
|
|
|
|
ctr[i][0]++; |
|
|
|
|
if (ctr[i][0] < ctr[i][1]) |
|
|
|
|
break; |
|
|
|
|
else |
|
|
|
|
ctr[i] = null; |
|
|
|
|
} |
|
|
|
|
has_next = ctr[0] != null; |
|
|
|
|
const cyclic_pgs = [ pg ]; |
|
|
|
|
if (ordered) |
|
|
|
|
for (let i = 1; i < pg.size; i++) |
|
|
|
|
cyclic_pgs.push([ ...pg.slice(i), ...pg.slice(0, i) ]); |
|
|
|
|
for (const pg of cyclic_pgs) |
|
|
|
|
r['pg_'+pg.join('_')] = pg; |
|
|
|
|
} |
|
|
|
|
return osds; |
|
|
|
|
// Generate purely random combinations
|
|
|
|
|
while (count > 0) |
|
|
|
|
{ |
|
|
|
|
let pg = gen_pg((l, i, j, n) => rng() % n); |
|
|
|
|
r['pg_'+pg.join('_')] = pg; |
|
|
|
|
count--; |
|
|
|
|
} |
|
|
|
|
return r; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
// ordered = don't treat (x,y) and (y,x) as equal
|
|
|
|
@ -752,11 +865,12 @@ module.exports = { |
|
|
|
|
pg_weights_space_efficiency, |
|
|
|
|
pg_list_space_efficiency, |
|
|
|
|
pg_per_osd_space_efficiency, |
|
|
|
|
flatten_tree, |
|
|
|
|
extract_tree_levels, |
|
|
|
|
|
|
|
|
|
lp_solve, |
|
|
|
|
make_int_pgs, |
|
|
|
|
align_pgs, |
|
|
|
|
random_combinations, |
|
|
|
|
random_hier_combinations, |
|
|
|
|
all_combinations, |
|
|
|
|
}; |
|
|
|
|