461 lines
17 KiB
JavaScript
461 lines
17 KiB
JavaScript
// Экспорт схемы PostgreSQL БД в HTML, GraphViz, SQL
|
||
// Упоротые случаи не покрывает, только вменяемые
|
||
// (c) Vitaliy Filippov 2019+
|
||
// Версия: 2020-09-07
|
||
|
||
// Установить модуль pg: npm install pg
|
||
|
||
// Создать config.json формата:
|
||
// { db: { host, database, user, password, port }, skip_tables?: RegExp, skip_triggers?: RegExp, skip_functions?: RegExp }
|
||
|
||
// HTML:
|
||
// nodejs export-schema.js config.json html > schema.html
|
||
// В docx потом можно перегнать так (open/libreoffice):
|
||
// soffice --headless --infilter=writerglobal8_HTML --convert-to docx:"MS Word 2007 XML" --outdir . schema.html
|
||
|
||
// Graphviz:
|
||
// nodejs export-schema.js config.json dot > schema.dot
|
||
// Потом перегнать в SVG:
|
||
// dot -Tsvg schema.dot
|
||
|
||
// SQL (воссоздать начальные определения таблиц, индексов, триггеров, функций и агрегатов без pg_dump-ного мусора):
|
||
// nodejs export-schema.js config.json sql <DIRECTORY>
|
||
|
||
const fsp = require('fs').promises || require('../lib/fsp.js');
|
||
const pg = require('pg');
|
||
|
||
const columninfo = (ver) => `select distinct on (c.relname, a.attnum, a.attname, a.attnotnull, t.typname)
|
||
c.relname as table,
|
||
a.attname as name,
|
||
a.attnotnull as "notnull",
|
||
(a.atttypmod-4) as "len",
|
||
(case when `+(ver >= 120000 ? 'pg_get_expr(d.adbin, d.adrelid)' : 'd.adsrc')+` like 'nextval(''%_id_seq%''::regclass)' then
|
||
(case when t.typname='int8' then 'bigserial'
|
||
when t.typname='int4'
|
||
then 'serial'
|
||
else t.typname end)
|
||
else
|
||
(case when t.typname='int8' then 'bigint'
|
||
when t.typname='int4' then 'int'
|
||
when t.typname='int2' then 'smallint'
|
||
when t.typname='_int4' then 'int[]'
|
||
else t.typname end)
|
||
end) as "type",
|
||
(case when `+(ver >= 120000 ? 'pg_get_expr(d.adbin, d.adrelid)' : 'd.adsrc')+` like 'nextval(''%_id_seq%''::regclass)' then ''
|
||
else `+(ver >= 120000 ? 'pg_get_expr(d.adbin, d.adrelid)' : 'd.adsrc')+` end) as "default",
|
||
pg_catalog.obj_description(c.oid) table_comment,
|
||
pg_catalog.col_description(c.oid, a.attnum) column_comment,
|
||
fkc.relname ref_table,
|
||
(select jsonb_agg(fka.attname) from pg_attribute fka where fka.attrelid=fk.confrelid and fka.attnum=any(fk.confkey)) ref_fields,
|
||
fk.confupdtype ref_update,
|
||
fk.confdeltype ref_delete
|
||
from pg_namespace n
|
||
inner join pg_class c on c.relnamespace=n.oid
|
||
inner join pg_attribute a on a.attrelid=c.oid and a.attnum>0 and a.attisdropped=false
|
||
inner join pg_type t on t.oid=a.atttypid
|
||
left join pg_attrdef d on d.adrelid=a.attrelid and d.adnum=a.attnum
|
||
left join pg_constraint fk on fk.conrelid=c.oid and fk.conkey=array[a.attnum] and fk.contype='f'
|
||
left join pg_class fkc on fkc.oid=fk.confrelid
|
||
where n.nspname='public' and c.relkind='r'
|
||
order by c.relname, a.attnum, a.attname, a.attnotnull, t.typname`;
|
||
|
||
const indexinfo = `select i.tablename, replace(i.indexdef, ' USING btree (', ' (') indexdef, x.indisprimary
|
||
from pg_indexes i
|
||
inner join pg_namespace n on n.nspname='public' and n.nspname=i.schemaname
|
||
inner join pg_class c on c.relnamespace=n.oid and c.relname=i.indexname
|
||
inner join pg_index x on x.indexrelid=c.oid`;
|
||
|
||
const triggerinfo = `select
|
||
c.relname as table,
|
||
t.tgname as name,
|
||
(pg_get_functiondef(p.oid) || e';\n\n' || pg_get_triggerdef(t.oid) || ';\n') as sql
|
||
from pg_trigger t, pg_class c, pg_namespace n, pg_proc p
|
||
where n.nspname='public' and c.relnamespace=n.oid
|
||
and c.oid=t.tgrelid and p.oid=t.tgfoid and t.tgisinternal=false`;
|
||
|
||
const funcinfo = (ver) => `select p.proname as name, pg_get_functiondef(p.oid) || ';\n' as sql
|
||
from pg_proc p
|
||
inner join pg_namespace n on n.nspname='public' and n.oid=p.pronamespace
|
||
left join pg_trigger t on t.tgfoid=p.oid
|
||
left join pg_aggregate a on p.oid in (a.aggtransfn, a.aggfinalfn, a.aggmtransfn, a.aggminvtransfn, a.aggmfinalfn)
|
||
where `+(ver >= 110000 ? `p.prokind != 'a'` : `p.proisagg=false`)+` and t.oid is null and a.aggfnoid is null`;
|
||
|
||
const agginfo = (ver) => `select
|
||
p.proname as name,
|
||
pg_get_functiondef(a.aggtransfn)
|
||
||case when a.aggfinalfn=0 then '' else e';\n\n'||pg_get_functiondef(a.aggfinalfn) end
|
||
||case when a.aggmtransfn=0 then '' else e';\n\n'||pg_get_functiondef(a.aggmtransfn) end
|
||
||case when a.aggminvtransfn=0 then '' else e';\n\n'||pg_get_functiondef(a.aggminvtransfn) end
|
||
||case when a.aggmfinalfn=0 then '' else e';\n\n'||pg_get_functiondef(a.aggmfinalfn) end
|
||
||e';\n\ncreate aggregate '||n.nspname||'.'||p.proname||'('||format_type(a.aggtranstype, null)||') (sfunc = '||a.aggtransfn
|
||
||', stype = '||format_type(a.aggtranstype, null)
|
||
||case when a.aggtransspace=0 then '' else ', sspace = '||a.aggtransspace end
|
||
||case when a.aggfinalfn=0 then '' else ', finalfunc = '||a.aggfinalfn end
|
||
||case when a.agginitval is null then '' else ', initcond = '||a.agginitval end
|
||
||case when a.aggmtranstype=0 then '' else ', mstype = '||format_type(a.aggmtranstype, null) end
|
||
||case when a.aggmtransspace=0 then '' else ', msspace = '||a.aggmtransspace end
|
||
||case when a.aggmtransfn=0 then '' else ', msfunc = '||a.aggmtransfn end
|
||
||case when a.aggminvtransfn=0 then '' else ', invfunc = '||a.aggminvtransfn end
|
||
||case when a.aggmfinalfn=0 then '' else ', mfinalfunc = '||a.aggmfinalfn end
|
||
||case when a.aggminitval is null then '' else ', minitcond = '||a.aggminitval end
|
||
||case when op.oprname is null then '' else ', sortop = '||op.oprname end
|
||
`+(ver >= 90600 ? `||case when a.aggcombinefn=0 then '' else ', combinefunc = '||a.aggcombinefn end
|
||
||case when a.aggserialfn=0 then '' else ', serialfunc = '||a.aggserialfn end
|
||
||case when a.aggdeserialfn=0 then '' else ', deserialfunc = '||a.aggcombinefn end
|
||
` : '')+`||e');\n' as sql
|
||
from pg_proc p
|
||
join pg_namespace n on p.pronamespace = n.oid
|
||
join pg_aggregate a on a.aggfnoid = p.oid
|
||
left join pg_operator op on op.oid = a.aggsortop
|
||
where n.nspname = 'public'`;
|
||
|
||
async function run()
|
||
{
|
||
const config = JSON.parse(await fsp.readFile(process.argv[2] || '../config.json', { encoding: 'utf-8' }));
|
||
const format = process.argv[3] || 'html';
|
||
const dbh = new pg.Client(config.db);
|
||
await dbh.connect();
|
||
const pg_ver = (await dbh.query('show server_version_num')).rows[0]['server_version_num'];
|
||
const columns = (await dbh.query(columninfo(pg_ver))).rows;
|
||
let indexes = (await dbh.query(indexinfo)).rows;
|
||
const triggers = (await dbh.query(triggerinfo)).rows;
|
||
const funcs = (await dbh.query(funcinfo(pg_ver))).rows;
|
||
const aggs = (await dbh.query(agginfo)).rows;
|
||
const tables = {};
|
||
let skip = config.skip_tables && new RegExp(config.skip_tables, 'i');
|
||
for (const col of columns)
|
||
{
|
||
if (skip && skip.exec(col.table))
|
||
{
|
||
continue;
|
||
}
|
||
if (!tables[col.table])
|
||
{
|
||
tables[col.table] = {
|
||
name: col.table,
|
||
comment: col.table_comment,
|
||
columns: {},
|
||
indexes: [],
|
||
triggers: [],
|
||
primary_key: [],
|
||
};
|
||
}
|
||
tables[col.table].columns[col.name] = {
|
||
name: col.name,
|
||
comment: col.column_comment,
|
||
type: col.type.toLowerCase(),
|
||
len: col.len,
|
||
notnull: col.notnull,
|
||
default: col.default,
|
||
ref_table: col.ref_table,
|
||
ref_fields: col.ref_fields,
|
||
ref_update: col.ref_update,
|
||
ref_delete: col.ref_delete,
|
||
};
|
||
}
|
||
indexes = indexes.sort((a, b) => a.indexdef.localeCompare(b.indexdef));
|
||
for (const idx of indexes)
|
||
{
|
||
if (!tables[idx.tablename])
|
||
{
|
||
continue;
|
||
}
|
||
if (idx.indisprimary)
|
||
{
|
||
let m = /^create\s*unique\s*index\s*\S+\s*on\s*\S+\s*\(\s*(.*)\s*\)\s*$/i.exec(idx.indexdef);
|
||
if (m)
|
||
{
|
||
tables[idx.tablename].primary_key = m[1].split(/\s*,\s*/);
|
||
}
|
||
else
|
||
{
|
||
tables[idx.tablename].indexes.push(idx.indexdef);
|
||
}
|
||
}
|
||
else
|
||
{
|
||
tables[idx.tablename].indexes.push(idx.indexdef);
|
||
}
|
||
}
|
||
for (const f of funcs)
|
||
{
|
||
f.sql = f.sql.replace(/\r\n/g, '\n');
|
||
}
|
||
skip = config.skip_triggers && new RegExp(config.skip_triggers, 'i');
|
||
for (const t of triggers)
|
||
{
|
||
// Make SQL compatible with PostgreSQL < 11
|
||
t.sql = t.sql.replace(/ EXECUTE FUNCTION /, ' EXECUTE PROCEDURE ');
|
||
if (tables[t.table] && (!skip || !skip.exec(t.name)))
|
||
{
|
||
tables[t.table].triggers.push(t.sql);
|
||
}
|
||
}
|
||
if (format == 'html')
|
||
{
|
||
console.log(exportHTML(tables));
|
||
}
|
||
else if (format == 'crudstub')
|
||
{
|
||
await generateCRUDStubs(tables);
|
||
}
|
||
else if (format == 'dot')
|
||
{
|
||
// Graphviz
|
||
console.log(exportDOT(tables));
|
||
}
|
||
else if (format == 'sql')
|
||
{
|
||
// Простой экспорт схемы в SQL
|
||
const dir = process.argv[4] || './sql-export';
|
||
const stat = await fsp.stat(dir);
|
||
if (!stat.isDirectory)
|
||
{
|
||
console.error('Укажите директорию следующим параметром:\n nodejs export-schema.js config.json sql ./sql-export-dir');
|
||
process.exit(-1);
|
||
}
|
||
let all_funcs = [ ...funcs, ...aggs ];
|
||
if (config.skip_functions)
|
||
{
|
||
let re = new RegExp(config.skip_functions, 'i');
|
||
all_funcs = all_funcs.filter(f => !re.exec(f.name));
|
||
}
|
||
await exportSQL(tables, all_funcs, dir);
|
||
}
|
||
else
|
||
{
|
||
console.error('Неизвестный формат экспорта: '+format);
|
||
}
|
||
process.exit(0);
|
||
}
|
||
|
||
async function generateCRUDStubs(tables)
|
||
{
|
||
let services = '', imports = '';
|
||
for (const t in tables)
|
||
{
|
||
if (t == 'patch_state' || !tables[t].columns.id)
|
||
{
|
||
continue;
|
||
}
|
||
const name = t.replace(/(?:^|_)([a-z])/g, (m, m1) => m1.toUpperCase());
|
||
let joins = '';
|
||
let cols = Object.values(tables[t].columns).map(col =>
|
||
{
|
||
if (col.name == 'id')
|
||
{
|
||
return '';
|
||
}
|
||
let type = col.type;
|
||
if (col.ref_table)
|
||
{
|
||
type = 'ref';
|
||
joins += ` ${col.name.replace('_id', '')}: { service: '${col.ref_table}', object_field: '${col.name}' },\n`;
|
||
}
|
||
else if (type == 'varchar' || type == 'text')
|
||
{
|
||
type = 'string';
|
||
}
|
||
else if ((col.comment||'').indexOf('UNIX время') >= 0)
|
||
{
|
||
type = 'unixtime';
|
||
}
|
||
else if (type == 'int' || type == 'smallint' || type == 'bigint')
|
||
{
|
||
type = 'int';
|
||
}
|
||
else if (type == 'jsonb')
|
||
{
|
||
type = 'json';
|
||
}
|
||
else if (type == 'boolean')
|
||
{
|
||
type = 'bool';
|
||
}
|
||
return ` ${col.name}: '${type}',\n`;
|
||
}).join('');
|
||
let code =
|
||
`const UserError = require('../UserError.js');
|
||
const { CRUDService } = require('../CRUDService.js');
|
||
|
||
class ${name} extends CRUDService
|
||
{
|
||
}
|
||
|
||
${name}.prototype.table = '${t}';
|
||
${name}.prototype.maxLimit = 100;
|
||
${name}.prototype.fieldTypes = {
|
||
${cols}};
|
||
${name}.prototype.joins = {
|
||
${joins}};
|
||
|
||
module.exports = ${name};
|
||
`;
|
||
await fsp.writeFile('../lib/api/'+name+'.js', code);
|
||
imports += `const ${name} = require('./api/${name}.js');\n`;
|
||
services += ` ${t}: ${name},\n`;
|
||
}
|
||
console.log(imports + services);
|
||
}
|
||
|
||
function exportHTML(tables)
|
||
{
|
||
let seen = {}, seq = [];
|
||
for (const t in tables)
|
||
{
|
||
add(tables, t, seen, seq);
|
||
}
|
||
// th td margin: 0cm для либреофиса
|
||
let out = `<style>
|
||
th, td { margin: 0cm; }
|
||
h3 { font-size: 20px; margin: 10px 0; }
|
||
table { font-size: 15px; width: 1000px; border-collapse: collapse; }
|
||
th { background: #f0f0f0; }
|
||
td, th { border: 1px solid #ccc; text-align: left; padding: .2em; }
|
||
th:first-child { width: 15%; }
|
||
th:nth-child(2) { width: 25%; }
|
||
</style>`;
|
||
for (const t of seq)
|
||
{
|
||
const tbl = tables[t];
|
||
out += `<h3>${h(tbl.name)} — ${h(tbl.comment)}</h3>\n`;
|
||
out += `<table width="100%">\n<tr><th width="15%">Колонка</th><th width="25%">Тип</th><th>Комментарий</th></tr>\n`;
|
||
for (const c in tbl.columns)
|
||
{
|
||
const col = tbl.columns[c];
|
||
out += `<tr><td>${h(col.name)}</td>`;
|
||
out += `<td>${h(col.ref_table ? 'Ссылка на '+col.ref_table : col.type)}</td>`;
|
||
out += `<td>${h(col.name == 'id' ? 'Первичный ключ' : col.comment)}</td></tr>\n`;
|
||
}
|
||
out += `</table>\n`;
|
||
}
|
||
return out;
|
||
}
|
||
|
||
function exportDOT(tables)
|
||
{
|
||
let seen = {}, seq = [];
|
||
for (const t in tables)
|
||
{
|
||
add(tables, t, seen, seq);
|
||
}
|
||
let out = `digraph G {
|
||
graph [rankdir=LR]
|
||
node [shape=note fontsize=10 fontname=Arial style=filled fillcolor=white]
|
||
edge [color="#4080ff" len=0.5]
|
||
`;
|
||
for (const t of seq)
|
||
{
|
||
const tbl = tables[t];
|
||
let links = {};
|
||
out += `${tbl.name} [color="#8090a0",fillcolor="#f0f4f8",label=<
|
||
<table border="0" cellborder="0" cellspacing="0">
|
||
<tr><td colspan="3">${h(tbl.name)} - ${h(tbl.comment)}</td></tr>
|
||
<tr><td align="left" width="15%"><b>Колонка</b></td><td align="left" width="25%"><b>Тип</b></td><td align="left"><b>Комментарий</b></td></tr>
|
||
`;
|
||
for (const c in tbl.columns)
|
||
{
|
||
const col = tbl.columns[c];
|
||
out += `<tr><td align="left">${h(col.name)}</td>`;
|
||
out += `<td align="left">${h(col.ref_table ? 'Ссылка на '+col.ref_table : col.type)}</td>`;
|
||
out += `<td align="left">${h(col.name == 'id' ? 'Первичный ключ' : col.comment)}</td></tr>\n`;
|
||
if (col.ref_table)
|
||
{
|
||
// FIXME: Стрелочки покрасить
|
||
links[col.ref_table] = `${tbl.name} -> ${col.ref_table}\n`;
|
||
}
|
||
}
|
||
out += `</table>
|
||
>
|
||
,tooltip="${h(tbl.name)}"]
|
||
`;
|
||
out += Object.values(links).join('');
|
||
}
|
||
out += "}\n";
|
||
return out;
|
||
}
|
||
|
||
async function exportSQL(tables, funcs, dir)
|
||
{
|
||
const ACTION = { a: 'no action', r: 'restrict', c: 'cascade', n: 'set null', d: 'set default' };
|
||
let services = '', imports = '';
|
||
for (const t in tables)
|
||
{
|
||
const tbl = tables[t];
|
||
let code = `create table ${tbl.name} (\n`;
|
||
for (const c in tbl.columns)
|
||
{
|
||
const col = tbl.columns[c];
|
||
code += " "+col.name+' '+col.type +
|
||
(col.type == 'varchar' ? '('+col.len+')' : '') +
|
||
(col.notnull ? ' not null' : '') +
|
||
(tbl.primary_key.length == 1 && tbl.primary_key[0] === col.name ? ' primary key' : '') +
|
||
(col.ref_table
|
||
? ' references '+col.ref_table+' ('+col.ref_fields.join(', ')+
|
||
') on delete '+ACTION[col.ref_delete]+' on update '+ACTION[col.ref_update]
|
||
: '') + ",\n";
|
||
}
|
||
if (tbl.primary_key.length > 1)
|
||
{
|
||
code += " primary key ("+tbl.primary_key.join(', ')+")\n";
|
||
}
|
||
else
|
||
{
|
||
code = code.substr(0, code.length-2)+"\n";
|
||
}
|
||
code += ");\n\n";
|
||
if (tbl.indexes.length > 0)
|
||
{
|
||
code += tbl.indexes.join(";\n")+";\n\n";
|
||
}
|
||
if (tbl.comment)
|
||
{
|
||
code += `comment on table ${tbl.name} is '${tbl.comment}';\n`;
|
||
}
|
||
for (const c in tbl.columns)
|
||
{
|
||
const col = tbl.columns[c];
|
||
if (col.comment)
|
||
{
|
||
code += `comment on column ${tbl.name}.${c} is '${col.comment}';\n`;
|
||
}
|
||
}
|
||
await fsp.writeFile(dir+'/'+tbl.name+'.sql', code.trim()+'\n');
|
||
if (tbl.triggers.length)
|
||
{
|
||
await fsp.writeFile(dir+'/'+tbl.name+'_triggers.sql', tbl.triggers.join("\n\n"));
|
||
}
|
||
}
|
||
for (const f of funcs)
|
||
{
|
||
await fsp.writeFile(dir+'/'+f.name+'.sql', f.sql);
|
||
}
|
||
}
|
||
|
||
function add(tables, t, seen, seq)
|
||
{
|
||
if (seen[t])
|
||
{
|
||
return;
|
||
}
|
||
const tbl = tables[t];
|
||
seen[t] = true;
|
||
for (const c in tbl.columns)
|
||
{
|
||
const col = tbl.columns[c];
|
||
if (col.ref_table && !seen[col.ref_table])
|
||
{
|
||
add(tables, col.ref_table, seen, seq);
|
||
}
|
||
}
|
||
seq.push(t);
|
||
}
|
||
|
||
run().catch(console.error);
|
||
|
||
function h(s)
|
||
{
|
||
const r = { '<': '<', '>': '>', '&': '&', '\'': ''', '"': '"' };
|
||
return s == null ? 'null' : s.replace(/[<>&'"]/g, m => r[m]); //'
|
||
}
|