From 50abf970cd8141a0c8268628c56baf97d264c146 Mon Sep 17 00:00:00 2001 From: Daniel Faria Gomes Date: Mon, 4 May 2020 16:15:09 -0300 Subject: [PATCH 1/2] Removing Changelog - Changes are now described in the Releases page --- Changelog.md | 32 -------------------------------- 1 file changed, 32 deletions(-) delete mode 100644 Changelog.md diff --git a/Changelog.md b/Changelog.md deleted file mode 100644 index 7111828..0000000 --- a/Changelog.md +++ /dev/null @@ -1,32 +0,0 @@ -## 2011-07-01 Daniel Carneiro - -* Initial Release - -1.32 -Added new function markAll to mark all archives. Good when you re-create a database with all changes applied to schema. -1.33 -Added new function to create dbseeds (like ruby migrations). -Updated funtion schema:recreate_oracle and schema:recreate_mysql, now creating insert DDL for core data. -1.34 -bug fix encoding problems -bug fix metadata files generation -1.35 -Added new function to validate if a constraint is enabled or not (Works only in Oracle) -1.36 -bug fix schema:recreate_mysql duplicating primary key statement -1.37 -bug fix dbchange:markall in mysql -1.40 -bug fix - Single quotes duplicated in insert into query -bug-fix - String fields not encapsulated with single quotes when value is number -1.41 -bug fix - schema:recreate_mysql not escaping strings in defaultValue -1.42 -bug fix - dbchange:down executing empty down statements -Added support for postgres (version >= 8.4) -1.43 -bug fixes in postgres support -1.58 -bug fix in oracle indexes read -1.59 -transforming warnings in errors \ No newline at end of file From 0a656e6d15442c3cfd32b77458e35d61b5922bfa Mon Sep 17 00:00:00 2001 From: Daniel Faria Gomes Date: Mon, 4 May 2020 19:22:55 -0300 Subject: [PATCH 2/2] Adding postgres policy / row level security support --- .../reader/DatabaseReaderChanger.groovy | 46 +- .../reader/PostgresDatabaseReader.groovy | 704 ++++++++++-------- .../bluesoft/bee/importer/JsonImporter.groovy | 327 ++++---- .../com/bluesoft/bee/model/ObjectType.groovy | 72 +- .../br/com/bluesoft/bee/model/Schema.groovy | 173 ++--- .../br/com/bluesoft/bee/model/Table.groovy | 238 +++--- .../bee/model/message/MessageType.groovy | 56 +- .../bee/model/postgres/PostgresPolicy.groovy | 71 ++ .../bee/model/postgres/PostgresTable.groovy | 82 ++ .../schema/BeePostgresSchemaCreator.groovy | 204 ++--- .../BeePostgresSchemaCreatorAction.groovy | 106 +-- .../bee/schema/BeeSchemaCreator.groovy | 662 ++++++++-------- .../schema/BeeSchemaGeneratorAction.groovy | 148 ++-- .../schema/BeeSchemaValidatorAction.groovy | 99 +-- .../bluesoft/bee/util/VersionHelper.groovy | 25 +- .../bee/util/VersionHelperTest.groovy | 58 +- 16 files changed, 1686 insertions(+), 1385 deletions(-) create mode 100644 src/main/groovy/br/com/bluesoft/bee/model/postgres/PostgresPolicy.groovy create mode 100644 src/main/groovy/br/com/bluesoft/bee/model/postgres/PostgresTable.groovy diff --git a/src/main/groovy/br/com/bluesoft/bee/database/reader/DatabaseReaderChanger.groovy b/src/main/groovy/br/com/bluesoft/bee/database/reader/DatabaseReaderChanger.groovy index 156175a..f357709 100644 --- a/src/main/groovy/br/com/bluesoft/bee/database/reader/DatabaseReaderChanger.groovy +++ b/src/main/groovy/br/com/bluesoft/bee/database/reader/DatabaseReaderChanger.groovy @@ -1,32 +1,30 @@ package br.com.bluesoft.bee.database.reader -import java.text.MessageFormat; - -import br.com.bluesoft.bee.model.Options; +import br.com.bluesoft.bee.model.Options import br.com.bluesoft.bee.util.RDBMS -import br.com.bluesoft.bee.util.RDBMSUtil; +import br.com.bluesoft.bee.util.RDBMSUtil +import java.text.MessageFormat class DatabaseReaderChanger { - public static final String MENSAGEM_DE_ERRO_BANCO_NAO_SUPORTADO = "Banco de dados {0} não suportado" - - public static DatabaseReader getDatabaseReader(Options options, sql) { - def databaseReader - RDBMS banco = RDBMSUtil.getRDBMS(options) - - if (banco == RDBMS.MYSQL) { - String databaseName = RDBMSUtil.getMySqlDatabaseName(options) - databaseReader = new MySqlDatabaseReader(sql, databaseName) - } else if(banco == RDBMS.ORACLE) { - databaseReader = new OracleDatabaseReader(sql) - } else if(banco == RDBMS.POSTGRES) { - databaseReader = new PostgresDatabaseReader(sql) - } else { - def mensagemDeErro = MessageFormat.format(MENSAGEM_DE_ERRO_BANCO_NAO_SUPORTADO, banco) - throw new IllegalArgumentException(mensagemDeErro) - } - return databaseReader; - } - + public static final String MENSAGEM_DE_ERRO_BANCO_NAO_SUPORTADO = "Banco de dados {0} não suportado" + + public static DatabaseReader getDatabaseReader(Options options, sql) { + def databaseReader + RDBMS banco = RDBMSUtil.getRDBMS(options) + + if (banco == RDBMS.MYSQL) { + String databaseName = RDBMSUtil.getMySqlDatabaseName(options) + databaseReader = new MySqlDatabaseReader(sql, databaseName) + } else if (banco == RDBMS.ORACLE) { + databaseReader = new OracleDatabaseReader(sql) + } else if (banco == RDBMS.POSTGRES) { + databaseReader = new PostgresDatabaseReader(sql) + } else { + def mensagemDeErro = MessageFormat.format(MENSAGEM_DE_ERRO_BANCO_NAO_SUPORTADO, banco) + throw new IllegalArgumentException(mensagemDeErro) + } + return databaseReader; + } } diff --git a/src/main/groovy/br/com/bluesoft/bee/database/reader/PostgresDatabaseReader.groovy b/src/main/groovy/br/com/bluesoft/bee/database/reader/PostgresDatabaseReader.groovy index 1446a91..093b754 100644 --- a/src/main/groovy/br/com/bluesoft/bee/database/reader/PostgresDatabaseReader.groovy +++ b/src/main/groovy/br/com/bluesoft/bee/database/reader/PostgresDatabaseReader.groovy @@ -32,81 +32,92 @@ */ package br.com.bluesoft.bee.database.reader - -import br.com.bluesoft.bee.model.* +import br.com.bluesoft.bee.model.Constraint +import br.com.bluesoft.bee.model.Index +import br.com.bluesoft.bee.model.IndexColumn +import br.com.bluesoft.bee.model.Procedure +import br.com.bluesoft.bee.model.Schema +import br.com.bluesoft.bee.model.Sequence +import br.com.bluesoft.bee.model.TableColumn +import br.com.bluesoft.bee.model.Trigger +import br.com.bluesoft.bee.model.View +import br.com.bluesoft.bee.model.postgres.PostgresPolicy +import br.com.bluesoft.bee.model.postgres.PostgresTable import br.com.bluesoft.bee.util.VersionHelper class PostgresDatabaseReader implements DatabaseReader { - def sql - - PostgresDatabaseReader (def sql) { - this.sql = sql - } - - Schema getSchema(objectName = null) { - def schema = new Schema() - schema.databaseVersion = getDatabaseVersion() - schema.tables = getTables(objectName, schema.databaseVersion) - schema.sequences = getSequences(objectName) - schema.views = getViews(objectName) - schema.procedures = getProcedures(objectName) - schema.packages = getPackages(objectName) - schema.triggers = getTriggers(objectName) - return schema - } - - def getDatabaseVersion() { - def databaseVersion = null - try { - databaseVersion = sql.rows('show server_version')[0].get('server_version') - } catch (Exception e) { - databaseVersion = sql.rows("select setting from pg_settings where name = 'server_version'")[0].get('setting') - } - return databaseVersion - } - - def getTables(objectName, databaseVersion) { - def tables = fillTables(objectName) - fillColumns(tables, objectName) - fillIndexes(tables, objectName, databaseVersion) - fillCostraints(tables, objectName) - fillCostraintsColumns(tables, objectName) - return tables - } - - static final def TABLES_QUERY = ''' - select t.table_name, 'N'as temporary + def sql + + PostgresDatabaseReader(def sql) { + this.sql = sql + } + + Schema getSchema(objectName = null) { + def schema = new Schema() + schema.databaseVersion = getDatabaseVersion() + schema.tables = getTables(objectName, schema.databaseVersion) + schema.sequences = getSequences(objectName) + schema.views = getViews(objectName) + schema.procedures = getProcedures(objectName) + schema.packages = getPackages(objectName) + schema.triggers = getTriggers(objectName) + return schema + } + + def getDatabaseVersion() { + def databaseVersion = null + try { + databaseVersion = sql.rows('show server_version')[0].get('server_version') + } catch (Exception e) { + databaseVersion = sql.rows("select setting from pg_settings where name = 'server_version'")[0].get('setting') + } + return databaseVersion + } + + def getTables(objectName, databaseVersion) { + def tables = fillTables(objectName) + fillColumns(tables, objectName) + fillIndexes(tables, objectName, databaseVersion) + fillCostraints(tables, objectName) + fillCostraintsColumns(tables, objectName) + fillPolicies(tables, objectName) + return tables + } + + static final def TABLES_QUERY = ''' + select t.table_name, pt.rowsecurity from information_schema.tables t + inner join pg_catalog.pg_tables pt on t.table_name = pt.tablename where t.table_type = 'BASE TABLE' and table_schema not in ('pg_catalog', 'information_schema') order by table_name ''' - static final def TABLES_QUERY_BY_NAME = ''' - select t.table_name, 'N'as temporary + static final def TABLES_QUERY_BY_NAME = ''' + select t.table_name, pt.rowsecurity from information_schema.tables t + inner join pg_catalog.pg_tables pt on t.table_name = pt.tablename where t.table_type = 'BASE TABLE' and table_schema not in ('pg_catalog', 'information_schema') and t.table_name = ? order by table_name ''' - private def fillTables(objectName) { - def tables = [:] - def rows - if(objectName) { - rows = sql.rows(TABLES_QUERY_BY_NAME, [objectName]) - } else { - rows = sql.rows(TABLES_QUERY) - } - rows.each({ - def name = it.table_name.toLowerCase() - def temporary = it.temporary == 'Y' ? true : false - def comment = '' - tables[name] = new Table(name:name, temporary:temporary, comment:comment) - }) - return tables - } - - - static final def TABLES_COLUMNS_QUERY = ''' + + private def fillTables(objectName) { + def tables = [:] + def rows + if (objectName) { + rows = sql.rows(TABLES_QUERY_BY_NAME, [objectName]) + } else { + rows = sql.rows(TABLES_QUERY) + } + rows.each({ + def name = it.table_name.toLowerCase() + def rowSecurity = it.rowsecurity + tables[name] = new PostgresTable(name: name, rowSecurity: rowSecurity, temporary: false, comment: '') + }) + return tables + } + + static final def TABLES_COLUMNS_QUERY = ''' select ic.table_name, ic.column_name, ic.data_type, ic.is_nullable as nullable, case when (ic.numeric_precision_radix is not null) then ic.numeric_precision @@ -121,7 +132,7 @@ class PostgresDatabaseReader implements DatabaseReader { order by ic.table_name, ic.ordinal_position; ''' - static final def TABLES_COLUMNS_QUERY_BY_NAME = ''' + static final def TABLES_COLUMNS_QUERY_BY_NAME = ''' select ic.table_name, ic.column_name, ic.data_type, ic.is_nullable as nullable, case when (ic.numeric_precision_radix is not null) then ic.numeric_precision @@ -136,40 +147,41 @@ class PostgresDatabaseReader implements DatabaseReader { and ic.table_name = ? order by ic.table_name, ic.ordinal_position; ''' - private def fillColumns(tables, objectName) { - def rows - if(objectName) { - rows = sql.rows(TABLES_COLUMNS_QUERY_BY_NAME, [objectName]) - } else { - rows = sql.rows(TABLES_COLUMNS_QUERY) - } - rows.each({ - def table = tables[it.table_name.toLowerCase()] - def column = new TableColumn() - column.name = it.column_name.toLowerCase() - column.type = getColumnType(it.data_type) - column.size = it.data_size - column.scale = it.data_scale == null ? 0 : it.data_scale - column.nullable = it.nullable == 'NO' ? false : true - def defaultValue = it.data_default - if(defaultValue) { - column.defaultValue = defaultValue?.trim()?.toUpperCase() == 'NULL' ? null : defaultValue?.trim() - } - table.columns[column.name] = column - }) - } - - private def getColumnType(String oracleColumnType){ - switch (oracleColumnType.toLowerCase()) { - case "varchar2": - return "varchar" - break - default: - return oracleColumnType.toLowerCase() - } - } - - final static def INDEXES_QUERY = ''' + + private def fillColumns(tables, objectName) { + def rows + if (objectName) { + rows = sql.rows(TABLES_COLUMNS_QUERY_BY_NAME, [objectName]) + } else { + rows = sql.rows(TABLES_COLUMNS_QUERY) + } + rows.each({ + def table = tables[it.table_name.toLowerCase()] + def column = new TableColumn() + column.name = it.column_name.toLowerCase() + column.type = getColumnType(it.data_type) + column.size = it.data_size + column.scale = it.data_scale == null ? 0 : it.data_scale + column.nullable = it.nullable == 'NO' ? false : true + def defaultValue = it.data_default + if (defaultValue) { + column.defaultValue = defaultValue?.trim()?.toUpperCase() == 'NULL' ? null : defaultValue?.trim() + } + table.columns[column.name] = column + }) + } + + private def getColumnType(String oracleColumnType) { + switch (oracleColumnType.toLowerCase()) { + case "varchar2": + return "varchar" + break + default: + return oracleColumnType.toLowerCase() + } + } + + final static def INDEXES_QUERY = ''' select ct.relname as table_name, ci.relname as index_name, i.indisunique as uniqueness, am.amname as index_type, pg_get_indexdef(ci.oid, (i.keys).n, false) as column_name, case am.amcanorder @@ -191,8 +203,8 @@ class PostgresDatabaseReader implements DatabaseReader { where ct.relname !~ '^(pg_|sql_)' order by table_name, index_name, column_name; ''' - - final static def INDEXES_QUERY_BY_NAME = ''' + + final static def INDEXES_QUERY_BY_NAME = ''' select ct.relname as table_name, ci.relname as index_name, i.indisunique as uniqueness, am.amname as index_type, pg_get_indexdef(ci.oid, (i.keys).n, false) as column_name, case am.amcanorder @@ -216,7 +228,7 @@ class PostgresDatabaseReader implements DatabaseReader { order by table_name, index_name, column_name; ''' - final static def INDEXES_QUERY_9_6 = ''' + final static def INDEXES_QUERY_9_6 = ''' select ct.relname as table_name, ci.relname as index_name, i.indisunique as uniqueness, am.amname as index_type, pg_get_indexdef(ci.oid, (i.keys).n, false) as column_name, case when pg_index_column_has_property(ci.oid,1, 'asc') then 'asc' else 'desc' end as descend @@ -231,9 +243,9 @@ class PostgresDatabaseReader implements DatabaseReader { join pg_am am on (ci.relam = am.oid) where ct.relname !~ '^(pg_|sql_)' order by table_name, index_name, column_name; - ''' + ''' - final static def INDEXES_QUERY_BY_NAME_9_6 = ''' + final static def INDEXES_QUERY_BY_NAME_9_6 = ''' select ct.relname as table_name, ci.relname as index_name, i.indisunique as uniqueness, am.amname as index_type, pg_get_indexdef(ci.oid, (i.keys).n, false) as column_name, case when pg_index_column_has_property(ci.oid,1, 'asc') then 'asc' else 'desc' end as descend @@ -249,130 +261,131 @@ class PostgresDatabaseReader implements DatabaseReader { where ct.relname !~ '^(pg_|sql_)' and ct.relname = ? order by table_name, index_name, column_name; - ''' - - private def fillIndexes(tables, objectName, databaseVersion) { - def rows = queryIndexesInInformationSchema(objectName, databaseVersion) - rows.each({ - def tableName = it.table_name.toLowerCase() - def table = tables[tableName] - def indexName = it.index_name.toLowerCase() - def indexAlreadyExists = table.indexes[indexName] ? true : false - def index = null; - if (indexAlreadyExists) { - index = table.indexes[indexName] - } else { - index = new Index() - index.name = indexName - index.type = getIndexType(it.index_type) - index.unique = it.uniqueness - table.indexes[index.name] = index - } - def indexColumn = new IndexColumn() - indexColumn.name = it.column_name.toLowerCase() - indexColumn.descend = it.descend.toLowerCase() == 'desc' ? true : false - index.columns << indexColumn - }) - } - - private queryIndexesInInformationSchema(objectName, databaseVersion){ - def rows = null - if (databaseVersion != null) { - if (objectName) { - if (VersionHelper.isNewerThan9_6(databaseVersion)) { - rows = sql.rows(INDEXES_QUERY_BY_NAME_9_6, [objectName]) - } else { - rows = sql.rows(INDEXES_QUERY_BY_NAME, [objectName]) - } - } else { - if (VersionHelper.isNewerThan9_6(databaseVersion)) { - rows = sql.rows(INDEXES_QUERY_9_6) - } else { - rows = sql.rows(INDEXES_QUERY) - } - } - } else { - if (objectName) { - try { - rows = sql.rows(INDEXES_QUERY_BY_NAME, [objectName]) - } catch (Exception e) { - rows = sql.rows(INDEXES_QUERY_BY_NAME_9_6, [objectName]) - } - } else { - try { - rows = sql.rows(INDEXES_QUERY) - } catch (Exception e) { - rows = sql.rows(INDEXES_QUERY_9_6) - } - } - } - } - - private def getIndexType(String indexType){ - switch (indexType) { - case "btree": - return "n" - break - default: - return indexType - } - } - - final static def CONSTRAINTS_QUERY = ''' + ''' + + private def fillIndexes(tables, objectName, databaseVersion) { + def rows = queryIndexesInInformationSchema(objectName, databaseVersion) + rows.each({ + def tableName = it.table_name.toLowerCase() + def table = tables[tableName] + def indexName = it.index_name.toLowerCase() + def indexAlreadyExists = table.indexes[indexName] ? true : false + def index = null; + if (indexAlreadyExists) { + index = table.indexes[indexName] + } else { + index = new Index() + index.name = indexName + index.type = getIndexType(it.index_type) + index.unique = it.uniqueness + table.indexes[index.name] = index + } + def indexColumn = new IndexColumn() + indexColumn.name = it.column_name.toLowerCase() + indexColumn.descend = it.descend.toLowerCase() == 'desc' ? true : false + index.columns << indexColumn + }) + } + + private queryIndexesInInformationSchema(objectName, databaseVersion) { + def rows = null + if (databaseVersion != null) { + if (objectName) { + if (VersionHelper.isNewerThan9_6(databaseVersion)) { + rows = sql.rows(INDEXES_QUERY_BY_NAME_9_6, [objectName]) + } else { + rows = sql.rows(INDEXES_QUERY_BY_NAME, [objectName]) + } + } else { + if (VersionHelper.isNewerThan9_6(databaseVersion)) { + rows = sql.rows(INDEXES_QUERY_9_6) + } else { + rows = sql.rows(INDEXES_QUERY) + } + } + } else { + if (objectName) { + try { + rows = sql.rows(INDEXES_QUERY_BY_NAME, [objectName]) + } catch (Exception e) { + rows = sql.rows(INDEXES_QUERY_BY_NAME_9_6, [objectName]) + } + } else { + try { + rows = sql.rows(INDEXES_QUERY) + } catch (Exception e) { + rows = sql.rows(INDEXES_QUERY_9_6) + } + } + } + } + + private def getIndexType(String indexType) { + switch (indexType) { + case "btree": + return "n" + break + default: + return indexType + } + } + + final static def CONSTRAINTS_QUERY = ''' select tc.table_name, tc.constraint_name, ccu.table_name as ref_table, tc.constraint_type, rc.delete_rule as delete_rule, 'enabled' as status from information_schema.table_constraints tc left join information_schema.referential_constraints rc on tc.constraint_catalog = rc.constraint_catalog and tc.constraint_schema = rc.constraint_schema and tc.constraint_name = rc.constraint_name left join information_schema.constraint_column_usage ccu on rc.unique_constraint_catalog = ccu.constraint_catalog and rc.unique_constraint_schema = ccu.constraint_schema and rc.unique_constraint_name = ccu.constraint_name where lower(tc.constraint_type) in ('primary key','unique', 'foreign key') ''' - final static def CONSTRAINTS_QUERY_BY_NAME = ''' + final static def CONSTRAINTS_QUERY_BY_NAME = ''' select tc.table_name, tc.constraint_name, ccu.table_name as ref_table, tc.constraint_type, rc.delete_rule as delete_rule, 'enabled' as status from information_schema.table_constraints tc left join information_schema.referential_constraints rc on tc.constraint_catalog = rc.constraint_catalog and tc.constraint_schema = rc.constraint_schema and tc.constraint_name = rc.constraint_name left join information_schema.constraint_column_usage ccu on rc.unique_constraint_catalog = ccu.constraint_catalog and rc.unique_constraint_schema = ccu.constraint_schema and rc.unique_constraint_name = ccu.constraint_name where lower(tc.constraint_type) in ('primary key','unique', 'foreign key') and tc.table_name = ? ''' - private def fillCostraints(tables, objectName) { - def rows - if(objectName) { - rows = sql.rows(CONSTRAINTS_QUERY_BY_NAME, [objectName]) - } else { - rows = sql.rows(CONSTRAINTS_QUERY) - } - - rows.each({ - def tableName = it.table_name.toLowerCase() - def table = tables[tableName] - - def constraint = new Constraint() - constraint.name = it.constraint_name.toLowerCase() - constraint.refTable = it.ref_table?.toLowerCase() - constraint.type = getConstraintType(it.constraint_type.toLowerCase()) - def onDelete = it.delete_rule?.toLowerCase() - constraint.onDelete = onDelete == 'no action' ? null : onDelete - def status = it.status?.toLowerCase() - constraint.status = status - table.constraints[constraint.name] = constraint - }) - } - - private getConstraintType(constraint_type) { - switch (constraint_type) { - case "primary key": - return "P" - break - case "unique": - return "U" - break - case "foreign key": - return "R" - break - default: - return constraint_type - } - } - - final static def CONSTRAINTS_COLUMNS_QUERY = ''' + + private def fillCostraints(tables, objectName) { + def rows + if (objectName) { + rows = sql.rows(CONSTRAINTS_QUERY_BY_NAME, [objectName]) + } else { + rows = sql.rows(CONSTRAINTS_QUERY) + } + + rows.each({ + def tableName = it.table_name.toLowerCase() + def table = tables[tableName] + + def constraint = new Constraint() + constraint.name = it.constraint_name.toLowerCase() + constraint.refTable = it.ref_table?.toLowerCase() + constraint.type = getConstraintType(it.constraint_type.toLowerCase()) + def onDelete = it.delete_rule?.toLowerCase() + constraint.onDelete = onDelete == 'no action' ? null : onDelete + def status = it.status?.toLowerCase() + constraint.status = status + table.constraints[constraint.name] = constraint + }) + } + + private getConstraintType(constraint_type) { + switch (constraint_type) { + case "primary key": + return "P" + break + case "unique": + return "U" + break + case "foreign key": + return "R" + break + default: + return constraint_type + } + } + + final static def CONSTRAINTS_COLUMNS_QUERY = ''' select tc.table_name, tc.constraint_name, kcu.column_name, ccu.table_name as ref_table, ccu.column_name as ref_field from information_schema.table_constraints tc left join information_schema.key_column_usage kcu on tc.constraint_catalog = kcu.constraint_catalog and tc.constraint_schema = kcu.constraint_schema and tc.constraint_name = kcu.constraint_name @@ -381,7 +394,7 @@ class PostgresDatabaseReader implements DatabaseReader { where lower(tc.constraint_type) in ('primary key','unique', 'foreign key') order by kcu.ordinal_position, kcu.position_in_unique_constraint ''' - final static def CONSTRAINTS_COLUMNS_QUERY_BY_NAME = ''' + final static def CONSTRAINTS_COLUMNS_QUERY_BY_NAME = ''' select tc.table_name, tc.constraint_name, kcu.column_name, ccu.table_name as ref_table, ccu.column_name as ref_field from information_schema.table_constraints tc left join information_schema.key_column_usage kcu on tc.constraint_catalog = kcu.constraint_catalog and tc.constraint_schema = kcu.constraint_schema and tc.constraint_name = kcu.constraint_name @@ -390,81 +403,117 @@ class PostgresDatabaseReader implements DatabaseReader { where lower(tc.constraint_type) in ('primary key','unique', 'foreign key') and tc.table_name = ? order by kcu.ordinal_position, kcu.position_in_unique_constraint ''' - private def fillCostraintsColumns(tables, objectName) { - def rows - if(objectName) { - rows = sql.rows(CONSTRAINTS_COLUMNS_QUERY_BY_NAME, [objectName]) - } else { - rows = sql.rows(CONSTRAINTS_COLUMNS_QUERY) - } - rows.each({ - def tableName = it.table_name.toLowerCase() - def table = tables[tableName] - def constraint = table.constraints[it.constraint_name.toLowerCase()] - constraint.columns << it.column_name.toLowerCase() - }) - } - - final static def SEQUENCES_QUERY = ''' + + private def fillCostraintsColumns(tables, objectName) { + def rows + if (objectName) { + rows = sql.rows(CONSTRAINTS_COLUMNS_QUERY_BY_NAME, [objectName]) + } else { + rows = sql.rows(CONSTRAINTS_COLUMNS_QUERY) + } + rows.each({ + def tableName = it.table_name.toLowerCase() + def table = tables[tableName] + def constraint = table.constraints[it.constraint_name.toLowerCase()] + constraint.columns << it.column_name.toLowerCase() + }) + } + + private def fillPolicies(tables, objectName) { + def rows + + if (objectName) { + rows = sql.rows(""" + select p.tablename, p.policyname, p.permissive, p.cmd, p.qual, p.with_check, p.roles::varchar + from pg_catalog.pg_policies p + where objectname = ? + """, objectName) + } else { + rows = sql.rows(""" + select p.tablename, p.policyname, p.permissive, p.cmd, p.qual, p.with_check, p.roles::varchar + from pg_catalog.pg_policies p + """) + } + + rows.each({ + def tableName = it.tablename.toLowerCase() + def policyName = it.policyname + def table = tables[tableName] + + def policy = new PostgresPolicy() + policy.name = it.policyname + policy.permissive = it.permissive == 'PERMISSIVE' + policy.cmd = it.cmd + policy.usingExpression = it.qual + policy.checkExpression = it.with_check + policy.roles = it.roles.substring(1, it.roles.size() - 1).tokenize(',') + + table.policies[policyName] = policy + }) + } + + final static def SEQUENCES_QUERY = ''' select c.relname as sequence_name, '1' as min_value from pg_class c where c.relkind = 'S' order by c.relname ''' - final static def SEQUENCES_QUERY_BY_NAME = ''' + final static def SEQUENCES_QUERY_BY_NAME = ''' select c.relname as sequence_name, '1' as min_value from pg_class c where c.relkind = 'S' and c.relname = upper(?) order by c.relname ''' - def getSequences(objectName){ - def sequences = [:] - def rows - if(objectName) { - rows = sql.rows(SEQUENCES_QUERY_BY_NAME, [objectName]) - } else { - rows = sql.rows(SEQUENCES_QUERY) - } - rows.each({ - def sequence = new Sequence() - sequence.name = it.sequence_name.toLowerCase() - sequence.minValue = it.min_value - sequences[sequence.name] = sequence - }) - return sequences - } - - final static def VIEWS_QUERY = ''' + + def getSequences(objectName) { + def sequences = [:] + def rows + if (objectName) { + rows = sql.rows(SEQUENCES_QUERY_BY_NAME, [objectName]) + } else { + rows = sql.rows(SEQUENCES_QUERY) + } + rows.each({ + def sequence = new Sequence() + sequence.name = it.sequence_name.toLowerCase() + sequence.minValue = it.min_value + sequences[sequence.name] = sequence + }) + return sequences + } + + final static def VIEWS_QUERY = ''' select table_name as view_name, view_definition as text from information_schema.views where table_schema = 'public' order by view_name ''' - final static def VIEWS_QUERY_BY_NAME = ''' + final static def VIEWS_QUERY_BY_NAME = ''' select table_name as view_name, view_definition as text from information_schema.views where table_schema = 'public' and table_name = upper(?) order by view_name ''' - def getViews(objectName){ - def views = [:] - def rows - if(objectName) { - rows = sql.rows(VIEWS_QUERY_BY_NAME, [objectName]) - } else { - rows = sql.rows(VIEWS_QUERY) - } - - rows.each({ - def view = new View() - view.name = it.view_name.toLowerCase() - view.text = it.text - views[view.name] = view - }) - return views - } - - final static def PROCEDURES_NAME_QUERY = ''' + + def getViews(objectName) { + def views = [:] + def rows + if (objectName) { + rows = sql.rows(VIEWS_QUERY_BY_NAME, [objectName]) + } else { + rows = sql.rows(VIEWS_QUERY) + } + + rows.each({ + def view = new View() + view.name = it.view_name.toLowerCase() + view.text = it.text + views[view.name] = view + }) + return views + } + + final static def PROCEDURES_NAME_QUERY = ''' select p.proname as name from pg_namespace n inner join pg_proc p on pronamespace = n.oid @@ -473,7 +522,7 @@ class PostgresDatabaseReader implements DatabaseReader { and n.nspname not in ('information_schema','pg_catalog','pg_toast') order by p.proname ''' - final static def PROCEDURES_NAME_QUERY_BY_NAME = ''' + final static def PROCEDURES_NAME_QUERY_BY_NAME = ''' select p.proname as name from pg_namespace n join pg_proc p on pronamespace = n.oid @@ -483,12 +532,13 @@ class PostgresDatabaseReader implements DatabaseReader { and p.proname = ? order by p.pronames ''' - def getProcedures(objectName) { - def procedures = getProceduresBody(objectName) - return procedures - } - final static def PROCEDURES_BODY_QUERY = ''' + def getProcedures(objectName) { + def procedures = getProceduresBody(objectName) + return procedures + } + + final static def PROCEDURES_BODY_QUERY = ''' select pp.proname as name, pg_get_functiondef(pp.oid) as text @@ -501,7 +551,7 @@ class PostgresDatabaseReader implements DatabaseReader { and pn.nspname <> 'information_schema' order by pp.proname ''' - final static def PROCEDURES_BODY_QUERY_BY_NAME = ''' + final static def PROCEDURES_BODY_QUERY_BY_NAME = ''' select pp.proname as name, pg_get_functiondef(pp.oid) as text @@ -515,24 +565,25 @@ class PostgresDatabaseReader implements DatabaseReader { and pp.proname = ? order by pp.proname ''' - def getProceduresBody(objectName) { - def procedures = [:] - def rows - - if(objectName) { - rows = sql.rows(PROCEDURES_BODY_QUERY_BY_NAME, [objectName]) - } else { - rows = sql.rows(PROCEDURES_BODY_QUERY) - } - - rows.each({ - def procedure = new Procedure(name: it.name.toLowerCase(), text: it.text) - procedures[procedure.name] = procedure - }) - return procedures - } - - final static def TRIGGERS_QUERY = ''' + + def getProceduresBody(objectName) { + def procedures = [:] + def rows + + if (objectName) { + rows = sql.rows(PROCEDURES_BODY_QUERY_BY_NAME, [objectName]) + } else { + rows = sql.rows(PROCEDURES_BODY_QUERY) + } + + rows.each({ + def procedure = new Procedure(name: it.name.toLowerCase(), text: it.text) + procedures[procedure.name] = procedure + }) + return procedures + } + + final static def TRIGGERS_QUERY = ''' select trigger_name as name, action_timing, @@ -544,7 +595,7 @@ class PostgresDatabaseReader implements DatabaseReader { group by trigger_name, action_timing, event_object_table, action_orientation, action_statement order by trigger_name, action_timing, event_object_table, action_orientation, action_statement ''' - final static def TRIGGERS_QUERY_BY_NAME = ''' + final static def TRIGGERS_QUERY_BY_NAME = ''' select trigger_name as name, action_timing, @@ -557,34 +608,35 @@ class PostgresDatabaseReader implements DatabaseReader { group by trigger_name, action_timing, event_object_table, action_orientation, action_statement order by trigger_name, action_timing, event_object_table, action_orientation, action_statement ''' - def getTriggers(objectName) { - def triggers = [:] - def rows - - if(objectName) { - rows = sql.rows(TRIGGERS_QUERY_BY_NAME, [objectName]) - } else { - rows = sql.rows(TRIGGERS_QUERY) - } - - rows.each({ - def triggerName = it.name.toLowerCase() - def trigger = triggers[triggerName] ?: new Trigger() - trigger.name = triggerName - def text = "" - text += "CREATE TRIGGER ${it.name}\n" - text += "${it.action_timing} ${it.event_manipulation} ON ${it.event_object_table}\n" - text += "FOR EACH ${it.action_orientation}\n" - text += "${it.action_statement}\n" - trigger.text = text - triggers[triggerName] = trigger - }) - - return triggers - } - - def getPackages(objectName) { - def packages = [:] - return packages - } + + def getTriggers(objectName) { + def triggers = [:] + def rows + + if (objectName) { + rows = sql.rows(TRIGGERS_QUERY_BY_NAME, [objectName]) + } else { + rows = sql.rows(TRIGGERS_QUERY) + } + + rows.each({ + def triggerName = it.name.toLowerCase() + def trigger = triggers[triggerName] ?: new Trigger() + trigger.name = triggerName + def text = "" + text += "CREATE TRIGGER ${it.name}\n" + text += "${it.action_timing} ${it.event_manipulation} ON ${it.event_object_table}\n" + text += "FOR EACH ${it.action_orientation}\n" + text += "${it.action_statement}\n" + trigger.text = text + triggers[triggerName] = trigger + }) + + return triggers + } + + def getPackages(objectName) { + def packages = [:] + return packages + } } diff --git a/src/main/groovy/br/com/bluesoft/bee/importer/JsonImporter.groovy b/src/main/groovy/br/com/bluesoft/bee/importer/JsonImporter.groovy index dd8fb11..fea7263 100644 --- a/src/main/groovy/br/com/bluesoft/bee/importer/JsonImporter.groovy +++ b/src/main/groovy/br/com/bluesoft/bee/importer/JsonImporter.groovy @@ -32,165 +32,182 @@ */ package br.com.bluesoft.bee.importer -import java.io.File; - -import org.codehaus.jackson.map.ObjectMapper - -import br.com.bluesoft.bee.model.* import br.com.bluesoft.bee.model.Package +import br.com.bluesoft.bee.model.Procedure +import br.com.bluesoft.bee.model.Schema +import br.com.bluesoft.bee.model.Sequence +import br.com.bluesoft.bee.model.Table +import br.com.bluesoft.bee.model.Trigger +import br.com.bluesoft.bee.model.UserType +import br.com.bluesoft.bee.model.View +import br.com.bluesoft.bee.model.postgres.PostgresTable import br.com.bluesoft.bee.util.JsonUtil - +import br.com.bluesoft.bee.util.RDBMS +import org.codehaus.jackson.map.ObjectMapper class JsonImporter implements Importer { - def path - ObjectMapper mapper - File mainFolder - File tablesFolder - File sequencesFolder - File viewsFolder - File proceduresFolder - File packagesFolder - File triggersFolder - File userTypesFolder - - JsonImporter() { - this(null) - } - - JsonImporter(def path) { - this.path = path ?: '/tmp/bee' - this.mainFolder = new File(this.path) - this.tablesFolder = new File(mainFolder, 'tables') - this.sequencesFolder = new File(mainFolder, 'sequences') - this.viewsFolder = new File(mainFolder, 'views') - this.proceduresFolder = new File(mainFolder, 'procedures') - this.packagesFolder = new File(mainFolder, 'packages') - this.triggersFolder = new File(mainFolder, 'triggers') - this.userTypesFolder = new File(mainFolder, 'usertypes') - - this.mapper = JsonUtil.createMapper() - } - - Schema importMetaData(){ - Schema schema = new Schema() - schema.tables = importTables() - schema.views = importViews() - schema.sequences = importSequences() - schema.procedures = importProcedures() - schema.packages = importPackages() - schema.triggers = importTriggers() - schema.userTypes = importUserTypes() - return schema - } - - private def importTables() { - checkIfFolderExists(tablesFolder) - def tables = [:] - tablesFolder.eachFile { - if (it.name.endsWith(".bee")) { - def table = mapper.readValue(it, Table.class) - tables[table.name] = table - } - } - return tables - } - - private def importViews() { - checkIfFolderExists(viewsFolder) - def views = [:] - def files = viewsFolder.listFiles().sort { it.name } - files.each { - if (it.name.endsWith(".bee")) { - def view = mapper.readValue(it, View.class) - views[view.name] = view - } - } - return views - } - - private def importSequences() { - def sequences = [:] - def sequencesFolderExists = checkIfFolderExists(sequencesFolder) - - if (sequencesFolderExists && sequencesFolder.listFiles().size() > 0) { - sequencesFolder.eachFile { - if (it.name.endsWith(".bee")) { - def sequence = mapper.readValue(it, Sequence.class) - sequences[sequence.name] = sequence - } - } - } else { - File sequenceFile = new File(mainFolder, 'sequences.bee') - if (sequenceFile.exists()) { - def sequencesJSON = mapper.readTree(sequenceFile.getText()) - sequencesJSON.getElements().each { - def sequence = mapper.readValue(it.toString(), Sequence.class) - sequences[sequence.name] = sequence - } - } - } - - return sequences - } - - private def importProcedures() { - checkIfFolderExists(proceduresFolder) - def procedures = [:] - proceduresFolder.eachFile { - if (it.name.endsWith(".bee")) { - def procedure = mapper.readValue(it, Procedure.class) - procedures[procedure.name] = procedure - } - } - - return procedures - } - - private def importPackages() { - checkIfFolderExists(packagesFolder) - def packages = [:] - packagesFolder.eachFile { - if (it.name.endsWith(".bee")) { - def pack = mapper.readValue(it, Package.class) - packages[pack.name] = pack - } - } - return packages - } - - private def importTriggers() { - checkIfFolderExists(triggersFolder) - def triggers = [:] - triggersFolder.eachFile { - if (it.name.endsWith(".bee")) { - def trigger = mapper.readValue(it, Trigger.class) - triggers[trigger.name] = trigger - } - } - return triggers - } - - private def importUserTypes() { - checkIfFolderExists(userTypesFolder) - def userTypes = [:] - userTypesFolder.eachFile { - if (it.name.endsWith(".bee")) { - def userType = mapper.readValue(it, UserType.class) - userTypes[userType.name] = userType - } - } - return userTypes - } - - private def checkIfFolderExists(def directory) { - def exists = false - if (!directory.isDirectory()) { - directory.mkdir() - } else { - exists = true - } - return exists - } + def path + ObjectMapper mapper + File mainFolder + File tablesFolder + File sequencesFolder + File viewsFolder + File proceduresFolder + File packagesFolder + File triggersFolder + File userTypesFolder + + JsonImporter() { + this(null) + } + + JsonImporter(def path) { + this.path = path ?: '/tmp/bee' + this.mainFolder = new File(this.path) + this.tablesFolder = new File(mainFolder, 'tables') + this.sequencesFolder = new File(mainFolder, 'sequences') + this.viewsFolder = new File(mainFolder, 'views') + this.proceduresFolder = new File(mainFolder, 'procedures') + this.packagesFolder = new File(mainFolder, 'packages') + this.triggersFolder = new File(mainFolder, 'triggers') + this.userTypesFolder = new File(mainFolder, 'usertypes') + + this.mapper = JsonUtil.createMapper() + } + + Schema importMetaData() { + importMetaData(Table.class) + } + + Schema importMetaData(RDBMS rdbms) { + switch (rdbms) { + case RDBMS.POSTGRES: + return importMetaData(PostgresTable.class) + default: + return importMetaData() + } + } + + Schema importMetaData(Class tableClass) { + Schema schema = new Schema() + schema.tables = importTables(tableClass) + schema.views = importViews() + schema.sequences = importSequences() + schema.procedures = importProcedures() + schema.packages = importPackages() + schema.triggers = importTriggers() + schema.userTypes = importUserTypes() + return schema + } + + private def importTables(Class tableClass) { + checkIfFolderExists(tablesFolder) + def tables = [:] + tablesFolder.eachFile { + if (it.name.endsWith(".bee")) { + def table = mapper.readValue(it, tableClass) + tables[table.name] = table + } + } + return tables + } + + private def importViews() { + checkIfFolderExists(viewsFolder) + def views = [:] + def files = viewsFolder.listFiles().sort { it.name } + files.each { + if (it.name.endsWith(".bee")) { + def view = mapper.readValue(it, View.class) + views[view.name] = view + } + } + return views + } + + private def importSequences() { + def sequences = [:] + def sequencesFolderExists = checkIfFolderExists(sequencesFolder) + + if (sequencesFolderExists && sequencesFolder.listFiles().size() > 0) { + sequencesFolder.eachFile { + if (it.name.endsWith(".bee")) { + def sequence = mapper.readValue(it, Sequence.class) + sequences[sequence.name] = sequence + } + } + } else { + File sequenceFile = new File(mainFolder, 'sequences.bee') + if (sequenceFile.exists()) { + def sequencesJSON = mapper.readTree(sequenceFile.getText()) + sequencesJSON.getElements().each { + def sequence = mapper.readValue(it.toString(), Sequence.class) + sequences[sequence.name] = sequence + } + } + } + + return sequences + } + + private def importProcedures() { + checkIfFolderExists(proceduresFolder) + def procedures = [:] + proceduresFolder.eachFile { + if (it.name.endsWith(".bee")) { + def procedure = mapper.readValue(it, Procedure.class) + procedures[procedure.name] = procedure + } + } + + return procedures + } + + private def importPackages() { + checkIfFolderExists(packagesFolder) + def packages = [:] + packagesFolder.eachFile { + if (it.name.endsWith(".bee")) { + def pack = mapper.readValue(it, Package.class) + packages[pack.name] = pack + } + } + return packages + } + + private def importTriggers() { + checkIfFolderExists(triggersFolder) + def triggers = [:] + triggersFolder.eachFile { + if (it.name.endsWith(".bee")) { + def trigger = mapper.readValue(it, Trigger.class) + triggers[trigger.name] = trigger + } + } + return triggers + } + + private def importUserTypes() { + checkIfFolderExists(userTypesFolder) + def userTypes = [:] + userTypesFolder.eachFile { + if (it.name.endsWith(".bee")) { + def userType = mapper.readValue(it, UserType.class) + userTypes[userType.name] = userType + } + } + return userTypes + } + + private def checkIfFolderExists(def directory) { + def exists = false + if (!directory.isDirectory()) { + directory.mkdir() + } else { + exists = true + } + return exists + } } diff --git a/src/main/groovy/br/com/bluesoft/bee/model/ObjectType.groovy b/src/main/groovy/br/com/bluesoft/bee/model/ObjectType.groovy index 8ab9406..12c898f 100644 --- a/src/main/groovy/br/com/bluesoft/bee/model/ObjectType.groovy +++ b/src/main/groovy/br/com/bluesoft/bee/model/ObjectType.groovy @@ -34,39 +34,49 @@ package br.com.bluesoft.bee.model; public enum ObjectType { - TABLE("table"), VIEW("view"), SEQUENCE("sequence"), TABLE_COLUMN("table column"), INDEX('index'), CONSTRAINT('constraint'), PROCEDURE('procedure'), PACKAGE('package'), TRIGGER('trigger'), USER_TYPE('user type') + TABLE("table"), + VIEW("view"), + SEQUENCE("sequence"), + TABLE_COLUMN("table column"), + INDEX('index'), + CONSTRAINT('constraint'), + PROCEDURE('procedure'), + PACKAGE('package'), + TRIGGER('trigger'), + USER_TYPE('user type'), + POLICY('policy') - def description + def description - private ObjectType(def description) { - this.description = description - } + private ObjectType(def description) { + this.description = description + } - static def getType(object) { + static def getType(object) { - switch(object.class) { - case Table.class: - return TABLE - case View.class: - return VIEW - case Sequence.class: - return SEQUENCE - case TableColumn.class: - return TABLE_COLUMN - case Index.class: - return INDEX - case Constraint.class: - return CONSTRAINT - case Procedure.class: - return PROCEDURE - case Package.class: - return PACKAGE - case Trigger.class: - return TRIGGER - case UserType.class: - return USER_TYPE - default: - return null - } - } + switch (object.class) { + case Table.class: + return TABLE + case View.class: + return VIEW + case Sequence.class: + return SEQUENCE + case TableColumn.class: + return TABLE_COLUMN + case Index.class: + return INDEX + case Constraint.class: + return CONSTRAINT + case Procedure.class: + return PROCEDURE + case Package.class: + return PACKAGE + case Trigger.class: + return TRIGGER + case UserType.class: + return USER_TYPE + default: + return null + } + } } diff --git a/src/main/groovy/br/com/bluesoft/bee/model/Schema.groovy b/src/main/groovy/br/com/bluesoft/bee/model/Schema.groovy index b80c3fd..7ef7f05 100644 --- a/src/main/groovy/br/com/bluesoft/bee/model/Schema.groovy +++ b/src/main/groovy/br/com/bluesoft/bee/model/Schema.groovy @@ -36,101 +36,102 @@ import br.com.bluesoft.bee.model.message.Message import br.com.bluesoft.bee.model.message.MessageLevel import br.com.bluesoft.bee.model.message.MessageType - class Schema { - String databaseVersion - Map tables = [:] - Map views = [:] - Map sequences = [:] - Map procedures = [:] - Map packages = [:] - Map triggers = [:] - Map userTypes = [:] - boolean filtered - def validateWithMetadata(Schema metadataSchema) { + String databaseVersion + Map tables = [:] + Map views = [:] + Map sequences = [:] + Map procedures = [:] + Map packages = [:] + Map triggers = [:] + Map userTypes = [:] + boolean filtered + + def validateWithMetadata(Schema metadataSchema) { + + def databaseObjects = this.getAllObjects() + def metadataObjects = metadataSchema.getAllObjects() - def databaseObjects = this.getAllObjects() - def metadataObjects = metadataSchema.getAllObjects() + def messages = [] + messages.addAll getMissingObjectsMessages(databaseObjects, metadataObjects) + messages.addAll getAdditionalObjectsMessages(databaseObjects, metadataObjects) + messages.addAll getWrongTypesObjectMessages(databaseObjects, metadataObjects) + databaseObjects.each { objName, obj -> + if (obj instanceof Validator) { + def target = metadataSchema.getAllObjects()[objName] + if (target) { + messages.addAll obj.validateWithMetadata(target) + } + } + } - def messages = [] - messages.addAll getMissingObjectsMessages(databaseObjects, metadataObjects) - messages.addAll getAdditionalObjectsMessages(databaseObjects, metadataObjects) - messages.addAll getWrongTypesObjectMessages(databaseObjects, metadataObjects) - databaseObjects.each { objName, obj -> - if (obj instanceof Validator) { - def target = metadataSchema.getAllObjects()[objName] - if (target) - messages.addAll obj.validateWithMetadata(target) - } - } + return messages + } - return messages - } - - private def getWrongTypesObjectMessages(databaseObjects, metadataObjects) { - def messages = [] - databaseObjects.each { objName, obj -> - def objectsWithWrongType = metadataObjects.find{it.key == objName && it.value.class != obj.class} - objectsWithWrongType.each { - def messageText = "The database contain one ${obj.class.simpleName} with name ${objName}, but reference metadata too contain a ${it.value.class.simpleName} with same name."; - def message = new Message(objectName:objName, level:MessageLevel.ERROR, objectType:ObjectType.TABLE, messageType:MessageType.PRESENCE, message:messageText) - messages << message - } - } - return messages - } + private def getWrongTypesObjectMessages(databaseObjects, metadataObjects) { + def messages = [] + databaseObjects.each { objName, obj -> + def objectsWithWrongType = metadataObjects.find { it.key == objName && it.value.class != obj.class } + objectsWithWrongType.each { + def messageText = "The database contain one ${obj.class.simpleName} with name ${objName}, but reference metadata too contain a ${it.value.class.simpleName} with same name."; + def message = new Message(objectName: objName, level: MessageLevel.ERROR, objectType: ObjectType.TABLE, messageType: MessageType.PRESENCE, message: messageText) + messages << message + } + } + return messages + } - private def getMissingObjectsMessages(databaseObjects, metadataObjects) { - def messages = [] - def databaseMissingObjects = metadataObjects.keySet() - databaseObjects.keySet() - databaseMissingObjects.each { - def object = metadataObjects[it] - def objectType = ObjectType.getType(object) - def messageText = "The database is missing the ${objectType.description} ${it}."; - def message = new Message(objectName:it, level:MessageLevel.ERROR, objectType:objectType, messageType:MessageType.PRESENCE, message:messageText) - messages << message - } - return messages - } + private def getMissingObjectsMessages(databaseObjects, metadataObjects) { + def messages = [] + def databaseMissingObjects = metadataObjects.keySet() - databaseObjects.keySet() + databaseMissingObjects.each { + def object = metadataObjects[it] + def objectType = ObjectType.getType(object) + def messageText = "The database is missing the ${objectType.description} ${it}."; + def message = new Message(objectName: it, level: MessageLevel.ERROR, objectType: objectType, messageType: MessageType.PRESENCE, message: messageText) + messages << message + } + return messages + } - private def getAdditionalObjectsMessages(databaseObjects, metadataObjects) { - def messages = [] - def databaseMissingObjects = databaseObjects.keySet() - metadataObjects.keySet() - databaseMissingObjects.each { - def object = databaseObjects[it] - def objectType = ObjectType.getType(object) - def messageText = "The ${objectType.description} ${it} exists in the database but does not exist in the reference metadata."; - def message = new Message(objectName:it, level:MessageLevel.WARNING, objectType:objectType, messageType:MessageType.PRESENCE, message:messageText) - messages << message - } - return messages - } + private def getAdditionalObjectsMessages(databaseObjects, metadataObjects) { + def messages = [] + def databaseMissingObjects = databaseObjects.keySet() - metadataObjects.keySet() + databaseMissingObjects.each { + def object = databaseObjects[it] + def objectType = ObjectType.getType(object) + def messageText = "The ${objectType.description} ${it} exists in the database but does not exist in the reference metadata."; + def message = new Message(objectName: it, level: MessageLevel.WARNING, objectType: objectType, messageType: MessageType.PRESENCE, message: messageText) + messages << message + } + return messages + } - def getAllObjects() { - def allObjects = [:] - allObjects.putAll tables - allObjects.putAll views - allObjects.putAll sequences - allObjects.putAll procedures - allObjects.putAll packages - allObjects.putAll triggers - allObjects.putAll userTypes - return allObjects - } + def getAllObjects() { + def allObjects = [:] + allObjects.putAll tables + allObjects.putAll views + allObjects.putAll sequences + allObjects.putAll procedures + allObjects.putAll packages + allObjects.putAll triggers + allObjects.putAll userTypes + return allObjects + } - def filter(String objectName) { - def Schema schema = new Schema() - schema.tables.putAll tables.findAll { it.key == objectName } - schema.views.putAll views.findAll { it.key == objectName } - schema.sequences.putAll sequences.findAll { it.key == objectName } - schema.procedures.putAll procedures.findAll { it.key == objectName } - schema.packages.putAll packages.findAll { it.key == objectName } - schema.triggers.putAll triggers.findAll { it.key == objectName } - schema.userTypes.putAll userTypes.findAll {it.key == objectName } - schema.filtered = true - return schema - } + def filter(String objectName) { + def Schema schema = new Schema() + schema.tables.putAll tables.findAll { it.key == objectName } + schema.views.putAll views.findAll { it.key == objectName } + schema.sequences.putAll sequences.findAll { it.key == objectName } + schema.procedures.putAll procedures.findAll { it.key == objectName } + schema.packages.putAll packages.findAll { it.key == objectName } + schema.triggers.putAll triggers.findAll { it.key == objectName } + schema.userTypes.putAll userTypes.findAll { it.key == objectName } + schema.filtered = true + return schema + } } diff --git a/src/main/groovy/br/com/bluesoft/bee/model/Table.groovy b/src/main/groovy/br/com/bluesoft/bee/model/Table.groovy index 575af38..10725b2 100644 --- a/src/main/groovy/br/com/bluesoft/bee/model/Table.groovy +++ b/src/main/groovy/br/com/bluesoft/bee/model/Table.groovy @@ -38,122 +38,124 @@ import br.com.bluesoft.bee.model.message.MessageType class Table implements Validator { - String name - Boolean temporary = false - String comment - - Map columns = [:] as LinkedHashMap - Map indexes = [:] - Map constraints = [:] - - Boolean shouldImportTheData() { - this.comment ? this.comment?.toUpperCase()?.contains('#CORE') : false - } - - List validateWithMetadata(metadataTable) { - if (!metadataTable instanceof Table) - return [] - - def messages = [] - if (metadataTable) { - messages.addAll validatePresenceOfColumns(metadataTable) - messages.addAll validatePresenceOfIndexes(metadataTable) - messages.addAll validatePresenceOfConstraints(metadataTable) - messages.addAll validateTemporary(metadataTable) - messages.addAll validateColumns(metadataTable) - messages.addAll validateIndexes(metadataTable) - messages.addAll validateConstraints(metadataTable) - } - return messages - } - - private def validatePresenceOfColumns(Table metadataTable) { - def messages = [] - def databaseMissingColumns = metadataTable.columns.keySet() - this.columns.keySet() - def aditionalDatabaseColumns = this.columns.keySet() - metadataTable.columns.keySet() - databaseMissingColumns.each { - def messageText = "The table ${name} is missing the column ${it}."; - def message = new Message(objectName:"${name}.${it}", level:MessageLevel.ERROR, objectType:ObjectType.TABLE_COLUMN, messageType:MessageType.PRESENCE, message:messageText) - messages << message - } - aditionalDatabaseColumns.each { - def messageText = "The table ${name} has the additional column ${it}."; - def message = new Message(objectName:"${name}.${it}", level:MessageLevel.ERROR, objectType:ObjectType.TABLE_COLUMN, messageType:MessageType.PRESENCE, message:messageText) - messages << message - } - return messages - } - - private def validatePresenceOfIndexes(Table metadataTable) { - def messages = [] - def databaseMissingIndexes = metadataTable.indexes.keySet() - this.indexes.keySet() - def aditionalDatabaseIndexes = this.indexes.keySet() - metadataTable.indexes.keySet() - databaseMissingIndexes.each { - def messageText = "The table ${name} is missing the index ${it}."; - def message = new Message(objectName:"${it}", level:MessageLevel.ERROR, objectType:ObjectType.INDEX, messageType:MessageType.PRESENCE, message:messageText) - messages << message - } - aditionalDatabaseIndexes.each { - def messageText = "The table ${name} has the additional index ${it}."; - def message = new Message(objectName:"${it}", level:MessageLevel.ERROR, objectType:ObjectType.INDEX, messageType:MessageType.PRESENCE, message:messageText) - messages << message - } - return messages - } - - private def validatePresenceOfConstraints(Table metadataTable) { - def messages = [] - def databaseMissingConstraints = metadataTable.constraints.keySet() - this.constraints.keySet() - def aditionalDatabaseConstraints = this.constraints.keySet() - metadataTable.constraints.keySet() - databaseMissingConstraints.each { - def messageText = "The table ${name} is missing the constraint ${it}."; - def message = new Message(objectName:"${it}", level:MessageLevel.ERROR, objectType:ObjectType.CONSTRAINT, messageType:MessageType.PRESENCE, message:messageText) - messages << message - } - aditionalDatabaseConstraints.each { - def messageText = "The table ${name} has the additional constraint ${it}."; - def message = new Message(objectName:"${it}", level:MessageLevel.ERROR, objectType:ObjectType.CONSTRAINT, messageType:MessageType.PRESENCE, message:messageText) - messages << message - } - return messages - } - - private def validateTemporary(Table metadataTable) { - def messages = [] - if (metadataTable.temporary != this.temporary) { - def message = new Message(objectName:name, level:MessageLevel.ERROR, objectType:ObjectType.TABLE, messageType:MessageType.TEMPORARY) - if (metadataTable.temporary) { - message.message = "The table ${this.name} should be temporary."; - } else { - message.message = "The table ${this.name} should not be temporary."; - } - messages << message - } - return messages - } - - private def validateColumns(Table metadataTable) { - return validateElements('columns', metadataTable) - } - - private def validateIndexes(Table metadataTable) { - return validateElements('indexes', metadataTable) - } - - private def validateConstraints(Table metadataTable) { - return validateElements('constraints', metadataTable) - } - - private def validateElements(elements, metadataTable) { - def messages = [] - def metadataElementsMap = metadataTable[elements] - def databaseElementsMap = this[elements] - - metadataElementsMap.each { elementName, element -> - if (databaseElementsMap[elementName]) { - messages.addAll(databaseElementsMap[elementName].validateWithMetadata(metadataTable, element)) - } - } - return messages - } + String name + Boolean temporary = false + String comment + + Map columns = [:] as LinkedHashMap + Map indexes = [:] + Map constraints = [:] + + Boolean shouldImportTheData() { + this.comment ? this.comment?.toUpperCase()?.contains('#CORE') : false + } + + List validateWithMetadata(metadataTable) { + if (!metadataTable instanceof Table) { + return [] + } + + def messages = [] + + if (metadataTable) { + messages.addAll validatePresenceOfColumns(metadataTable) + messages.addAll validatePresenceOfIndexes(metadataTable) + messages.addAll validatePresenceOfConstraints(metadataTable) + messages.addAll validateTemporary(metadataTable) + messages.addAll validateColumns(metadataTable) + messages.addAll validateIndexes(metadataTable) + messages.addAll validateConstraints(metadataTable) + } + return messages + } + + private def validatePresenceOfColumns(Table metadataTable) { + def messages = [] + def databaseMissingColumns = metadataTable.columns.keySet() - this.columns.keySet() + def aditionalDatabaseColumns = this.columns.keySet() - metadataTable.columns.keySet() + databaseMissingColumns.each { + def messageText = "The table ${name} is missing the column ${it}."; + def message = new Message(objectName: "${name}.${it}", level: MessageLevel.ERROR, objectType: ObjectType.TABLE_COLUMN, messageType: MessageType.PRESENCE, message: messageText) + messages << message + } + aditionalDatabaseColumns.each { + def messageText = "The table ${name} has the additional column ${it}."; + def message = new Message(objectName: "${name}.${it}", level: MessageLevel.ERROR, objectType: ObjectType.TABLE_COLUMN, messageType: MessageType.PRESENCE, message: messageText) + messages << message + } + return messages + } + + private def validatePresenceOfIndexes(Table metadataTable) { + def messages = [] + def databaseMissingIndexes = metadataTable.indexes.keySet() - this.indexes.keySet() + def aditionalDatabaseIndexes = this.indexes.keySet() - metadataTable.indexes.keySet() + databaseMissingIndexes.each { + def messageText = "The table ${name} is missing the index ${it}."; + def message = new Message(objectName: "${it}", level: MessageLevel.ERROR, objectType: ObjectType.INDEX, messageType: MessageType.PRESENCE, message: messageText) + messages << message + } + aditionalDatabaseIndexes.each { + def messageText = "The table ${name} has the additional index ${it}."; + def message = new Message(objectName: "${it}", level: MessageLevel.ERROR, objectType: ObjectType.INDEX, messageType: MessageType.PRESENCE, message: messageText) + messages << message + } + return messages + } + + private def validatePresenceOfConstraints(Table metadataTable) { + def messages = [] + def databaseMissingConstraints = metadataTable.constraints.keySet() - this.constraints.keySet() + def aditionalDatabaseConstraints = this.constraints.keySet() - metadataTable.constraints.keySet() + databaseMissingConstraints.each { + def messageText = "The table ${name} is missing the constraint ${it}."; + def message = new Message(objectName: "${it}", level: MessageLevel.ERROR, objectType: ObjectType.CONSTRAINT, messageType: MessageType.PRESENCE, message: messageText) + messages << message + } + aditionalDatabaseConstraints.each { + def messageText = "The table ${name} has the additional constraint ${it}."; + def message = new Message(objectName: "${it}", level: MessageLevel.ERROR, objectType: ObjectType.CONSTRAINT, messageType: MessageType.PRESENCE, message: messageText) + messages << message + } + return messages + } + + private def validateTemporary(Table metadataTable) { + def messages = [] + if (metadataTable.temporary != this.temporary) { + def message = new Message(objectName: name, level: MessageLevel.ERROR, objectType: ObjectType.TABLE, messageType: MessageType.TEMPORARY) + if (metadataTable.temporary) { + message.message = "The table ${this.name} should be temporary."; + } else { + message.message = "The table ${this.name} should not be temporary."; + } + messages << message + } + return messages + } + + private def validateColumns(Table metadataTable) { + return validateElements('columns', metadataTable) + } + + private def validateIndexes(Table metadataTable) { + return validateElements('indexes', metadataTable) + } + + private def validateConstraints(Table metadataTable) { + return validateElements('constraints', metadataTable) + } + + protected def validateElements(elements, Table metadataTable) { + def messages = [] + def metadataElementsMap = metadataTable[elements] + def databaseElementsMap = this[elements] + + metadataElementsMap.each { elementName, element -> + if (databaseElementsMap[elementName]) { + messages.addAll(databaseElementsMap[elementName].validateWithMetadata(metadataTable, element)) + } + } + return messages + } } diff --git a/src/main/groovy/br/com/bluesoft/bee/model/message/MessageType.groovy b/src/main/groovy/br/com/bluesoft/bee/model/message/MessageType.groovy index e028ed6..70bafbd 100644 --- a/src/main/groovy/br/com/bluesoft/bee/model/message/MessageType.groovy +++ b/src/main/groovy/br/com/bluesoft/bee/model/message/MessageType.groovy @@ -34,39 +34,39 @@ package br.com.bluesoft.bee.model.message; enum MessageType { - PRESENCE, - DATA_TYPE, - DATA_SIZE, - DATA_SCALE, - NULLABILITY, - DATA_DEFAULT, - AUTO_INCREMENT, - ON_UPDATE_CURRENT_TIMESTAMP, - SEQUENCE_MIN_VALUE, - STRUCTURE, - TEMPORARY, + PRESENCE, + DATA_TYPE, + DATA_SIZE, + DATA_SCALE, + NULLABILITY, + DATA_DEFAULT, + AUTO_INCREMENT, + ON_UPDATE_CURRENT_TIMESTAMP, + SEQUENCE_MIN_VALUE, + STRUCTURE, + TEMPORARY, - INDEX_TYPE, - INDEX_UNIQUENESS, - INDEX_COLUMNS, + INDEX_TYPE, + INDEX_UNIQUENESS, + INDEX_COLUMNS, - CONSTRAINT_TYPE, - CONSTRAINT_REF_TABLE, - CONSTRAINT_COLUMNS, - CONSTRAINT_REF_COLUMNS, - CONSTRAINT_DELETE_RULE, - CONSTRAINT_STATUS, - CONSTRAINT_SEARCH_CONDITION, + CONSTRAINT_TYPE, + CONSTRAINT_REF_TABLE, + CONSTRAINT_COLUMNS, + CONSTRAINT_REF_COLUMNS, + CONSTRAINT_DELETE_RULE, + CONSTRAINT_STATUS, + CONSTRAINT_SEARCH_CONDITION, - VIEW_BODY, + VIEW_BODY, - PROCEDURE_BODY, + PROCEDURE_BODY, - PACKAGE_HEADER, - PACKAGE_BODY, + PACKAGE_HEADER, + PACKAGE_BODY, - TRIGGER_BODY, + TRIGGER_BODY, - DATA_MISMATCH, - USER_TYPE_BODY + DATA_MISMATCH, + USER_TYPE_BODY } diff --git a/src/main/groovy/br/com/bluesoft/bee/model/postgres/PostgresPolicy.groovy b/src/main/groovy/br/com/bluesoft/bee/model/postgres/PostgresPolicy.groovy new file mode 100644 index 0000000..6530711 --- /dev/null +++ b/src/main/groovy/br/com/bluesoft/bee/model/postgres/PostgresPolicy.groovy @@ -0,0 +1,71 @@ +package br.com.bluesoft.bee.model.postgres + +import br.com.bluesoft.bee.model.ObjectType +import br.com.bluesoft.bee.model.Table +import br.com.bluesoft.bee.model.message.Message +import br.com.bluesoft.bee.model.message.MessageLevel +import br.com.bluesoft.bee.model.message.MessageType + +class PostgresPolicy { + + String name + String cmd + String usingExpression + String checkExpression + String[] roles + boolean permissive + + def validateWithMetadata(Table table, PostgresPolicy metadataPolicy) { + def messages = [] + + if (metadataPolicy.cmd != this.cmd) { + def messageText = "The cmd of the policy ${this.name} of the table ${table.name} should be ${metadataPolicy.cmd} but it is ${this.cmd}" + messages << new Message(objectName: this.name, level: MessageLevel.ERROR, objectType: ObjectType.POLICY, messageType: MessageType.DATA_MISMATCH, message: messageText) + } + + if (metadataPolicy.usingExpression != this.usingExpression) { + def messageText = "The using expression of the policy ${this.name} of the table ${table.name} should be ${metadataPolicy.usingExpression} but it is ${this.usingExpression}" + messages << new Message(objectName: this.name, level: MessageLevel.ERROR, objectType: ObjectType.POLICY, messageType: MessageType.DATA_MISMATCH, message: messageText) + } + + if (metadataPolicy.checkExpression != this.checkExpression) { + def messageText = "The check expression of the policy ${this.name} of the table ${table.name} should be ${metadataPolicy.checkExpression} but it is ${this.checkExpression}" + messages << new Message(objectName: this.name, level: MessageLevel.ERROR, objectType: ObjectType.POLICY, messageType: MessageType.DATA_MISMATCH, message: messageText) + } + + if (metadataPolicy.permissive != this.permissive) { + def messageText = "The policy ${this.name} of the table ${table.name} should ${metadataPolicy.permissive ? 'be' : 'not be'} permissive" + messages << new Message(objectName: this.name, level: MessageLevel.ERROR, objectType: ObjectType.POLICY, messageType: MessageType.DATA_MISMATCH, message: messageText) + } + + messages.addAll validatePresenceOfRoles(table, metadataPolicy) + + return messages + } + + private def validatePresenceOfRoles(Table table, PostgresPolicy metadataPolicy) { + def messages = [] + + def missingRoles = metadataPolicy.roles - this.roles + def aditionalRoles = this.roles - metadataPolicy.roles + + missingRoles.each { + def messageText = "The policy ${name} of the table ${table.name} is missing the role ${it}."; + def message = new Message(objectName: "${table.name}.${name}", level: MessageLevel.ERROR, objectType: ObjectType.POLICY, messageType: MessageType.PRESENCE, message: messageText) + messages << message + } + + aditionalRoles.each { + def messageText = "The policy ${name} of the table ${table.name} has the additional role ${it}."; + def message = new Message(objectName: "${table.name}.${name}", level: MessageLevel.ERROR, objectType: ObjectType.POLICY, messageType: MessageType.PRESENCE, message: messageText) + messages << message + } + + return messages + } + + // forcing getter to avoid the creation of conflicting getters (is + get) + boolean isPermissive() { + return permissive + } +} diff --git a/src/main/groovy/br/com/bluesoft/bee/model/postgres/PostgresTable.groovy b/src/main/groovy/br/com/bluesoft/bee/model/postgres/PostgresTable.groovy new file mode 100644 index 0000000..7cdb468 --- /dev/null +++ b/src/main/groovy/br/com/bluesoft/bee/model/postgres/PostgresTable.groovy @@ -0,0 +1,82 @@ +/* + * The contents of this file are subject to the Mozilla Public License Version + * 1.1 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * http://www.mozilla.org/MPL/ + * + * Software distributed under the License is distributed on an "AS IS" basis, + * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License + * for the specific language governing rights and limitations under the + * License. + * + * The Original Code is mozilla.org code. + * + * The Initial Developer of the Original Code is + * Bluesoft Consultoria em Informatica Ltda. + * Portions created by the Initial Developer are Copyright (C) 2011 + * the Initial Developer. All Rights Reserved. + * + * Contributor(s): + * + * Alternatively, the contents of this file may be used under the terms of + * either of the GNU General Public License Version 2 or later (the "GPL"), + * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), + * in which case the provisions of the GPL or the LGPL are applicable instead + * of those above. If you wish to allow use of your version of this file only + * under the terms of either the GPL or the LGPL, and not to allow others to + * use your version of this file under the terms of the MPL, indicate your + * decision by deleting the provisions above and replace them with the notice + * and other provisions required by the GPL or the LGPL. If you do not delete + * the provisions above, a recipient may use your version of this file under + * the terms of any one of the MPL, the GPL or the LGPL. + */ +package br.com.bluesoft.bee.model.postgres + +import br.com.bluesoft.bee.model.ObjectType +import br.com.bluesoft.bee.model.Table +import br.com.bluesoft.bee.model.message.Message +import br.com.bluesoft.bee.model.message.MessageLevel +import br.com.bluesoft.bee.model.message.MessageType + +class PostgresTable extends Table { + + Boolean rowSecurity + Map policies = [:] + + List validateWithMetadata(metadataTable) { + def messages = super.validateWithMetadata(metadataTable) + + if (metadataTable instanceof PostgresTable) { + if (metadataTable.rowSecurity != this.rowSecurity) { + def messageText = "The row security of the table ${metadataTable.name} should be ${metadataTable.rowSecurity} but it is ${this.rowSecurity}" + messages << new Message(objectName: this.name, level: MessageLevel.ERROR, objectType: ObjectType.TABLE, messageType: MessageType.PRESENCE, message: messageText) + } + + messages.addAll validatePresenceOfPolicies(metadataTable) + messages.addAll validateElements('policies', metadataTable) + } + + return messages + } + + private def validatePresenceOfPolicies(PostgresTable metadataTable) { + def messages = [] + + def missingPolicies = metadataTable.policies.keySet() - this.policies.keySet() + def additionalPolicies = this.policies.keySet() - metadataTable.policies.keySet() + + missingPolicies.each { + def messageText = "The table ${name} is missing the policy ${it}."; + def message = new Message(objectName: "${name}.${it}", level: MessageLevel.ERROR, objectType: ObjectType.TABLE_COLUMN, messageType: MessageType.PRESENCE, message: messageText) + messages << message + } + + additionalPolicies.each { + def messageText = "The table ${name} has the additional policy ${it}."; + def message = new Message(objectName: "${name}.${it}", level: MessageLevel.ERROR, objectType: ObjectType.TABLE_COLUMN, messageType: MessageType.PRESENCE, message: messageText) + messages << message + } + + return messages + } +} diff --git a/src/main/groovy/br/com/bluesoft/bee/schema/BeePostgresSchemaCreator.groovy b/src/main/groovy/br/com/bluesoft/bee/schema/BeePostgresSchemaCreator.groovy index 26674c2..08d856c 100644 --- a/src/main/groovy/br/com/bluesoft/bee/schema/BeePostgresSchemaCreator.groovy +++ b/src/main/groovy/br/com/bluesoft/bee/schema/BeePostgresSchemaCreator.groovy @@ -1,88 +1,124 @@ package br.com.bluesoft.bee.schema - class BeePostgresSchemaCreator extends BeeSchemaCreator { - - def createColumn(def column) { - def result = " ${column.name} ${column.type}" - if (column.type in ['character', 'character varying', 'text']) { - if (column.size != null) { - result += "(${column.size})" - } - } - - if (column.type in ['decimal','numeric','serial', 'bigserial']) { - if (column.scale > 0) { - result += "(${column.size}, ${column.scale})" - } else if (column.size != null) { - result += "(${column.size})" - } - } - - if(column.defaultValue) - result += " default ${column.defaultValue}" - - if (!column.nullable) { - result += ' not null' - } - - return result - } - - def createIndex(tableName, index) { - def result = "create" - def indexType = getIndexType(index.type) - if (index.unique) - result += ' unique' - if (index.columns.size() == 1 && index.columns[0].name.contains('(')) { - result += " index ${index.name} on ${tableName} USING ${indexType} (" + index.columns[0].name + ");\n" - } else { - result += " index ${index.name} on ${tableName} USING ${indexType} (" + index.columns.join(',') + ");\n" - } - - return result - } - - def getIndexType(indexType){ - if (indexType == 'b') { - return ' btree' - } else { - return ' btree' - } - } - - void createIndexes(def file, def schema) { - def tables = schema.tables.sort() - tables.each { - def table = it.value - def indexes = table.indexes*.value.findAll { it.type == 'n' } - def primaryKeys = table.constraints*.value.findAll {it.type == 'P'} - def uniqueKeys = table.constraints*.value.findAll {it.type == 'U'} - indexes.each { - def indexName = it.name - def existPrimaryKeyWithThisName = primaryKeys.findAll {it.name.equals(indexName)}.size() == 1 - def existUniqueKeyWithThisName = uniqueKeys.findAll {it.name.equals(indexName)}.size() == 1 - if (!existPrimaryKeyWithThisName && !existUniqueKeyWithThisName) { - file << createIndex(table.name, it) - } - } - } - - file << "\n" - } - - - void createProcedures(def file, def schema) { - schema.procedures*.value.sort().each { - def procedure = "${it.text};\n\n" - file.append(procedure.toString(), 'utf-8') - } - } - - void createTriggers(def file, def schema) { - schema.triggers*.value.sort().each { - def trigger = "${it.text};\n\n" - file.append(trigger.toString(), 'utf-8') - } - } + + def createColumn(def column) { + def result = " ${column.name} ${column.type}" + if (column.type in ['character', 'character varying', 'text']) { + if (column.size != null) { + result += "(${column.size})" + } + } + + if (column.type in ['decimal', 'numeric', 'serial', 'bigserial']) { + if (column.scale > 0) { + result += "(${column.size}, ${column.scale})" + } else if (column.size != null) { + result += "(${column.size})" + } + } + + if (column.defaultValue) { + result += " default ${column.defaultValue}" + } + + if (!column.nullable) { + result += ' not null' + } + + return result + } + + def createIndex(tableName, index) { + def result = "create" + def indexType = getIndexType(index.type) + if (index.unique) { + result += ' unique' + } + if (index.columns.size() == 1 && index.columns[0].name.contains('(')) { + result += " index ${index.name} on ${tableName} USING ${indexType} (" + index.columns[0].name + ");\n" + } else { + result += " index ${index.name} on ${tableName} USING ${indexType} (" + index.columns.join(',') + ");\n" + } + + return result + } + + def getIndexType(indexType) { + if (indexType == 'b') { + return ' btree' + } else { + return ' btree' + } + } + + void createIndexes(def file, def schema) { + def tables = schema.tables.sort() + tables.each { + def table = it.value + def indexes = table.indexes*.value.findAll { it.type == 'n' } + def primaryKeys = table.constraints*.value.findAll { it.type == 'P' } + def uniqueKeys = table.constraints*.value.findAll { it.type == 'U' } + indexes.each { + def indexName = it.name + def existPrimaryKeyWithThisName = primaryKeys.findAll { it.name.equals(indexName) }.size() == 1 + def existUniqueKeyWithThisName = uniqueKeys.findAll { it.name.equals(indexName) }.size() == 1 + if (!existPrimaryKeyWithThisName && !existUniqueKeyWithThisName) { + file << createIndex(table.name, it) + } + } + } + + file << "\n" + } + + + void createProcedures(def file, def schema) { + schema.procedures*.value.sort().each { + def procedure = "${it.text};\n\n" + file.append(procedure.toString(), 'utf-8') + } + } + + void createTriggers(def file, def schema) { + schema.triggers*.value.sort().each { + def trigger = "${it.text};\n\n" + file.append(trigger.toString(), 'utf-8') + } + } + + void createTables(def file, def schema) { + def tables = schema.tables.sort() + tables.each({ + file << createTable(it.value) + file << createRowSecurity(it.value) + file << createPolicies(it.value) + file << "\n" + }) + } + + def createRowSecurity(def table) { + return table.rowSecurity ? "alter table ${table.name} enable row level security;\n" : "" + } + + def createPolicies(def table) { + def result = "" + table.policies.each({ + result += createPolicy(table, it.value) + }) + return result; + } + + def createPolicy(def table, def policy) { + def result = "create policy ${policy.name} on ${table.name}" + result += " as ${policy.permissive ? 'permissive' : 'restrictive'}" + result += " for ${policy.cmd}" + result += " to ${policy.roles.join(", ")}" + result += " using (${policy.usingExpression})" + if (policy.checkExpression) { + result += " with check (${policy.checkExpression})" + } + result += ";\n" + return result + } } diff --git a/src/main/groovy/br/com/bluesoft/bee/schema/BeePostgresSchemaCreatorAction.groovy b/src/main/groovy/br/com/bluesoft/bee/schema/BeePostgresSchemaCreatorAction.groovy index 48512e8..f98dba2 100644 --- a/src/main/groovy/br/com/bluesoft/bee/schema/BeePostgresSchemaCreatorAction.groovy +++ b/src/main/groovy/br/com/bluesoft/bee/schema/BeePostgresSchemaCreatorAction.groovy @@ -2,81 +2,83 @@ package br.com.bluesoft.bee.schema import br.com.bluesoft.bee.importer.JsonImporter import br.com.bluesoft.bee.model.Options +import br.com.bluesoft.bee.model.postgres.PostgresTable import br.com.bluesoft.bee.runner.ActionRunner import br.com.bluesoft.bee.service.BeeWriter +public class BeePostgresSchemaCreatorAction implements ActionRunner { -public class BeePostgresSchemaCreatorAction implements ActionRunner { + Options options + BeeWriter out + BeeSchemaCreator beeSchemaCreator - Options options - BeeWriter out - BeeSchemaCreator beeSchemaCreator + def importer - def importer + //TODO types, outputfile - //TODO types, outputfile + public boolean validateParameters() { + return true + } - public boolean validateParameters() { - return true - } + public boolean run() { - public boolean run() { + def objectName = options.arguments[0] - def objectName = options.arguments[0] + out.log('importing schema metadata from the reference files') + def schema = getImporter().importMetaData(PostgresTable.class) - out.log('importing schema metadata from the reference files') - def schema = getImporter().importMetaData() + if (objectName) { + schema = schema.filter(objectName) + } - if(objectName) - schema = schema.filter(objectName) + def file = new File('bee.sql') + if (file.exists()) { + file.delete() + } - def file = new File('bee.sql') - if(file.exists()) - file.delete() + beeSchemaCreator = new BeePostgresSchemaCreator() - beeSchemaCreator = new BeePostgresSchemaCreator() + out.println("generating sequences...") + beeSchemaCreator.createSequences(file, schema) - out.println("generating sequences...") - beeSchemaCreator.createSequences(file, schema) + out.println("generating tables...") + beeSchemaCreator.createTables(file, schema) - out.println("generating tables...") - beeSchemaCreator.createTables(file, schema) - - out.println("generating core data...") - beeSchemaCreator.createCoreData(file, schema, options.dataDir) + out.println("generating core data...") + beeSchemaCreator.createCoreData(file, schema, options.dataDir) - out.println("generating constraints...") - beeSchemaCreator.createPrimaryKeys(file, schema) - beeSchemaCreator.createUniqueKeys(file, schema) - beeSchemaCreator.createForeignKeys(file, schema) + out.println("generating constraints...") + beeSchemaCreator.createPrimaryKeys(file, schema) + beeSchemaCreator.createUniqueKeys(file, schema) + beeSchemaCreator.createForeignKeys(file, schema) - out.println("generating indexes...") - beeSchemaCreator.createIndexes(file, schema) + out.println("generating indexes...") + beeSchemaCreator.createIndexes(file, schema) - out.println("generating views...") - beeSchemaCreator.createViews(file, schema) + out.println("generating views...") + beeSchemaCreator.createViews(file, schema) - out.println("generating functions...") - beeSchemaCreator.createProcedures(file, schema) + out.println("generating functions...") + beeSchemaCreator.createProcedures(file, schema) - out.println("generating triggers...") - beeSchemaCreator.createTriggers(file, schema) + out.println("generating triggers...") + beeSchemaCreator.createTriggers(file, schema) - - def env = System.getenv() - if(env['EDITOR']) { - println "Opening editor ${env['EDITOR']}" - def cmd = [env['EDITOR'], file.path] - new ProcessBuilder(env['EDITOR'], file.path).start() - } + def env = System.getenv() + if (env['EDITOR']) { + println "Opening editor ${env['EDITOR']}" + def cmd = [env['EDITOR'], file.path] + new ProcessBuilder(env['EDITOR'], file.path).start() + } - return true - } + return true + } - private def getImporter() { - if(importer == null) - return new JsonImporter(options.dataDir.canonicalPath) - return importer - } + private def getImporter() { + if (importer == null) { + return new JsonImporter(options.dataDir.canonicalPath) + } + return importer + } } diff --git a/src/main/groovy/br/com/bluesoft/bee/schema/BeeSchemaCreator.groovy b/src/main/groovy/br/com/bluesoft/bee/schema/BeeSchemaCreator.groovy index a75f054..da9c33a 100644 --- a/src/main/groovy/br/com/bluesoft/bee/schema/BeeSchemaCreator.groovy +++ b/src/main/groovy/br/com/bluesoft/bee/schema/BeeSchemaCreator.groovy @@ -1,333 +1,343 @@ package br.com.bluesoft.bee.schema -import java.text.SimpleDateFormat; - -import br.com.bluesoft.bee.service.BeeWriter import br.com.bluesoft.bee.util.CsvUtil -abstract class BeeSchemaCreator { - - void createSequences(def file, def schema) { - schema.sequences*.value.each { file << "create sequence ${it.name};\n" } - file << "\n" - } - - def createColumn(def column) { - println 'BeeSchemaCreator' - - def result = " ${column.name} ${column.type}" - if(column.type in ['char', 'varchar']) - if(column.sizeType != null) - result += "(${column.size} ${column.sizeType})" - else - result += "(${column.size})" +import java.text.SimpleDateFormat - if(column.type == 'number') - if(column.scale > 0) - result += "(${column.size}, ${column.scale})" - else - result += "(${column.size})" - - if(column.defaultValue) - result += " default ${column.defaultValue}" - - if(!column.nullable) - result += ' not null' - return result - } - - def createTable(def table) { - def columns = [] - table.columns.each({ - columns << createColumn(it.value) - }) - def temp = table.temporary ? " global temporary" : "" - def result = "create${temp} table ${table.name} (\n" + columns.join(",\n") + "\n);\n\n" - } +abstract class BeeSchemaCreator { - void createTables(def file, def schema) { - def tables = schema.tables.sort() - tables.each( { file << createTable(it.value) }) - } - - def createPrimaryKey(table) { - def constraint = table.constraints.find ({ it.value.type == 'P' }) - - if(constraint == null) - return "" - - constraint = constraint.value - - return "alter table ${table.name} add constraint ${constraint.name} primary key (" + constraint.columns.join(',') + ");\n" - } - - void createPrimaryKeys(def file, def schema) { - def tables = schema.tables.sort() - tables.each { - file << createPrimaryKey(it.value) - } - - file << "\n" - } - - def createUniqueKey(table) { - def constraints = table.constraints.findAll { it.value.type == 'U' }*.value - - def result = "" - - constraints.each { - result += "alter table ${table.name} add constraint ${it.name} unique(" + it.columns.join(',') + ");\n" - } - - return result - } - - void createUniqueKeys(def file, def schema) { - def tables = schema.tables.sort() - tables.each { - file << createUniqueKey(it.value) - } - - file << "\n" - } - - def createForeignKey(table) { - def constraints = table.constraints.findAll { it.value.type == 'R' }*.value - - def result = "" - - constraints.each { - def onDelete = it.onDelete ? "on delete ${it.onDelete}" : "" - def refColumns = it.refColumns ? "(" + it.refColumns.join(',') + ")" : "" - result += "alter table ${table.name} add constraint ${it.name} foreign key (" + it.columns.join(',') + ") references ${it.refTable} ${refColumns} ${onDelete};\n" - } - - return result - } - - void createForeignKeys(def file, def schema) { - def tables = schema.tables.sort() - tables.each { - file << createForeignKey(it.value) - } - - file << "\n" - } - - def createCheckConstraint(table) { - def constraints = table.constraints.findAll { it.value.type == 'C' }*.value - - def result = "" - - constraints.each { - result += "alter table ${table.name} add constraint ${it.name} check (" + it.searchCondition + ");\n" - } - - return result - } - - void createCheckConstraint(def file, def schema) { - def tables = schema.tables.sort() - tables.each { - file << createCheckConstraint(it.value) - } - - file << "\n" - } - - def createIndex(tableName, index) { - def result = "create" - if(index.type == 'b') - result += ' bitmap' - if(index.unique) - result += ' unique' - result += " index ${index.name} on ${tableName}(" + index.columns.join(',') + ");\n" - - return result - } - - void createIndexes(def file, def schema) { - def tables = schema.tables.sort() - tables.each { - def table = it.value - def indexes = table.indexes*.value.findAll { it.type == 'n' } - indexes.each { file << createIndex(table.name, it) } - } - - file << "\n" - } - - void createFunctionalIndexes(def file, def schema) { - def tables = schema.tables.sort() - tables.each { - def table = it.value - def indexes = table.indexes*.value.findAll { it.type == 'f' } - indexes.each { file << createIndex(table.name, it) } - } - - file << "\n" - } - - void createBitmapIndexes(def file, def schema) { - def tables = schema.tables.sort() - tables.each { - def table = it.value - def indexes = table.indexes*.value.findAll { it.type == 'b' } - indexes.each { file << createIndex(table.name, it) } - } - - file << "\n" - } - - void createViews(def file, def schema) { - schema.views*.value.each { - def view = "create or replace view ${it.name} as ${it.text};\n\n" - file.append(view.toString(), 'utf-8') - } - } - - void createPackages(def file, def schema) { - schema.packages*.value.sort().each { - def text = "create or replace ${it.text}\n/\n\n" - def body = "create or replace ${it.body}\n/\n\n" - file.append(text.toString(), 'utf-8') - file.append(body.toString(), 'utf-8') - } - } - - void createProcedures(def file, def schema) { - schema.procedures*.value.sort().each { - def text = [] - it.text.eachLine { text << it } - def text2 = text[1..text.size()-1].join("\n") - def procedure = "create or replace ${text2}\n/\n\n" - file.append(procedure.toString(), 'utf-8') - } - } - - void createTriggers(def file, def schema) { - schema.triggers*.value.sort().each { - def trigger = "create or replace ${it.text}\n/\n\n" - file.append(trigger.toString(), 'utf-8') - } - } - - void createCsvData(def file, def csvFile, def schema) { - def tableName = csvFile.name.split('\\.')[0] - def fileData = CsvUtil.read(csvFile) - def table = schema.tables[tableName] - def columnNames = [] - def columns = [:] - def columnTypes = [:] - def isVirtualColumn =[:] - def numberOfVirtualColumns = 0 - - if (table != null) { - table.columns.findAll { !it.value.ignore }.each{ - columns[it.value.name] = it.value.type - columnNames << it.value.name - isVirtualColumn[it.value.name] = it.value.virtual - if (it.value.virtual) { - numberOfVirtualColumns++ - } - } - - def counterColumnNames = 1 - def counterValue = 1 - - - def query = new StringBuilder() - for (int i = 0; i < fileData.size; i++) { - query << "insert into ${tableName} (" - columnNames.eachWithIndex {columName, index -> - def isVirtual = isVirtualColumn[columName] - if (!isVirtual) { - query << columName - } - columnTypes[index] = columns[columName] - if ( (counterColumnNames + numberOfVirtualColumns) < (columnNames.size()) ) { - query << ", " - } - counterColumnNames++ - } - query << ") " - query << "values (" - def params = [] - fileData[i].eachWithIndex { columnValue, index2 -> - def fieldValue = columnValue.toString() - params.add(fieldValue) - def columnType = columnTypes[index2] - def columnName = columnNames[index2] - def isVirtual = isVirtualColumn[columnName] - def isString = columnType == 'varchar' || columnType == 'varchar2' || columnType == 'character' || columnType == 'character varying' || columnType == 'text' - def isDate = columnType == 'date' - def isNotNumber = !fieldValue?.isNumber() - if (!isVirtual) { - if (isNotNumber && !isDate || isString) { - fieldValue = fieldValue.replaceAll("\'", "\''") - if (fieldValue != 'null') { - fieldValue = "\'" + fieldValue + "\'" - } - } - if (isDate && fieldValue != 'null') { - fieldValue = fieldValue.replaceAll("\'", "") - SimpleDateFormat inputSdf = new SimpleDateFormat('yyyy-MM-dd') - SimpleDateFormat outputSdf = new SimpleDateFormat('yyyy-MM-dd') - def date = inputSdf.parse(fieldValue); - fieldValue = outputSdf.format(date) - fieldValue = "\'" + fieldValue + "\'" - } - query << fieldValue - } - if ( (counterValue + numberOfVirtualColumns) < (columnNames.size()) ) { - query << ", " - } - counterValue++ - } - query << ");\n" - counterColumnNames = 1 - counterValue = 1 - } - query << "commit;\n" - file.append(query.toString(), 'utf-8') - } - } - - void createScriptData(def file, def csvFile, def schema) { - def lines = csvFile.readLines() - lines.each { - file.append(it, "utf-8") - file.append("\n", "utf-8") - } - } - - void createCoreData(def file, def schema, def dataFolderPath) { - def dataFolder = new File(dataFolderPath, 'data') - def dataFolderFiles = dataFolder.listFiles() - def seedsFolder = new File(dataFolderPath, 'dbseeds') - def seedsFolderFiles = seedsFolder.listFiles() - - dataFolderFiles.each { - if(it.name.endsWith(".csv")) { - createCsvData(file, it, schema) - } - } - seedsFolderFiles.each { - if(it.name.endsWith(".csv")) { - createCsvData(file, it, schema) - } - } - seedsFolderFiles.each { - if(it.name.endsWith(".script")) { - createScriptData(file, it, schema) - } - } - } - - def createUserTypes(def file, def schema) { - schema.userTypes*.value.sort().each { - def userType = createUserType(it) - file.append(userType.toString(), 'utf-8') - } - } + void createSequences(def file, def schema) { + schema.sequences*.value.each { file << "create sequence ${it.name};\n" } + file << "\n" + } + + def createColumn(def column) { + println 'BeeSchemaCreator' + + def result = " ${column.name} ${column.type}" + if (column.type in ['char', 'varchar']) { + if (column.sizeType != null) { + result += "(${column.size} ${column.sizeType})" + } else { + result += "(${column.size})" + } + } + + if (column.type == 'number') { + if (column.scale > 0) { + result += "(${column.size}, ${column.scale})" + } else { + result += "(${column.size})" + } + } + + if (column.defaultValue) { + result += " default ${column.defaultValue}" + } + + if (!column.nullable) { + result += ' not null' + } + return result + } + + def createTable(def table) { + def columns = [] + table.columns.each({ + columns << createColumn(it.value) + }) + def temp = table.temporary ? " global temporary" : "" + return "create${temp} table ${table.name} (\n" + columns.join(",\n") + "\n);\n" + } + + void createTables(def file, def schema) { + def tables = schema.tables.sort() + tables.each({ + file << createTable(it.value) + file << "\n" + }) + } + + def createPrimaryKey(table) { + def constraint = table.constraints.find({ it.value.type == 'P' }) + + if (constraint == null) { + return "" + } + + constraint = constraint.value + + return "alter table ${table.name} add constraint ${constraint.name} primary key (" + constraint.columns.join(',') + ");\n" + } + + void createPrimaryKeys(def file, def schema) { + def tables = schema.tables.sort() + tables.each { + file << createPrimaryKey(it.value) + } + + file << "\n" + } + + def createUniqueKey(table) { + def constraints = table.constraints.findAll { it.value.type == 'U' }*.value + + def result = "" + + constraints.each { + result += "alter table ${table.name} add constraint ${it.name} unique(" + it.columns.join(',') + ");\n" + } + + return result + } + + void createUniqueKeys(def file, def schema) { + def tables = schema.tables.sort() + tables.each { + file << createUniqueKey(it.value) + } + + file << "\n" + } + + def createForeignKey(table) { + def constraints = table.constraints.findAll { it.value.type == 'R' }*.value + + def result = "" + + constraints.each { + def onDelete = it.onDelete ? "on delete ${it.onDelete}" : "" + def refColumns = it.refColumns ? "(" + it.refColumns.join(',') + ")" : "" + result += "alter table ${table.name} add constraint ${it.name} foreign key (" + it.columns.join(',') + ") references ${it.refTable} ${refColumns} ${onDelete};\n" + } + + return result + } + + void createForeignKeys(def file, def schema) { + def tables = schema.tables.sort() + tables.each { + file << createForeignKey(it.value) + } + + file << "\n" + } + + def createCheckConstraint(table) { + def constraints = table.constraints.findAll { it.value.type == 'C' }*.value + + def result = "" + + constraints.each { + result += "alter table ${table.name} add constraint ${it.name} check (" + it.searchCondition + ");\n" + } + + return result + } + + void createCheckConstraint(def file, def schema) { + def tables = schema.tables.sort() + tables.each { + file << createCheckConstraint(it.value) + } + + file << "\n" + } + + def createIndex(tableName, index) { + def result = "create" + if (index.type == 'b') { + result += ' bitmap' + } + if (index.unique) { + result += ' unique' + } + result += " index ${index.name} on ${tableName}(" + index.columns.join(',') + ");\n" + + return result + } + + void createIndexes(def file, def schema) { + def tables = schema.tables.sort() + tables.each { + def table = it.value + def indexes = table.indexes*.value.findAll { it.type == 'n' } + indexes.each { file << createIndex(table.name, it) } + } + + file << "\n" + } + + void createFunctionalIndexes(def file, def schema) { + def tables = schema.tables.sort() + tables.each { + def table = it.value + def indexes = table.indexes*.value.findAll { it.type == 'f' } + indexes.each { file << createIndex(table.name, it) } + } + + file << "\n" + } + + void createBitmapIndexes(def file, def schema) { + def tables = schema.tables.sort() + tables.each { + def table = it.value + def indexes = table.indexes*.value.findAll { it.type == 'b' } + indexes.each { file << createIndex(table.name, it) } + } + + file << "\n" + } + + void createViews(def file, def schema) { + schema.views*.value.each { + def view = "create or replace view ${it.name} as ${it.text};\n\n" + file.append(view.toString(), 'utf-8') + } + } + + void createPackages(def file, def schema) { + schema.packages*.value.sort().each { + def text = "create or replace ${it.text}\n/\n\n" + def body = "create or replace ${it.body}\n/\n\n" + file.append(text.toString(), 'utf-8') + file.append(body.toString(), 'utf-8') + } + } + + void createProcedures(def file, def schema) { + schema.procedures*.value.sort().each { + def text = [] + it.text.eachLine { text << it } + def text2 = text[1..text.size() - 1].join("\n") + def procedure = "create or replace ${text2}\n/\n\n" + file.append(procedure.toString(), 'utf-8') + } + } + + void createTriggers(def file, def schema) { + schema.triggers*.value.sort().each { + def trigger = "create or replace ${it.text}\n/\n\n" + file.append(trigger.toString(), 'utf-8') + } + } + + void createCsvData(def file, def csvFile, def schema) { + def tableName = csvFile.name.split('\\.')[0] + def fileData = CsvUtil.read(csvFile) + def table = schema.tables[tableName] + def columnNames = [] + def columns = [:] + def columnTypes = [:] + def isVirtualColumn = [:] + def numberOfVirtualColumns = 0 + + if (table != null) { + table.columns.findAll { !it.value.ignore }.each { + columns[it.value.name] = it.value.type + columnNames << it.value.name + isVirtualColumn[it.value.name] = it.value.virtual + if (it.value.virtual) { + numberOfVirtualColumns++ + } + } + + def counterColumnNames = 1 + def counterValue = 1 + + def query = new StringBuilder() + for (int i = 0; i < fileData.size; i++) { + query << "insert into ${tableName} (" + columnNames.eachWithIndex { columName, index -> + def isVirtual = isVirtualColumn[columName] + if (!isVirtual) { + query << columName + } + columnTypes[index] = columns[columName] + if ((counterColumnNames + numberOfVirtualColumns) < (columnNames.size())) { + query << ", " + } + counterColumnNames++ + } + query << ") " + query << "values (" + def params = [] + fileData[i].eachWithIndex { columnValue, index2 -> + def fieldValue = columnValue.toString() + params.add(fieldValue) + def columnType = columnTypes[index2] + def columnName = columnNames[index2] + def isVirtual = isVirtualColumn[columnName] + def isString = columnType == 'varchar' || columnType == 'varchar2' || columnType == 'character' || columnType == 'character varying' || columnType == 'text' + def isDate = columnType == 'date' + def isNotNumber = !fieldValue?.isNumber() + if (!isVirtual) { + if (isNotNumber && !isDate || isString) { + fieldValue = fieldValue.replaceAll("\'", "\''") + if (fieldValue != 'null') { + fieldValue = "\'" + fieldValue + "\'" + } + } + if (isDate && fieldValue != 'null') { + fieldValue = fieldValue.replaceAll("\'", "") + SimpleDateFormat inputSdf = new SimpleDateFormat('yyyy-MM-dd') + SimpleDateFormat outputSdf = new SimpleDateFormat('yyyy-MM-dd') + def date = inputSdf.parse(fieldValue); + fieldValue = outputSdf.format(date) + fieldValue = "\'" + fieldValue + "\'" + } + query << fieldValue + } + if ((counterValue + numberOfVirtualColumns) < (columnNames.size())) { + query << ", " + } + counterValue++ + } + query << ");\n" + counterColumnNames = 1 + counterValue = 1 + } + query << "commit;\n" + file.append(query.toString(), 'utf-8') + } + } + + void createScriptData(def file, def csvFile, def schema) { + def lines = csvFile.readLines() + lines.each { + file.append(it, "utf-8") + file.append("\n", "utf-8") + } + } + + void createCoreData(def file, def schema, def dataFolderPath) { + def dataFolder = new File(dataFolderPath, 'data') + def dataFolderFiles = dataFolder.listFiles() + def seedsFolder = new File(dataFolderPath, 'dbseeds') + def seedsFolderFiles = seedsFolder.listFiles() + + dataFolderFiles.each { + if (it.name.endsWith(".csv")) { + createCsvData(file, it, schema) + } + } + seedsFolderFiles.each { + if (it.name.endsWith(".csv")) { + createCsvData(file, it, schema) + } + } + seedsFolderFiles.each { + if (it.name.endsWith(".script")) { + createScriptData(file, it, schema) + } + } + } + + def createUserTypes(def file, def schema) { + schema.userTypes*.value.sort().each { + def userType = createUserType(it) + file.append(userType.toString(), 'utf-8') + } + } } diff --git a/src/main/groovy/br/com/bluesoft/bee/schema/BeeSchemaGeneratorAction.groovy b/src/main/groovy/br/com/bluesoft/bee/schema/BeeSchemaGeneratorAction.groovy index e1af004..f36dd5d 100644 --- a/src/main/groovy/br/com/bluesoft/bee/schema/BeeSchemaGeneratorAction.groovy +++ b/src/main/groovy/br/com/bluesoft/bee/schema/BeeSchemaGeneratorAction.groovy @@ -33,85 +33,91 @@ package br.com.bluesoft.bee.schema import br.com.bluesoft.bee.database.ConnectionInfo -import br.com.bluesoft.bee.database.reader.DatabaseReaderChanger; -import br.com.bluesoft.bee.database.reader.OracleDatabaseReader +import br.com.bluesoft.bee.database.reader.DatabaseReaderChanger import br.com.bluesoft.bee.exporter.JsonExporter import br.com.bluesoft.bee.importer.JsonImporter import br.com.bluesoft.bee.model.Options import br.com.bluesoft.bee.model.Schema import br.com.bluesoft.bee.runner.ActionRunner import br.com.bluesoft.bee.service.BeeWriter +import br.com.bluesoft.bee.util.RDBMSUtil import groovy.sql.Sql class BeeSchemaGeneratorAction implements ActionRunner { - Options options - BeeWriter out - - def sql - def importer - - public boolean validateParameters() { - return options.arguments.size() >= 1 - } - - public boolean run(){ - - def clientName = options.arguments[0] - def objectName = options.arguments[1] - - try { - out.log "Connecting to the database..." - sql = getDatabaseConnection(clientName) - } catch (e){ - throw new Exception("It was not possible to connect to the database.",e) - } - - try { - out.log "Extracting the metadata..." - def databaseReader = DatabaseReaderChanger.getDatabaseReader(options, sql) - Schema schemaNew = databaseReader.getSchema(objectName) - if(objectName) - schemaNew = schemaNew.filter(objectName) - - Schema schemaOld = getImporter().importMetaData() - if(objectName) - schemaOld = schemaOld.filter(objectName) - - applyIgnore(schemaOld, schemaNew) - - def exporter = new JsonExporter(schemaNew, options.dataDir.canonicalPath) - exporter.export(); - return true - } catch(e) { - e.printStackTrace() - throw new Exception("Error importing database metadata.",e) - } - } - - void applyIgnore(Schema schemaOld, Schema schemaNew) { - def tableNames = schemaOld.tables.findAll { it.key in schemaOld.tables } - - tableNames.each { etable -> - def ignoredColumns = schemaOld.tables[etable.key].columns.findAll { it.value.ignore } - ignoredColumns.each { - if(schemaNew.tables[etable.key].columns[it.key]) { - schemaNew.tables[etable.key].columns[it.key].ignore = true - } - } - } - } - - Sql getDatabaseConnection(clientName) { - if(sql != null) { - return sql - } - return ConnectionInfo.createDatabaseConnection(options.configFile, clientName) - } - - private def getImporter() { - if(importer == null) - return new JsonImporter(options.dataDir.canonicalPath) - return importer - } + Options options + BeeWriter out + + def sql + def importer + + public boolean validateParameters() { + return options.arguments.size() >= 1 + } + + public boolean run() { + + def clientName = options.arguments[0] + def objectName = options.arguments[1] + + try { + out.log "Connecting to the database..." + sql = getDatabaseConnection(clientName) + } catch (e) { + throw new Exception("It was not possible to connect to the database.", e) + } + + try { + out.log "Extracting the metadata..." + + def rdbms = RDBMSUtil.getRDBMS(options) + def databaseReader = DatabaseReaderChanger.getDatabaseReader(options, sql) + + Schema schemaNew = databaseReader.getSchema(objectName) + if (objectName) { + schemaNew = schemaNew.filter(objectName) + } + + Schema schemaOld = getImporter().importMetaData(rdbms) + if (objectName) { + schemaOld = schemaOld.filter(objectName) + } + + applyIgnore(schemaOld, schemaNew) + + def exporter = new JsonExporter(schemaNew, options.dataDir.canonicalPath) + exporter.export(); + return true + } catch (e) { + e.printStackTrace() + throw new Exception("Error importing database metadata.", e) + } + } + + void applyIgnore(Schema schemaOld, Schema schemaNew) { + def tableNames = schemaOld.tables.findAll { it.key in schemaOld.tables } + + tableNames.each { etable -> + def ignoredColumns = schemaOld.tables[etable.key].columns.findAll { it.value.ignore } + ignoredColumns.each { + if (schemaNew.tables[etable.key].columns[it.key]) { + schemaNew.tables[etable.key].columns[it.key].ignore = true + } + } + } + } + + Sql getDatabaseConnection(clientName) { + if (sql != null) { + return sql + } + return ConnectionInfo.createDatabaseConnection(options.configFile, clientName) + } + + private def getImporter() { + if (importer == null) { + return new JsonImporter(options.dataDir.canonicalPath) + } + return importer + } } diff --git a/src/main/groovy/br/com/bluesoft/bee/schema/BeeSchemaValidatorAction.groovy b/src/main/groovy/br/com/bluesoft/bee/schema/BeeSchemaValidatorAction.groovy index a7d908d..da68fde 100644 --- a/src/main/groovy/br/com/bluesoft/bee/schema/BeeSchemaValidatorAction.groovy +++ b/src/main/groovy/br/com/bluesoft/bee/schema/BeeSchemaValidatorAction.groovy @@ -33,8 +33,7 @@ package br.com.bluesoft.bee.schema import br.com.bluesoft.bee.database.ConnectionInfo -import br.com.bluesoft.bee.database.reader.DatabaseReaderChanger; -import br.com.bluesoft.bee.database.reader.OracleDatabaseReader +import br.com.bluesoft.bee.database.reader.DatabaseReaderChanger import br.com.bluesoft.bee.importer.JsonImporter import br.com.bluesoft.bee.model.Options import br.com.bluesoft.bee.model.Schema @@ -42,67 +41,71 @@ import br.com.bluesoft.bee.model.message.MessageLevel import br.com.bluesoft.bee.runner.ActionRunner import br.com.bluesoft.bee.service.BeeWriter import br.com.bluesoft.bee.service.MessagePrinter +import br.com.bluesoft.bee.util.RDBMSUtil -class BeeSchemaValidatorAction implements ActionRunner{ +class BeeSchemaValidatorAction implements ActionRunner { - Options options - BeeWriter out - def importer - def sql + Options options + BeeWriter out + def importer + def sql - public boolean validateParameters() { - return options.arguments.size() >= 1 - } + public boolean validateParameters() { + return options.arguments.size() >= 1 + } - boolean run() { + boolean run() { - def clientName = options.arguments[0] - def objectName = options.arguments[1] - - MessagePrinter messagePrinter = new MessagePrinter() + def clientName = options.arguments[0] + def objectName = options.arguments[1] - def importer = getImporter() - out.log("connecting to " + clientName); - def sql = getDatabaseConnection(clientName) - def databaseReader = DatabaseReaderChanger.getDatabaseReader(options, sql) + MessagePrinter messagePrinter = new MessagePrinter() - out.log('importing schema metadata from the reference files') - Schema metadataSchema = importer.importMetaData() + def importer = getImporter() + out.log("connecting to " + clientName); + def sql = getDatabaseConnection(clientName) + def databaseReader = DatabaseReaderChanger.getDatabaseReader(options, sql) - if(objectName) - metadataSchema = metadataSchema.filter(objectName) + out.log('importing schema metadata from the reference files') + Schema metadataSchema = importer.importMetaData(RDBMSUtil.getRDBMS(options)) - out.log('importing schema metadata from the database') - Schema databaseSchema = databaseReader.getSchema(objectName) + if (objectName) { + metadataSchema = metadataSchema.filter(objectName) + } - if(objectName) - databaseSchema = databaseSchema.filter(objectName) + out.log('importing schema metadata from the database') + Schema databaseSchema = databaseReader.getSchema(objectName) - out.log('validating') - def messages = databaseSchema.validateWithMetadata(metadataSchema) - def warnings = messages.findAll { it.level == MessageLevel.WARNING } - def errors = messages.findAll { it.level == MessageLevel.ERROR } + if (objectName) { + databaseSchema = databaseSchema.filter(objectName) + } - out.log("--- bee found ${warnings.size()} warning(s)" ) - messagePrinter.print(out, warnings) + out.log('validating') + def messages = databaseSchema.validateWithMetadata(metadataSchema) + def warnings = messages.findAll { it.level == MessageLevel.WARNING } + def errors = messages.findAll { it.level == MessageLevel.ERROR } - out.log("--- bee found ${errors.size()} error(s)" ) - messagePrinter.print(out, errors) + out.log("--- bee found ${warnings.size()} warning(s)") + messagePrinter.print(out, warnings) - return errors.size() == 0 - } + out.log("--- bee found ${errors.size()} error(s)") + messagePrinter.print(out, errors) - private def getImporter() { - if(importer == null) - return new JsonImporter(options.dataDir.canonicalPath) - return importer - } + return errors.size() == 0 + } + private def getImporter() { + if (importer == null) { + return new JsonImporter(options.dataDir.canonicalPath) + } + return importer + } - def getDatabaseConnection(clientName) { - if(sql != null) { - return sql - } - return ConnectionInfo.createDatabaseConnection(options.configFile, clientName) - } + + def getDatabaseConnection(clientName) { + if (sql != null) { + return sql + } + return ConnectionInfo.createDatabaseConnection(options.configFile, clientName) + } } diff --git a/src/main/groovy/br/com/bluesoft/bee/util/VersionHelper.groovy b/src/main/groovy/br/com/bluesoft/bee/util/VersionHelper.groovy index 5040eb7..1181b07 100644 --- a/src/main/groovy/br/com/bluesoft/bee/util/VersionHelper.groovy +++ b/src/main/groovy/br/com/bluesoft/bee/util/VersionHelper.groovy @@ -2,16 +2,17 @@ package br.com.bluesoft.bee.util public class VersionHelper { - def static isNewerThan9_6 (String version) { - def is_newer = false - def major_version = version.tokenize('.')[0].toInteger() - def minor_version = version.tokenize('.')[1].toInteger() - if (major_version == 9 && minor_version >=6) { - is_newer = true - } else if (major_version > 9) { - is_newer = true - } - return is_newer - } + def static isNewerThan9_6(String version) { + def is_newer = false -} \ No newline at end of file + def major_version = version.tokenize('.')[0].toInteger() + def minor_version = version.tokenize('.')[1].tokenize(' ')[0].toInteger() + + if (major_version == 9 && minor_version >= 6) { + is_newer = true + } else if (major_version > 9) { + is_newer = true + } + return is_newer + } +} diff --git a/src/test/groovy/br/com/bluesoft/bee/util/VersionHelperTest.groovy b/src/test/groovy/br/com/bluesoft/bee/util/VersionHelperTest.groovy index e524af5..ad2c909 100644 --- a/src/test/groovy/br/com/bluesoft/bee/util/VersionHelperTest.groovy +++ b/src/test/groovy/br/com/bluesoft/bee/util/VersionHelperTest.groovy @@ -4,28 +4,38 @@ import org.junit.Test class VersionHelperTest { - @Test - void 'versao 8_5 deve retornar false'() { - assert VersionHelper.isNewerThan9_6('8.5') == false - } - - @Test - void 'versao 9_5 deve retornar false'() { - assert VersionHelper.isNewerThan9_6('9.5') == false - } - - @Test - void 'versao 9_6 deve retornar true'() { - assert VersionHelper.isNewerThan9_6('9.6') == true - } - - @Test - void 'versao 9_7 deve retornar true'() { - assert VersionHelper.isNewerThan9_6('9.7') == true - } - - @Test - void 'versao 10_3 deve retornar true'() { - assert VersionHelper.isNewerThan9_6('10.3') == true - } + @Test + void 'versao 8_5 deve retornar false'() { + assert !VersionHelper.isNewerThan9_6('8.5') + } + + @Test + void 'versao 9_5 deve retornar false'() { + assert !VersionHelper.isNewerThan9_6('9.5') + } + + @Test + void 'versao 9_6 deve retornar true'() { + assert VersionHelper.isNewerThan9_6('9.6') + } + + @Test + void 'versao 9_7 deve retornar true'() { + assert VersionHelper.isNewerThan9_6('9.7') + } + + @Test + void 'versao 10_3 deve retornar true'() { + assert VersionHelper.isNewerThan9_6('10.3') + } + + @Test + void 'versao 9_5 com string do SO deve retornar false'() { + assert !VersionHelper.isNewerThan9_6('9.5 (Debian 12.2-2.pgdg100+1)') + } + + @Test + void 'versao 9_6 com string do SO deve retornar true'() { + assert VersionHelper.isNewerThan9_6('9.6 (Debian 12.2-2.pgdg100+1)') + } }