forked from faisaltheparttimecoder/mock-data
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathpostgres.go
More file actions
349 lines (285 loc) · 9.63 KB
/
postgres.go
File metadata and controls
349 lines (285 loc) · 9.63 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
package main
import (
"database/sql"
"fmt"
"strings"
"github.com/lib/pq"
"github.com/pivotal/mock-data/core"
"github.com/pivotal/mock-data/db/postgres"
)
// Global Variables
var (
skippedTab []string
db *sql.DB
stmt *sql.Stmt
)
// Progress Database connection
func dbConn() error {
dbconn, err := sql.Open(DBEngine, fmt.Sprintf("user=%v password=%v host=%v port=%v dbname=%v sslmode=disable", Connector.Username, Connector.Password, Connector.Host, Connector.Port, Connector.Db))
if err != nil {
return fmt.Errorf("Cannot establish a database connection: %v\n", err)
}
db = dbconn
return nil
}
// Check if we can run the query and extract the version of the database
func dbVersion() error {
log.Infof("Obtaining the version of the DB Engine: \"%s\"", Connector.Engine)
var version string
// Obtain the version of the database
rows, err := db.Query(postgres.PGVersion())
if err != nil {
return fmt.Errorf("Cannot extracting version, error from the database: %v", err)
}
// Store the information of the version onto a variable
for rows.Next() {
err = rows.Scan(&version)
if err != nil {
return fmt.Errorf("Error scanning the rows from the version query: %v", err)
}
}
// Print the version of the database on the logs
log.Infof("Version of the DB Engine \"%s\": %v", Connector.Engine, version)
return nil
}
// Extract all the tables in the database
func dbExtractTables() ([]string, error) {
log.Infof("Extracting all the tables in the database: \"%s\"", Connector.Db)
var tableString []string
var rows *sql.Rows
var err error
// Obtain all the tables in the database
if Connector.Engine == "postgres" { // Use postgres specific query
rows, err = db.Query(postgres.PGAllTablesQry1())
} else { // Use greenplum, hdb query to extract the columns
rows, err = db.Query(postgres.PGAllTablesQry2())
}
if err != nil {
return tableString, fmt.Errorf("Cannot extract all the tables, error from the database: %v", err)
}
// Loop through the rows and store the table names.
for rows.Next() {
var table string
err = rows.Scan(&table)
if err != nil {
return tableString, fmt.Errorf("Error extracting the rows of the list of tables: %v", err)
}
tableString = append(tableString, table)
}
return tableString, nil
}
// Get all the columns and its datatype from the query
func dbColDataType() ([]Table, error) {
log.Info("Checking for the existence of the table provided to the application, if exist extract all the column and datatype information")
var table []Table
var rows *sql.Rows
var err error
// Loop through the table list provided and collect the columns and datatypes
for _, v := range strings.Split(Connector.Table, ",") {
var tab Table
if DBEngine == "postgres" { // Use postgres specific query
rows, err = db.Query(postgres.PGColumnQry1(v))
} else { // Use greenplum, hdb query to extract the columns
rows, err = db.Query(postgres.PGColumnQry2(v))
}
if err != nil {
return table, fmt.Errorf("Cannot extracting the column info, error from the database: %v", err)
}
for rows.Next() {
var col string
var datatype string
var seqCol string = ""
// Scan and store the rows
err = rows.Scan(&col, &datatype, &seqCol)
if err != nil {
return table, fmt.Errorf("Error extracting the rows of the list of columns: %v", err)
}
// Ignore columns with sequence, since its auto loaded no need to randomize
if !strings.HasPrefix(seqCol, "nextval") {
tab.tabname = v
if tab.columns == nil {
tab.columns = make(map[string]string)
}
tab.columns[col] = datatype
}
}
// If there is no columns, then ignore that table
if len(tab.columns) > 0 {
table = append(table, tab)
}
}
return table, nil
}
// Extract the table & columns and request to load data
func extractor(table_info []Table) error {
// Before we begin lets take a backup of all the PK, UK, FK, CK ( unless user says to ignore it )
// constraints since we are not sure when we send cascade to constraints
// what all constraints are dropped. so its easy to take a backup of all
// constraints and then execute this DDL script at the end after we fix all the
// constraint issues.
// THEORY: already exists would fail and not available would be created.
if !Connector.IgnoreConstraints {
log.Infof("Backup up all the constraint in the database: \"%s\"", Connector.Db)
err := postgres.BackupDDL(db, ExecutionTimestamp)
if err != nil {
return err
}
}
// Loop through all the tables available and start to load data
// based on columns datatypes
log.Info("Separating the input to tables, columns & datatypes and attempting to mock data to the table")
for _, v := range table_info {
err := splitter(v.columns, v.tabname)
if err != nil {
return err
}
}
return nil
}
// Segregate tables, columns & datatypes to load data
func splitter(columns map[string]string, tabname string) error {
var schema string
var colkey, coldatatypes []string
// Collect the column and datatypes
for key, dt := range columns {
colkey = append(colkey, key)
coldatatypes = append(coldatatypes, dt)
}
// Ensure all the constriants are removed from the table
// and also store them to ensure all the constraints conditions
// are met when we re-enable them
err := postgres.RemoveConstraints(db, tabname)
if err != nil {
return err
}
// Split the table into schema and tablename
tab := strings.Split(tabname, ".")
if len(tab) == 1 { // if no schema provide then use the default postgres schema "public"
schema = "public"
} else { // else what is provided by the user
schema = tab[0]
tabname = tab[1]
}
// Start the progress bar
progressMsg := "(Mocking Table: " + schema + "." + tabname + ")"
core.ProgressBar(Connector.RowCount, progressMsg)
// Commit the data to the database
err = commitData(schema, tabname, colkey, coldatatypes)
if err != nil {
return err
}
// Close the Progress bar
core.CloseProgressBar()
return nil
}
// Start a transaction block and commit the data
func commitData(schema, tabname string, colkey, dtkeys []string) error {
// Start a transaction
txn, err := db.Begin()
if err != nil {
return fmt.Errorf("Error in starting a transaction: %v", err)
}
// Prepare the copy statement
stmt, err = txn.Prepare(pq.CopyInSchema(schema, tabname, colkey...))
if err != nil {
return fmt.Errorf("Error in preparing the transaction statement: %v", err)
}
// Iterate through connector row count and build data for each datatype
DataTypePickerLoop: // Label the loop to break, if there is a datatype that we don't support
for i := 0; i < Connector.RowCount; i++ {
// data collector
var data []interface{}
// Generate data based on the columns datatype
for _, v := range dtkeys {
dataoutput, err := core.BuildData(v)
if err != nil {
if strings.HasPrefix(fmt.Sprint(err), "Unsupported datatypes found") {
log.Errorf("Skipping table \"%s\" due to error \"%v\"", tabname, err)
skippedTab = append(skippedTab, tabname)
break DataTypePickerLoop // break the loop
} else {
return err
}
}
data = append(data, dataoutput)
}
// Execute the statement
_, err = stmt.Exec(data...)
if err != nil {
return err
}
// Increment progress bar
core.IncrementBar()
}
// Close the statement
err = stmt.Close()
if err != nil {
return fmt.Errorf("Error in closing the transaction statement: %v", err)
}
// Commit the transaction
err = txn.Commit()
if err != nil {
return fmt.Errorf("Error in committing the transaction statement: %v", err)
}
return nil
}
// Main postgres data mocker
func MockPostgres() error {
var table []Table
log.Infof("Attempting to establish a connection to the %s database", DBEngine)
// Establishing a connection to the database
err := dbConn()
if err != nil {
return err
}
// Check if we can query the database and get the version of the database in the meantime
err = dbVersion()
if err != nil {
return err
}
// If the request is to load all table then, extract all tables
// and pass to the connector table argument.
if Connector.AllTables {
tableList, err := dbExtractTables()
if err != nil {
return err
}
Connector.Table = strings.Join(tableList, ",")
}
// Extract the columns and datatypes from the table defined on the connector table.
if Connector.Table != "" { // if there are only tables in the connector table variables
table, err = dbColDataType()
if err != nil {
return err
}
}
// Build data for all the column and datatypes & then commit data
if len(table) > 0 { // if there are tables found, then proceed
err = extractor(table)
if err != nil {
// TODO: need to fix constraints here as well.
log.Error("Unexpected error encountered by MockD..")
return err
}
// Recreate all the constraints of the table unless user wants to ignore it
if !Connector.IgnoreConstraints {
err = postgres.FixConstraints(db, ExecutionTimestamp, Connector.Debug)
if err != nil {
backupFiles, _ := core.ListFile(".", "*_"+ExecutionTimestamp+".sql")
log.Errorf("Some constraints creation failed (highlighted above), Will need your intervention to fix those constraints")
log.Errorf("All the DDL are saved in the files: \n%v", strings.Join(backupFiles, "\n"))
return err
}
}
} else { // We didn't obtain any table from the database ( eg.s fresh DB's or User gave a view name etc )
log.Warning("No table's available to load the mock data, closing the program")
}
// If there is tables that are skipped, report to the user.
if len(skippedTab) > 0 {
log.Warning("These tables (below) are skipped, since it contain unsupported datatypes")
log.Warningf("%s", strings.Join(skippedTab, ","))
}
// Close the database connection
defer db.Close()
return nil
}