diff --git a/cli/commands/migrate_create.go b/cli/commands/migrate_create.go index 1ffa533a0de0b..0b16952f6a66e 100644 --- a/cli/commands/migrate_create.go +++ b/cli/commands/migrate_create.go @@ -37,7 +37,9 @@ type migrateCreateOptions struct { func (o *migrateCreateOptions) run() error { timestamp := getTime() - err := mig.CreateCmd(o.EC.MigrationDir, timestamp, o.name) + createOptions := mig.New(timestamp, o.name, o.EC.MigrationDir) + createOptions.IsCMD = true + err := createOptions.Create() if err != nil { return errors.Wrap(err, "error creating migration files") } diff --git a/cli/migrate/api/migrate.go b/cli/migrate/api/migrate.go index 2e06c1328c8fe..82ea5dff1fc29 100644 --- a/cli/migrate/api/migrate.go +++ b/cli/migrate/api/migrate.go @@ -27,6 +27,7 @@ type Response struct { type Request struct { Name string `json:"name"` Up []interface{} `json:"up"` + Down []interface{} `json:"down"` } func MigrateAPI(c *gin.Context) { @@ -79,7 +80,19 @@ func MigrateAPI(c *gin.Context) { // Convert to Millisecond timestamp := startTime.UnixNano() / int64(time.Millisecond) - err = cmd.CreateCmd(sourceURL.Path, timestamp, request.Name, request.Up) + createOptions := cmd.New(timestamp, request.Name, sourceURL.Path) + err = createOptions.SetMetaUp(request.Up) + if err != nil { + c.JSON(http.StatusInternalServerError, &Response{Code: "create_file_error", Message: err.Error()}) + return + } + err = createOptions.SetMetaDown(request.Down) + if err != nil { + c.JSON(http.StatusInternalServerError, &Response{Code: "create_file_error", Message: err.Error()}) + return + } + + err = createOptions.Create() if err != nil { c.JSON(http.StatusInternalServerError, &Response{Code: "create_file_error", Message: err.Error()}) return @@ -88,7 +101,7 @@ func MigrateAPI(c *gin.Context) { // Rescan file system err = t.ReScan() if err != nil { - deleteErr := cmd.DeleteCmd(sourceURL.Path, timestamp) + deleteErr := createOptions.Delete() if deleteErr != nil { c.JSON(http.StatusInternalServerError, &Response{Code: "delete_file_error", Message: deleteErr.Error()}) return @@ -98,7 +111,7 @@ func MigrateAPI(c *gin.Context) { } if err = t.Migrate(uint64(timestamp), "up"); err != nil { - deleteErr := cmd.DeleteCmd(sourceURL.Path, timestamp) + deleteErr := createOptions.Delete() if deleteErr != nil { c.JSON(http.StatusInternalServerError, &Response{Code: "delete_file_error", Message: deleteErr.Error()}) return diff --git a/cli/migrate/cmd/commands.go b/cli/migrate/cmd/commands.go index 90fcc669d4906..08df60cbe6992 100644 --- a/cli/migrate/cmd/commands.go +++ b/cli/migrate/cmd/commands.go @@ -21,73 +21,92 @@ const ( var ext = []string{sqlFile, yamlFile} -func DeleteCmd(dir string, timestamp int64) error { - count := 0 +type CreateOptions struct { + Version int64 + Directory string + Name string + IsCMD bool + MetaUp []byte + MetaDown []byte + SQLUp []byte + SQLDown []byte +} + +func New(version int64, name, directory string) *CreateOptions { if runtime.GOOS == "windows" { - dir = strings.TrimPrefix(dir, "/") + directory = strings.TrimPrefix(directory, "/") } - fileName := fmt.Sprintf("%v_", timestamp) - // scan directory - files, err := ioutil.ReadDir(dir) + return &CreateOptions{ + Version: version, + Directory: directory, + Name: name, + MetaUp: []byte(`[]`), + MetaDown: []byte(`[]`), + SQLUp: []byte{}, + SQLDown: []byte{}, + } +} + +func (c *CreateOptions) SetMetaUp(data interface{}) error { + t, err := json.Marshal(data) + if err != nil { + return err + } + yamlData, err := yaml.JSONToYAML(t) if err != nil { return err } + c.MetaUp = yamlData + return nil +} - for _, fi := range files { - if !fi.IsDir() { - if strings.HasPrefix(fi.Name(), fileName) { - base := filepath.Join(dir, fi.Name()) - err = deleteFile(base) - if err != nil { - return err - } - count = count + 1 - } - } +func (c *CreateOptions) SetMetaDown(data interface{}) error { + t, err := json.Marshal(data) + if err != nil { + return err } - if count == 0 { - return errors.New("Cannot find any migration file") + yamlData, err := yaml.JSONToYAML(t) + if err != nil { + return err } + c.MetaDown = yamlData return nil } -func CreateCmd(dir string, timestamp int64, name string, options ...interface{}) error { - if runtime.GOOS == "windows" { - dir = strings.TrimPrefix(dir, "/") +func (c *CreateOptions) SetSQLUp(data string) error { + c.SQLUp = []byte(data) + return nil +} + +func (c *CreateOptions) SetSQLDown(data string) error { + c.SQLDown = []byte(data) + return nil +} + +func (c *CreateOptions) Create() error { + fileName := fmt.Sprintf("%v_%v.", c.Version, c.Name) + base := filepath.Join(c.Directory, fileName) + err := os.MkdirAll(c.Directory, os.ModePerm) + if err != nil { + return err } - fileName := fmt.Sprintf("%v_%v.", timestamp, name) - base := filepath.Join(dir, fileName) - err := os.MkdirAll(dir, os.ModePerm) + // Create MetaUp + err = createFile(base+"up.yaml", c.MetaUp) if err != nil { return err } - - // If len(options) == 0, cmd else, api - if len(options) == 0 { - return createForCMD(base) + // Create MetaDown + err = createFile(base+"down.yaml", c.MetaDown) + if err != nil { + return err } - return createForAPI(base, options[0]) -} -func createForCMD(base string) error { - var data []byte - var err error - for _, v := range ext { - switch v { - case sqlFile: - data = []byte{} - case yamlFile: - bytes := []byte(`[]`) - data, err = yaml.JSONToYAML(bytes) - if err != nil { - return err - } - } - err = createFile(base+"up"+v, data) + if c.IsCMD { + err = createFile(base+"up.sql", c.SQLUp) if err != nil { return err } - err = createFile(base+"down"+v, data) + err = createFile(base+"down.sql", c.SQLDown) if err != nil { return err } @@ -95,29 +114,30 @@ func createForCMD(base string) error { return nil } -func createForAPI(base string, options interface{}) error { - var data []byte - for _, v := range ext { - switch v { - // Only yaml file for api-console - case yamlFile: - // Up file - t, err := json.Marshal(options) - if err != nil { - return err - } - - data, err = yaml.JSONToYAML(t) - if err != nil { - return err - } +func (c *CreateOptions) Delete() error { + count := 0 + fileName := fmt.Sprintf("%v_", c.Version) + // scan directory + files, err := ioutil.ReadDir(c.Directory) + if err != nil { + return err + } - err = createFile(base+"up"+v, data) - if err != nil { - return err + for _, fi := range files { + if !fi.IsDir() { + if strings.HasPrefix(fi.Name(), fileName) { + base := filepath.Join(c.Directory, fi.Name()) + err = deleteFile(base) + if err != nil { + return err + } + count = count + 1 } } } + if count == 0 { + return errors.New("Cannot find any migration file") + } return nil } diff --git a/cli/migrate/migrate.go b/cli/migrate/migrate.go index a343e7632b8c3..f306b977ce6b8 100644 --- a/cli/migrate/migrate.go +++ b/cli/migrate/migrate.go @@ -859,7 +859,7 @@ func (m *Migrate) versionUpExists(version uint64) error { return os.ErrNotExist } -// versionUpExists checks the source if either the up or down migration for +// versionDownExists checks the source if either the up or down migration for // the specified migration version exists. func (m *Migrate) versionDownExists(version uint64) error { // try up migration first diff --git a/cli/migrate/source/file/file.go b/cli/migrate/source/file/file.go index 0faee0bd162a3..a5082bba22427 100644 --- a/cli/migrate/source/file/file.go +++ b/cli/migrate/source/file/file.go @@ -74,7 +74,7 @@ func (f *File) Open(url string, logger *log.Logger) (source.Driver, error) { for _, fi := range files { if !fi.IsDir() { - m, err := source.DefaultParse(fi.Name()) + m, err := source.DefaultParse(fi.Name(), p) if err != nil { continue // ignore files that we can't parse } diff --git a/cli/migrate/source/file/file_test.go b/cli/migrate/source/file/file_test.go index cc3f8bb24c745..ddf1d328df099 100644 --- a/cli/migrate/source/file/file_test.go +++ b/cli/migrate/source/file/file_test.go @@ -22,16 +22,28 @@ func Test(t *testing.T) { // write files that meet driver test requirements mustWriteFile(t, tmpDir, "1_foobar.up.sql", "1 up") mustWriteFile(t, tmpDir, "1_foobar.down.sql", "1 down") - mustWriteFile(t, tmpDir, "1_foobar.up.yaml", "1 metaup") - mustWriteFile(t, tmpDir, "1_foobar.down.yaml", "1 metadown") + mustWriteFile(t, tmpDir, "1_foobar.up.yaml", `- args: + name: test + type: add_existing_table_or_view +`) + mustWriteFile(t, tmpDir, "1_foobar.down.yaml", `- args: + name: test + type: add_existing_table_or_view +`) mustWriteFile(t, tmpDir, "3_foobar.up.sql", "3 up") - mustWriteFile(t, tmpDir, "4_foobar.up.yaml", "4 metaup") + mustWriteFile(t, tmpDir, "4_foobar.up.yaml", `- args: + name: test + type: add_existing_table_or_view +`) mustWriteFile(t, tmpDir, "5_foobar.down.sql", "5 down") - mustWriteFile(t, tmpDir, "6_foobar.down.yaml", "6 metadown") + mustWriteFile(t, tmpDir, "6_foobar.down.yaml", `- args: + name: test + type: add_existing_table_or_view +`) mustWriteFile(t, tmpDir, "8_foobar.up.sql", "7 up") mustWriteFile(t, tmpDir, "8_foobar.down.sql", "7 down") @@ -89,7 +101,7 @@ func TestOpenWithRelativePath(t *testing.T) { t.Fatal(err) } - mustWriteFile(t, filepath.Join(tmpDir, "foo"), "1_foobar.up.sql", "") + mustWriteFile(t, filepath.Join(tmpDir, "foo"), "1_foobar.up.sql", "test") logger, _ := test.NewNullLogger() f := &File{} @@ -140,8 +152,8 @@ func TestOpenWithDuplicateVersion(t *testing.T) { } defer os.RemoveAll(tmpDir) - mustWriteFile(t, tmpDir, "1_foo.up.sql", "") // 1 up - mustWriteFile(t, tmpDir, "1_bar.up.sql", "") // 1 up + mustWriteFile(t, tmpDir, "1_foo.up.sql", "test") // 1 up + mustWriteFile(t, tmpDir, "1_bar.up.sql", "test") // 1 up logger, _ := test.NewNullLogger() f := &File{} diff --git a/cli/migrate/source/parse.go b/cli/migrate/source/parse.go index 1b52a5da50fd5..df31556c6f9b7 100644 --- a/cli/migrate/source/parse.go +++ b/cli/migrate/source/parse.go @@ -3,8 +3,12 @@ package source import ( "errors" "fmt" + "io/ioutil" + "path/filepath" "regexp" "strconv" + + yaml "github.com/ghodss/yaml" ) var ( @@ -22,7 +26,7 @@ var ( var Regex = regexp.MustCompile(`^([0-9]+)_(.*)\.(` + string(Down) + `|` + string(Up) + `)\.(.*)$`) // Parse returns Migration for matching Regex pattern. -func Parse(raw string) (*Migration, error) { +func Parse(raw string, directory string) (*Migration, error) { var direction Direction m := Regex.FindStringSubmatch(raw) if len(m) == 5 { @@ -40,6 +44,18 @@ func Parse(raw string) (*Migration, error) { } else { return nil, errors.New("Invalid Direction type") } + data, err := ioutil.ReadFile(filepath.Join(directory, raw)) + if err != nil { + return nil, err + } + var t []interface{} + err = yaml.Unmarshal(data, &t) + if err != nil { + return nil, err + } + if len(t) == 0 { + return nil, errors.New("Empty metadata file") + } } else if m[4] == "sql" { if m[3] == "up" { direction = Up @@ -48,6 +64,13 @@ func Parse(raw string) (*Migration, error) { } else { return nil, errors.New("Invalid Direction type") } + data, err := ioutil.ReadFile(filepath.Join(directory, raw)) + if err != nil { + return nil, err + } + if string(data[:]) == "" { + return nil, errors.New("Empty SQL file") + } } return &Migration{ diff --git a/console/src/components/Services/Data/Add/AddActions.js b/console/src/components/Services/Data/Add/AddActions.js index 1e82d35ef755c..cbfe00fc34edc 100644 --- a/console/src/components/Services/Data/Add/AddActions.js +++ b/console/src/components/Services/Data/Add/AddActions.js @@ -4,7 +4,6 @@ import { loadSchema, makeMigrationCall } from '../DataActions'; import { showSuccessNotification } from '../Notification'; import { UPDATE_MIGRATION_STATUS_ERROR } from '../../../Main/Actions'; import { setTable } from '../DataActions.js'; -import globals from '../../../../Globals'; const SET_DEFAULTS = 'AddTable/SET_DEFAULTS'; const SET_TABLENAME = 'AddTable/SET_TABLENAME'; @@ -161,28 +160,17 @@ const createTableSql = () => { type: 'bulk', args: upQueryArgs, }; - /* - const sqlDropTable = 'DROP TABLE ' + '"' + state.tableName.trim() + '"'; + const sqlDropTable = + 'DROP TABLE ' + currentSchema + '.' + '"' + state.tableName.trim() + '"'; const downQuery = { type: 'bulk', args: [ { type: 'run_sql', - args: { 'sql': sqlDropTable } - } - ] - }; - */ - const schemaMigration = { - name: migrationName, - up: upQuery.args, - // down: downQuery.args, - down: [], + args: { sql: sqlDropTable }, + }, + ], }; - let finalReqBody = schemaMigration.up; - if (globals.consoleMode === 'hasuradb') { - finalReqBody = schemaMigration.up; - } const requestMsg = 'Creating table...'; const successMsg = 'Table Created'; const errorMsg = 'Create table failed'; @@ -213,8 +201,8 @@ const createTableSql = () => { makeMigrationCall( dispatch, getState, - finalReqBody, - [], + upQuery.args, + downQuery.args, migrationName, customOnSuccess, customOnError, diff --git a/console/src/components/Services/Data/Add/AddExistingTableViewActions.js b/console/src/components/Services/Data/Add/AddExistingTableViewActions.js index d9034c53ba11e..5e69126d5b909 100644 --- a/console/src/components/Services/Data/Add/AddExistingTableViewActions.js +++ b/console/src/components/Services/Data/Add/AddExistingTableViewActions.js @@ -8,8 +8,6 @@ import { import { showSuccessNotification } from '../Notification'; import { getAllUnTrackedRelations } from '../TableRelationships/Actions'; -import globals from '../../../../Globals'; - const SET_DEFAULTS = 'AddExistingTable/SET_DEFAULTS'; const SET_TABLENAME = 'AddExistingTable/SET_TABLENAME'; const MAKING_REQUEST = 'AddExistingTable/MAKING_REQUEST'; @@ -26,13 +24,22 @@ const addExistingTableSql = () => { const state = getState().addTable.existingTableView; const currentSchema = getState().tables.currentSchema; - const requestBody = { + const requestBodyUp = { type: 'add_existing_table_or_view', args: { name: state.tableName.trim(), schema: currentSchema, }, }; + const requestBodyDown = { + type: 'untrack_table', + args: { + table: { + name: state.tableName.trim(), + schema: currentSchema, + }, + }, + }; const migrationName = 'add_existing_table_or_view_' + currentSchema + @@ -40,18 +47,13 @@ const addExistingTableSql = () => { state.tableName.trim(); const upQuery = { type: 'bulk', - args: [requestBody], + args: [requestBodyUp], }; - - const schemaMigration = { - name: migrationName, - up: upQuery.args, - down: [], + const downQuery = { + type: 'bulk', + args: [requestBodyDown], }; - let finalReqBody = schemaMigration.up; - if (globals.consoleMode === 'hasuradb') { - finalReqBody = schemaMigration.up; - } + const requestMsg = 'Adding existing table/view...'; const successMsg = 'Existing table/view added'; const errorMsg = 'Adding existing table/view failed'; @@ -93,8 +95,8 @@ const addExistingTableSql = () => { makeMigrationCall( dispatch, getState, - finalReqBody, - [], + upQuery.args, + downQuery.args, migrationName, customOnSuccess, customOnError, @@ -111,33 +113,38 @@ const addAllUntrackedTablesSql = tableList => { dispatch({ type: MAKING_REQUEST }); dispatch(showSuccessNotification('Existing table/view added!')); - const bulkQuery = []; + const bulkQueryUp = []; + const bulkQueryDown = []; for (let i = 0; i < tableList.length; i++) { if (tableList[i].table_name !== 'schema_migrations') { - bulkQuery.push({ + bulkQueryUp.push({ type: 'add_existing_table_or_view', args: { name: tableList[i].table_name, schema: currentSchema, }, }); + bulkQueryDown.push({ + type: 'untrack_table', + args: { + table: { + name: tableList[i].table_name, + schema: currentSchema, + }, + }, + }); } } const migrationName = 'add_all_existing_table_or_view_' + currentSchema; const upQuery = { type: 'bulk', - args: bulkQuery, + args: bulkQueryUp, }; - - const schemaMigration = { - name: migrationName, - up: upQuery.args, - down: [], + const downQuery = { + type: 'bulk', + args: bulkQueryDown, }; - let finalReqBody = schemaMigration.up; - if (globals.consoleMode === 'hasuradb') { - finalReqBody = schemaMigration.up; - } + const requestMsg = 'Adding existing table/view...'; const successMsg = 'Existing table/view added'; const errorMsg = 'Adding existing table/view failed'; @@ -146,7 +153,10 @@ const addAllUntrackedTablesSql = tableList => { dispatch({ type: REQUEST_SUCCESS }); dispatch(loadSchema()).then(() => { const allSchemas = getState().tables.allSchemas; - const untrackedRelations = getAllUnTrackedRelations(allSchemas); + const untrackedRelations = getAllUnTrackedRelations( + allSchemas, + currentSchema + ).bulkRelTrack; dispatch({ type: LOAD_UNTRACKED_RELATIONS, untrackedRelations: untrackedRelations, @@ -162,8 +172,8 @@ const addAllUntrackedTablesSql = tableList => { makeMigrationCall( dispatch, getState, - finalReqBody, - [], + upQuery.args, + downQuery.args, migrationName, customOnSuccess, customOnError, diff --git a/console/src/components/Services/Data/DataActions.js b/console/src/components/Services/Data/DataActions.js index 48c1a06ac683d..3c50fed3a240b 100644 --- a/console/src/components/Services/Data/DataActions.js +++ b/console/src/components/Services/Data/DataActions.js @@ -130,7 +130,7 @@ const loadUntrackedRelations = () => (dispatch, getState) => { const untrackedRelations = getAllUnTrackedRelations( getState().tables.allSchemas, getState().tables.currentSchema - ); + ).bulkRelTrack; dispatch({ type: LOAD_UNTRACKED_RELATIONS, untrackedRelations, diff --git a/console/src/components/Services/Data/Schema/Schema.js b/console/src/components/Services/Data/Schema/Schema.js index 84d17723fb305..76842d4374799 100644 --- a/console/src/components/Services/Data/Schema/Schema.js +++ b/console/src/components/Services/Data/Schema/Schema.js @@ -17,6 +17,7 @@ import { import { loadSchema, loadUntrackedSchema, + loadUntrackedRelations, fetchSchemaList, LOAD_UNTRACKED_RELATIONS, UPDATE_CURRENT_SCHEMA, @@ -37,7 +38,7 @@ class Schema extends Component { const untrackedRelations = getAllUnTrackedRelations( this.props.schema, this.props.currentSchema - ); + ).bulkRelTrack; this.props.dispatch({ type: LOAD_UNTRACKED_RELATIONS, untrackedRelations, @@ -48,7 +49,7 @@ class Schema extends Component { const untrackedRelations = getAllUnTrackedRelations( this.props.schema, this.props.currentSchema - ); + ).bulkRelTrack; this.props.dispatch({ type: LOAD_UNTRACKED_RELATIONS, untrackedRelations, @@ -73,6 +74,7 @@ class Schema extends Component { dispatch({ type: UPDATE_CURRENT_SCHEMA, currentSchema: updatedSchema }), dispatch(loadSchema()), dispatch(loadUntrackedSchema()), + dispatch(loadUntrackedRelations()), ]); }; diff --git a/console/src/components/Services/Data/TableModify/ModifyActions.js b/console/src/components/Services/Data/TableModify/ModifyActions.js index 17ac145403380..314d363a74b1e 100644 --- a/console/src/components/Services/Data/TableModify/ModifyActions.js +++ b/console/src/components/Services/Data/TableModify/ModifyActions.js @@ -83,17 +83,26 @@ const deleteTableSql = tableName => { const untrackTableSql = tableName => { return (dispatch, getState) => { const currentSchema = getState().tables.currentSchema; - const sqlUpQueries = [ + const upQueries = [ { type: 'untrack_table', args: { table: { - name: tableName, + name: tableName.trim(), schema: currentSchema, }, }, }, ]; + const downQueries = [ + { + type: 'add_existing_table_or_view', + args: { + name: tableName.trim(), + schema: currentSchema, + }, + }, + ]; // apply migrations const migrationName = 'untrack_table_' + currentSchema + '_' + tableName; @@ -104,7 +113,10 @@ const untrackTableSql = tableName => { const customOnSuccess = () => { const allSchemas = getState().tables.allSchemas; - const untrackedRelations = getAllUnTrackedRelations(allSchemas); + const untrackedRelations = getAllUnTrackedRelations( + allSchemas, + currentSchema + ).bulkRelTrack; dispatch({ type: LOAD_UNTRACKED_RELATIONS, untrackedRelations: untrackedRelations, @@ -118,8 +130,8 @@ const untrackTableSql = tableName => { makeMigrationCall( dispatch, getState, - sqlUpQueries, - [], + upQueries, + downQueries, migrationName, customOnSuccess, customOnError, @@ -363,15 +375,25 @@ const addColSql = ( sql: runSqlQueryUp, }, }); - /* - const runSqlQueryDown = 'ALTER TABLE ' + '"' + tableName + '"' + ' DROP COLUMN ' + '"' + colName + '"'; - const schemaChangesDown = [{ - type: 'run_sql', - args: { - 'sql': runSqlQueryDown - } - }]; - */ + const runSqlQueryDown = + 'ALTER TABLE ' + + currentSchema + + '.' + + '"' + + tableName + + '"' + + ' DROP COLUMN ' + + '"' + + colName + + '"'; + const schemaChangesDown = [ + { + type: 'run_sql', + args: { + sql: runSqlQueryDown, + }, + }, + ]; // Apply migrations const migrationName = @@ -395,7 +417,7 @@ const addColSql = ( dispatch, getState, schemaChangesUp, - [], + schemaChangesDown, migrationName, customOnSuccess, customOnError, @@ -437,19 +459,12 @@ const deleteConstraintSql = (tableName, cName) => { }, ]; - /* // pending - const schemaChangesDown = [{ - type: 'run_sql', - args: { - 'sql': dropContraintQuery - } - }]; - */ + const schemaChangesDown = []; // Apply migrations const migrationName = - 'alter_table_' + currentSchema + '_' + tableName + '_add_foreign_key'; + 'alter_table_' + currentSchema + '_' + tableName + '_drop_foreign_key'; const requestMsg = 'Deleting Constraint...'; const successMsg = 'Constraint deleted'; @@ -462,7 +477,7 @@ const deleteConstraintSql = (tableName, cName) => { dispatch, getState, schemaChangesUp, - [], + schemaChangesDown, migrationName, customOnSuccess, customOnError, @@ -515,10 +530,10 @@ const addFkSql = (tableName, isInsideEdit) => { } // ALTER TABLE