diff --git a/src/deploy/NVA_build/noobaa_init.sh b/src/deploy/NVA_build/noobaa_init.sh index 6614327334..6cf95290ea 100755 --- a/src/deploy/NVA_build/noobaa_init.sh +++ b/src/deploy/NVA_build/noobaa_init.sh @@ -180,22 +180,12 @@ init_noobaa_agent() { run_internal_process node --unhandled-rejections=warn ./src/agent/agent_cli } -migrate_dbs() { - fix_non_root_user - - cd /root/node_modules/noobaa-core/ - /usr/local/bin/node --unhandled-rejections=warn src/upgrade/migration_to_postgres.js -} - if [ "${RUN_INIT}" == "agent" ] then init_noobaa_agent elif [ "${RUN_INIT}" == "init_mongo" ] then prepare_mongo_pv -elif [ "${RUN_INIT}" == "db_migrate" ] -then - migrate_dbs elif [ "${RUN_INIT}" == "init_endpoint" ] then init_endpoint diff --git a/src/test/unit_tests/test_migration_to_postgres.js b/src/test/unit_tests/test_migration_to_postgres.js deleted file mode 100644 index 44a5bcf51d..0000000000 --- a/src/test/unit_tests/test_migration_to_postgres.js +++ /dev/null @@ -1,85 +0,0 @@ -/* Copyright (C) 2016 NooBaa */ -'use strict'; - -// TODO: Should implement the test - -// const { PostgresClient } = require('../../util/postgres_client'); -const mongo_client = require('../../util/mongo_client'); -const { Pool } = require('pg'); -const _ = require('lodash'); - - -const POSTGRES_PARAMS = { - host: 'postgres', - port: 5432, - user: 'postgres', - password: 'postgres', - database: 'postgres' -}; - -const pool = new Pool(POSTGRES_PARAMS); - - - -// const _objects = PostgresClient.instance().define_table({ -// name: 'objectmds', -// }); -// const _multiparts = PostgresClient.instance().define_table({ -// name: 'objectmultiparts', -// }); -// const _parts = PostgresClient.instance().define_table({ -// name: 'objectparts', -// }); -// const _chunks = PostgresClient.instance().define_table({ -// name: 'datachunks', -// }); -// const _blocks = PostgresClient.instance().define_table({ -// name: 'datablocks', -// }); -// const _sequences = PostgresClient.instance().define_table({ -// name: 'mdsequences', -// }); - -const batch_size = 100; - - -async function migrate_collection(col_name) { - console.log(`DZDZ: migrating ${col_name}`); - const client = await pool.connect(); - const col = mongo_client.instance().collection(col_name); - await client.query(`CREATE TABLE IF NOT EXISTS ${col_name} (_id char(24) PRIMARY KEY, data jsonb )`); - let done = false; - let marker; - let total = 0; - while (!done) { - const docs = await col.find({ _id: marker ? { $gt: marker } : undefined }, { limit: batch_size }); - if (docs.length > 0) { - const rows = docs.map(doc => ({ _id: doc._id.toString(), data: doc })); - const values_str = _.times(docs.length, n => `($${(n * 2) + 1}, $${(n * 2) + 2})`).join(', '); - const values = _.flatMap(rows, row => _.map(row, k => k)); - const text = `INSERT INTO ${col_name} (_id, data) VALUES ${values_str}`; - await client.query({ text, values }); - total += docs.length; - console.log(`DZDZ: migrated ${total} documents to table ${col_name}`); - marker = docs[docs.length - 1]._id; - } else { - done = true; - } - } - console.log(`DZDZ: completed migration of ${total} documents to table ${col_name}`); -} - - -async function main() { - await mongo_client.instance().connect(); - await migrate_collection('objectmds'); - await migrate_collection('objectmultiparts'); - await migrate_collection('objectparts'); - await migrate_collection('datachunks'); - await migrate_collection('datablocks'); - await migrate_collection('mdsequences'); - process.exit(0); -} - - -main(); diff --git a/src/test/unit_tests/test_postgres_upgrade.js b/src/test/unit_tests/test_postgres_upgrade.js deleted file mode 100644 index 2f0eaa5b3d..0000000000 --- a/src/test/unit_tests/test_postgres_upgrade.js +++ /dev/null @@ -1,278 +0,0 @@ -/* Copyright (C) 2016 NooBaa */ -'use strict'; - -// setup coretest first to prepare the env -// const coretest = require('./coretest'); -const mocha = require('mocha'); -const { Migrator } = require('../../upgrade/migrator'); -const assert = require('assert'); -const EventEmitter = require('events').EventEmitter; -const _ = require('lodash'); -const postgres_client = require('../../util/postgres_client'); -const wtf = require('wtfnode'); - - -// coretest.setup({ pools_to_create: [], db_name: 'upgradetest' }); -process.env.CORETEST = 'upgrade-test'; - -const default_schema = { - id: 'default_schema', - type: 'object', - required: [ - '_id', - 'key1', - 'key2', - 'key3', - ], - properties: { - _id: { objectid: true }, - key1: { type: 'string' }, - key2: { type: 'boolean' }, - key3: { - type: 'array', - items: { - type: 'string', - enum: ['val1', 'val2', 'val3', 'val4'] - } - }, - key4: { - type: 'object', - properties: { - key1: { - type: 'number', - }, - key2: { - type: 'string', - }, - when: { - date: true - } - } - }, - epoch: { - idate: true - } - } -}; - -class TestCollection { - - /** - * @param {TestCollection} col - * @returns {nb.DBCollection} - */ - static implements_interface(col) { return col; } - - constructor(col, size) { - TestCollection.implements_interface(this); - this.schema = col.schema; - this.name = col.name; - this.db_indexes = col.db_indexes; - this.size = size; - this.data = _.times(this.size, i => ({ _id: i + 1, - key1: 'value1-' + i, - key2: Boolean(i % 2), - key3: _.times(i % 10, j => 'val' + j), - key4: { - key1: i, - key2: 'val2-' + i, - when: new Date(), - }, - epoch: Date.now(), - })); - } - - async find(query, options = {}) { - const start = (query && query._id && query._id.$gt) || 0; - const end = options.limit ? start + options.limit : this.size; - return this.data.slice(start, end); - } - - async countDocuments(query) { - if (_.isEmpty(query)) return this.size; - } -} - -class TestClient extends EventEmitter { - /** - * @param {TestClient} client - * @returns {nb.DBClient} - */ - static implements_interface(client) { return client; } - - constructor(size) { - super(); - TestClient.implements_interface(this); - this.collections = {}; - this.size = size; - } - - async connect(skip_init_db) { - console.log('connected!'); - } - - async disconnect(skip_init_db) { - console.log('bye!'); - } - - /** - * - * @returns {nb.DBCollection} - */ - define_collection(col) { - if (this.collections[col.name]) { - throw new Error('define_collection: collection already defined ' + col.name); - } - const test_collection = new TestCollection(col, this.size, this); - this.collections[col.name] = test_collection; - return test_collection; - } - - /** - * - * @returns {nb.DBCollection} - */ - collection(col_name) { - return this.collections[col_name]; - } -} - -mocha.describe('upgrade_mongo_postgress', function() { - let to_client; - - async function announce(msg) { - if (process.env.SUPPRESS_LOGS) return; - const l = Math.max(80, msg.length + 4); - console.log('='.repeat(l)); - console.log('=' + ' '.repeat(l - 2) + '='); - console.log('= ' + msg + ' '.repeat(l - msg.length - 3) + '='); - console.log('=' + ' '.repeat(l - 2) + '='); - console.log('='.repeat(l)); - } - - mocha.after('upgradetest-after-all', async function() { - // to_client.disconnect(); - let tries_left = 3; - setInterval(function check_dangling_handles() { - tries_left -= 1; - console.info(`Waiting for dangling handles to release, re-sample in 30s (tries left: ${tries_left})`); - wtf.dump(); - - if (tries_left === 0) { - console.error('Tests cannot complete successfully, running tests resulted in dangling handles'); - - // Force the test suite to fail (ignoring the exist handle that mocha sets up in order to return a fail - // exit code) - process.removeAllListeners('exit'); - process.exit(1); - } - }, 30000).unref(); - - }); - - mocha.beforeEach('upgradetest-before', async function() { - this.timeout(60000); // eslint-disable-line no-invalid-this - to_client = new postgres_client.PostgresClient(); - to_client.set_db_name('upgrade_test'); - await to_client.connect(); - await announce('db_client dropDatabase()'); - await to_client.dropDatabase(); - await announce('db_client createDatabase()'); - await to_client.createDatabase(); - await to_client.disconnect(); - }); - - mocha.afterEach('clean-test', async function() { - await to_client.disconnect(); - }); - - mocha.it('verify moving one collection with many rows', async function() { - this.timeout(60000); // eslint-disable-line no-invalid-this - const collection_size = 3000; - const from_client = new TestClient(collection_size); - const collections = _.times(1, i => ({ - name: 'test1_collection' + i, - schema: default_schema, - })); - const migrator = new Migrator(from_client, to_client, collections); - await migrator.migrate_db(); - await to_client.connect(); - const new_table = to_client.collection(collections[0].name); - assert.strictEqual(await new_table.countDocuments(), collection_size); - await to_client.disconnect(); - }); - - mocha.it('verify moving many collection with few rows', async function() { - this.timeout(60000); // eslint-disable-line no-invalid-this - const collection_number = 100; - const collection_size = 30; - const from_client = new TestClient(collection_size); - const collections = _.times(collection_number, i => ({ - name: 'test2_collection' + i, - schema: default_schema, - })); - const migrator = new Migrator(from_client, to_client, collections); - await migrator.migrate_db(); - await to_client.connect(); - for (let i = 0; i < collection_number; ++i) { - const table = to_client.collection(collections[i].name); - assert.strictEqual(await table.countDocuments(), collection_size); - } - await to_client.disconnect(); - }); - - mocha.it('verify a collection from after spesific marker', async function() { - this.timeout(60000); // eslint-disable-line no-invalid-this - const collection_number = 4; - const collection_size = 350; - const from_client = new TestClient(collection_size); - const collections = _.times(collection_number, i => ({ - name: 'test3_collection' + i, - schema: default_schema, - })); - to_client.define_collection({ name: 'migrate_status' }); - await to_client.connect(); - await to_client.collection('migrate_status').updateOne({}, { - collection_index: collection_number / 2, - marker: collection_size / 2, - }); - to_client.tables = []; - await to_client.disconnect(); - const migrator = new Migrator(from_client, to_client, collections); - await migrator.migrate_db(); - await to_client.connect(); - for (let i = (collection_number / 2); i < collection_number; ++i) { - const table_i = to_client.collection(collections[i].name); - assert.strictEqual(await table_i.countDocuments(), collection_size); - } - await to_client.disconnect(); - }); - - mocha.it('verify a collection moved success with wrong marker', async function() { - this.timeout(60000); // eslint-disable-line no-invalid-this - const collection_number = 4; - const collection_size = 350; - let from_client = new TestClient(collection_size); - const collections = _.times(collection_number, i => ({ - name: 'test4_collection' + i, - schema: default_schema, - })); - let migrator = new Migrator(from_client, to_client, collections.slice(0, collection_number / 2)); - await migrator.migrate_db(); - await to_client.connect(); - await to_client.collection('migrate_status').updateOne({}, { $set: {collection_index: (collection_number / 2) - 1}}); - await to_client.disconnect(); - from_client = new TestClient(collection_size); - to_client = new postgres_client.PostgresClient(); - to_client.set_db_name('upgrade_test'); - migrator = new Migrator(from_client, to_client, collections); - await migrator.migrate_db(); - await to_client.connect(); - for (let i = 0; i < collection_number; ++i) { - const table = to_client.collection(collections[i].name); - assert.strictEqual(await table.countDocuments(), collection_size); - } - await to_client.disconnect(); - }); - -}); diff --git a/src/upgrade/migration_to_postgres.js b/src/upgrade/migration_to_postgres.js deleted file mode 100644 index 6a22ea572b..0000000000 --- a/src/upgrade/migration_to_postgres.js +++ /dev/null @@ -1,250 +0,0 @@ -/* Copyright (C) 2016 NooBaa */ -'use strict'; - -const system_schema = require('../server/system_services/schemas/system_schema'); -const cluster_schema = require('../server/system_services/schemas/cluster_schema'); -const namespace_resource_schema = require('../server/system_services/schemas/namespace_resource_schema'); -const role_schema = require('../server/system_services/schemas/role_schema'); -const account_schema = require('../server/system_services/schemas/account_schema'); -const bucket_schema = require('../server/system_services/schemas/bucket_schema'); -const tiering_policy_schema = require('../server/system_services/schemas/tiering_policy_schema'); -const tier_schema = require('../server/system_services/schemas/tier_schema'); -const pool_schema = require('../server/system_services/schemas/pool_schema'); -const agent_config_schema = require('../server/system_services/schemas/agent_config_schema'); -const chunk_config_schema = require('../server/system_services/schemas/chunk_config_schema'); -const master_key_schema = require('../server/system_services/schemas/master_key_schema'); -const object_md_schema = require('../server/object_services/schemas/object_md_schema'); -const object_multipart_schema = require('../server/object_services/schemas/object_multipart_schema'); -const object_part_schema = require('../server/object_services/schemas/object_part_schema'); -const data_chunk_schema = require('../server/object_services/schemas/data_chunk_schema'); -const data_block_schema = require('../server/object_services/schemas/data_block_schema'); -const object_md_indexes = require('../server/object_services/schemas/object_md_indexes'); -const object_multipart_indexes = require('../server/object_services/schemas/object_multipart_indexes'); -const object_part_indexes = require('../server/object_services/schemas/object_part_indexes'); -const data_chunk_indexes = require('../server/object_services/schemas/data_chunk_indexes'); -const data_block_indexes = require('../server/object_services/schemas/data_block_indexes'); -const node_schema = require('../server/node_services/node_schema'); -const bucket_stats_schema = require('../server/analytic_services/bucket_stats_schema'); -const endpoint_group_report_schema = require('../server/analytic_services/endpoint_group_report_schema'); -const s3_usage_schema = require('../server/analytic_services/s3_usage_schema'); -const usage_report_schema = require('../server/analytic_services/usage_report_schema'); -const io_stats_schema = require('../server/analytic_services/io_stats_schema'); -const system_history_schema = require('../server/analytic_services/system_history_schema'); -const activity_log_schema = require('../server/analytic_services/activity_log_schema'); -const alerts_log_schema = require('../server/notifications/alerts_log_schema'); - - -const postgres_client = require('../util/postgres_client'); -const mongo_client = require('../util/mongo_client'); - -const { Migrator } = require('./migrator'); - -const name_deleted_indexes = [{ - fields: { - name: 1, - }, - options: { - unique: true, - partialFilterExpression: { - deleted: null, - } - } -}]; - -const system_name_deleted_indexes = [{ - fields: { - system: 1, - name: 1, - }, - options: { - unique: true, - partialFilterExpression: { - deleted: null, - } - } -}]; - -const COLLECTIONS = [{ - name: 'clusters', // system_store - schema: cluster_schema, - db_indexes: [{ - fields: { - owner_secret: 1, - }, - options: { - unique: true, - } - }], -}, { - name: 'namespace_resources', - schema: namespace_resource_schema, - db_indexes: name_deleted_indexes, -}, { - name: 'systems', - schema: system_schema, - db_indexes: name_deleted_indexes, -}, { - name: 'roles', - schema: role_schema, - db_indexes: [], -}, { - name: 'accounts', - schema: account_schema, - db_indexes: [{ - fields: { - email: 1, - }, - options: { - unique: true, - partialFilterExpression: { - deleted: null, - } - } - }], -}, { - name: 'buckets', - schema: bucket_schema, - db_indexes: system_name_deleted_indexes, -}, { - name: 'tieringpolicies', - schema: tiering_policy_schema, - db_indexes: system_name_deleted_indexes, -}, { - name: 'tiers', - schema: tier_schema, - db_indexes: system_name_deleted_indexes, -}, { - name: 'pools', - schema: pool_schema, - db_indexes: system_name_deleted_indexes, -}, { - name: 'agent_configs', - schema: agent_config_schema, - db_indexes: [{ - fields: { - system: 1, - name: 1 - }, - options: { - unique: true, - } - }], -}, { - name: 'chunk_configs', - schema: chunk_config_schema, -}, { - name: 'master_keys', - schema: master_key_schema, -}, { - name: 'objectmds', // md_store - schema: object_md_schema, - db_indexes: object_md_indexes, -}, { - name: 'objectmultiparts', - schema: object_multipart_schema, - db_indexes: object_multipart_indexes, -}, { - name: 'objectparts', - schema: object_part_schema, - db_indexes: object_part_indexes, -}, { - name: 'datachunks', - schema: data_chunk_schema, - db_indexes: data_chunk_indexes, -}, { - name: 'datablocks', - schema: data_block_schema, - db_indexes: data_block_indexes, -}, { - name: 'mdsequences', -}, { // nodes_store - name: 'nodes', - schema: node_schema, -}, { // bucket_stats_store - name: 'bucketstats', - schema: bucket_stats_schema, - db_indexes: [{ - fields: { - bucket: 1 - } - }], -}, { // endpoint_stats_store - name: 'objectstats', - schema: s3_usage_schema, - db_indexes: [{ - fields: { - system: 1, - }, - options: { - unique: true, - } - }], -}, { - name: 'usagereports', - schema: usage_report_schema, - db_indexes: [{ - fields: { - start_time: 1, - aggregated_time: -1, - aggregated_time_range: 1, - } - }], -}, { - name: 'endpointgroupreports', - schema: endpoint_group_report_schema, - db_indexes: [{ - fields: { - start_time: 1, - aggregated_time: -1, - aggregated_time_range: 1, - } - }] -}, { // io_stats_store - name: 'iostats', - schema: io_stats_schema, - db_indexes: [{ - fields: { - system: 1, - resource_id: 1, - resource_type: 1, - start_time: 1, - end_time: 1 - } - }], -}, { // activity_log_store - name: 'activitylogs', - schema: activity_log_schema, - db_indexes: [{ - fields: { - time: 1, - }, - options: { - unique: false, - } - }] -}, { // alerts_log_store - name: 'alertslogs', - schema: alerts_log_schema, - db_indexes: [] -}, { // history_data_store - name: 'system_history', - schema: system_history_schema, -}]; - -async function main() { - try { - const from_client = mongo_client.instance(); - const to_client = postgres_client.instance(); - // await to_client.dropDatabase(); - // await to_client.createDatabase(); - const migrator = new Migrator(from_client, to_client, COLLECTIONS); - await migrator.migrate_db(); - } catch (err) { - console.log('ERROR in merging', err); - process.exit(1); - } - process.exit(0); -} - - -main(); diff --git a/src/upgrade/migrator.js b/src/upgrade/migrator.js deleted file mode 100644 index def8589966..0000000000 --- a/src/upgrade/migrator.js +++ /dev/null @@ -1,135 +0,0 @@ -/* Copyright (C) 2016 NooBaa */ -'use strict'; - -const _ = require('lodash'); -const P = require('../util/promise'); - -const migrate_schema = { - $id: 'migrate_schema', - type: 'object', - required: ['_id', 'collection_index'], - properties: { - _id: { objectid: true }, - collection_index: { type: 'number' }, - collection_name: { type: 'string' }, - last_move_size: { type: 'number' }, - total_move_size: { type: 'number' }, - marker: { objectid: true }, - last_update: { idate: true } - } -}; - -/** - * - * Migrator - * - * taking two db's and move all the given collections between them - * - */ -class Migrator { - - /** - * @param {Object[]} collection_list - * @param {nb.DBClient} from_client - * @param {nb.DBClient} to_client - * @param {number} [batch_size] - */ - constructor(from_client, to_client, collection_list, batch_size) { - this.collection_list = collection_list; - this.from_client = from_client; - this.to_client = to_client; - this.batch_size = batch_size || 100; - } - - async migrate_db() { - this.to_client.define_collection({ name: 'migrate_status', schema: migrate_schema }); - for (const collection of this.collection_list) { - await this.from_client.define_collection(_.omit(collection, 'db_indexes')); - await this.to_client.define_collection(collection); - } - await this.from_client.connect(true); - await this.to_client.connect(); - this.upgrade_table = this.to_client.collection('migrate_status'); - this.migrate_status = await this.upgrade_table.findOne({}); - if (!this.migrate_status) { - this.migrate_status = { - _id: this.to_client.new_object_id(), - collection_index: 0, - collection_name: this.collection_list[0].name, - last_move_size: 0, - total_move_size: 0, - marker: undefined, - last_update: Date.now(), - }; - await this.upgrade_table.insertOne(this.migrate_status); - } - const start_index = this.migrate_status.collection_index; - for (let i = start_index; i < this.collection_list.length; ++i) { - const collection_name = this.collection_list[i].name; - if (collection_name === 'system_history') { - console.log(`skipping system_history collection migration due to possible heavy queries...`); - continue; - } - this.migrate_status.collection_index = i; - this.migrate_status.collection_name = collection_name; - await P.retry({ attempts: 3, delay_ms: 10000, func: async () => this._migrate_collection(collection_name) }); - await this._verify_collection(collection_name); - this.migrate_status.marker = undefined; - await this.upgrade_table.updateOne({}, { $set: { collection_index: i + 1, last_update: Date.now() }, $unset: { marker: 1 } }); - } - await this.from_client.disconnect(); - await this.to_client.disconnect(); - } - - async _migrate_collection(collection_name) { - console.log(`migrating ${collection_name}`); - const from_col = this.from_client.collection(collection_name); - const to_col = this.to_client.collection(collection_name); - let done = false; - let marker = this.migrate_status.marker; - let total = 0; - while (!done) { - console.log(`_migrate_collection: start searching docs in ${collection_name}`); - const docs = await from_col.find({ _id: marker ? { $gt: marker } : undefined }, { limit: this.batch_size, sort: { _id: 1 } }); - console.log(`_migrate_collection: found ${docs.length} docs in ${collection_name}`); - if (docs.length > 0) { - try { - console.log(`_migrate_collection: insertMany started`); - await to_col.insertManyUnordered(docs); - } catch (err) { // if duplicate key - continue - console.log('_migrate_collection: failed with error: ', err); - if (err.code !== '23505') throw err; - } - total += docs.length; - console.log(`migrated ${total} documents to table ${collection_name}`); - marker = docs[docs.length - 1]._id; - await this.upgrade_table.updateOne({}, { - $set: { - marker, - last_move_size: docs.length, - total_move_size: total, - last_update: Date.now() - } - }); - } else { - done = true; - } - } - console.log(`completed migration of ${total} documents to table ${collection_name}`); - } - - async _verify_collection(collection_name) { - console.log(`verifying ${collection_name}`); - const from_col = this.from_client.collection(collection_name); - const to_col = this.to_client.collection(collection_name); - const from_documents_number = await from_col.countDocuments({}); - const to_documents_number = await to_col.countDocuments({}); - if (from_documents_number !== to_documents_number) { - throw new Error(`Last migrate failed! collection ${collection_name} sizes don't match. - FROM: ${from_documents_number}, - TO: ${to_documents_number}`); - } - } -} - -exports.Migrator = Migrator;