diff --git a/backup.js b/backup.js index 79e2e9a..ceaae87 100644 --- a/backup.js +++ b/backup.js @@ -2,6 +2,7 @@ var AWS = require('aws-sdk'); var Dyno = require('dyno'); var stream = require('stream'); var zlib = require('zlib'); +var joinPath = require('path.join'); module.exports = function(config, done) { var primary = Dyno(config); @@ -16,7 +17,7 @@ module.exports = function(config, done) { return done(new Error('Must provide a bucket, prefix and jobid for backups')); var index = !isNaN(parseInt(config.segment)) ? config.segment.toString() : 0; - var key = [config.backup.prefix, config.backup.jobid, index].join('/'); + var key = joinPath(config.backup.prefix, config.backup.jobid, index); var count = 0; var size = 0; diff --git a/bin/incremental-diff-record.js b/bin/incremental-diff-record.js index afc1bee..ece6282 100755 --- a/bin/incremental-diff-record.js +++ b/bin/incremental-diff-record.js @@ -7,6 +7,7 @@ var crypto = require('crypto'); var AWS = require('aws-sdk'); var s3 = new AWS.S3(); var assert = require('assert'); +var joinPath = require('path.join'); var args = minimist(process.argv.slice(2)); @@ -69,13 +70,13 @@ try { catch (err) { key = JSON.parse(key); } -s3url.Key = [ +s3url.Key = joinPath( s3url.Key, table, crypto.createHash('md5') .update(Dyno.serialize(key)) .digest('hex') -].join('/'); +); var dyno = Dyno({ region: region, diff --git a/bin/incremental-record-history.js b/bin/incremental-record-history.js index c4e6b6d..e3c28e2 100755 --- a/bin/incremental-record-history.js +++ b/bin/incremental-record-history.js @@ -7,6 +7,7 @@ var AWS = require('aws-sdk'); var s3 = new AWS.S3(); var queue = require('queue-async'); var Dyno = require('dyno'); +var joinPath = require('path.join'); var args = minimist(process.argv.slice(2)); @@ -67,13 +68,13 @@ try { } catch (err) { key = JSON.parse(key); } -s3url.Key = [ +s3url.Key = joinPath( s3url.Key, table, crypto.createHash('md5') .update(Dyno.serialize(key)) .digest('hex') -].join('/'); +); var q = queue(100); q.awaitAll(function(err, results) { diff --git a/index.js b/index.js index 4c3b298..6cf1bf8 100644 --- a/index.js +++ b/index.js @@ -4,6 +4,7 @@ var queue = require('queue-async'); var crypto = require('crypto'); var https = require('https'); var streambot = require('streambot'); +var joinPath = require('path.join'); module.exports.replicate = replicate; module.exports.streambotReplicate = streambot(function(event, callback) { @@ -189,7 +190,7 @@ function incrementalBackup(event, context, callback) { var params = { Bucket: process.env.BackupBucket, - Key: [process.env.BackupPrefix, table, id].join('/') + Key: joinPath(process.env.BackupPrefix, table, id) }; var req = change.eventName === 'REMOVE' ? 'deleteObject' : 'putObject'; diff --git a/package.json b/package.json index 9afafe7..d799099 100644 --- a/package.json +++ b/package.json @@ -35,6 +35,7 @@ "fastlog": "^1.0.0", "minimist": "^1.1.0", "queue-async": "1.0.7", + "path.join": "1.0.0", "s3scan": "^0.2.2", "s3urls": "^1.3.0", "split": "^0.3.3", diff --git a/s3-backfill.js b/s3-backfill.js index b70e0af..b63b227 100644 --- a/s3-backfill.js +++ b/s3-backfill.js @@ -4,6 +4,7 @@ var stream = require('stream'); var queue = require('queue-async'); var crypto = require('crypto'); var https = require('https'); +var joinPath = require('path.join'); module.exports = backfill; @@ -59,7 +60,7 @@ function backfill(config, done) { var params = { Bucket: config.backup.bucket, - Key: [config.backup.prefix, config.table, id].join('/'), + Key: joinPath(config.backup.prefix, config.table, id), Body: Dyno.serialize(record) }; diff --git a/s3-snapshot.js b/s3-snapshot.js index fe3b3d0..cd4b6ad 100644 --- a/s3-snapshot.js +++ b/s3-snapshot.js @@ -3,6 +3,7 @@ var s3scan = require('s3scan'); var zlib = require('zlib'); var stream = require('stream'); var AgentKeepAlive = require('agentkeepalive'); +var joinPath = require('path.join'); module.exports = function(config, done) { var log = config.log || console.log; @@ -29,7 +30,7 @@ module.exports = function(config, done) { var s3 = new AWS.S3(s3Options); var size = 0; - var uri = ['s3:/', config.source.bucket, config.source.prefix].join('/'); + var uri = 's3://' + joinPath(config.source.bucket, config.source.prefix); var partsLoaded = -1; var objStream = s3scan.Scan(uri, { s3: s3 })