first commit
This commit is contained in:
179
test/integration/batch/job-queue.test.js
Normal file
179
test/integration/batch/job-queue.test.js
Normal file
@@ -0,0 +1,179 @@
|
||||
'use strict';
|
||||
|
||||
require('../../helper');
|
||||
var assert = require('../../support/assert');
|
||||
var redisUtils = require('../../support/redis_utils');
|
||||
|
||||
var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() });
|
||||
var JobPublisher = require('../../../batch/pubsub/job-publisher');
|
||||
var JobQueue = require('../../../batch/job_queue');
|
||||
|
||||
var JobBackend = require('../../../batch/job_backend');
|
||||
var JobService = require('../../../batch/job_service');
|
||||
var JobCanceller = require('../../../batch/job_canceller');
|
||||
var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() });
|
||||
|
||||
describe('job queue', function () {
|
||||
var pool = redisUtils.getPool();
|
||||
var jobPublisher = new JobPublisher(pool);
|
||||
var jobQueue = new JobQueue(metadataBackend, jobPublisher);
|
||||
var jobBackend = new JobBackend(metadataBackend, jobQueue);
|
||||
var jobCanceller = new JobCanceller();
|
||||
var jobService = new JobService(jobBackend, jobCanceller);
|
||||
|
||||
var userA = 'userA';
|
||||
var userB = 'userB';
|
||||
|
||||
beforeEach(function () {
|
||||
this.jobQueue = new JobQueue(metadataBackend, jobPublisher);
|
||||
});
|
||||
|
||||
afterEach(function (done) {
|
||||
redisUtils.clean('batch:*', done);
|
||||
});
|
||||
|
||||
it('should find queues for one user', function (done) {
|
||||
var self = this;
|
||||
|
||||
this.jobQueue.enqueue(userA, 'wadus-wadus-wadus-wadus', function(err) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
self.jobQueue.scanQueues(function (err, queues) {
|
||||
assert.ifError(err);
|
||||
assert.equal(queues.length, 1);
|
||||
assert.equal(queues[0], userA);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should find queues for more than one user', function (done) {
|
||||
var self = this;
|
||||
|
||||
this.jobQueue.enqueue(userA, 'wadus-wadus-wadus-wadus', function(err) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
self.jobQueue.enqueue(userB, 'wadus-wadus-wadus-wadus', function(err) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
self.jobQueue.scanQueues(function (err, queues) {
|
||||
assert.ifError(err);
|
||||
assert.equal(queues.length, 2);
|
||||
assert.ok(queues[0] === userA || queues[0] === userB);
|
||||
assert.ok(queues[1] === userA || queues[1] === userB);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should find queues from jobs not using new Redis SETs for users', function(done) {
|
||||
var self = this;
|
||||
var redisArgs = [JobQueue.QUEUE.PREFIX + userA, 'wadus-id'];
|
||||
metadataBackend.redisCmd(JobQueue.QUEUE.DB, 'LPUSH', redisArgs, function (err) {
|
||||
assert.ok(!err, err);
|
||||
self.jobQueue.scanQueues(function (err, queues) {
|
||||
assert.ok(!err, err);
|
||||
|
||||
assert.equal(queues.length, 1);
|
||||
assert.equal(queues[0], userA);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('.scanQueues() should feed queue index', function (done) {
|
||||
var self = this;
|
||||
|
||||
var data = {
|
||||
user: 'vizzuality',
|
||||
query: 'select 1 as cartodb_id',
|
||||
host: 'localhost'
|
||||
};
|
||||
|
||||
jobService.create(data, function (err) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
self.jobQueue.scanQueues(function (err, queuesFromScan) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
assert.equal(queuesFromScan.length, 1);
|
||||
assert.ok(queuesFromScan.indexOf(data.user) >= 0);
|
||||
|
||||
self.jobQueue.getQueues(function (err, queuesFromIndex) {
|
||||
if (err) {
|
||||
done(err);
|
||||
}
|
||||
|
||||
assert.equal(queuesFromIndex.length, 1);
|
||||
assert.ok(queuesFromIndex.indexOf(data.user) >= 0);
|
||||
|
||||
redisUtils.clean('batch:*', done);
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('.scanQueues() should feed queue index with two users', function (done) {
|
||||
var self = this;
|
||||
|
||||
var jobVizzuality = {
|
||||
user: 'vizzuality',
|
||||
query: 'select 1 as cartodb_id',
|
||||
host: 'localhost'
|
||||
};
|
||||
|
||||
var jobWadus = {
|
||||
user: 'wadus',
|
||||
query: 'select 1 as cartodb_id',
|
||||
host: 'localhost'
|
||||
};
|
||||
|
||||
jobService.create(jobVizzuality, function (err) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
jobService.create(jobWadus, function (err) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
self.jobQueue.scanQueues(function (err, queuesFromScan) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
assert.equal(queuesFromScan.length, 2);
|
||||
assert.ok(queuesFromScan.indexOf(jobVizzuality.user) >= 0);
|
||||
assert.ok(queuesFromScan.indexOf(jobWadus.user) >= 0);
|
||||
|
||||
self.jobQueue.getQueues(function (err, queuesFromIndex) {
|
||||
if (err) {
|
||||
done(err);
|
||||
}
|
||||
|
||||
assert.equal(queuesFromIndex.length, 2);
|
||||
assert.ok(queuesFromIndex.indexOf(jobVizzuality.user) >= 0);
|
||||
assert.ok(queuesFromIndex.indexOf(jobWadus.user) >= 0);
|
||||
|
||||
redisUtils.clean('batch:*', done);
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
219
test/integration/batch/job_backend.test.js
Normal file
219
test/integration/batch/job_backend.test.js
Normal file
@@ -0,0 +1,219 @@
|
||||
'use strict';
|
||||
|
||||
require('../../helper');
|
||||
|
||||
var BATCH_SOURCE = '../../../batch/';
|
||||
|
||||
var assert = require('../../support/assert');
|
||||
var redisUtils = require('../../support/redis_utils');
|
||||
|
||||
var JobQueue = require(BATCH_SOURCE + 'job_queue');
|
||||
var JobBackend = require(BATCH_SOURCE + 'job_backend');
|
||||
var JobPublisher = require(BATCH_SOURCE + 'pubsub/job-publisher');
|
||||
var JobFactory = require(BATCH_SOURCE + 'models/job_factory');
|
||||
var jobStatus = require(BATCH_SOURCE + 'job_status');
|
||||
|
||||
var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() });
|
||||
var jobPublisher = new JobPublisher(redisUtils.getPool());
|
||||
var jobQueue = new JobQueue(metadataBackend, jobPublisher);
|
||||
|
||||
var queue = require('queue-async');
|
||||
|
||||
var USER = 'vizzuality';
|
||||
var QUERY = 'select pg_sleep(0)';
|
||||
var HOST = 'localhost';
|
||||
var JOB = {
|
||||
user: USER,
|
||||
query: QUERY,
|
||||
host: HOST
|
||||
};
|
||||
|
||||
function createWadusJob() {
|
||||
return JobFactory.create(JSON.parse(JSON.stringify(JOB)));
|
||||
}
|
||||
|
||||
describe('job backend', function() {
|
||||
var jobBackend = new JobBackend(metadataBackend, jobQueue);
|
||||
|
||||
after(function (done) {
|
||||
redisUtils.clean('batch:*', done);
|
||||
});
|
||||
|
||||
it('.create() should persist a job', function (done) {
|
||||
var job = createWadusJob();
|
||||
|
||||
jobBackend.create(job.data, function (err, jobCreated) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
assert.ok(jobCreated.job_id);
|
||||
assert.equal(jobCreated.status, jobStatus.PENDING);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('.create() should return error', function (done) {
|
||||
var job = createWadusJob();
|
||||
|
||||
delete job.data.job_id;
|
||||
|
||||
jobBackend.create(job.data, function (err) {
|
||||
assert.ok(err);
|
||||
assert.equal(err.name, 'NotFoundError');
|
||||
assert.equal(err.message, 'Job with id undefined not found');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('.get() should return a job with the given id', function (done) {
|
||||
var jobData = createWadusJob();
|
||||
|
||||
jobBackend.create(jobData.data, function (err, jobCreated) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
assert.ok(jobCreated.job_id);
|
||||
|
||||
jobBackend.get(jobCreated.job_id, function (err, job) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
assert.equal(job.job_id, jobCreated.job_id);
|
||||
assert.equal(job.user, jobData.data.user);
|
||||
assert.equal(job.query, jobData.data.query);
|
||||
assert.equal(job.host, jobData.data.host);
|
||||
assert.equal(job.status, jobStatus.PENDING);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('.update() should update an existent job', function (done) {
|
||||
var job = createWadusJob();
|
||||
|
||||
jobBackend.create(job.data, function (err, jobCreated) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
jobCreated.query = 'select pg_sleep(1)';
|
||||
|
||||
var job = JobFactory.create(jobCreated);
|
||||
|
||||
jobBackend.update(job.data, function (err, jobUpdated) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
assert.equal(jobUpdated.query, 'select pg_sleep(1)');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('.update() should return error when updates a nonexistent job', function (done) {
|
||||
var job = createWadusJob();
|
||||
|
||||
jobBackend.update(job.data, function (err) {
|
||||
assert.ok(err, err);
|
||||
assert.equal(err.name, 'NotFoundError');
|
||||
assert.equal(err.message, 'Job with id ' + job.data.job_id + ' not found');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('.save() should save a job', function (done) {
|
||||
var job = createWadusJob();
|
||||
|
||||
jobBackend.save(job.data, function (err, jobSaved) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
assert.ok(jobSaved.job_id);
|
||||
|
||||
assert.equal(jobSaved.user, job.data.user);
|
||||
assert.equal(jobSaved.query, job.data.query);
|
||||
assert.equal(jobSaved.host, job.data.host);
|
||||
assert.equal(jobSaved.status, jobStatus.PENDING);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('.addWorkInProgressJob() should add current job to user and host lists', function (done) {
|
||||
var job = createWadusJob();
|
||||
|
||||
jobBackend.addWorkInProgressJob(job.data.user, job.data.job_id, function (err) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('.listWorkInProgressJobByUser() should retrieve WIP jobs of given user', function (done) {
|
||||
var testStepsQueue = queue(1);
|
||||
|
||||
testStepsQueue.defer(redisUtils.clean, 'batch:wip:user:*');
|
||||
testStepsQueue.defer(jobBackend.addWorkInProgressJob.bind(jobBackend), 'vizzuality', 'wadus');
|
||||
testStepsQueue.defer(jobBackend.listWorkInProgressJobByUser.bind(jobBackend), 'vizzuality');
|
||||
|
||||
testStepsQueue.awaitAll(function (err, results) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
assert.deepEqual(results[2], ['wadus']);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('.listWorkInProgressJobs() should retrieve WIP users', function (done) {
|
||||
var jobs = [{ user: 'userA', id: 'jobId1' }, { user: 'userA', id: 'jobId2' }, { user: 'userB', id: 'jobId3' }];
|
||||
|
||||
var testStepsQueue = queue(1);
|
||||
|
||||
jobs.forEach(function (job) {
|
||||
testStepsQueue.defer(jobBackend.addWorkInProgressJob.bind(jobBackend), job.user, job.id);
|
||||
});
|
||||
|
||||
testStepsQueue.awaitAll(function (err) {
|
||||
if (err) {
|
||||
done(err);
|
||||
}
|
||||
|
||||
jobBackend.listWorkInProgressJobs(function (err, users) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
assert.ok(users.userA);
|
||||
assert.deepEqual(users.userA, [ 'jobId1', 'jobId2' ]);
|
||||
assert.ok(users.userB);
|
||||
assert.deepEqual(users.userB, [ 'jobId3' ]);
|
||||
done();
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
it('.clearWorkInProgressJob() should remove job from work in progress list', function (done) {
|
||||
var job = createWadusJob();
|
||||
|
||||
jobBackend.addWorkInProgressJob(job.data.user, job.data.job_id, function (err) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
jobBackend.clearWorkInProgressJob(job.data.user, job.data.job_id, function (err) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
119
test/integration/batch/job_canceller.test.js
Normal file
119
test/integration/batch/job_canceller.test.js
Normal file
@@ -0,0 +1,119 @@
|
||||
'use strict';
|
||||
|
||||
require('../../helper');
|
||||
|
||||
var BATCH_SOURCE = '../../../batch/';
|
||||
|
||||
var assert = require('../../support/assert');
|
||||
var redisUtils = require('../../support/redis_utils');
|
||||
|
||||
var JobQueue = require(BATCH_SOURCE + 'job_queue');
|
||||
var JobBackend = require(BATCH_SOURCE + 'job_backend');
|
||||
var JobPublisher = require(BATCH_SOURCE + 'pubsub/job-publisher');
|
||||
var jobStatus = require(BATCH_SOURCE + 'job_status');
|
||||
var JobCanceller = require(BATCH_SOURCE + 'job_canceller');
|
||||
var PSQL = require('cartodb-psql');
|
||||
|
||||
var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() });
|
||||
var jobPublisher = new JobPublisher(redisUtils.getPool());
|
||||
var jobQueue = new JobQueue(metadataBackend, jobPublisher);
|
||||
var jobBackend = new JobBackend(metadataBackend, jobQueue);
|
||||
var JobFactory = require(BATCH_SOURCE + 'models/job_factory');
|
||||
|
||||
var USER = 'vizzuality';
|
||||
var QUERY = 'select pg_sleep(0)';
|
||||
var HOST = 'localhost';
|
||||
|
||||
// sets job to running, run its query and returns inmediatly (don't wait for query finishes)
|
||||
// in order to test query cancelation/draining
|
||||
function runQueryHelper(job, callback) {
|
||||
var job_id = job.job_id;
|
||||
var sql = job.query;
|
||||
|
||||
job.status = jobStatus.RUNNING;
|
||||
|
||||
jobBackend.update(job, function (err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
const dbConfiguration = {
|
||||
host: job.host,
|
||||
port: job.port,
|
||||
dbname: job.dbname,
|
||||
user: job.dbuser,
|
||||
pass: job.pass,
|
||||
};
|
||||
|
||||
const pg = new PSQL(dbConfiguration);
|
||||
|
||||
sql = '/* ' + job_id + ' */ ' + sql;
|
||||
|
||||
pg.eventedQuery(sql, function (err, query) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
callback(null, query);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function createWadusJob(query) {
|
||||
query = query || QUERY;
|
||||
return JobFactory.create(JSON.parse(JSON.stringify({
|
||||
user: USER,
|
||||
query: query,
|
||||
host: HOST,
|
||||
dbname: 'cartodb_test_user_1_db',
|
||||
dbuser: 'test_cartodb_user_1',
|
||||
port: 5432,
|
||||
pass: 'test_cartodb_user_1_pass',
|
||||
})));
|
||||
}
|
||||
|
||||
describe('job canceller', function() {
|
||||
var jobCanceller = new JobCanceller();
|
||||
|
||||
after(function (done) {
|
||||
redisUtils.clean('batch:*', done);
|
||||
});
|
||||
|
||||
it('.cancel() should cancel a job', function (done) {
|
||||
var job = createWadusJob('select pg_sleep(1)');
|
||||
|
||||
jobBackend.create(job.data, function (err, jobCreated) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
assert.equal(job.data.job_id, jobCreated.job_id);
|
||||
|
||||
runQueryHelper(job.data, function (err) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
jobCanceller.cancel(job, function (err) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('.cancel() a non running job should not return an error', function (done) {
|
||||
var job = createWadusJob();
|
||||
|
||||
jobCanceller.cancel(job, function (err) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
39
test/integration/batch/job_publisher.test.js
Normal file
39
test/integration/batch/job_publisher.test.js
Normal file
@@ -0,0 +1,39 @@
|
||||
'use strict';
|
||||
|
||||
require('../../helper');
|
||||
|
||||
var BATCH_SOURCE = '../../../batch/';
|
||||
|
||||
var assert = require('../../support/assert');
|
||||
|
||||
var redisUtils = require('../../support/redis_utils');
|
||||
|
||||
|
||||
var Channel = require(BATCH_SOURCE + 'pubsub/channel');
|
||||
var JobPublisher = require(BATCH_SOURCE + 'pubsub/job-publisher');
|
||||
|
||||
var HOST = 'wadus';
|
||||
|
||||
describe('job publisher', function() {
|
||||
var jobPublisher = new JobPublisher(redisUtils.getPool());
|
||||
|
||||
it('.publish() should publish in job channel', function (done) {
|
||||
redisUtils.getPool().acquire(Channel.DB, function (err, client) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
client.subscribe(Channel.NAME);
|
||||
|
||||
client.on('message', function (channel, host) {
|
||||
assert.equal(host, HOST);
|
||||
assert.equal(channel, Channel.NAME);
|
||||
client.unsubscribe(Channel.NAME);
|
||||
done();
|
||||
});
|
||||
|
||||
jobPublisher.publish(HOST);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
80
test/integration/batch/job_runner.test.js
Normal file
80
test/integration/batch/job_runner.test.js
Normal file
@@ -0,0 +1,80 @@
|
||||
'use strict';
|
||||
|
||||
require('../../helper');
|
||||
|
||||
var BATCH_SOURCE = '../../../batch/';
|
||||
|
||||
var assert = require('../../support/assert');
|
||||
var redisUtils = require('../../support/redis_utils');
|
||||
|
||||
var JobQueue = require(BATCH_SOURCE + 'job_queue');
|
||||
var JobBackend = require(BATCH_SOURCE + 'job_backend');
|
||||
var JobPublisher = require(BATCH_SOURCE + 'pubsub/job-publisher');
|
||||
var jobStatus = require(BATCH_SOURCE + 'job_status');
|
||||
var UserDatabaseMetadataService = require(BATCH_SOURCE + 'user_database_metadata_service');
|
||||
var JobCanceller = require(BATCH_SOURCE + 'job_canceller');
|
||||
var JobService = require(BATCH_SOURCE + 'job_service');
|
||||
var JobRunner = require(BATCH_SOURCE + 'job_runner');
|
||||
var QueryRunner = require(BATCH_SOURCE + 'query_runner');
|
||||
|
||||
|
||||
var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() });
|
||||
var jobPublisher = new JobPublisher(redisUtils.getPool());
|
||||
var jobQueue = new JobQueue(metadataBackend, jobPublisher);
|
||||
var jobBackend = new JobBackend(metadataBackend, jobQueue);
|
||||
var userDatabaseMetadataService = new UserDatabaseMetadataService(metadataBackend);
|
||||
var jobCanceller = new JobCanceller();
|
||||
var jobService = new JobService(jobBackend, jobCanceller);
|
||||
var queryRunner = new QueryRunner(userDatabaseMetadataService);
|
||||
var StatsD = require('node-statsd').StatsD;
|
||||
var statsdClient = new StatsD(global.settings.statsd);
|
||||
|
||||
var USER = 'vizzuality';
|
||||
var QUERY = 'select pg_sleep(0)';
|
||||
var HOST = 'localhost';
|
||||
var JOB = {
|
||||
user: USER,
|
||||
query: QUERY,
|
||||
host: HOST,
|
||||
dbname: 'cartodb_test_user_1_db',
|
||||
dbuser: 'test_cartodb_user_1',
|
||||
port: 5432,
|
||||
pass: 'test_cartodb_user_1_pass',
|
||||
};
|
||||
|
||||
describe('job runner', function() {
|
||||
var jobRunner = new JobRunner(jobService, jobQueue, queryRunner, metadataBackend, statsdClient);
|
||||
|
||||
after(function (done) {
|
||||
redisUtils.clean('batch:*', function() {
|
||||
redisUtils.clean('limits:batch:*', done);
|
||||
});
|
||||
});
|
||||
|
||||
it('.run() should run a job', function (done) {
|
||||
jobService.create(JOB, function (err, job) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
jobRunner.run(job.data.job_id, function (err, job) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
assert.equal(job.data.status, jobStatus.DONE);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('.run() should return a job not found error', function (done) {
|
||||
jobRunner.run('wadus_job_id', function (err) {
|
||||
assert.ok(err, err);
|
||||
assert.equal(err.name, 'NotFoundError');
|
||||
assert.equal(err.message, 'Job with id wadus_job_id not found');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
205
test/integration/batch/job_service.test.js
Normal file
205
test/integration/batch/job_service.test.js
Normal file
@@ -0,0 +1,205 @@
|
||||
'use strict';
|
||||
|
||||
require('../../helper');
|
||||
|
||||
var BATCH_SOURCE = '../../../batch/';
|
||||
|
||||
var assert = require('../../support/assert');
|
||||
var redisUtils = require('../../support/redis_utils');
|
||||
|
||||
var JobQueue = require(BATCH_SOURCE + 'job_queue');
|
||||
var JobBackend = require(BATCH_SOURCE + 'job_backend');
|
||||
var JobPublisher = require(BATCH_SOURCE + 'pubsub/job-publisher');
|
||||
var jobStatus = require(BATCH_SOURCE + 'job_status');
|
||||
var JobCanceller = require(BATCH_SOURCE + 'job_canceller');
|
||||
var JobService = require(BATCH_SOURCE + 'job_service');
|
||||
var PSQL = require('cartodb-psql');
|
||||
|
||||
var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() });
|
||||
var jobPublisher = new JobPublisher(redisUtils.getPool());
|
||||
var jobQueue = new JobQueue(metadataBackend, jobPublisher);
|
||||
var jobBackend = new JobBackend(metadataBackend, jobQueue);
|
||||
var jobCanceller = new JobCanceller();
|
||||
|
||||
var USER = 'vizzuality';
|
||||
var QUERY = 'select pg_sleep(0)';
|
||||
var HOST = 'localhost';
|
||||
var JOB = {
|
||||
user: USER,
|
||||
query: QUERY,
|
||||
host: HOST,
|
||||
dbname: 'cartodb_test_user_1_db',
|
||||
dbuser: 'test_cartodb_user_1',
|
||||
port: 5432,
|
||||
pass: 'test_cartodb_user_1_pass',
|
||||
|
||||
};
|
||||
|
||||
function createWadusDataJob() {
|
||||
return JSON.parse(JSON.stringify(JOB));
|
||||
}
|
||||
|
||||
// sets job to running, run its query and returns inmediatly (don't wait for query finishes)
|
||||
// in order to test query cancelation/draining
|
||||
function runQueryHelper(job, callback) {
|
||||
var job_id = job.job_id;
|
||||
var sql = job.query;
|
||||
|
||||
job.status = jobStatus.RUNNING;
|
||||
|
||||
jobBackend.update(job, function (err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
const dbConfiguration = {
|
||||
host: job.host,
|
||||
port: job.port,
|
||||
dbname: job.dbname,
|
||||
user: job.dbuser,
|
||||
pass: job.pass,
|
||||
};
|
||||
|
||||
var pg = new PSQL(dbConfiguration);
|
||||
|
||||
sql = '/* ' + job_id + ' */ ' + sql;
|
||||
|
||||
pg.eventedQuery(sql, function (err, query) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
callback(null, query);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
describe('job service', function() {
|
||||
var jobService = new JobService(jobBackend, jobCanceller);
|
||||
|
||||
after(function (done) {
|
||||
redisUtils.clean('batch:*', done);
|
||||
});
|
||||
|
||||
it('.get() should return a job', function (done) {
|
||||
jobService.create(createWadusDataJob(), function (err, jobCreated) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
jobService.get(jobCreated.data.job_id, function (err, job) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
assert.equal(job.data.job_id, jobCreated.data.job_id);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('.get() should return a not found error', function (done) {
|
||||
jobService.get('wadus_job_id', function (err) {
|
||||
assert.ok(err);
|
||||
assert.equal(err.message, 'Job with id wadus_job_id not found');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('.create() should persist a job', function (done) {
|
||||
jobService.create(createWadusDataJob(), function (err, jobCreated) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
assert.ok(jobCreated.data.job_id);
|
||||
assert.equal(jobCreated.data.status, jobStatus.PENDING);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('.create() should return error with invalid job data', function (done) {
|
||||
var job = createWadusDataJob();
|
||||
|
||||
delete job.query;
|
||||
|
||||
jobService.create(job, function (err) {
|
||||
assert.ok(err);
|
||||
assert.equal(err.message, 'You must indicate a valid SQL');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('.cancel() should cancel a running job', function (done) {
|
||||
var job = createWadusDataJob();
|
||||
job.query = 'select pg_sleep(3)';
|
||||
|
||||
jobService.create(job, function (err, job) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
runQueryHelper(job.data, function (err) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
jobService.cancel(job.data.job_id, function (err, jobCancelled) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
assert.equal(jobCancelled.data.job_id, job.data.job_id);
|
||||
assert.equal(jobCancelled.data.status, jobStatus.CANCELLED);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('.cancel() should return a job not found error', function (done) {
|
||||
jobService.cancel('wadus_job_id', function (err) {
|
||||
assert.ok(err, err);
|
||||
assert.equal(err.name, 'NotFoundError');
|
||||
assert.equal(err.message, 'Job with id wadus_job_id not found');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('.drain() should draing a running job', function (done) {
|
||||
var job = createWadusDataJob();
|
||||
job.query = 'select pg_sleep(3)';
|
||||
|
||||
jobService.create(job, function (err, job) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
runQueryHelper(job.data, function (err) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
jobService.drain(job.data.job_id, function (err, jobDrained) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
||||
assert.equal(jobDrained.job_id, job.data.job_id);
|
||||
assert.equal(jobDrained.status, jobStatus.PENDING);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('.drain() should return a job not found error', function (done) {
|
||||
jobService.drain('wadus_job_id', function (err) {
|
||||
assert.ok(err, err);
|
||||
assert.equal(err.name, 'NotFoundError');
|
||||
assert.equal(err.message, 'Job with id wadus_job_id not found');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
60
test/integration/batch/locker.js
Normal file
60
test/integration/batch/locker.js
Normal file
@@ -0,0 +1,60 @@
|
||||
'use strict';
|
||||
|
||||
require('../../helper');
|
||||
|
||||
var assert = require('../../support/assert');
|
||||
var redisUtils = require('../../support/redis_utils');
|
||||
var Locker = require('../../../batch/leader/locker');
|
||||
|
||||
describe('locker', function() {
|
||||
var host = 'localhost';
|
||||
|
||||
var TTL = 500;
|
||||
|
||||
var config = { ttl: TTL, pool: redisUtils.getPool() };
|
||||
|
||||
it('should lock and unlock', function (done) {
|
||||
var lockerA = Locker.create('redis-distlock', config);
|
||||
var lockerB = Locker.create('redis-distlock', config);
|
||||
lockerA.lock(host, function(err, lock) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
assert.ok(lock);
|
||||
|
||||
// others can't lock on same host
|
||||
lockerB.lock(host, function(err) {
|
||||
assert.ok(err);
|
||||
assert.equal(err.name, 'LockError');
|
||||
|
||||
lockerA.unlock(host, function(err) {
|
||||
assert.ok(!err);
|
||||
// others can lock after unlock
|
||||
lockerB.lock(host, function(err, lock2) {
|
||||
assert.ok(!err);
|
||||
assert.ok(lock2);
|
||||
lockerB.unlock(host, done);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should lock and keep locking until unlock', function (done) {
|
||||
var lockerA = Locker.create('redis-distlock', config);
|
||||
var lockerB = Locker.create('redis-distlock', config);
|
||||
lockerA.lock(host, function(err, lock) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
setTimeout(function() {
|
||||
lockerB.lock(host, function(err) {
|
||||
assert.ok(err);
|
||||
|
||||
assert.ok(lock);
|
||||
lockerA.unlock(host, done);
|
||||
});
|
||||
}, 2 * TTL);
|
||||
});
|
||||
});
|
||||
});
|
||||
204
test/integration/batch/scheduler.js
Normal file
204
test/integration/batch/scheduler.js
Normal file
@@ -0,0 +1,204 @@
|
||||
'use strict';
|
||||
|
||||
require('../../helper');
|
||||
var debug = require('../../../batch/util/debug')('scheduler-test');
|
||||
var assert = require('../../support/assert');
|
||||
var Scheduler = require('../../../batch/scheduler/scheduler');
|
||||
var FixedCapacity = require('../../../batch/scheduler/capacity/fixed');
|
||||
|
||||
describe('scheduler', function() {
|
||||
|
||||
var USER_FINISHED = true;
|
||||
|
||||
var USER_A = 'userA';
|
||||
var USER_B = 'userB';
|
||||
var USER_C = 'userC';
|
||||
|
||||
function TaskRunner(userTasks) {
|
||||
this.results = [];
|
||||
this.userTasks = userTasks;
|
||||
}
|
||||
|
||||
TaskRunner.prototype.run = function(user, callback) {
|
||||
this.results.push(user);
|
||||
this.userTasks[user]--;
|
||||
setTimeout(function() {
|
||||
return callback(null, this.userTasks[user] === 0);
|
||||
}.bind(this), 50);
|
||||
};
|
||||
|
||||
function ManualTaskRunner() {
|
||||
this.userTasks = {};
|
||||
}
|
||||
|
||||
ManualTaskRunner.prototype.run = function(user, callback) {
|
||||
if (!this.userTasks.hasOwnProperty(user)) {
|
||||
this.userTasks[user] = [];
|
||||
}
|
||||
this.userTasks[user].push(callback);
|
||||
};
|
||||
|
||||
ManualTaskRunner.prototype.dispatch = function(user, isDone) {
|
||||
if (this.userTasks.hasOwnProperty(user)) {
|
||||
var cb = this.userTasks[user].shift();
|
||||
if (cb) {
|
||||
return cb(null, isDone);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// simulate one by one or infinity capacity
|
||||
var capacities = [new FixedCapacity(1), new FixedCapacity(2), new FixedCapacity(Infinity)];
|
||||
|
||||
capacities.forEach(function(capacity) {
|
||||
|
||||
it('regression #1', function (done) {
|
||||
var taskRunner = new TaskRunner({
|
||||
userA: 2,
|
||||
userB: 2
|
||||
});
|
||||
var scheduler = new Scheduler(capacity, taskRunner);
|
||||
scheduler.add(USER_A);
|
||||
scheduler.add(USER_B);
|
||||
|
||||
scheduler.on('done', function() {
|
||||
var results = taskRunner.results;
|
||||
|
||||
assert.equal(results.length, 4);
|
||||
|
||||
assert.equal(results[0], USER_A);
|
||||
assert.equal(results[1], USER_B);
|
||||
assert.equal(results[2], USER_A);
|
||||
assert.equal(results[3], USER_B);
|
||||
|
||||
return done();
|
||||
});
|
||||
|
||||
scheduler.schedule();
|
||||
});
|
||||
|
||||
it('regression #2: it should restart task after it was done but got re-scheduled', function (done) {
|
||||
var taskRunner = new ManualTaskRunner();
|
||||
var scheduler = new Scheduler(capacity, taskRunner);
|
||||
debug('Adding users A and B');
|
||||
scheduler.add(USER_A);
|
||||
scheduler.add(USER_B);
|
||||
|
||||
var acquiredUsers = [];
|
||||
|
||||
scheduler.on('done', function() {
|
||||
debug('Users %j', acquiredUsers);
|
||||
assert.equal(acquiredUsers[0], USER_A);
|
||||
assert.equal(acquiredUsers[1], USER_B);
|
||||
assert.equal(acquiredUsers[2], USER_A);
|
||||
assert.equal(acquiredUsers[3], USER_B);
|
||||
|
||||
assert.equal(acquiredUsers.length, 4);
|
||||
|
||||
return done();
|
||||
});
|
||||
|
||||
scheduler.on('acquired', function(user) {
|
||||
debug('Acquired user %s', user);
|
||||
acquiredUsers.push(user);
|
||||
});
|
||||
|
||||
scheduler.schedule();
|
||||
|
||||
debug('User A will be mark as DONE');
|
||||
taskRunner.dispatch(USER_A, USER_FINISHED);
|
||||
|
||||
debug('User B should be running');
|
||||
debug('User A submit a new task');
|
||||
scheduler.add(USER_A);
|
||||
|
||||
debug('User B will get another task to run');
|
||||
taskRunner.dispatch(USER_B);
|
||||
|
||||
debug('User A should start working on this new task');
|
||||
taskRunner.dispatch(USER_A, USER_FINISHED);
|
||||
taskRunner.dispatch(USER_B, USER_FINISHED);
|
||||
});
|
||||
|
||||
it('should run tasks', function (done) {
|
||||
var taskRunner = new TaskRunner({
|
||||
userA: 1
|
||||
});
|
||||
var scheduler = new Scheduler(capacity, taskRunner);
|
||||
scheduler.add(USER_A);
|
||||
|
||||
scheduler.on('done', function() {
|
||||
var results = taskRunner.results;
|
||||
|
||||
assert.equal(results.length, 1);
|
||||
|
||||
assert.equal(results[0], USER_A);
|
||||
|
||||
return done();
|
||||
});
|
||||
|
||||
scheduler.schedule();
|
||||
});
|
||||
|
||||
|
||||
it('should run tasks for different users', function (done) {
|
||||
var taskRunner = new TaskRunner({
|
||||
userA: 1,
|
||||
userB: 1,
|
||||
userC: 1
|
||||
});
|
||||
var scheduler = new Scheduler(capacity, taskRunner);
|
||||
scheduler.add(USER_A);
|
||||
scheduler.add(USER_B);
|
||||
scheduler.add(USER_C);
|
||||
|
||||
scheduler.on('done', function() {
|
||||
var results = taskRunner.results;
|
||||
|
||||
assert.equal(results.length, 3);
|
||||
|
||||
assert.equal(results[0], USER_A);
|
||||
assert.equal(results[1], USER_B);
|
||||
assert.equal(results[2], USER_C);
|
||||
|
||||
return done();
|
||||
});
|
||||
|
||||
scheduler.schedule();
|
||||
});
|
||||
|
||||
it('should be fair when scheduling tasks', function (done) {
|
||||
var taskRunner = new TaskRunner({
|
||||
userA: 3,
|
||||
userB: 2,
|
||||
userC: 1
|
||||
});
|
||||
|
||||
var scheduler = new Scheduler(capacity, taskRunner);
|
||||
scheduler.add(USER_A);
|
||||
scheduler.add(USER_A);
|
||||
scheduler.add(USER_A);
|
||||
scheduler.add(USER_B);
|
||||
scheduler.add(USER_B);
|
||||
scheduler.add(USER_C);
|
||||
|
||||
scheduler.on('done', function() {
|
||||
var results = taskRunner.results;
|
||||
|
||||
assert.equal(results.length, 6);
|
||||
|
||||
assert.equal(results[0], USER_A);
|
||||
assert.equal(results[1], USER_B);
|
||||
assert.equal(results[2], USER_C);
|
||||
assert.equal(results[3], USER_A);
|
||||
assert.equal(results[4], USER_B);
|
||||
assert.equal(results[5], USER_A);
|
||||
|
||||
return done();
|
||||
});
|
||||
|
||||
scheduler.schedule();
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user