diff --git a/lib/bin/process-held-submissions.js b/lib/bin/process-backlog.js similarity index 89% rename from lib/bin/process-held-submissions.js rename to lib/bin/process-backlog.js index 3f5b4336b..be84f9f91 100644 --- a/lib/bin/process-held-submissions.js +++ b/lib/bin/process-backlog.js @@ -11,7 +11,7 @@ // were previously held in a backlog due to submissions coming in out of order. const { run } = require('../task/task'); -const { processHeldSubmissions } = require('../task/process-held-submissions'); +const { processBacklog } = require('../task/process-backlog'); const { program } = require('commander'); program.option('-f, --force', 'Force all submissions in the backlog to be processed immediately.'); @@ -19,5 +19,5 @@ program.parse(); const options = program.opts(); -run(processHeldSubmissions(options.force) +run(processBacklog(options.force) .then((count) => `Submissions processed: ${count}`)); diff --git a/lib/model/query/entities.js b/lib/model/query/entities.js index 1b55e4928..8a4667231 100644 --- a/lib/model/query/entities.js +++ b/lib/model/query/entities.js @@ -520,7 +520,7 @@ const _getHeldSubmissionsAsEvents = (force) => ({ all }) => all(sql` ORDER BY "branchId", "branchBaseVersion"`) .then(map(construct(Audit))); -const processHeldSubmissions = (force = false) => async (container) => { +const processBacklog = (force = false) => async (container) => { const events = await container.Entities._getHeldSubmissionsAsEvents(force); return runSequentially(events.map(event => async () => { @@ -714,7 +714,7 @@ module.exports = { _computeBaseVersion, _holdSubmission, _checkHeldSubmission, _getNextHeldSubmissionInBranch, _deleteHeldSubmissionByEventId, - _getHeldSubmissionsAsEvents, processHeldSubmissions, + _getHeldSubmissionsAsEvents, processBacklog, processSubmissionEvent, streamForExport, getDefBySubmissionId, createVersion, diff --git a/lib/task/process-held-submissions.js b/lib/task/process-backlog.js similarity index 82% rename from lib/task/process-held-submissions.js rename to lib/task/process-backlog.js index 152075cbf..f73701970 100644 --- a/lib/task/process-held-submissions.js +++ b/lib/task/process-backlog.js @@ -11,6 +11,6 @@ // overladen and bogged down over time. const { task } = require('./task'); -const processHeldSubmissions = task.withContainer(({ Entities }) => Entities.processHeldSubmissions); -module.exports = { processHeldSubmissions }; +const processBacklog = task.withContainer(({ Entities }) => Entities.processBacklog); +module.exports = { processBacklog }; diff --git a/test/integration/api/offline-entities.js b/test/integration/api/offline-entities.js index f554b93a8..56c853add 100644 --- a/test/integration/api/offline-entities.js +++ b/test/integration/api/offline-entities.js @@ -800,7 +800,7 @@ describe('Offline Entities', () => { let backlogCount = await container.oneFirst(sql`select count(*) from entity_submission_backlog`); backlogCount.should.equal(1); - await container.Entities.processHeldSubmissions(true); + await container.Entities.processBacklog(true); await asAlice.get('/v1/projects/1/datasets/people/entities/12345678-1234-4123-8234-123456789abc') .expect(200) @@ -846,7 +846,7 @@ describe('Offline Entities', () => { let backlogCount = await container.oneFirst(sql`select count(*) from entity_submission_backlog`); backlogCount.should.equal(2); - await container.Entities.processHeldSubmissions(true); + await container.Entities.processBacklog(true); await asAlice.get('/v1/projects/1/datasets/people/entities/12345678-1234-4123-8234-123456789abc') .expect(200) @@ -885,7 +885,7 @@ describe('Offline Entities', () => { let backlogCount = await container.oneFirst(sql`select count(*) from entity_submission_backlog`); backlogCount.should.equal(1); - await container.Entities.processHeldSubmissions(true); + await container.Entities.processBacklog(true); await asAlice.get(`/v1/projects/1/datasets/people/entities/${newUuid}`) .expect(200) @@ -940,7 +940,7 @@ describe('Offline Entities', () => { let backlogCount = await container.oneFirst(sql`select count(*) from entity_submission_backlog`); backlogCount.should.equal(2); - await container.Entities.processHeldSubmissions(true); + await container.Entities.processBacklog(true); await asAlice.get(`/v1/projects/1/datasets/people/entities/${newUuid}`) .expect(200) @@ -976,7 +976,7 @@ describe('Offline Entities', () => { backlogCount = await container.oneFirst(sql`select count(*) from entity_submission_backlog`); backlogCount.should.equal(1); - await container.Entities.processHeldSubmissions(true); + await container.Entities.processBacklog(true); await asAlice.get(`/v1/projects/1/datasets/people/entities/${newUuid}`) .expect(200) @@ -1020,7 +1020,7 @@ describe('Offline Entities', () => { backlogCount.should.equal(1); // Force the update submission to be processed as a create - await container.Entities.processHeldSubmissions(true); + await container.Entities.processBacklog(true); await asAlice.get(`/v1/projects/1/datasets/people/entities/12345678-1234-4123-8234-123456789ddd`) .expect(200) @@ -1107,7 +1107,7 @@ describe('Offline Entities', () => { // Process submissions that have been in the backlog for a long time // (only 1 of 2 should be processed) - const count = await container.Entities.processHeldSubmissions(); + const count = await container.Entities.processBacklog(); count.should.equal(1); await asAlice.get('/v1/projects/1/datasets/people/entities/12345678-1234-4123-8234-123456789abc')