From 75176bca0ce8051a6dd4ba56f5613bce75901f09 Mon Sep 17 00:00:00 2001 From: Jane Sandberg Date: Thu, 30 Jan 2020 11:40:13 -0800 Subject: [PATCH] LP1821094: Add an AngularJS module that runs promises in batches This service helps to reduce server load for repetitive OpenSRF calls by dividing a large array of promises into batches. It maintains the original order of the array when returning results. Within each batch, calls are sent simultaneously. The batches themselves are run sequentially. This represents a middle ground between running a ton of OpenSRF calls sequentially -- which leads to a long wait for the user -- and running them simultaneously, which can result in some serious wait times. One use case is when you need to get several rows from pcrud, but the order of results is important and can't be just passed using orderBy. To use, you can just replace $q.all with egBatchPromises.all This also changes the item status refresh code to use egBatchPromises.all instead of $q.all, in response to some pcrud-monopolizing noted in https://bugs.launchpad.net/evergreen/+bug/1821094/comments/14 Signed-off-by: Jane Sandberg --- Open-ILS/src/templates/staff/cat/item/index.tt2 | 1 + Open-ILS/web/js/ui/default/staff/cat/item/app.js | 8 ++-- .../js/ui/default/staff/services/batch_promises.js | 54 ++++++++++++++++++++++ .../web/js/ui/default/staff/test/karma.conf.js | 1 + .../ui/default/staff/test/unit/egBatchPromises.js | 33 +++++++++++++ 5 files changed, 93 insertions(+), 4 deletions(-) create mode 100644 Open-ILS/web/js/ui/default/staff/services/batch_promises.js create mode 100644 Open-ILS/web/js/ui/default/staff/test/unit/egBatchPromises.js diff --git a/Open-ILS/src/templates/staff/cat/item/index.tt2 b/Open-ILS/src/templates/staff/cat/item/index.tt2 index 9cb8be84b7..238ece8fd2 100644 --- a/Open-ILS/src/templates/staff/cat/item/index.tt2 +++ b/Open-ILS/src/templates/staff/cat/item/index.tt2 @@ -10,6 +10,7 @@ + diff --git a/Open-ILS/web/js/ui/default/staff/cat/item/app.js b/Open-ILS/web/js/ui/default/staff/cat/item/app.js index fd82c77d8b..b1b4ced25f 100644 --- a/Open-ILS/web/js/ui/default/staff/cat/item/app.js +++ b/Open-ILS/web/js/ui/default/staff/cat/item/app.js @@ -3,7 +3,7 @@ */ angular.module('egItemStatus', - ['ngRoute', 'ui.bootstrap', 'egCoreMod', 'egUiMod', 'egGridMod', 'egUserMod']) + ['ngRoute', 'ui.bootstrap', 'egCoreMod', 'egUiMod', 'egGridMod', 'egUserMod', 'egBatchPromisesMod']) .filter('boolText', function(){ return function (v) { @@ -316,12 +316,12 @@ function($scope , $q , $window , $location , $timeout , egCore , egNet , egGridD .controller('ListCtrl', ['$scope','$q','$routeParams','$location','$timeout','$window','egCore', 'egGridDataProvider','egItem','egUser','$uibModal','egCirc','egConfirmDialog', - 'egProgressDialog', 'ngToast', + 'egProgressDialog', 'ngToast', 'egBatchPromises', // function($scope , $q , $routeParams , $location , $timeout , $window , egCore , // egGridDataProvider , itemSvc , egUser , $uibModal , egCirc , egConfirmDialog, // egProgressDialog, ngToast) { function($scope , $q , $routeParams , $location , $timeout , $window , egCore , egGridDataProvider , itemSvc , egUser , $uibModal , egCirc , egConfirmDialog, - egProgressDialog, ngToast) { + egProgressDialog, ngToast, egBatchPromises) { var copyId = []; var cp_list = $routeParams.idList; if (cp_list) { @@ -493,7 +493,7 @@ function($scope , $q , $window , $location , $timeout , egCore , egNet , egGridD progress_bar = $timeout(egProgressDialog.open, 5000, true, {value: 0, max: fetch_list.length}); - $q.all(fetch_list) + egBatchPromises.all(fetch_list) .then( function() { copyGrid.refresh(); if (progress_bar) $timeout.cancel(progress_bar); diff --git a/Open-ILS/web/js/ui/default/staff/services/batch_promises.js b/Open-ILS/web/js/ui/default/staff/services/batch_promises.js new file mode 100644 index 0000000000..83518fc04a --- /dev/null +++ b/Open-ILS/web/js/ui/default/staff/services/batch_promises.js @@ -0,0 +1,54 @@ +/** + * Module for batching promises + * + * This service helps to reduce server load for repetitive OpenSRF + * calls by dividing a large array of promises into batches. It + * maintains the original order of the array when returning results. + * + * Within each batch, calls are sent simultaneously. The batches + * themselves are run sequentially. + * + * This represents a middle ground between running a ton of OpenSRF + * calls sequentially -- which leads to a long wait for the user -- + * and running them simultaneously, which can result in some serious + * wait times. + * + * One use case is when you need to get several rows from pcrud, + * but the order of results is important and can't be just passed + * using orderBy. + * + * You can just replace $q.all with egBatchPromises.all + */ + +angular.module('egBatchPromisesMod', []) + +.factory('egBatchPromises', ['$q', function($q) { + + var service = {}; + + // Helper method to break an array into chunks of a specified size + service.createChunks = function(array_to_be_chunked, chunk_size = 10) { + var results = []; + + while (array_to_be_chunked.length) { + results.push(array_to_be_chunked.splice(0, chunk_size)); + } + + return results; + }; + + // Helper method that adds a batch of simultaneous promises to a sequential + // chain + service.addBatchToChain = function(chain, batch) { + return chain.then(() => $q.all(batch)); + } + + // Returns a chain of chunked promises + service.all = function(array_of_promises, chunk_size = 10) { + var chunked_array = this.createChunks(array_of_promises, chunk_size); + return chunked_array.reduce(this.addBatchToChain, $q.when()); + }; + + return service; +}]); + diff --git a/Open-ILS/web/js/ui/default/staff/test/karma.conf.js b/Open-ILS/web/js/ui/default/staff/test/karma.conf.js index 09dcffff15..88a6e7e900 100644 --- a/Open-ILS/web/js/ui/default/staff/test/karma.conf.js +++ b/Open-ILS/web/js/ui/default/staff/test/karma.conf.js @@ -26,6 +26,7 @@ module.exports = function(config){ 'services/grid.js', 'services/patron_search.js', 'services/user-bucket.js', + 'services/batch_promises.js', // load app scripts 'app.js', diff --git a/Open-ILS/web/js/ui/default/staff/test/unit/egBatchPromises.js b/Open-ILS/web/js/ui/default/staff/test/unit/egBatchPromises.js new file mode 100644 index 0000000000..0234f3432f --- /dev/null +++ b/Open-ILS/web/js/ui/default/staff/test/unit/egBatchPromises.js @@ -0,0 +1,33 @@ +'use strict'; + +describe('egBatchPromises', function(){ + beforeEach(module('egBatchPromisesMod')); + + it('should chunk an array properly', inject(function(egBatchPromises) { + var original_array = [1, 2, 3, 4, 5, 6, 7, 8]; + var expected_array = [[1, 2, 3], [4, 5, 6], [7, 8]]; + expect(egBatchPromises.createChunks(original_array, 3)).toEqual(expected_array); + })); + + it('should add a batch to a promise chain properly', inject(function(egBatchPromises, $q, $rootScope, $timeout) { + var resolved_value; + var promise_that_shares_its_value = function (value) { + return $q.when(value) + .then((val) => {resolved_value = val}); + }; + + var promise = promise_that_shares_its_value(1); + var batch_to_add = [ + promise_that_shares_its_value(2), + promise_that_shares_its_value(3), + promise_that_shares_its_value(4), + ]; + + var chain = egBatchPromises.addBatchToChain(promise, batch_to_add); + + chain.then(); + $rootScope.$apply(); + expect(resolved_value).toEqual(4); + })); + +}); -- 2.11.0