command: add a helper for the parallel execution boilerplate

Now that we have a bunch of subcommands doing parallel execution, a
common pattern arises that we can factor out for most of them.  We
leave forall alone as it's a bit too complicated atm to cut over.

Change-Id: I3617a4f7c66142bcd1ab030cb4cca698a65010ac
Reviewed-on: https://gerrit-review.googlesource.com/c/git-repo/+/301942
Tested-by: Mike Frysinger <vapier@google.com>
Reviewed-by: Chris Mcdonald <cjmcdonald@google.com>
This commit is contained in:
Mike Frysinger
2021-03-01 00:56:38 -05:00
parent b8bf291ddb
commit b5d075d04f
10 changed files with 145 additions and 143 deletions

View File

@@ -13,10 +13,9 @@
# limitations under the License.
import functools
import multiprocessing
import sys
from command import Command, DEFAULT_LOCAL_JOBS, WORKER_BATCH_SIZE
from command import Command, DEFAULT_LOCAL_JOBS
from progress import Progress
@@ -50,7 +49,7 @@ The command is equivalent to:
success = []
all_projects = self.GetProjects(args[1:])
def _ProcessResults(results):
def _ProcessResults(_pool, pm, results):
for status, project in results:
if status is not None:
if status:
@@ -59,17 +58,12 @@ The command is equivalent to:
err.append(project)
pm.update()
pm = Progress('Checkout %s' % nb, len(all_projects), quiet=opt.quiet)
# NB: Multiprocessing is heavy, so don't spin it up for one job.
if len(all_projects) == 1 or opt.jobs == 1:
_ProcessResults(self._ExecuteOne(nb, x) for x in all_projects)
else:
with multiprocessing.Pool(opt.jobs) as pool:
results = pool.imap_unordered(
functools.partial(self._ExecuteOne, nb), all_projects,
chunksize=WORKER_BATCH_SIZE)
_ProcessResults(results)
pm.end()
self.ExecuteInParallel(
opt.jobs,
functools.partial(self._ExecuteOne, nb),
all_projects,
callback=_ProcessResults,
output=Progress('Checkout %s' % (nb,), len(all_projects), quiet=opt.quiet))
if err:
for p in err: