subcmds: reduce multiprocessing serialization overhead
Follow the same approach as 39ffd9977e
to reduce serialization overhead.
Below benchmarks are tested with 2.7k projects on my workstation
(warm cache). git tracing is disabled for benchmark.
(seconds) | v2.48 | v2.48 | this CL | this CL
| | -j32 | | -j32
-----------------------------------------------------------
with clean tree state:
branches (none) | 5.6 | 5.9 | 1.0 | 0.9
status (clean) | 21.3 | 9.4 | 19.4 | 4.7
diff (none) | 7.6 | 7.2 | 5.7 | 2.2
prune (none) | 5.7 | 6.1 | 1.3 | 1.2
abandon (none) | 19.4 | 18.6 | 0.9 | 0.8
upload (none) | 19.7 | 18.7 | 0.9 | 0.8
forall -c true | 7.5 | 7.6 | 0.6 | 0.6
forall -c "git log -1" | 11.3 | 11.1 | 0.6 | 0.6
with branches:
start BRANCH --all | 21.9 | 20.3 | 13.6 | 2.6
checkout BRANCH | 29.1 | 27.8 | 1.1 | 1.0
branches (2) | 28.0 | 28.6 | 1.5 | 1.3
abandon BRANCH | 29.2 | 27.5 | 9.7 | 2.2
Bug: b/371638995
Change-Id: I53989a3d1e43063587b3f52f852b1c2c56b49412
Reviewed-on: https://gerrit-review.googlesource.com/c/git-repo/+/440221
Reviewed-by: Josip Sokcevic <sokcevic@google.com>
Tested-by: Kuang-che Wu <kcwu@google.com>
Commit-Queue: Kuang-che Wu <kcwu@google.com>
This commit is contained in:
parent
39ffd9977e
commit
8da4861b38
11 changed files with 230 additions and 174 deletions
|
@ -20,7 +20,6 @@ from command import DEFAULT_LOCAL_JOBS
|
|||
from error import GitError
|
||||
from error import RepoExitError
|
||||
from progress import Progress
|
||||
from project import Project
|
||||
from repo_logging import RepoLogger
|
||||
|
||||
|
||||
|
@ -30,7 +29,7 @@ logger = RepoLogger(__file__)
|
|||
class CheckoutBranchResult(NamedTuple):
|
||||
# Whether the Project is on the branch (i.e. branch exists and no errors)
|
||||
result: bool
|
||||
project: Project
|
||||
project_idx: int
|
||||
error: Exception
|
||||
|
||||
|
||||
|
@ -62,15 +61,17 @@ The command is equivalent to:
|
|||
if not args:
|
||||
self.Usage()
|
||||
|
||||
def _ExecuteOne(self, nb, project):
|
||||
@classmethod
|
||||
def _ExecuteOne(cls, nb, project_idx):
|
||||
"""Checkout one project."""
|
||||
error = None
|
||||
result = None
|
||||
project = cls.get_parallel_context()["projects"][project_idx]
|
||||
try:
|
||||
result = project.CheckoutBranch(nb)
|
||||
except GitError as e:
|
||||
error = e
|
||||
return CheckoutBranchResult(result, project, error)
|
||||
return CheckoutBranchResult(result, project_idx, error)
|
||||
|
||||
def Execute(self, opt, args):
|
||||
nb = args[0]
|
||||
|
@ -83,22 +84,25 @@ The command is equivalent to:
|
|||
|
||||
def _ProcessResults(_pool, pm, results):
|
||||
for result in results:
|
||||
project = all_projects[result.project_idx]
|
||||
if result.error is not None:
|
||||
err.append(result.error)
|
||||
err_projects.append(result.project)
|
||||
err_projects.append(project)
|
||||
elif result.result:
|
||||
success.append(result.project)
|
||||
success.append(project)
|
||||
pm.update(msg="")
|
||||
|
||||
self.ExecuteInParallel(
|
||||
opt.jobs,
|
||||
functools.partial(self._ExecuteOne, nb),
|
||||
all_projects,
|
||||
callback=_ProcessResults,
|
||||
output=Progress(
|
||||
f"Checkout {nb}", len(all_projects), quiet=opt.quiet
|
||||
),
|
||||
)
|
||||
with self.ParallelContext():
|
||||
self.get_parallel_context()["projects"] = all_projects
|
||||
self.ExecuteInParallel(
|
||||
opt.jobs,
|
||||
functools.partial(self._ExecuteOne, nb),
|
||||
range(len(all_projects)),
|
||||
callback=_ProcessResults,
|
||||
output=Progress(
|
||||
f"Checkout {nb}", len(all_projects), quiet=opt.quiet
|
||||
),
|
||||
)
|
||||
|
||||
if err_projects:
|
||||
for p in err_projects:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue