55import os
66import subprocess
77import sys
8+ from datetime import datetime
89
910from dotenv import load_dotenv
1011
11- from ..core .analyzer import CommitAnalyzer
12+ from ..core .analyzer import CommitAnalysis , CommitAnalyzer
1213from ..core .git import GitError , GitFile , GitOperations
1314from ..services .ai_service import AIService
1415from ..services .metrics import metrics_manager # noqa
@@ -53,6 +54,18 @@ def __init__(self, test_mode: bool = False, api_key: str | None = None):
5354 self .combine_commits = False
5455 self .console = console
5556
57+ def _maybe_create_branch (self , analysis : CommitAnalysis ) -> None :
58+ """Offer to create a new branch if the commit is complex."""
59+ if not analysis .is_complex :
60+ return
61+ branch_name = f"loom-large-{ datetime .now ().strftime ('%Y%m%d_%H%M%S' )} "
62+ if console .confirm_branch_creation (branch_name ):
63+ try :
64+ self .git .create_and_checkout_branch (branch_name )
65+ console .print_info (f"Switched to new branch { branch_name } " )
66+ except GitError as e :
67+ console .print_error (str (e ))
68+
5669 def _process_single_commit (self , files : list [GitFile ]) -> None :
5770 """Process files as a single commit."""
5871 try :
@@ -69,6 +82,8 @@ def _process_single_commit(self, files: list[GitFile]) -> None:
6982
7083 # Print analysis
7184 console .print_warnings (analysis )
85+ self ._maybe_create_branch (analysis )
86+ self ._maybe_create_branch (analysis )
7287
7388 try :
7489 # Generate commit message
@@ -236,12 +251,18 @@ def _create_batches(self, changed_files: list[GitFile]) -> list[list[GitFile]]:
236251 console .print_warning ("No valid files to process." )
237252 return []
238253
239- # Create batches from valid files
254+ # Group files by top-level directory for smarter batching
255+ grouped : dict [str , list [GitFile ]] = {}
256+ for f in valid_files :
257+ parts = f .path .split (os .sep )
258+ top_dir = parts [0 ] if len (parts ) > 1 else "root"
259+ grouped .setdefault (top_dir , []).append (f )
260+
240261 batches = []
241262 batch_size = BATCH_THRESHOLD
242- for i in range ( 0 , len ( valid_files ), batch_size ):
243- batch = valid_files [ i : i + batch_size ]
244- batches .append (batch )
263+ for group_files in grouped . values ( ):
264+ for i in range ( 0 , len ( group_files ), batch_size ):
265+ batches .append (group_files [ i : i + batch_size ] )
245266
246267 return batches
247268
0 commit comments