1515import tempfile
1616import concurrent .futures
1717import os
18- from typing import Dict , List , Optional
19- import click
18+ from typing import Dict , List , Optional , Tuple
2019
2120from .common import UPLOAD_CHUNK_SIZE , ClientError
2221from .merginproject import MerginProject
2726class UploadJob :
2827 """Keeps all the important data about a pending upload job"""
2928
30- def __init__ (self , project_path , changes , transaction_id , mp , mc , tmp_dir ):
29+ def __init__ (self , project_path , changes , transaction_id , mp , mc , tmp_dir , exclusive : bool ):
3130 self .project_path = project_path # full project name ("username/projectname")
3231 self .changes = changes # dictionary of local changes to the project
3332 self .transaction_id = transaction_id # ID of the transaction assigned by the server
@@ -37,6 +36,7 @@ def __init__(self, project_path, changes, transaction_id, mp, mc, tmp_dir):
3736 self .mp = mp # MerginProject instance
3837 self .mc = mc # MerginClient instance
3938 self .tmp_dir = tmp_dir # TemporaryDirectory instance for any temp file we need
39+ self .exclusive = exclusive # flag whether this upload blocks other uploads
4040 self .is_cancelled = False # whether upload has been cancelled
4141 self .executor = None # ThreadPoolExecutor that manages background upload tasks
4242 self .futures = [] # list of futures submitted to the executor
@@ -160,7 +160,7 @@ def split(self) -> List[Dict[str, List[dict]]]:
160160 return changes_list
161161
162162
163- def push_next_change (mc , directory ) -> Optional [UploadJob ]:
163+ def push_project_async (mc , directory ) -> Optional [UploadJob ]:
164164 """Starts push of a change of a project and returns pending upload job"""
165165
166166 mp = MerginProject (directory )
@@ -198,14 +198,8 @@ def push_next_change(mc, directory) -> Optional[UploadJob]:
198198 + f"\n \n Local version: { local_version } \n Server version: { server_version } "
199199 )
200200
201- all_changes = mp .get_push_changes ()
202- changes_list = ChangesHandler (mc , project_info , all_changes ).split ()
203- if not changes_list :
204- return None
205-
206- # take only the first change
207- change = changes_list [0 ]
208- mp .log .debug ("push change:\n " + pprint .pformat (change ))
201+ changes = change_batch or mp .get_push_changes ()
202+ mp .log .debug ("push change:\n " + pprint .pformat (changes ))
209203
210204 tmp_dir = tempfile .TemporaryDirectory (prefix = "python-api-client-" )
211205
@@ -214,22 +208,22 @@ def push_next_change(mc, directory) -> Optional[UploadJob]:
214208 # That's because if there are pending transactions, checkpointing or switching from WAL mode
215209 # won't work, and we would end up with some changes left in -wal file which do not get
216210 # uploaded. The temporary copy using geodiff uses sqlite backup API and should copy everything.
217- for f in change ["updated" ]:
211+ for f in changes ["updated" ]:
218212 if mp .is_versioned_file (f ["path" ]) and "diff" not in f :
219213 mp .copy_versioned_file_for_upload (f , tmp_dir .name )
220214
221- for f in change ["added" ]:
215+ for f in changes ["added" ]:
222216 if mp .is_versioned_file (f ["path" ]):
223217 mp .copy_versioned_file_for_upload (f , tmp_dir .name )
224218
225- if not any (len (v ) for v in change .values ()):
219+ if not any (len (v ) for v in changes .values ()):
226220 mp .log .info (f"--- push { project_path } - nothing to do" )
227221 return
228222
229223 # drop internal info from being sent to server
230- for item in change ["updated" ]:
224+ for item in changes ["updated" ]:
231225 item .pop ("origin_checksum" , None )
232- data = {"version" : local_version , "changes" : change }
226+ data = {"version" : local_version , "changes" : changes }
233227
234228 try :
235229 resp = mc .post (
@@ -246,15 +240,16 @@ def push_next_change(mc, directory) -> Optional[UploadJob]:
246240 upload_files = data ["changes" ]["added" ] + data ["changes" ]["updated" ]
247241
248242 transaction_id = server_resp ["transaction" ] if upload_files else None
249- job = UploadJob (project_path , change , transaction_id , mp , mc , tmp_dir )
243+ exclusive = server_resp .get ("exclusive" , True )
244+ job = UploadJob (project_path , changes , transaction_id , mp , mc , tmp_dir , exclusive )
250245
251246 if not upload_files :
252247 mp .log .info ("not uploading any files" )
253248 job .server_resp = server_resp
254249 push_project_finalize (job )
255250 return None # all done - no pending job
256251
257- mp .log .info (f"got transaction ID { transaction_id } " )
252+ mp .log .info (f"got transaction ID { transaction_id } , { 'exclusive' if exclusive else 'non-exclusive' } upload " )
258253
259254 upload_queue_items = []
260255 total_size = 0
@@ -347,7 +342,7 @@ def push_project_finalize(job):
347342
348343 if with_upload_of_files :
349344 try :
350- job .mp .log .info (f"Finishing transaction { job .transaction_id } " )
345+ job .mp .log .info (f"Finishing { 'exclusive' if job . exclusive else 'non-exclusive' } transaction { job .transaction_id } " )
351346 resp = job .mc .post ("/v1/project/push/finish/%s" % job .transaction_id )
352347 job .server_resp = json .load (resp )
353348 except ClientError as err :
@@ -417,3 +412,10 @@ def remove_diff_files(job) -> None:
417412 diff_file = job .mp .fpath_meta (change ["diff" ]["path" ])
418413 if os .path .exists (diff_file ):
419414 os .remove (diff_file )
415+
416+ def get_next_batch (project_dir ) -> Tuple [Dict [str ], bool ]:
417+ """
418+ Return the next dictionary with changes, similar to changes[0] in push_project_async.
419+ """
420+ # TODO
421+ return {"added" : [], "updated" : [], "removed" : []}, True
0 commit comments