11import logging
22import os
33from asyncio import Future
4+ from collections .abc import Callable
45from datetime import datetime , timedelta , timezone
56from io import BytesIO
6- from typing import Any , Callable , Optional , Tuple , TypeVar , Union
7+ from typing import Any , Optional , Tuple , TypeVar , Union
78
89from kiota_abstractions .headers_collection import HeadersCollection
910from kiota_abstractions .method import Method
@@ -37,13 +38,11 @@ def __init__(
3738 self .max_chunk_size = max_chunk_size
3839 self .factory = parsable_factory
3940 cleaned_value = self .check_value_exists (
40- upload_session , 'get_next_expected_range' , [
41- 'next_expected_range' , 'NextExpectedRange' ]
41+ upload_session , 'get_next_expected_range' , ['next_expected_range' , 'NextExpectedRange' ]
4242 )
4343 self .next_range = cleaned_value [0 ]
4444 self ._chunks = int ((self .file_size / max_chunk_size ) + 0.5 )
45- self .on_chunk_upload_complete : Optional [Callable [[
46- list [int ]], None ]] = None
45+ self .on_chunk_upload_complete : Optional [Callable [[list [int ]], None ]] = None
4746
4847 @property
4948 def upload_session (self ):
@@ -69,8 +68,7 @@ def upload_session_expired(self, upload_session: Optional[Parsable] = None) -> b
6968 now = datetime .now (timezone .utc )
7069 upload_session = upload_session or self .upload_session
7170 if not hasattr (upload_session , "expiration_date_time" ):
72- raise ValueError (
73- "Upload session does not have an expiration date time" )
71+ raise ValueError ("Upload session does not have an expiration date time" )
7472 expiry = getattr (upload_session , 'expiration_date_time' )
7573 if expiry is None :
7674 raise ValueError ("Expiry is None" )
@@ -95,16 +93,13 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None):
9593
9694 self .on_chunk_upload_complete = after_chunk_upload or self .on_chunk_upload_complete
9795 session : LargeFileUploadSession = await self .next_chunk (
98- self .stream , 0 , max (
99- 0 , min (self .max_chunk_size - 1 , self .file_size - 1 ))
96+ self .stream , 0 , max (0 , min (self .max_chunk_size - 1 , self .file_size - 1 ))
10097 )
10198 process_next = session
10299 # determine the range to be uploaded
103100 # even when resuming existing upload sessions.
104- range_parts = self .next_range [0 ].split (
105- "-" ) if self .next_range else ['0' , '0' ]
106- end = min (int (range_parts [0 ]) +
107- self .max_chunk_size - 1 , self .file_size )
101+ range_parts = self .next_range [0 ].split ("-" ) if self .next_range else ['0' , '0' ]
102+ end = min (int (range_parts [0 ]) + self .max_chunk_size - 1 , self .file_size )
108103 uploaded_range = [range_parts [0 ], end ]
109104 response = None
110105
@@ -129,13 +124,12 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None):
129124 if not next_range :
130125 continue
131126 range_parts = str (next_range [0 ]).split ("-" )
132- end = min (int (range_parts [0 ]) +
133- self .max_chunk_size , self .file_size )
127+ end = min (int (range_parts [0 ]) + self .max_chunk_size , self .file_size )
134128 uploaded_range = [range_parts [0 ], end ]
135129 self .next_range = next_range [0 ] + "-"
136130 process_next = await self .next_chunk (self .stream )
137131
138- except Exception as error : # pylint: disable=broad-except
132+ except Exception as error : #pylint: disable=broad-except
139133 logging .error ("Error uploading chunk %s" , error )
140134 finally :
141135 self .chunks -= 1
@@ -182,8 +176,7 @@ async def next_chunk(
182176 chunk_data = file .read (end - start + 1 )
183177 info .headers = HeadersCollection ()
184178
185- info .headers .try_add (
186- 'Content-Range' , f'bytes { start } -{ end } /{ self .file_size } ' )
179+ info .headers .try_add ('Content-Range' , f'bytes { start } -{ end } /{ self .file_size } ' )
187180 info .headers .try_add ('Content-Length' , str (len (chunk_data )))
188181 info .headers .try_add ("Content-Type" , "application/octet-stream" )
189182 info .set_stream_content (bytes (chunk_data ))
@@ -223,8 +216,7 @@ async def last_chunk(
223216 chunk_data = file .read (end - start + 1 )
224217 info .headers = HeadersCollection ()
225218
226- info .headers .try_add (
227- 'Content-Range' , f'bytes { start } -{ end } /{ self .file_size } ' )
219+ info .headers .try_add ('Content-Range' , f'bytes { start } -{ end } /{ self .file_size } ' )
228220 info .headers .try_add ('Content-Length' , str (len (chunk_data )))
229221 info .headers .try_add ("Content-Type" , "application/octet-stream" )
230222 info .set_stream_content (bytes (chunk_data ))
@@ -239,8 +231,7 @@ def get_file(self) -> BytesIO:
239231
240232 async def cancel (self ) -> Parsable :
241233 upload_url = self .get_validated_upload_url (self .upload_session )
242- request_information = RequestInformation (
243- method = Method .DELETE , url_template = upload_url )
234+ request_information = RequestInformation (method = Method .DELETE , url_template = upload_url )
244235
245236 await self .request_adapter .send_no_response_content_async (request_information )
246237
@@ -263,8 +254,7 @@ def additional_data_contains(self, parsable: Parsable,
263254 'AdditionalDataHolder'
264255 )
265256 if not hasattr (parsable , 'additional_data' ):
266- raise ValueError (
267- 'The object passed does not contain an additional_data property' )
257+ raise ValueError ('The object passed does not contain an additional_data property' )
268258 additional_data = parsable .additional_data
269259 for property_candidate in property_candidates :
270260 if property_candidate in additional_data :
@@ -308,8 +298,7 @@ async def resume(self) -> Future:
308298
309299 def get_validated_upload_url (self , upload_session : Parsable ) -> str :
310300 if not hasattr (upload_session , 'upload_url' ):
311- raise RuntimeError (
312- 'The upload session does not contain a valid upload url' )
301+ raise RuntimeError ('The upload session does not contain a valid upload url' )
313302 result = upload_session .upload_url
314303
315304 if result is None or result .strip () == '' :
0 commit comments