Implemented recursive resource segmentation for large transfers
This commit is contained in:
parent
73a3516db8
commit
e4dfd052e6
|
@ -251,8 +251,10 @@ def client(destination_hexhash, configpath):
|
||||||
|
|
||||||
# Requests the specified file from the server
|
# Requests the specified file from the server
|
||||||
def download(filename):
|
def download(filename):
|
||||||
global server_link, menu_mode, current_filename
|
global server_link, menu_mode, current_filename, transfer_size, download_started
|
||||||
current_filename = filename
|
current_filename = filename
|
||||||
|
download_started = 0
|
||||||
|
transfer_size = 0
|
||||||
|
|
||||||
# We just create a packet containing the
|
# We just create a packet containing the
|
||||||
# requested filename, and send it down the
|
# requested filename, and send it down the
|
||||||
|
@ -454,14 +456,16 @@ def link_closed(link):
|
||||||
# so the user can be shown a progress of
|
# so the user can be shown a progress of
|
||||||
# the download.
|
# the download.
|
||||||
def download_began(resource):
|
def download_began(resource):
|
||||||
global menu_mode, current_download, download_started, transfer_size, file_size
|
global menu_mode, current_download, download_started, transfer_size, file_size
|
||||||
current_download = resource
|
current_download = resource
|
||||||
|
|
||||||
download_started = time.time()
|
if download_started == 0:
|
||||||
transfer_size = resource.size
|
download_started = time.time()
|
||||||
file_size = resource.uncompressed_size
|
|
||||||
|
transfer_size += resource.size
|
||||||
menu_mode = "downloading"
|
file_size = resource.total_size
|
||||||
|
|
||||||
|
menu_mode = "downloading"
|
||||||
|
|
||||||
# When the download concludes, successfully
|
# When the download concludes, successfully
|
||||||
# or not, we'll update our menu state and
|
# or not, we'll update our menu state and
|
||||||
|
|
|
@ -62,6 +62,7 @@ class Resource:
|
||||||
|
|
||||||
resource.flags = adv.f
|
resource.flags = adv.f
|
||||||
resource.size = adv.t
|
resource.size = adv.t
|
||||||
|
resource.total_size = adv.d
|
||||||
resource.uncompressed_size = adv.d
|
resource.uncompressed_size = adv.d
|
||||||
resource.hash = adv.h
|
resource.hash = adv.h
|
||||||
resource.original_hash = adv.o
|
resource.original_hash = adv.o
|
||||||
|
@ -122,6 +123,7 @@ class Resource:
|
||||||
resource_data = None
|
resource_data = None
|
||||||
if hasattr(data, "read"):
|
if hasattr(data, "read"):
|
||||||
data_size = os.stat(data.name).st_size
|
data_size = os.stat(data.name).st_size
|
||||||
|
self.total_size = data_size
|
||||||
self.grand_total_parts = math.ceil(data_size/Resource.SDU)
|
self.grand_total_parts = math.ceil(data_size/Resource.SDU)
|
||||||
|
|
||||||
if data_size <= Resource.MAX_EFFICIENT_SIZE:
|
if data_size <= Resource.MAX_EFFICIENT_SIZE:
|
||||||
|
@ -144,6 +146,7 @@ class Resource:
|
||||||
elif isinstance(data, bytes):
|
elif isinstance(data, bytes):
|
||||||
data_size = len(data)
|
data_size = len(data)
|
||||||
self.grand_total_parts = math.ceil(data_size/Resource.SDU)
|
self.grand_total_parts = math.ceil(data_size/Resource.SDU)
|
||||||
|
self.total_size = data_size
|
||||||
|
|
||||||
resource_data = data
|
resource_data = data
|
||||||
self.total_segments = 1
|
self.total_segments = 1
|
||||||
|
@ -467,7 +470,7 @@ class Resource:
|
||||||
RNS.log("Error while cleaning up resource files, the contained exception was:", RNS.LOG_ERROR)
|
RNS.log("Error while cleaning up resource files, the contained exception was:", RNS.LOG_ERROR)
|
||||||
RNS.log(str(e))
|
RNS.log(str(e))
|
||||||
else:
|
else:
|
||||||
RNS.log("Resource segment "+str(self.segment_index)+" of "+str(self.total_segments)+" received, waiting for next segment to be announced", RNS.LOG_VERBOSE)
|
RNS.log("Resource segment "+str(self.segment_index)+" of "+str(self.total_segments)+" received, waiting for next segment to be announced", RNS.LOG_DEBUG)
|
||||||
|
|
||||||
|
|
||||||
def prove(self):
|
def prove(self):
|
||||||
|
@ -728,7 +731,7 @@ class Resource:
|
||||||
self.processed_parts = (self.segment_index-1)*math.ceil(Resource.MAX_EFFICIENT_SIZE/Resource.SDU)
|
self.processed_parts = (self.segment_index-1)*math.ceil(Resource.MAX_EFFICIENT_SIZE/Resource.SDU)
|
||||||
self.processed_parts += self.received_count
|
self.processed_parts += self.received_count
|
||||||
if self.split:
|
if self.split:
|
||||||
self.progress_total_parts = float((self.total_segments-1)*math.ceil(Resource.MAX_EFFICIENT_SIZE/Resource.SDU)+self.total_parts)
|
self.progress_total_parts = float(math.ceil(self.total_size/Resource.SDU))
|
||||||
else:
|
else:
|
||||||
self.progress_total_parts = float(self.total_parts)
|
self.progress_total_parts = float(self.total_parts)
|
||||||
|
|
||||||
|
@ -746,15 +749,15 @@ class ResourceAdvertisement:
|
||||||
|
|
||||||
def __init__(self, resource=None):
|
def __init__(self, resource=None):
|
||||||
if resource != None:
|
if resource != None:
|
||||||
self.t = resource.size # Transfer size
|
self.t = resource.size # Transfer size
|
||||||
self.d = resource.uncompressed_size # Data size
|
self.d = resource.total_size # Total uncompressed data size
|
||||||
self.n = len(resource.parts) # Number of parts
|
self.n = len(resource.parts) # Number of parts
|
||||||
self.h = resource.hash # Resource hash
|
self.h = resource.hash # Resource hash
|
||||||
self.r = resource.random_hash # Resource random hash
|
self.r = resource.random_hash # Resource random hash
|
||||||
self.o = resource.original_hash # First-segment hash
|
self.o = resource.original_hash # First-segment hash
|
||||||
self.m = resource.hashmap # Resource hashmap
|
self.m = resource.hashmap # Resource hashmap
|
||||||
self.c = resource.compressed # Compression flag
|
self.c = resource.compressed # Compression flag
|
||||||
self.e = resource.encrypted # Encryption flag
|
self.e = resource.encrypted # Encryption flag
|
||||||
self.s = resource.split # Split flag
|
self.s = resource.split # Split flag
|
||||||
self.i = resource.segment_index # Segment index
|
self.i = resource.segment_index # Segment index
|
||||||
self.l = resource.total_segments # Total segments
|
self.l = resource.total_segments # Total segments
|
||||||
|
|
Loading…
Reference in New Issue