2017-07-19 21:21:41 +00:00
import ClientData
import ClientImporting
2018-04-18 22:10:15 +00:00
import ClientImportOptions
2016-02-17 22:06:47 +00:00
import ClientThreading
2015-03-04 22:44:32 +00:00
import HydrusConstants as HC
2015-08-19 21:48:21 +00:00
import HydrusData
2015-03-04 22:44:32 +00:00
import HydrusExceptions
2017-05-17 21:53:02 +00:00
import HydrusGlobals as HG
2015-03-04 22:44:32 +00:00
import HydrusNATPunch
2015-11-04 22:30:28 +00:00
import HydrusPaths
2015-06-03 21:05:13 +00:00
import HydrusSerialisable
2015-03-04 22:44:32 +00:00
import HydrusThreading
import ClientConstants as CC
import random
2018-04-11 22:30:40 +00:00
import threading
2015-03-04 22:44:32 +00:00
import time
import wx
2015-11-04 22:30:28 +00:00
def DAEMONCheckExportFolders ( controller ) :
2015-03-04 22:44:32 +00:00
2017-12-06 22:06:56 +00:00
if not controller . options [ ' pause_export_folders_sync ' ] :
2015-03-04 22:44:32 +00:00
2018-03-14 21:01:02 +00:00
HG . export_folders_running = True
2015-03-04 22:44:32 +00:00
2018-03-14 21:01:02 +00:00
try :
export_folder_names = controller . Read ( ' serialisable_names ' , HydrusSerialisable . SERIALISABLE_TYPE_EXPORT_FOLDER )
2015-03-04 22:44:32 +00:00
2018-03-14 21:01:02 +00:00
for name in export_folder_names :
export_folder = controller . Read ( ' serialisable_named ' , HydrusSerialisable . SERIALISABLE_TYPE_EXPORT_FOLDER , name )
2015-10-21 21:53:10 +00:00
2018-03-14 21:01:02 +00:00
if controller . options [ ' pause_export_folders_sync ' ] or HydrusThreading . IsThreadShuttingDown ( ) :
break
2015-10-21 21:53:10 +00:00
2018-03-14 21:01:02 +00:00
export_folder . DoWork ( )
finally :
2015-10-21 21:53:10 +00:00
2018-03-14 21:01:02 +00:00
HG . export_folders_running = False
2015-03-04 22:44:32 +00:00
2015-11-04 22:30:28 +00:00
def DAEMONCheckImportFolders ( controller ) :
2015-03-04 22:44:32 +00:00
2017-12-06 22:06:56 +00:00
if not controller . options [ ' pause_import_folders_sync ' ] :
2015-03-04 22:44:32 +00:00
2018-02-07 23:40:33 +00:00
HG . import_folders_running = True
try :
2015-03-04 22:44:32 +00:00
2018-03-14 21:01:02 +00:00
import_folder_names = controller . Read ( ' serialisable_names ' , HydrusSerialisable . SERIALISABLE_TYPE_IMPORT_FOLDER )
2018-02-07 23:40:33 +00:00
for name in import_folder_names :
2015-10-21 21:53:10 +00:00
2018-02-07 23:40:33 +00:00
import_folder = controller . Read ( ' serialisable_named ' , HydrusSerialisable . SERIALISABLE_TYPE_IMPORT_FOLDER , name )
2015-10-21 21:53:10 +00:00
2018-02-14 21:47:18 +00:00
if controller . options [ ' pause_import_folders_sync ' ] or HydrusThreading . IsThreadShuttingDown ( ) :
2018-02-07 23:40:33 +00:00
break
import_folder . DoWork ( )
finally :
2015-10-21 21:53:10 +00:00
2018-02-07 23:40:33 +00:00
HG . import_folders_running = False
2015-03-04 22:44:32 +00:00
2015-11-04 22:30:28 +00:00
def DAEMONDownloadFiles ( controller ) :
2015-03-04 22:44:32 +00:00
2015-11-04 22:30:28 +00:00
hashes = controller . Read ( ' downloads ' )
2015-03-04 22:44:32 +00:00
num_downloads = len ( hashes )
2015-07-22 19:40:39 +00:00
if num_downloads > 0 :
2015-03-04 22:44:32 +00:00
2017-06-28 20:23:21 +00:00
client_files_manager = controller . client_files_manager
2016-08-24 18:36:56 +00:00
2015-07-22 19:40:39 +00:00
successful_hashes = set ( )
2015-03-04 22:44:32 +00:00
2016-02-17 22:06:47 +00:00
job_key = ClientThreading . JobKey ( )
2015-03-04 22:44:32 +00:00
2015-07-22 19:40:39 +00:00
job_key . SetVariable ( ' popup_text_1 ' , ' initialising downloader ' )
2015-03-04 22:44:32 +00:00
2015-11-04 22:30:28 +00:00
controller . pub ( ' message ' , job_key )
2015-07-22 19:40:39 +00:00
for hash in hashes :
job_key . SetVariable ( ' popup_text_1 ' , ' downloading ' + HydrusData . ConvertIntToPrettyString ( num_downloads - len ( successful_hashes ) ) + ' files from repositories ' )
2016-05-04 21:50:55 +00:00
( media_result , ) = controller . Read ( ' media_results ' , ( hash , ) )
2015-03-04 22:44:32 +00:00
2015-07-22 19:40:39 +00:00
service_keys = list ( media_result . GetLocationsManager ( ) . GetCurrent ( ) )
2015-03-04 22:44:32 +00:00
2015-07-22 19:40:39 +00:00
random . shuffle ( service_keys )
2015-03-04 22:44:32 +00:00
2015-07-22 19:40:39 +00:00
for service_key in service_keys :
2015-03-04 22:44:32 +00:00
2015-07-22 19:40:39 +00:00
if service_key == CC . LOCAL_FILE_SERVICE_KEY : break
elif service_key == CC . TRASH_SERVICE_KEY : continue
2016-02-17 22:06:47 +00:00
try :
2017-06-28 20:23:21 +00:00
service = controller . services_manager . GetService ( service_key )
2016-02-17 22:06:47 +00:00
except :
continue
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
if service . GetServiceType ( ) == HC . FILE_REPOSITORY :
2017-05-10 21:33:58 +00:00
2017-05-17 21:53:02 +00:00
file_repository = service
2017-05-10 21:33:58 +00:00
2017-05-17 21:53:02 +00:00
if file_repository . IsFunctional ( ) :
2015-03-25 22:04:19 +00:00
2015-07-22 19:40:39 +00:00
try :
2017-05-17 21:53:02 +00:00
( os_file_handle , temp_path ) = HydrusPaths . GetTempPath ( )
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
try :
file_repository . Request ( HC . GET , ' file ' , { ' hash ' : hash } , temp_path = temp_path )
2017-10-04 17:51:58 +00:00
controller . WaitUntilModelFree ( )
2017-05-17 21:53:02 +00:00
2017-07-19 21:21:41 +00:00
exclude_deleted = False # this is the important part here
2018-02-28 22:30:36 +00:00
allow_decompression_bombs = True
2017-07-19 21:21:41 +00:00
min_size = None
2018-02-28 22:30:36 +00:00
max_size = None
max_gif_size = None
2017-07-19 21:21:41 +00:00
min_resolution = None
2018-02-28 22:30:36 +00:00
max_resolution = None
automatic_archive = False
2018-04-18 22:10:15 +00:00
file_import_options = ClientImportOptions . FileImportOptions ( )
2017-07-19 21:21:41 +00:00
2018-02-28 22:30:36 +00:00
file_import_options . SetPreImportOptions ( exclude_deleted , allow_decompression_bombs , min_size , max_size , max_gif_size , min_resolution , max_resolution )
file_import_options . SetPostImportOptions ( automatic_archive )
2017-07-19 21:21:41 +00:00
2017-09-13 20:50:41 +00:00
file_import_job = ClientImporting . FileImportJob ( temp_path , file_import_options )
2017-07-19 21:21:41 +00:00
client_files_manager . ImportFile ( file_import_job )
2017-05-17 21:53:02 +00:00
successful_hashes . add ( hash )
break
finally :
HydrusPaths . CleanUpTempPath ( os_file_handle , temp_path )
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
except HydrusExceptions . ServerBusyException :
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
job_key . SetVariable ( ' popup_text_1 ' , file_repository . GetName ( ) + ' was busy. waiting 30s before trying again ' )
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
time . sleep ( 30 )
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
job_key . Delete ( )
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
controller . pub ( ' notify_new_downloads ' )
return
except Exception as e :
HydrusData . ShowText ( ' Error downloading file! ' )
HydrusData . ShowException ( e )
2015-07-22 19:40:39 +00:00
2015-03-25 22:04:19 +00:00
2017-05-17 21:53:02 +00:00
elif service . GetServiceType ( ) == HC . IPFS :
multihashes = HG . client_controller . Read ( ' service_filenames ' , service_key , { hash } )
if len ( multihashes ) > 0 :
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
multihash = multihashes [ 0 ]
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
# this actually calls to a thread that can launch gui 'select from tree' stuff, so let's just break at this point
service . ImportFile ( multihash )
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
break
2015-03-25 22:04:19 +00:00
2015-03-04 22:44:32 +00:00
2015-09-16 18:11:00 +00:00
if HydrusThreading . IsThreadShuttingDown ( ) :
return
2015-07-22 19:40:39 +00:00
if len ( successful_hashes ) > 0 :
2015-03-04 22:44:32 +00:00
2015-07-22 19:40:39 +00:00
job_key . SetVariable ( ' popup_text_1 ' , HydrusData . ConvertIntToPrettyString ( len ( successful_hashes ) ) + ' files downloaded ' )
2015-03-04 22:44:32 +00:00
2015-07-22 19:40:39 +00:00
job_key . Delete ( )
2015-03-04 22:44:32 +00:00
2015-11-04 22:30:28 +00:00
def DAEMONMaintainTrash ( controller ) :
2015-07-15 20:28:26 +00:00
2015-08-12 20:35:24 +00:00
if HC . options [ ' trash_max_size ' ] is not None :
max_size = HC . options [ ' trash_max_size ' ] * 1048576
2015-07-15 20:28:26 +00:00
2015-11-04 22:30:28 +00:00
service_info = controller . Read ( ' service_info ' , CC . TRASH_SERVICE_KEY )
2015-07-15 20:28:26 +00:00
while service_info [ HC . SERVICE_INFO_TOTAL_SIZE ] > max_size :
2015-09-16 18:11:00 +00:00
if HydrusThreading . IsThreadShuttingDown ( ) :
2015-07-15 20:28:26 +00:00
return
2016-06-01 20:04:15 +00:00
hashes = controller . Read ( ' trash_hashes ' , limit = 10 )
2015-07-15 20:28:26 +00:00
if len ( hashes ) == 0 :
return
2015-10-14 21:02:25 +00:00
content_update = HydrusData . ContentUpdate ( HC . CONTENT_TYPE_FILES , HC . CONTENT_UPDATE_DELETE , hashes )
2015-07-15 20:28:26 +00:00
service_keys_to_content_updates = { CC . TRASH_SERVICE_KEY : [ content_update ] }
2017-10-04 17:51:58 +00:00
controller . WaitUntilModelFree ( )
2015-07-15 20:28:26 +00:00
2015-11-04 22:30:28 +00:00
controller . WriteSynchronous ( ' content_updates ' , service_keys_to_content_updates )
2015-07-15 20:28:26 +00:00
2015-11-04 22:30:28 +00:00
service_info = controller . Read ( ' service_info ' , CC . TRASH_SERVICE_KEY )
2015-07-15 20:28:26 +00:00
2017-08-02 21:32:54 +00:00
time . sleep ( 2 )
2015-07-15 20:28:26 +00:00
2015-08-12 20:35:24 +00:00
if HC . options [ ' trash_max_age ' ] is not None :
max_age = HC . options [ ' trash_max_age ' ] * 3600
2015-07-15 20:28:26 +00:00
2016-06-01 20:04:15 +00:00
hashes = controller . Read ( ' trash_hashes ' , limit = 10 , minimum_age = max_age )
2015-07-15 20:28:26 +00:00
while len ( hashes ) > 0 :
2015-09-16 18:11:00 +00:00
if HydrusThreading . IsThreadShuttingDown ( ) :
2015-07-15 20:28:26 +00:00
return
2015-10-14 21:02:25 +00:00
content_update = HydrusData . ContentUpdate ( HC . CONTENT_TYPE_FILES , HC . CONTENT_UPDATE_DELETE , hashes )
2015-07-15 20:28:26 +00:00
service_keys_to_content_updates = { CC . TRASH_SERVICE_KEY : [ content_update ] }
2017-10-04 17:51:58 +00:00
controller . WaitUntilModelFree ( )
2015-07-15 20:28:26 +00:00
2015-11-04 22:30:28 +00:00
controller . WriteSynchronous ( ' content_updates ' , service_keys_to_content_updates )
2015-07-15 20:28:26 +00:00
2016-06-01 20:04:15 +00:00
hashes = controller . Read ( ' trash_hashes ' , limit = 10 , minimum_age = max_age )
2015-07-15 20:28:26 +00:00
2017-08-02 21:32:54 +00:00
time . sleep ( 2 )
2015-07-15 20:28:26 +00:00
2017-03-02 02:14:56 +00:00
def DAEMONSaveDirtyObjects ( controller ) :
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
controller . SaveDirtyObjects ( )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
def DAEMONSynchroniseAccounts ( controller ) :
2015-05-13 20:22:39 +00:00
2017-06-28 20:23:21 +00:00
services = controller . services_manager . GetServices ( HC . RESTRICTED_SERVICES )
2015-03-04 22:44:32 +00:00
for service in services :
2017-11-15 22:35:49 +00:00
if HydrusThreading . IsThreadShuttingDown ( ) :
2017-11-08 22:07:12 +00:00
return
2017-03-02 02:14:56 +00:00
service . SyncAccount ( )
2015-10-21 21:53:10 +00:00
2015-03-04 22:44:32 +00:00
2015-11-04 22:30:28 +00:00
def DAEMONSynchroniseRepositories ( controller ) :
2015-03-04 22:44:32 +00:00
2017-12-06 22:06:56 +00:00
if not controller . options [ ' pause_repo_sync ' ] :
2015-03-04 22:44:32 +00:00
2017-06-28 20:23:21 +00:00
services = controller . services_manager . GetServices ( HC . REPOSITORIES )
2015-07-15 20:28:26 +00:00
2015-03-04 22:44:32 +00:00
for service in services :
2017-11-29 21:48:23 +00:00
if HydrusThreading . IsThreadShuttingDown ( ) :
return
2017-12-06 22:06:56 +00:00
if controller . options [ ' pause_repo_sync ' ] :
2015-10-07 21:56:22 +00:00
2017-11-08 22:07:12 +00:00
return
2015-10-07 21:56:22 +00:00
2017-03-02 02:14:56 +00:00
service . Sync ( only_process_when_idle = True )
2015-03-04 22:44:32 +00:00
2017-11-29 21:48:23 +00:00
if HydrusThreading . IsThreadShuttingDown ( ) :
return
time . sleep ( 3 )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
2018-04-11 22:30:40 +00:00
class SubscriptionJob ( object ) :
def __init__ ( self , controller , name ) :
self . _controller = controller
self . _name = name
self . _job_done = threading . Event ( )
def _DoWork ( self ) :
if HG . subscription_report_mode :
HydrusData . ShowText ( ' Subscription " ' + self . _name + ' " about to start. ' )
subscription = self . _controller . Read ( ' serialisable_named ' , HydrusSerialisable . SERIALISABLE_TYPE_SUBSCRIPTION , self . _name )
subscription . Sync ( )
def IsDone ( self ) :
return self . _job_done . is_set ( )
def Work ( self ) :
try :
self . _DoWork ( )
finally :
self . _job_done . set ( )
2015-11-04 22:30:28 +00:00
def DAEMONSynchroniseSubscriptions ( controller ) :
2015-03-04 22:44:32 +00:00
2018-04-11 22:30:40 +00:00
def filter_finished_jobs ( subs_jobs ) :
done_indices = [ i for ( i , ( thread , job ) ) in enumerate ( subs_jobs ) if job . IsDone ( ) ]
done_indices . reverse ( )
for i in done_indices :
del subs_jobs [ i ]
def wait_for_free_slot ( controller , subs_jobs , max_simultaneous_subscriptions ) :
2018-04-18 22:10:15 +00:00
time . sleep ( 0.1 )
2018-04-11 22:30:40 +00:00
while True :
p1 = controller . options [ ' pause_subs_sync ' ]
p2 = HydrusThreading . IsThreadShuttingDown ( )
if p1 or p2 :
if HG . subscription_report_mode :
HydrusData . ShowText ( ' Subscriptions cancelling. Global sub pause is ' + str ( p1 ) + ' and sub daemon thread shutdown status is ' + str ( p2 ) + ' . ' )
if p2 :
for ( thread , job ) in subs_jobs :
HydrusThreading . ShutdownThread ( thread )
2018-04-25 22:07:52 +00:00
raise HydrusExceptions . CancelledException ( ' subs cancelling or thread shutting down ' )
2018-04-11 22:30:40 +00:00
filter_finished_jobs ( subs_jobs )
if len ( subs_jobs ) < max_simultaneous_subscriptions :
return
2018-04-18 22:10:15 +00:00
time . sleep ( 1.0 )
2018-04-11 22:30:40 +00:00
def wait_for_all_finished ( subs_jobs ) :
while True :
filter_finished_jobs ( subs_jobs )
if len ( subs_jobs ) == 0 :
return
time . sleep ( 1.0 )
2018-04-05 01:22:26 +00:00
if HG . subscription_report_mode :
HydrusData . ShowText ( ' Subscription daemon started a run. ' )
2017-12-06 22:06:56 +00:00
subscription_names = list ( controller . Read ( ' serialisable_names ' , HydrusSerialisable . SERIALISABLE_TYPE_SUBSCRIPTION ) )
2015-05-13 20:22:39 +00:00
2017-12-20 22:55:48 +00:00
if controller . new_options . GetBoolean ( ' process_subs_in_random_order ' ) :
random . shuffle ( subscription_names )
else :
subscription_names . sort ( )
2015-10-07 21:56:22 +00:00
2017-12-13 22:33:07 +00:00
HG . subscriptions_running = True
2018-04-11 22:30:40 +00:00
subs_jobs = [ ]
2017-12-13 22:33:07 +00:00
try :
2015-03-04 22:44:32 +00:00
2017-12-13 22:33:07 +00:00
for name in subscription_names :
2015-03-04 22:44:32 +00:00
2018-04-11 22:30:40 +00:00
max_simultaneous_subscriptions = controller . new_options . GetInteger ( ' max_simultaneous_subscriptions ' )
2017-12-13 22:33:07 +00:00
2018-04-11 22:30:40 +00:00
try :
2018-04-05 01:22:26 +00:00
2018-04-11 22:30:40 +00:00
wait_for_free_slot ( controller , subs_jobs , max_simultaneous_subscriptions )
2018-04-05 01:22:26 +00:00
2018-04-11 22:30:40 +00:00
except HydrusExceptions . CancelledException :
2017-12-13 22:33:07 +00:00
2018-04-11 22:30:40 +00:00
break
2017-12-13 22:33:07 +00:00
2018-04-11 22:30:40 +00:00
job = SubscriptionJob ( controller , name )
thread = threading . Thread ( target = job . Work , name = ' subscription thread ' )
2017-12-13 22:33:07 +00:00
2018-04-11 22:30:40 +00:00
thread . start ( )
2015-03-04 22:44:32 +00:00
2018-04-11 22:30:40 +00:00
subs_jobs . append ( ( thread , job ) )
wait_for_all_finished ( subs_jobs )
2015-03-04 22:44:32 +00:00
2017-12-13 22:33:07 +00:00
finally :
2015-10-07 21:56:22 +00:00
2017-12-13 22:33:07 +00:00
HG . subscriptions_running = False
2015-03-04 22:44:32 +00:00
2015-11-04 22:30:28 +00:00
def DAEMONUPnP ( controller ) :
2015-03-04 22:44:32 +00:00
try :
local_ip = HydrusNATPunch . GetLocalIP ( )
current_mappings = HydrusNATPunch . GetUPnPMappings ( )
our_mappings = { ( internal_client , internal_port ) : external_port for ( description , internal_client , internal_port , external_ip_address , external_port , protocol , enabled ) in current_mappings }
2016-03-30 22:56:50 +00:00
except :
return # This IGD probably doesn't support UPnP, so don't spam the user with errors they can't fix!
2015-03-04 22:44:32 +00:00
2017-06-28 20:23:21 +00:00
services = controller . services_manager . GetServices ( ( HC . LOCAL_BOORU , ) )
2015-03-04 22:44:32 +00:00
for service in services :
2017-03-02 02:14:56 +00:00
internal_port = service . GetPort ( )
2015-03-04 22:44:32 +00:00
if ( local_ip , internal_port ) in our_mappings :
current_external_port = our_mappings [ ( local_ip , internal_port ) ]
2017-03-02 02:14:56 +00:00
upnp_port = service . GetUPnPPort ( )
if upnp_port is None or current_external_port != upnp_port :
HydrusNATPunch . RemoveUPnPMapping ( current_external_port , ' TCP ' )
2015-03-04 22:44:32 +00:00
for service in services :
2017-03-02 02:14:56 +00:00
internal_port = service . GetPort ( )
upnp_port = service . GetUPnPPort ( )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
if upnp_port is not None :
2015-03-04 22:44:32 +00:00
if ( local_ip , internal_port ) not in our_mappings :
service_type = service . GetServiceType ( )
protocol = ' TCP '
description = HC . service_string_lookup [ service_type ] + ' at ' + local_ip + ' : ' + str ( internal_port )
duration = 3600
2016-09-07 20:01:05 +00:00
try :
2017-03-02 02:14:56 +00:00
HydrusNATPunch . AddUPnPMapping ( local_ip , internal_port , upnp_port , protocol , description , duration = duration )
2016-09-07 20:01:05 +00:00
except HydrusExceptions . FirewallException :
2017-03-02 02:14:56 +00:00
HydrusData . Print ( ' The UPnP Daemon tried to add ' + local_ip + ' : ' + internal_port + ' ->external: ' + upnp_port + ' but it failed due to router error. Please try it manually to get a full log of what happened. ' )
2016-09-07 20:01:05 +00:00
return
except :
raise
2015-03-04 22:44:32 +00:00
2016-12-14 21:19:07 +00:00