hydrus/include/ClientDaemons.py

401 lines
13 KiB
Python
Raw Normal View History

2017-07-19 21:21:41 +00:00
import ClientData
import ClientImporting
2016-02-17 22:06:47 +00:00
import ClientThreading
2015-03-04 22:44:32 +00:00
import HydrusConstants as HC
2015-08-19 21:48:21 +00:00
import HydrusData
2015-03-04 22:44:32 +00:00
import HydrusExceptions
2017-05-17 21:53:02 +00:00
import HydrusGlobals as HG
2015-03-04 22:44:32 +00:00
import HydrusNATPunch
2015-11-04 22:30:28 +00:00
import HydrusPaths
2015-06-03 21:05:13 +00:00
import HydrusSerialisable
2015-03-04 22:44:32 +00:00
import HydrusThreading
import ClientConstants as CC
import random
import time
import wx
2015-11-04 22:30:28 +00:00
def DAEMONCheckExportFolders( controller ):
2015-03-04 22:44:32 +00:00
2017-12-06 22:06:56 +00:00
if not controller.options[ 'pause_export_folders_sync' ]:
2015-03-04 22:44:32 +00:00
2015-11-04 22:30:28 +00:00
export_folders = controller.Read( 'serialisable_named', HydrusSerialisable.SERIALISABLE_TYPE_EXPORT_FOLDER )
2015-03-04 22:44:32 +00:00
2015-06-24 22:10:14 +00:00
for export_folder in export_folders:
2015-03-04 22:44:32 +00:00
2017-12-06 22:06:56 +00:00
if controller.options[ 'pause_export_folders_sync' ]:
2015-10-21 21:53:10 +00:00
break
2015-06-24 22:10:14 +00:00
export_folder.DoWork()
2015-03-04 22:44:32 +00:00
2015-11-04 22:30:28 +00:00
def DAEMONCheckImportFolders( controller ):
2015-03-04 22:44:32 +00:00
2017-12-06 22:06:56 +00:00
if not controller.options[ 'pause_import_folders_sync' ]:
2015-03-04 22:44:32 +00:00
2017-10-25 21:45:15 +00:00
import_folder_names = controller.Read( 'serialisable_names', HydrusSerialisable.SERIALISABLE_TYPE_IMPORT_FOLDER )
2015-03-04 22:44:32 +00:00
2017-10-25 21:45:15 +00:00
for name in import_folder_names:
import_folder = controller.Read( 'serialisable_named', HydrusSerialisable.SERIALISABLE_TYPE_IMPORT_FOLDER, name )
2015-03-04 22:44:32 +00:00
2017-12-06 22:06:56 +00:00
if controller.options[ 'pause_import_folders_sync' ]:
2015-10-21 21:53:10 +00:00
break
2015-09-09 22:04:39 +00:00
import_folder.DoWork()
2015-03-04 22:44:32 +00:00
2015-11-04 22:30:28 +00:00
def DAEMONCheckMouseIdle( controller ):
2015-10-28 21:29:05 +00:00
2015-11-04 22:30:28 +00:00
wx.CallAfter( controller.CheckMouseIdle )
2015-10-28 21:29:05 +00:00
2015-11-04 22:30:28 +00:00
def DAEMONDownloadFiles( controller ):
2015-03-04 22:44:32 +00:00
2015-11-04 22:30:28 +00:00
hashes = controller.Read( 'downloads' )
2015-03-04 22:44:32 +00:00
num_downloads = len( hashes )
2015-07-22 19:40:39 +00:00
if num_downloads > 0:
2015-03-04 22:44:32 +00:00
2017-06-28 20:23:21 +00:00
client_files_manager = controller.client_files_manager
2016-08-24 18:36:56 +00:00
2015-07-22 19:40:39 +00:00
successful_hashes = set()
2015-03-04 22:44:32 +00:00
2016-02-17 22:06:47 +00:00
job_key = ClientThreading.JobKey()
2015-03-04 22:44:32 +00:00
2015-07-22 19:40:39 +00:00
job_key.SetVariable( 'popup_text_1', 'initialising downloader' )
2015-03-04 22:44:32 +00:00
2015-11-04 22:30:28 +00:00
controller.pub( 'message', job_key )
2015-07-22 19:40:39 +00:00
for hash in hashes:
job_key.SetVariable( 'popup_text_1', 'downloading ' + HydrusData.ConvertIntToPrettyString( num_downloads - len( successful_hashes ) ) + ' files from repositories' )
2016-05-04 21:50:55 +00:00
( media_result, ) = controller.Read( 'media_results', ( hash, ) )
2015-03-04 22:44:32 +00:00
2015-07-22 19:40:39 +00:00
service_keys = list( media_result.GetLocationsManager().GetCurrent() )
2015-03-04 22:44:32 +00:00
2015-07-22 19:40:39 +00:00
random.shuffle( service_keys )
2015-03-04 22:44:32 +00:00
2015-07-22 19:40:39 +00:00
for service_key in service_keys:
2015-03-04 22:44:32 +00:00
2015-07-22 19:40:39 +00:00
if service_key == CC.LOCAL_FILE_SERVICE_KEY: break
elif service_key == CC.TRASH_SERVICE_KEY: continue
2016-02-17 22:06:47 +00:00
try:
2017-06-28 20:23:21 +00:00
service = controller.services_manager.GetService( service_key )
2016-02-17 22:06:47 +00:00
except:
continue
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
if service.GetServiceType() == HC.FILE_REPOSITORY:
2017-05-10 21:33:58 +00:00
2017-05-17 21:53:02 +00:00
file_repository = service
2017-05-10 21:33:58 +00:00
2017-05-17 21:53:02 +00:00
if file_repository.IsFunctional():
2015-03-25 22:04:19 +00:00
2015-07-22 19:40:39 +00:00
try:
2017-05-17 21:53:02 +00:00
( os_file_handle, temp_path ) = HydrusPaths.GetTempPath()
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
try:
file_repository.Request( HC.GET, 'file', { 'hash' : hash }, temp_path = temp_path )
2017-10-04 17:51:58 +00:00
controller.WaitUntilModelFree()
2017-05-17 21:53:02 +00:00
2017-07-19 21:21:41 +00:00
automatic_archive = False
exclude_deleted = False # this is the important part here
min_size = None
min_resolution = None
2017-09-13 20:50:41 +00:00
file_import_options = ClientImporting.FileImportOptions( automatic_archive = automatic_archive, exclude_deleted = exclude_deleted, min_size = min_size, min_resolution = min_resolution )
2017-07-19 21:21:41 +00:00
2017-09-13 20:50:41 +00:00
file_import_job = ClientImporting.FileImportJob( temp_path, file_import_options )
2017-07-19 21:21:41 +00:00
client_files_manager.ImportFile( file_import_job )
2017-05-17 21:53:02 +00:00
successful_hashes.add( hash )
break
finally:
HydrusPaths.CleanUpTempPath( os_file_handle, temp_path )
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
except HydrusExceptions.ServerBusyException:
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
job_key.SetVariable( 'popup_text_1', file_repository.GetName() + ' was busy. waiting 30s before trying again' )
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
time.sleep( 30 )
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
job_key.Delete()
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
controller.pub( 'notify_new_downloads' )
return
except Exception as e:
HydrusData.ShowText( 'Error downloading file!' )
HydrusData.ShowException( e )
2015-07-22 19:40:39 +00:00
2015-03-25 22:04:19 +00:00
2017-05-17 21:53:02 +00:00
elif service.GetServiceType() == HC.IPFS:
multihashes = HG.client_controller.Read( 'service_filenames', service_key, { hash } )
if len( multihashes ) > 0:
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
multihash = multihashes[0]
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
# this actually calls to a thread that can launch gui 'select from tree' stuff, so let's just break at this point
service.ImportFile( multihash )
2015-07-22 19:40:39 +00:00
2017-05-17 21:53:02 +00:00
break
2015-03-25 22:04:19 +00:00
2015-03-04 22:44:32 +00:00
2015-09-16 18:11:00 +00:00
if HydrusThreading.IsThreadShuttingDown():
return
2015-07-22 19:40:39 +00:00
if len( successful_hashes ) > 0:
2015-03-04 22:44:32 +00:00
2015-07-22 19:40:39 +00:00
job_key.SetVariable( 'popup_text_1', HydrusData.ConvertIntToPrettyString( len( successful_hashes ) ) + ' files downloaded' )
2015-03-04 22:44:32 +00:00
2015-07-22 19:40:39 +00:00
job_key.Delete()
2015-03-04 22:44:32 +00:00
2015-11-04 22:30:28 +00:00
def DAEMONMaintainTrash( controller ):
2015-07-15 20:28:26 +00:00
2015-08-12 20:35:24 +00:00
if HC.options[ 'trash_max_size' ] is not None:
max_size = HC.options[ 'trash_max_size' ] * 1048576
2015-07-15 20:28:26 +00:00
2015-11-04 22:30:28 +00:00
service_info = controller.Read( 'service_info', CC.TRASH_SERVICE_KEY )
2015-07-15 20:28:26 +00:00
while service_info[ HC.SERVICE_INFO_TOTAL_SIZE ] > max_size:
2015-09-16 18:11:00 +00:00
if HydrusThreading.IsThreadShuttingDown():
2015-07-15 20:28:26 +00:00
return
2016-06-01 20:04:15 +00:00
hashes = controller.Read( 'trash_hashes', limit = 10 )
2015-07-15 20:28:26 +00:00
if len( hashes ) == 0:
return
2015-10-14 21:02:25 +00:00
content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, hashes )
2015-07-15 20:28:26 +00:00
service_keys_to_content_updates = { CC.TRASH_SERVICE_KEY : [ content_update ] }
2017-10-04 17:51:58 +00:00
controller.WaitUntilModelFree()
2015-07-15 20:28:26 +00:00
2015-11-04 22:30:28 +00:00
controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
2015-07-15 20:28:26 +00:00
2015-11-04 22:30:28 +00:00
service_info = controller.Read( 'service_info', CC.TRASH_SERVICE_KEY )
2015-07-15 20:28:26 +00:00
2017-08-02 21:32:54 +00:00
time.sleep( 2 )
2015-07-15 20:28:26 +00:00
2015-08-12 20:35:24 +00:00
if HC.options[ 'trash_max_age' ] is not None:
max_age = HC.options[ 'trash_max_age' ] * 3600
2015-07-15 20:28:26 +00:00
2016-06-01 20:04:15 +00:00
hashes = controller.Read( 'trash_hashes', limit = 10, minimum_age = max_age )
2015-07-15 20:28:26 +00:00
while len( hashes ) > 0:
2015-09-16 18:11:00 +00:00
if HydrusThreading.IsThreadShuttingDown():
2015-07-15 20:28:26 +00:00
return
2015-10-14 21:02:25 +00:00
content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, hashes )
2015-07-15 20:28:26 +00:00
service_keys_to_content_updates = { CC.TRASH_SERVICE_KEY : [ content_update ] }
2017-10-04 17:51:58 +00:00
controller.WaitUntilModelFree()
2015-07-15 20:28:26 +00:00
2015-11-04 22:30:28 +00:00
controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
2015-07-15 20:28:26 +00:00
2016-06-01 20:04:15 +00:00
hashes = controller.Read( 'trash_hashes', limit = 10, minimum_age = max_age )
2015-07-15 20:28:26 +00:00
2017-08-02 21:32:54 +00:00
time.sleep( 2 )
2015-07-15 20:28:26 +00:00
2017-03-02 02:14:56 +00:00
def DAEMONSaveDirtyObjects( controller ):
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
controller.SaveDirtyObjects()
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
def DAEMONSynchroniseAccounts( controller ):
2015-05-13 20:22:39 +00:00
2017-06-28 20:23:21 +00:00
services = controller.services_manager.GetServices( HC.RESTRICTED_SERVICES )
2015-03-04 22:44:32 +00:00
for service in services:
2017-11-15 22:35:49 +00:00
if HydrusThreading.IsThreadShuttingDown():
2017-11-08 22:07:12 +00:00
return
2017-03-02 02:14:56 +00:00
service.SyncAccount()
2015-10-21 21:53:10 +00:00
2015-03-04 22:44:32 +00:00
2015-11-04 22:30:28 +00:00
def DAEMONSynchroniseRepositories( controller ):
2015-03-04 22:44:32 +00:00
2017-12-06 22:06:56 +00:00
if not controller.options[ 'pause_repo_sync' ]:
2015-03-04 22:44:32 +00:00
2017-06-28 20:23:21 +00:00
services = controller.services_manager.GetServices( HC.REPOSITORIES )
2015-07-15 20:28:26 +00:00
2015-03-04 22:44:32 +00:00
for service in services:
2017-11-29 21:48:23 +00:00
if HydrusThreading.IsThreadShuttingDown():
return
2017-12-06 22:06:56 +00:00
if controller.options[ 'pause_repo_sync' ]:
2015-10-07 21:56:22 +00:00
2017-11-08 22:07:12 +00:00
return
2015-10-07 21:56:22 +00:00
2017-03-02 02:14:56 +00:00
service.Sync( only_process_when_idle = True )
2015-03-04 22:44:32 +00:00
2017-11-29 21:48:23 +00:00
if HydrusThreading.IsThreadShuttingDown():
return
time.sleep( 3 )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
2015-11-04 22:30:28 +00:00
def DAEMONSynchroniseSubscriptions( controller ):
2015-03-04 22:44:32 +00:00
2017-12-06 22:06:56 +00:00
subscription_names = list( controller.Read( 'serialisable_names', HydrusSerialisable.SERIALISABLE_TYPE_SUBSCRIPTION ) )
2015-05-13 20:22:39 +00:00
2017-12-20 22:55:48 +00:00
if controller.new_options.GetBoolean( 'process_subs_in_random_order' ):
random.shuffle( subscription_names )
else:
subscription_names.sort()
2015-10-07 21:56:22 +00:00
2017-12-13 22:33:07 +00:00
HG.subscriptions_running = True
try:
2015-03-04 22:44:32 +00:00
2017-12-13 22:33:07 +00:00
for name in subscription_names:
2015-03-04 22:44:32 +00:00
2017-12-13 22:33:07 +00:00
p1 = controller.options[ 'pause_subs_sync' ]
p2 = controller.ViewIsShutdown()
if p1 or p2:
return
subscription = controller.Read( 'serialisable_named', HydrusSerialisable.SERIALISABLE_TYPE_SUBSCRIPTION, name )
subscription.Sync()
2015-03-04 22:44:32 +00:00
2017-12-13 22:33:07 +00:00
finally:
2015-10-07 21:56:22 +00:00
2017-12-13 22:33:07 +00:00
HG.subscriptions_running = False
2015-03-04 22:44:32 +00:00
2015-11-04 22:30:28 +00:00
def DAEMONUPnP( controller ):
2015-03-04 22:44:32 +00:00
try:
local_ip = HydrusNATPunch.GetLocalIP()
current_mappings = HydrusNATPunch.GetUPnPMappings()
our_mappings = { ( internal_client, internal_port ) : external_port for ( description, internal_client, internal_port, external_ip_address, external_port, protocol, enabled ) in current_mappings }
2016-03-30 22:56:50 +00:00
except:
return # This IGD probably doesn't support UPnP, so don't spam the user with errors they can't fix!
2015-03-04 22:44:32 +00:00
2017-06-28 20:23:21 +00:00
services = controller.services_manager.GetServices( ( HC.LOCAL_BOORU, ) )
2015-03-04 22:44:32 +00:00
for service in services:
2017-03-02 02:14:56 +00:00
internal_port = service.GetPort()
2015-03-04 22:44:32 +00:00
if ( local_ip, internal_port ) in our_mappings:
current_external_port = our_mappings[ ( local_ip, internal_port ) ]
2017-03-02 02:14:56 +00:00
upnp_port = service.GetUPnPPort()
if upnp_port is None or current_external_port != upnp_port:
HydrusNATPunch.RemoveUPnPMapping( current_external_port, 'TCP' )
2015-03-04 22:44:32 +00:00
for service in services:
2017-03-02 02:14:56 +00:00
internal_port = service.GetPort()
upnp_port = service.GetUPnPPort()
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
if upnp_port is not None:
2015-03-04 22:44:32 +00:00
if ( local_ip, internal_port ) not in our_mappings:
service_type = service.GetServiceType()
protocol = 'TCP'
description = HC.service_string_lookup[ service_type ] + ' at ' + local_ip + ':' + str( internal_port )
duration = 3600
2016-09-07 20:01:05 +00:00
try:
2017-03-02 02:14:56 +00:00
HydrusNATPunch.AddUPnPMapping( local_ip, internal_port, upnp_port, protocol, description, duration = duration )
2016-09-07 20:01:05 +00:00
except HydrusExceptions.FirewallException:
2017-03-02 02:14:56 +00:00
HydrusData.Print( 'The UPnP Daemon tried to add ' + local_ip + ':' + internal_port + '->external:' + upnp_port + ' but it failed due to router error. Please try it manually to get a full log of what happened.' )
2016-09-07 20:01:05 +00:00
return
except:
raise
2015-03-04 22:44:32 +00:00
2016-12-14 21:19:07 +00:00