hydrus/include/ClientDaemons.py

333 lines
9.5 KiB
Python
Raw Normal View History

2019-01-09 22:59:03 +00:00
from . import ClientImporting
from . import ClientImportOptions
from . import ClientImportFileSeeds
from . import ClientPaths
from . import ClientThreading
from . import HydrusConstants as HC
from . import HydrusData
from . import HydrusExceptions
from . import HydrusGlobals as HG
from . import HydrusNATPunch
from . import HydrusPaths
from . import HydrusSerialisable
from . import HydrusThreading
from . import ClientConstants as CC
2015-03-04 22:44:32 +00:00
import random
2018-04-11 22:30:40 +00:00
import threading
2015-03-04 22:44:32 +00:00
import time
2019-02-13 22:26:43 +00:00
def DAEMONCheckExportFolders():
controller = HG.client_controller
2015-03-04 22:44:32 +00:00
2017-12-06 22:06:56 +00:00
if not controller.options[ 'pause_export_folders_sync' ]:
2015-03-04 22:44:32 +00:00
2018-03-14 21:01:02 +00:00
HG.export_folders_running = True
2015-03-04 22:44:32 +00:00
2018-03-14 21:01:02 +00:00
try:
export_folder_names = controller.Read( 'serialisable_names', HydrusSerialisable.SERIALISABLE_TYPE_EXPORT_FOLDER )
2015-03-04 22:44:32 +00:00
2018-03-14 21:01:02 +00:00
for name in export_folder_names:
export_folder = controller.Read( 'serialisable_named', HydrusSerialisable.SERIALISABLE_TYPE_EXPORT_FOLDER, name )
2015-10-21 21:53:10 +00:00
2018-03-14 21:01:02 +00:00
if controller.options[ 'pause_export_folders_sync' ] or HydrusThreading.IsThreadShuttingDown():
break
2015-10-21 21:53:10 +00:00
2018-03-14 21:01:02 +00:00
export_folder.DoWork()
finally:
2015-10-21 21:53:10 +00:00
2018-03-14 21:01:02 +00:00
HG.export_folders_running = False
2015-03-04 22:44:32 +00:00
2019-02-13 22:26:43 +00:00
def DAEMONCheckImportFolders():
controller = HG.client_controller
2015-03-04 22:44:32 +00:00
2017-12-06 22:06:56 +00:00
if not controller.options[ 'pause_import_folders_sync' ]:
2015-03-04 22:44:32 +00:00
2018-02-07 23:40:33 +00:00
HG.import_folders_running = True
try:
2015-03-04 22:44:32 +00:00
2018-03-14 21:01:02 +00:00
import_folder_names = controller.Read( 'serialisable_names', HydrusSerialisable.SERIALISABLE_TYPE_IMPORT_FOLDER )
2018-02-07 23:40:33 +00:00
for name in import_folder_names:
2015-10-21 21:53:10 +00:00
2018-02-07 23:40:33 +00:00
import_folder = controller.Read( 'serialisable_named', HydrusSerialisable.SERIALISABLE_TYPE_IMPORT_FOLDER, name )
2015-10-21 21:53:10 +00:00
2018-02-14 21:47:18 +00:00
if controller.options[ 'pause_import_folders_sync' ] or HydrusThreading.IsThreadShuttingDown():
2018-02-07 23:40:33 +00:00
break
import_folder.DoWork()
finally:
2015-10-21 21:53:10 +00:00
2018-02-07 23:40:33 +00:00
HG.import_folders_running = False
2015-03-04 22:44:32 +00:00
2015-11-04 22:30:28 +00:00
def DAEMONMaintainTrash( controller ):
2015-07-15 20:28:26 +00:00
2015-08-12 20:35:24 +00:00
if HC.options[ 'trash_max_size' ] is not None:
max_size = HC.options[ 'trash_max_size' ] * 1048576
2015-07-15 20:28:26 +00:00
2015-11-04 22:30:28 +00:00
service_info = controller.Read( 'service_info', CC.TRASH_SERVICE_KEY )
2015-07-15 20:28:26 +00:00
while service_info[ HC.SERVICE_INFO_TOTAL_SIZE ] > max_size:
2015-09-16 18:11:00 +00:00
if HydrusThreading.IsThreadShuttingDown():
2015-07-15 20:28:26 +00:00
return
2016-06-01 20:04:15 +00:00
hashes = controller.Read( 'trash_hashes', limit = 10 )
2015-07-15 20:28:26 +00:00
if len( hashes ) == 0:
return
2015-10-14 21:02:25 +00:00
content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, hashes )
2015-07-15 20:28:26 +00:00
service_keys_to_content_updates = { CC.TRASH_SERVICE_KEY : [ content_update ] }
2015-11-04 22:30:28 +00:00
controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
2015-07-15 20:28:26 +00:00
2015-11-04 22:30:28 +00:00
service_info = controller.Read( 'service_info', CC.TRASH_SERVICE_KEY )
2015-07-15 20:28:26 +00:00
2017-08-02 21:32:54 +00:00
time.sleep( 2 )
2015-07-15 20:28:26 +00:00
2015-08-12 20:35:24 +00:00
if HC.options[ 'trash_max_age' ] is not None:
max_age = HC.options[ 'trash_max_age' ] * 3600
2015-07-15 20:28:26 +00:00
2016-06-01 20:04:15 +00:00
hashes = controller.Read( 'trash_hashes', limit = 10, minimum_age = max_age )
2015-07-15 20:28:26 +00:00
while len( hashes ) > 0:
2015-09-16 18:11:00 +00:00
if HydrusThreading.IsThreadShuttingDown():
2015-07-15 20:28:26 +00:00
return
2015-10-14 21:02:25 +00:00
content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, hashes )
2015-07-15 20:28:26 +00:00
service_keys_to_content_updates = { CC.TRASH_SERVICE_KEY : [ content_update ] }
2015-11-04 22:30:28 +00:00
controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
2015-07-15 20:28:26 +00:00
2016-06-01 20:04:15 +00:00
hashes = controller.Read( 'trash_hashes', limit = 10, minimum_age = max_age )
2015-07-15 20:28:26 +00:00
2017-08-02 21:32:54 +00:00
time.sleep( 2 )
2015-07-15 20:28:26 +00:00
2015-11-04 22:30:28 +00:00
def DAEMONSynchroniseRepositories( controller ):
2015-03-04 22:44:32 +00:00
2017-12-06 22:06:56 +00:00
if not controller.options[ 'pause_repo_sync' ]:
2015-03-04 22:44:32 +00:00
2017-06-28 20:23:21 +00:00
services = controller.services_manager.GetServices( HC.REPOSITORIES )
2015-07-15 20:28:26 +00:00
2015-03-04 22:44:32 +00:00
for service in services:
2017-11-29 21:48:23 +00:00
if HydrusThreading.IsThreadShuttingDown():
return
2017-12-06 22:06:56 +00:00
if controller.options[ 'pause_repo_sync' ]:
2015-10-07 21:56:22 +00:00
2017-11-08 22:07:12 +00:00
return
2015-10-07 21:56:22 +00:00
2019-09-25 21:34:18 +00:00
service.SyncRemote()
service.SyncProcessUpdates( maintenance_mode = HC.MAINTENANCE_IDLE )
2015-03-04 22:44:32 +00:00
2017-11-29 21:48:23 +00:00
if HydrusThreading.IsThreadShuttingDown():
return
time.sleep( 3 )
2015-03-04 22:44:32 +00:00
2017-03-02 02:14:56 +00:00
2018-04-11 22:30:40 +00:00
class SubscriptionJob( object ):
def __init__( self, controller, name ):
self._controller = controller
self._name = name
self._job_done = threading.Event()
def _DoWork( self ):
if HG.subscription_report_mode:
HydrusData.ShowText( 'Subscription "' + self._name + '" about to start.' )
subscription = self._controller.Read( 'serialisable_named', HydrusSerialisable.SERIALISABLE_TYPE_SUBSCRIPTION, self._name )
subscription.Sync()
def IsDone( self ):
return self._job_done.is_set()
def Work( self ):
try:
self._DoWork()
finally:
self._job_done.set()
2015-11-04 22:30:28 +00:00
def DAEMONSynchroniseSubscriptions( controller ):
2015-03-04 22:44:32 +00:00
2018-04-11 22:30:40 +00:00
def filter_finished_jobs( subs_jobs ):
done_indices = [ i for ( i, ( thread, job ) ) in enumerate( subs_jobs ) if job.IsDone() ]
done_indices.reverse()
for i in done_indices:
del subs_jobs[ i ]
def wait_for_free_slot( controller, subs_jobs, max_simultaneous_subscriptions ):
2018-04-18 22:10:15 +00:00
time.sleep( 0.1 )
2018-04-11 22:30:40 +00:00
while True:
p1 = controller.options[ 'pause_subs_sync' ]
p2 = HydrusThreading.IsThreadShuttingDown()
2019-10-16 20:47:55 +00:00
p3 = controller.new_options.GetBoolean( 'pause_all_new_network_traffic' )
2018-04-11 22:30:40 +00:00
2019-10-16 20:47:55 +00:00
if p1 or p2 or p3:
2018-04-11 22:30:40 +00:00
if HG.subscription_report_mode:
2019-10-16 20:47:55 +00:00
HydrusData.ShowText( 'Subscriptions cancelling. Global sub pause is {}, sub daemon thread shutdown status is {}, and global network pause is {}.'.format( p1, p2, p3 ) )
2018-04-11 22:30:40 +00:00
if p2:
for ( thread, job ) in subs_jobs:
HydrusThreading.ShutdownThread( thread )
2018-04-25 22:07:52 +00:00
raise HydrusExceptions.CancelledException( 'subs cancelling or thread shutting down' )
2018-04-11 22:30:40 +00:00
filter_finished_jobs( subs_jobs )
if len( subs_jobs ) < max_simultaneous_subscriptions:
return
2018-04-18 22:10:15 +00:00
time.sleep( 1.0 )
2018-04-11 22:30:40 +00:00
def wait_for_all_finished( subs_jobs ):
while True:
filter_finished_jobs( subs_jobs )
if len( subs_jobs ) == 0:
return
time.sleep( 1.0 )
2018-04-05 01:22:26 +00:00
if HG.subscription_report_mode:
HydrusData.ShowText( 'Subscription daemon started a run.' )
2017-12-06 22:06:56 +00:00
subscription_names = list( controller.Read( 'serialisable_names', HydrusSerialisable.SERIALISABLE_TYPE_SUBSCRIPTION ) )
2015-05-13 20:22:39 +00:00
2017-12-20 22:55:48 +00:00
if controller.new_options.GetBoolean( 'process_subs_in_random_order' ):
random.shuffle( subscription_names )
else:
subscription_names.sort()
2015-10-07 21:56:22 +00:00
2017-12-13 22:33:07 +00:00
HG.subscriptions_running = True
2018-04-11 22:30:40 +00:00
subs_jobs = []
2017-12-13 22:33:07 +00:00
try:
2015-03-04 22:44:32 +00:00
2017-12-13 22:33:07 +00:00
for name in subscription_names:
2015-03-04 22:44:32 +00:00
2018-04-11 22:30:40 +00:00
max_simultaneous_subscriptions = controller.new_options.GetInteger( 'max_simultaneous_subscriptions' )
2017-12-13 22:33:07 +00:00
2018-04-11 22:30:40 +00:00
try:
2018-04-05 01:22:26 +00:00
2018-04-11 22:30:40 +00:00
wait_for_free_slot( controller, subs_jobs, max_simultaneous_subscriptions )
2018-04-05 01:22:26 +00:00
2018-04-11 22:30:40 +00:00
except HydrusExceptions.CancelledException:
2017-12-13 22:33:07 +00:00
2018-04-11 22:30:40 +00:00
break
2017-12-13 22:33:07 +00:00
2018-04-11 22:30:40 +00:00
job = SubscriptionJob( controller, name )
thread = threading.Thread( target = job.Work, name = 'subscription thread' )
2017-12-13 22:33:07 +00:00
2018-04-11 22:30:40 +00:00
thread.start()
2015-03-04 22:44:32 +00:00
2018-04-11 22:30:40 +00:00
subs_jobs.append( ( thread, job ) )
2019-11-20 23:10:46 +00:00
# while we initialise the queue, don't hammer the cpu
if len( subs_jobs ) < max_simultaneous_subscriptions:
time.sleep( 1.0 )
2018-04-11 22:30:40 +00:00
wait_for_all_finished( subs_jobs )
2015-03-04 22:44:32 +00:00
2017-12-13 22:33:07 +00:00
finally:
2015-10-07 21:56:22 +00:00
2017-12-13 22:33:07 +00:00
HG.subscriptions_running = False
2015-03-04 22:44:32 +00:00