hydrus/include/ClientDaemons.py

800 lines
33 KiB
Python
Raw Normal View History

2015-03-18 21:46:29 +00:00
import ClientData
import ClientFiles
2015-03-04 22:44:32 +00:00
import collections
import dircache
import hashlib
import httplib
import itertools
import HydrusConstants as HC
2015-03-25 22:04:19 +00:00
import ClientDownloading
2015-03-04 22:44:32 +00:00
import HydrusEncryption
import HydrusExceptions
import HydrusFileHandling
import HydrusImageHandling
import HydrusNATPunch
import HydrusServer
2015-06-03 21:05:13 +00:00
import HydrusSerialisable
2015-03-04 22:44:32 +00:00
import HydrusTagArchive
import HydrusTags
import HydrusThreading
import ClientConstants as CC
import os
import Queue
import random
import shutil
import sqlite3
import stat
import sys
import threading
import time
import traceback
import wx
import yaml
2015-03-25 22:04:19 +00:00
import HydrusData
import HydrusNetworking
import HydrusGlobals
2015-03-04 22:44:32 +00:00
def DAEMONCheckExportFolders():
2015-05-13 20:22:39 +00:00
options = wx.GetApp().GetOptions()
if not options[ 'pause_export_folders_sync' ]:
2015-03-04 22:44:32 +00:00
2015-04-01 20:44:54 +00:00
export_folders = wx.GetApp().Read( 'export_folders' )
2015-03-04 22:44:32 +00:00
2015-06-24 22:10:14 +00:00
for export_folder in export_folders:
2015-03-04 22:44:32 +00:00
2015-06-24 22:10:14 +00:00
export_folder.DoWork()
2015-03-04 22:44:32 +00:00
def DAEMONCheckImportFolders():
2015-05-13 20:22:39 +00:00
options = wx.GetApp().GetOptions()
if not options[ 'pause_import_folders_sync' ]:
2015-03-04 22:44:32 +00:00
2015-04-01 20:44:54 +00:00
import_folders = wx.GetApp().Read( 'import_folders' )
2015-03-04 22:44:32 +00:00
for ( folder_path, details ) in import_folders.items():
2015-06-24 22:10:14 +00:00
if HydrusData.TimeHasPassed( details[ 'last_checked' ] + details[ 'check_period' ] ):
2015-03-04 22:44:32 +00:00
if os.path.exists( folder_path ) and os.path.isdir( folder_path ):
filenames = dircache.listdir( folder_path )
raw_paths = [ folder_path + os.path.sep + filename for filename in filenames ]
2015-03-18 21:46:29 +00:00
all_paths = ClientFiles.GetAllPaths( raw_paths )
2015-03-04 22:44:32 +00:00
if details[ 'type' ] == HC.IMPORT_FOLDER_TYPE_SYNCHRONISE:
all_paths = [ path for path in all_paths if path not in details[ 'cached_imported_paths' ] ]
all_paths = [ path for path in all_paths if path not in details[ 'failed_imported_paths' ] ]
successful_hashes = set()
for ( i, path ) in enumerate( all_paths ):
2015-05-13 20:22:39 +00:00
if options[ 'pause_import_folders_sync' ]: return
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
info = os.lstat( path )
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
size = info[6]
if size == 0: continue
( os_file_handle, temp_path ) = HydrusFileHandling.GetTempPath()
2015-03-04 22:44:32 +00:00
try:
2015-03-25 22:04:19 +00:00
try:
2015-05-06 20:26:18 +00:00
# try to get a write lock just to check it isn't being written to right now
2015-03-25 22:04:19 +00:00
2015-05-06 20:26:18 +00:00
with open( path, 'ab' ) as f:
pass
2015-03-25 22:04:19 +00:00
with open( path, 'rb' ) as f_source:
with open( temp_path, 'wb' ) as f_dest:
HydrusFileHandling.CopyFileLikeToFileLike( f_source, f_dest )
except:
# could not lock, so try again later
continue
2015-03-04 22:44:32 +00:00
try:
2015-03-25 22:04:19 +00:00
if details[ 'local_tag' ] is not None: service_keys_to_tags = { CC.LOCAL_TAG_SERVICE_KEY : { details[ 'local_tag' ] } }
2015-03-04 22:44:32 +00:00
else: service_keys_to_tags = {}
2015-03-25 22:04:19 +00:00
( result, hash ) = wx.GetApp().WriteSynchronous( 'import_file', temp_path, service_keys_to_tags = service_keys_to_tags )
2015-03-04 22:44:32 +00:00
2015-06-03 21:05:13 +00:00
if result in ( CC.STATUS_SUCCESSFUL, CC.STATUS_REDUNDANT ):
2015-03-25 22:04:19 +00:00
successful_hashes.add( hash )
if details[ 'type' ] == HC.IMPORT_FOLDER_TYPE_SYNCHRONISE: details[ 'cached_imported_paths' ].add( path )
2015-06-03 21:05:13 +00:00
elif result == CC.STATUS_DELETED:
2015-03-04 22:44:32 +00:00
details[ 'failed_imported_paths' ].add( path )
2015-03-25 22:04:19 +00:00
if details[ 'type' ] == HC.IMPORT_FOLDER_TYPE_DELETE:
try: os.remove( path )
except: details[ 'failed_imported_paths' ].add( path )
2015-03-04 22:44:32 +00:00
except:
details[ 'failed_imported_paths' ].add( path )
2015-03-25 22:04:19 +00:00
HydrusData.ShowText( 'Import folder failed to import ' + path + ':' + os.linesep * 2 + traceback.format_exc() )
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
finally:
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
HydrusFileHandling.CleanUpTempPath( os_file_handle, temp_path )
2015-03-04 22:44:32 +00:00
if len( successful_hashes ) > 0:
2015-03-25 22:04:19 +00:00
text = HydrusData.ToString( len( successful_hashes ) ) + ' files imported from ' + folder_path
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
job_key = HydrusData.JobKey()
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
job_key.SetVariable( 'popup_title', 'import folder' )
job_key.SetVariable( 'popup_text_1', text )
job_key.SetVariable( 'popup_files', successful_hashes )
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
HydrusGlobals.pubsub.pub( 'message', job_key )
2015-03-04 22:44:32 +00:00
2015-06-24 22:10:14 +00:00
details[ 'last_checked' ] = HydrusData.GetNow()
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
wx.GetApp().WriteSynchronous( 'import_folder', folder_path, details )
2015-03-04 22:44:32 +00:00
def DAEMONDownloadFiles():
2015-04-01 20:44:54 +00:00
hashes = wx.GetApp().Read( 'downloads' )
2015-03-04 22:44:32 +00:00
num_downloads = len( hashes )
for hash in hashes:
2015-04-01 20:44:54 +00:00
( media_result, ) = wx.GetApp().Read( 'media_results', CC.COMBINED_FILE_SERVICE_KEY, ( hash, ) )
2015-03-04 22:44:32 +00:00
service_keys = list( media_result.GetLocationsManager().GetCurrent() )
random.shuffle( service_keys )
for service_key in service_keys:
2015-03-25 22:04:19 +00:00
if service_key == CC.LOCAL_FILE_SERVICE_KEY: break
2015-07-08 21:45:38 +00:00
elif service_key == CC.TRASH_SERVICE_KEY: continue
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
try: file_repository = wx.GetApp().GetServicesManager().GetService( service_key )
2015-03-04 22:44:32 +00:00
except HydrusExceptions.NotFoundException: continue
if file_repository.CanDownload():
try:
request_args = { 'hash' : hash.encode( 'hex' ) }
2015-03-25 22:04:19 +00:00
( os_file_handle, temp_path ) = HydrusFileHandling.GetTempPath()
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
try:
file_repository.Request( HC.GET, 'file', request_args = request_args, temp_path = temp_path )
num_downloads -= 1
2015-04-01 20:44:54 +00:00
wx.GetApp().WaitUntilWXThreadIdle()
2015-03-25 22:04:19 +00:00
wx.GetApp().WriteSynchronous( 'import_file', temp_path )
finally:
HydrusFileHandling.CleanUpTempPath( os_file_handle, temp_path )
2015-03-04 22:44:32 +00:00
break
except:
2015-03-25 22:04:19 +00:00
HydrusData.ShowText( 'Error downloading file:' + os.linesep + traceback.format_exc() )
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
if HydrusGlobals.shutdown: return
2015-03-04 22:44:32 +00:00
def DAEMONFlushServiceUpdates( list_of_service_keys_to_service_updates ):
2015-03-25 22:04:19 +00:00
service_keys_to_service_updates = HydrusData.MergeKeyToListDicts( list_of_service_keys_to_service_updates )
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
wx.GetApp().WriteSynchronous( 'service_updates', service_keys_to_service_updates )
2015-03-04 22:44:32 +00:00
def DAEMONResizeThumbnails():
2015-03-25 22:04:19 +00:00
if not wx.GetApp().CurrentlyIdle(): return
2015-03-04 22:44:32 +00:00
2015-03-18 21:46:29 +00:00
full_size_thumbnail_paths = { path for path in ClientFiles.IterateAllThumbnailPaths() if not path.endswith( '_resized' ) }
2015-03-04 22:44:32 +00:00
2015-03-18 21:46:29 +00:00
resized_thumbnail_paths = { path[:-8] for path in ClientFiles.IterateAllThumbnailPaths() if path.endswith( '_resized' ) }
2015-03-04 22:44:32 +00:00
thumbnail_paths_to_render = list( full_size_thumbnail_paths.difference( resized_thumbnail_paths ) )
random.shuffle( thumbnail_paths_to_render )
i = 0
limit = max( 100, len( thumbnail_paths_to_render ) / 10 )
2015-05-13 20:22:39 +00:00
options = wx.GetApp().GetOptions()
2015-03-04 22:44:32 +00:00
for thumbnail_path in thumbnail_paths_to_render:
try:
2015-05-13 20:22:39 +00:00
thumbnail_resized = HydrusFileHandling.GenerateThumbnail( thumbnail_path, options[ 'thumbnail_dimensions' ] )
2015-03-04 22:44:32 +00:00
thumbnail_resized_path = thumbnail_path + '_resized'
with open( thumbnail_resized_path, 'wb' ) as f: f.write( thumbnail_resized )
2015-03-25 22:04:19 +00:00
except IOError as e: HydrusData.ShowText( 'Thumbnail read error:' + os.linesep + traceback.format_exc() )
except Exception as e: HydrusData.ShowText( 'Thumbnail rendering error:' + os.linesep + traceback.format_exc() )
2015-03-04 22:44:32 +00:00
if i % 10 == 0: time.sleep( 2 )
else:
if limit > 10000: time.sleep( 0.05 )
elif limit > 1000: time.sleep( 0.25 )
else: time.sleep( 0.5 )
i += 1
if i > limit: break
2015-03-25 22:04:19 +00:00
if HydrusGlobals.shutdown: break
2015-03-04 22:44:32 +00:00
def DAEMONSynchroniseAccounts():
2015-07-01 22:02:07 +00:00
services = wx.GetApp().GetServicesManager().GetServices( HC.RESTRICTED_SERVICES )
2015-03-04 22:44:32 +00:00
2015-05-13 20:22:39 +00:00
options = wx.GetApp().GetOptions()
2015-03-04 22:44:32 +00:00
do_notify = False
for service in services:
service_key = service.GetServiceKey()
service_type = service.GetServiceType()
account = service.GetInfo( 'account' )
credentials = service.GetCredentials()
if service_type in HC.REPOSITORIES:
2015-05-13 20:22:39 +00:00
if options[ 'pause_repo_sync' ]: continue
2015-03-04 22:44:32 +00:00
info = service.GetInfo()
if info[ 'paused' ]: continue
if account.IsStale() and credentials.HasAccessKey() and not service.HasRecentError():
try:
response = service.Request( HC.GET, 'account' )
account = response[ 'account' ]
account.MakeFresh()
2015-03-25 22:04:19 +00:00
wx.GetApp().WriteSynchronous( 'service_updates', { service_key : [ HydrusData.ServiceUpdate( HC.SERVICE_UPDATE_ACCOUNT, account ) ] } )
2015-03-04 22:44:32 +00:00
do_notify = True
except Exception as e:
print( 'Failed to refresh account for ' + service.GetName() + ':' )
print( traceback.format_exc() )
2015-03-25 22:04:19 +00:00
if do_notify: HydrusGlobals.pubsub.pub( 'notify_new_permissions' )
2015-03-04 22:44:32 +00:00
def DAEMONSynchroniseRepositories():
2015-03-25 22:04:19 +00:00
HydrusGlobals.repos_changed = False
2015-03-04 22:44:32 +00:00
2015-05-13 20:22:39 +00:00
options = wx.GetApp().GetOptions()
if not options[ 'pause_repo_sync' ]:
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
services = wx.GetApp().GetServicesManager().GetServices( HC.REPOSITORIES )
2015-03-04 22:44:32 +00:00
for service in services:
2015-07-01 22:02:07 +00:00
service.Sync()
2015-03-04 22:44:32 +00:00
time.sleep( 5 )
def DAEMONSynchroniseSubscriptions():
2015-03-25 22:04:19 +00:00
HydrusGlobals.subs_changed = False
2015-03-04 22:44:32 +00:00
2015-05-13 20:22:39 +00:00
options = wx.GetApp().GetOptions()
if not options[ 'pause_subs_sync' ]:
2015-03-04 22:44:32 +00:00
2015-04-01 20:44:54 +00:00
subscription_names = wx.GetApp().Read( 'subscription_names' )
2015-03-04 22:44:32 +00:00
for name in subscription_names:
2015-04-01 20:44:54 +00:00
info = wx.GetApp().Read( 'subscription', name )
2015-03-04 22:44:32 +00:00
site_type = info[ 'site_type' ]
query_type = info[ 'query_type' ]
query = info[ 'query' ]
frequency_type = info[ 'frequency_type' ]
frequency = info[ 'frequency' ]
2015-05-20 21:31:40 +00:00
get_tags_if_redundant = info[ 'get_tags_if_redundant' ]
2015-04-01 20:44:54 +00:00
initial_limit = info[ 'initial_limit' ]
2015-03-04 22:44:32 +00:00
advanced_tag_options = info[ 'advanced_tag_options' ]
advanced_import_options = info[ 'advanced_import_options' ]
last_checked = info[ 'last_checked' ]
url_cache = info[ 'url_cache' ]
paused = info[ 'paused' ]
if paused: continue
2015-03-25 22:04:19 +00:00
now = HydrusData.GetNow()
2015-03-04 22:44:32 +00:00
if last_checked is None: last_checked = 0
if last_checked + ( frequency_type * frequency ) < now:
try:
2015-03-25 22:04:19 +00:00
job_key = HydrusData.JobKey( pausable = True, cancellable = True )
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
job_key.SetVariable( 'popup_title', 'subscriptions - ' + name )
job_key.SetVariable( 'popup_text_1', 'checking' )
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
HydrusGlobals.pubsub.pub( 'message', job_key )
2015-03-04 22:44:32 +00:00
do_tags = len( advanced_tag_options ) > 0
if site_type == HC.SITE_TYPE_BOORU:
( booru_name, booru_query_type ) = query_type
2015-04-01 20:44:54 +00:00
try: booru = wx.GetApp().Read( 'remote_booru', booru_name )
2015-03-04 22:44:32 +00:00
except: raise Exception( 'While attempting to execute a subscription on booru ' + name + ', the client could not find that booru in the db.' )
tags = query.split( ' ' )
2015-07-08 21:45:38 +00:00
all_args = ( ( booru_name, tags ), )
2015-03-04 22:44:32 +00:00
elif site_type == HC.SITE_TYPE_HENTAI_FOUNDRY:
info = {}
info[ 'rating_nudity' ] = 3
info[ 'rating_violence' ] = 3
info[ 'rating_profanity' ] = 3
info[ 'rating_racism' ] = 3
info[ 'rating_sex' ] = 3
info[ 'rating_spoilers' ] = 3
info[ 'rating_yaoi' ] = 1
info[ 'rating_yuri' ] = 1
info[ 'rating_teen' ] = 1
info[ 'rating_guro' ] = 1
info[ 'rating_furry' ] = 1
info[ 'rating_beast' ] = 1
info[ 'rating_male' ] = 1
info[ 'rating_female' ] = 1
info[ 'rating_futa' ] = 1
info[ 'rating_other' ] = 1
info[ 'filter_media' ] = 'A'
info[ 'filter_order' ] = 'date_new'
info[ 'filter_type' ] = 0
advanced_hentai_foundry_options = info
if query_type == 'artist': all_args = ( ( 'artist pictures', query, advanced_hentai_foundry_options ), ( 'artist scraps', query, advanced_hentai_foundry_options ) )
else:
tags = query.split( ' ' )
all_args = ( ( query_type, tags, advanced_hentai_foundry_options ), )
elif site_type == HC.SITE_TYPE_PIXIV: all_args = ( ( query_type, query ), )
else: all_args = ( ( query, ), )
2015-05-20 21:31:40 +00:00
gallery_parsers = [ ClientDownloading.GetGalleryParser( site_type, *args ) for args in all_args ]
2015-03-04 22:44:32 +00:00
2015-05-20 21:31:40 +00:00
gallery_parsers[0].SetupGallerySearch() # for now this is cookie-based for hf, so only have to do it on one
2015-03-04 22:44:32 +00:00
2015-06-03 21:05:13 +00:00
all_urls = []
page_index = 0
2015-03-04 22:44:32 +00:00
while True:
2015-07-01 22:02:07 +00:00
while options[ 'pause_subs_sync' ]:
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
( i_paused, should_quit ) = job_key.WaitIfNeeded()
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
if should_quit:
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
break
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
time.sleep( 0.1 )
job_key.SetVariable( 'popup_text_1', 'subscriptions paused' )
2015-03-25 22:04:19 +00:00
if HydrusGlobals.subs_changed:
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
job_key.SetVariable( 'popup_text_1', 'subscriptions were changed during processing; this job was abandoned' )
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
print( job_key.ToString() )
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
job_key.Cancel()
2015-04-01 20:44:54 +00:00
time.sleep( 5 )
2015-07-01 22:02:07 +00:00
job_key.Delete()
2015-04-01 20:44:54 +00:00
2015-03-25 22:04:19 +00:00
HydrusGlobals.pubsub.pub( 'notify_restart_subs_sync_daemon' )
2015-03-04 22:44:32 +00:00
return
2015-07-01 22:02:07 +00:00
( i_paused, should_quit ) = job_key.WaitIfNeeded()
if should_quit:
break
2015-06-03 21:05:13 +00:00
if last_checked == 0 and initial_limit is not None and len( all_urls ) >= initial_limit: break
2015-04-01 20:44:54 +00:00
2015-05-20 21:31:40 +00:00
gallery_parsers_to_remove = []
2015-03-04 22:44:32 +00:00
2015-05-20 21:31:40 +00:00
for gallery_parser in gallery_parsers:
2015-03-04 22:44:32 +00:00
2015-06-03 21:05:13 +00:00
if last_checked == 0 and initial_limit is not None and len( all_urls ) >= initial_limit: break
2015-04-01 20:44:54 +00:00
2015-06-03 21:05:13 +00:00
page_of_urls = gallery_parser.GetPage( page_index )
2015-03-04 22:44:32 +00:00
2015-06-03 21:05:13 +00:00
if len( page_of_urls ) == 0: gallery_parsers_to_remove.append( gallery_parser )
2015-03-04 22:44:32 +00:00
else:
2015-06-03 21:05:13 +00:00
fresh_urls = [ url for url in page_of_urls if url not in url_cache ]
2015-03-04 22:44:32 +00:00
2015-06-03 21:05:13 +00:00
reached_url_cache = len( fresh_urls ) != len( page_of_urls )
2015-05-20 21:31:40 +00:00
if reached_url_cache: gallery_parsers_to_remove.append( gallery_parser )
2015-03-04 22:44:32 +00:00
2015-05-20 21:31:40 +00:00
if initial_limit is not None:
2015-06-03 21:05:13 +00:00
while len( fresh_urls ) > 0:
2015-05-20 21:31:40 +00:00
2015-06-03 21:05:13 +00:00
url = fresh_urls.pop( 0 )
2015-05-20 21:31:40 +00:00
2015-06-03 21:05:13 +00:00
all_urls.append( url )
2015-05-20 21:31:40 +00:00
2015-06-03 21:05:13 +00:00
if len( all_urls ) >= initial_limit:
2015-05-20 21:31:40 +00:00
break
else:
2015-06-03 21:05:13 +00:00
all_urls.extend( fresh_urls )
2015-05-20 21:31:40 +00:00
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
job_key.SetVariable( 'popup_text_1', 'found ' + HydrusData.ConvertIntToPrettyString( len( all_urls ) ) + ' new files' )
2015-03-04 22:44:32 +00:00
time.sleep( 5 )
2015-05-20 21:31:40 +00:00
for gallery_parser in gallery_parsers_to_remove: gallery_parsers.remove( gallery_parser )
2015-03-04 22:44:32 +00:00
2015-05-20 21:31:40 +00:00
if len( gallery_parsers ) == 0: break
2015-03-04 22:44:32 +00:00
2015-06-03 21:05:13 +00:00
page_index += 1
2015-03-04 22:44:32 +00:00
2015-06-03 21:05:13 +00:00
all_urls.reverse() # to do oldest first, which means we can save incrementally
2015-03-04 22:44:32 +00:00
num_new = 0
successful_hashes = set()
2015-06-03 21:05:13 +00:00
for ( i, url ) in enumerate( all_urls ):
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
while options[ 'pause_subs_sync' ]:
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
( i_paused, should_quit ) = job_key.WaitIfNeeded()
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
if should_quit:
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
break
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
time.sleep( 0.1 )
job_key.SetVariable( 'popup_text_1', 'subscriptions paused' )
2015-03-25 22:04:19 +00:00
if HydrusGlobals.subs_changed:
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
job_key.SetVariable( 'popup_text_1', 'subscriptions were changed during processing; this job was abandoned' )
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
print( job_key.ToString() )
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
job_key.Cancel()
2015-04-01 20:44:54 +00:00
time.sleep( 5 )
2015-07-01 22:02:07 +00:00
job_key.Delete()
2015-04-01 20:44:54 +00:00
2015-03-25 22:04:19 +00:00
HydrusGlobals.pubsub.pub( 'notify_restart_subs_sync_daemon' )
2015-03-04 22:44:32 +00:00
return
2015-07-01 22:02:07 +00:00
( i_paused, should_quit ) = job_key.WaitIfNeeded()
if should_quit:
break
2015-03-04 22:44:32 +00:00
try:
url_cache.add( url )
2015-06-10 19:40:25 +00:00
x_out_of_y = 'file ' + HydrusData.ConvertValueRangeToPrettyString( i, len( all_urls ) ) + ': '
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
job_key.SetVariable( 'popup_text_1', x_out_of_y + 'checking url status' )
job_key.SetVariable( 'popup_gauge_1', ( i, len( all_urls ) ) )
2015-03-04 22:44:32 +00:00
if len( successful_hashes ) > 0:
job_key_s_h = set( successful_hashes )
2015-07-01 22:02:07 +00:00
job_key.SetVariable( 'popup_files', job_key_s_h )
2015-03-04 22:44:32 +00:00
2015-04-01 20:44:54 +00:00
( status, hash ) = wx.GetApp().Read( 'url_status', url )
2015-03-04 22:44:32 +00:00
2015-06-03 21:05:13 +00:00
if status == CC.STATUS_DELETED and not advanced_import_options[ 'exclude_deleted_files' ]: status = CC.STATUS_NEW
2015-03-04 22:44:32 +00:00
2015-06-03 21:05:13 +00:00
if status == CC.STATUS_REDUNDANT:
2015-03-04 22:44:32 +00:00
2015-05-20 21:31:40 +00:00
if do_tags and get_tags_if_redundant:
2015-03-04 22:44:32 +00:00
try:
2015-07-01 22:02:07 +00:00
job_key.SetVariable( 'popup_text_1', x_out_of_y + 'found file in db, fetching tags' )
2015-03-04 22:44:32 +00:00
2015-06-03 21:05:13 +00:00
tags = gallery_parser.GetTags( url )
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
service_keys_to_tags = ClientDownloading.ConvertTagsToServiceKeysToTags( tags, advanced_tag_options )
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
service_keys_to_content_updates = ClientDownloading.ConvertServiceKeysToTagsToServiceKeysToContentUpdates( hash, service_keys_to_tags )
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
wx.GetApp().WriteSynchronous( 'content_updates', service_keys_to_content_updates )
2015-03-04 22:44:32 +00:00
except: pass
2015-06-03 21:05:13 +00:00
elif status == CC.STATUS_NEW:
2015-03-04 22:44:32 +00:00
num_new += 1
2015-07-01 22:02:07 +00:00
job_key.SetVariable( 'popup_text_1', x_out_of_y + 'downloading file' )
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
( os_file_handle, temp_path ) = HydrusFileHandling.GetTempPath()
try:
2015-03-04 22:44:32 +00:00
2015-06-03 21:05:13 +00:00
if do_tags: tags = gallery_parser.GetFileAndTags( temp_path, url )
2015-03-25 22:04:19 +00:00
else:
2015-06-03 21:05:13 +00:00
gallery_parser.GetFile( temp_path, url )
2015-03-25 22:04:19 +00:00
tags = []
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
service_keys_to_tags = ClientDownloading.ConvertTagsToServiceKeysToTags( tags, advanced_tag_options )
2015-07-01 22:02:07 +00:00
job_key.SetVariable( 'popup_text_1', x_out_of_y + 'importing file' )
2015-03-25 22:04:19 +00:00
( status, hash ) = wx.GetApp().WriteSynchronous( 'import_file', temp_path, advanced_import_options = advanced_import_options, service_keys_to_tags = service_keys_to_tags, url = url )
finally:
HydrusFileHandling.CleanUpTempPath( os_file_handle, temp_path )
2015-03-04 22:44:32 +00:00
2015-06-03 21:05:13 +00:00
if status in ( CC.STATUS_SUCCESSFUL, CC.STATUS_REDUNDANT ): successful_hashes.add( hash )
2015-03-04 22:44:32 +00:00
except Exception as e:
2015-03-25 22:04:19 +00:00
HydrusData.ShowText( 'While trying to execute subscription ' + name + ', the url ' + url + ' caused this problem:' )
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
HydrusData.ShowException( e )
2015-03-04 22:44:32 +00:00
if i % 20 == 0:
info[ 'site_type' ] = site_type
info[ 'query_type' ] = query_type
info[ 'query' ] = query
info[ 'frequency_type' ] = frequency_type
info[ 'frequency' ] = frequency
2015-05-20 21:31:40 +00:00
info[ 'get_tags_if_redundant' ] = get_tags_if_redundant
2015-04-01 20:44:54 +00:00
info[ 'initial_limit' ] = initial_limit
2015-03-04 22:44:32 +00:00
info[ 'advanced_tag_options' ] = advanced_tag_options
info[ 'advanced_import_options' ] = advanced_import_options
info[ 'last_checked' ] = last_checked
info[ 'url_cache' ] = url_cache
info[ 'paused' ] = paused
2015-03-25 22:04:19 +00:00
wx.GetApp().WriteSynchronous( 'subscription', name, info )
2015-03-04 22:44:32 +00:00
2015-04-01 20:44:54 +00:00
wx.GetApp().WaitUntilWXThreadIdle()
2015-03-04 22:44:32 +00:00
time.sleep( 3 )
2015-07-01 22:02:07 +00:00
job_key.DeleteVariable( 'popup_gauge_1' )
2015-03-04 22:44:32 +00:00
if len( successful_hashes ) > 0:
2015-07-01 22:02:07 +00:00
job_key.SetVariable( 'popup_text_1', HydrusData.ToString( len( successful_hashes ) ) + ' files imported' )
job_key.SetVariable( 'popup_files', successful_hashes )
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
else: job_key.SetVariable( 'popup_text_1', 'no new files' )
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
print( job_key.ToString() )
2015-03-04 22:44:32 +00:00
2015-07-01 22:02:07 +00:00
job_key.DeleteVariable( 'popup_text_1' )
2015-03-04 22:44:32 +00:00
if len( successful_hashes ) > 0: job_key.Finish()
else: job_key.Delete()
last_checked = now
except Exception as e:
job_key.Cancel()
last_checked = now + HC.UPDATE_DURATION
2015-03-25 22:04:19 +00:00
HydrusData.ShowText( 'Problem with ' + name + ':' )
2015-03-04 22:44:32 +00:00
2015-03-25 22:04:19 +00:00
HydrusData.ShowException( e )
2015-03-04 22:44:32 +00:00
time.sleep( 3 )
info[ 'site_type' ] = site_type
info[ 'query_type' ] = query_type
info[ 'query' ] = query
info[ 'frequency_type' ] = frequency_type
info[ 'frequency' ] = frequency
2015-05-20 21:31:40 +00:00
info[ 'get_tags_if_redundant' ] = get_tags_if_redundant
2015-04-01 20:44:54 +00:00
info[ 'initial_limit' ] = initial_limit
2015-03-04 22:44:32 +00:00
info[ 'advanced_tag_options' ] = advanced_tag_options
info[ 'advanced_import_options' ] = advanced_import_options
info[ 'last_checked' ] = last_checked
info[ 'url_cache' ] = url_cache
info[ 'paused' ] = paused
2015-03-25 22:04:19 +00:00
wx.GetApp().WriteSynchronous( 'subscription', name, info )
2015-03-04 22:44:32 +00:00
time.sleep( 3 )
def DAEMONUPnP():
try:
local_ip = HydrusNATPunch.GetLocalIP()
current_mappings = HydrusNATPunch.GetUPnPMappings()
our_mappings = { ( internal_client, internal_port ) : external_port for ( description, internal_client, internal_port, external_ip_address, external_port, protocol, enabled ) in current_mappings }
except: return # This IGD probably doesn't support UPnP, so don't spam the user with errors they can't fix!
2015-07-01 22:02:07 +00:00
services = wx.GetApp().GetServicesManager().GetServices( ( HC.LOCAL_BOORU, ) )
2015-03-04 22:44:32 +00:00
for service in services:
info = service.GetInfo()
internal_port = info[ 'port' ]
upnp = info[ 'upnp' ]
if ( local_ip, internal_port ) in our_mappings:
current_external_port = our_mappings[ ( local_ip, internal_port ) ]
if upnp is None or current_external_port != upnp: HydrusNATPunch.RemoveUPnPMapping( current_external_port, 'TCP' )
for service in services:
info = service.GetInfo()
internal_port = info[ 'port' ]
upnp = info[ 'upnp' ]
if upnp is not None:
if ( local_ip, internal_port ) not in our_mappings:
service_type = service.GetServiceType()
external_port = upnp
protocol = 'TCP'
description = HC.service_string_lookup[ service_type ] + ' at ' + local_ip + ':' + str( internal_port )
duration = 3600
HydrusNATPunch.AddUPnPMapping( local_ip, internal_port, external_port, protocol, description, duration = duration )