Version 166
This commit is contained in:
parent
701fd3f2e4
commit
8ab2aa1a19
|
@ -8,6 +8,26 @@
|
|||
<div class="content">
|
||||
<h3>changelog</h3>
|
||||
<ul>
|
||||
<li><h3>version 166</h3></li>
|
||||
<ul>
|
||||
<li>created new object to hold hdd import information</li>
|
||||
<li>created new object to hold generalised import file status</li>
|
||||
<li>moved hdd management controller to the new system</li>
|
||||
<li>moved hdd management gui to the new system</li>
|
||||
<li>hdd imports will now remember their import and pause status through a session change</li>
|
||||
<li>misc import code improvements</li>
|
||||
<li>hydrus client is getting out of the zip business--zips are no longer parsed for import nor able to be created for export</li>
|
||||
<li>import and export code is simpler</li>
|
||||
<li>repository file downloading daemon will now throw up a small auto-dismissing popup when it downloads files</li>
|
||||
<li>repository file downloading daemon will respond to new downloads much quicker than previously</li>
|
||||
<li>if a repsitory is in the process of backing up when it receives a request, it will now return a 503 'server temporarily busy' error (rather than timing out)</li>
|
||||
<li>the client can now catch and handle these 503 errors gracefully</li>
|
||||
<li>backing up will no longer block the client's gui, and it will no longer timeout if the operation takes more than ten minutes</li>
|
||||
<li>some networking code is a little simpler</li>
|
||||
<li>added 'select local files' and 'select trash' to thumbnail right click menu</li>
|
||||
<li>'cancel download' added to file repository submenu of thumbnail right click menu</li>
|
||||
<li>some buggy listbox resize behaviour (scrollbars not disappearing and occasional layout and drawing update fail) has been fixed</li>
|
||||
</ul>
|
||||
<li><h3>version 165</h3></li>
|
||||
<ul>
|
||||
<li>added a db table to track when a file was sent to trash</li>
|
||||
|
|
|
@ -60,6 +60,7 @@ DISCRIMINANT_INBOX = 0
|
|||
DISCRIMINANT_LOCAL = 1
|
||||
DISCRIMINANT_NOT_LOCAL = 2
|
||||
DISCRIMINANT_ARCHIVE = 3
|
||||
DISCRIMINANT_DOWNLOADING = 3
|
||||
|
||||
DUMPER_NOT_DUMPED = 0
|
||||
DUMPER_DUMPED_OK = 1
|
||||
|
|
|
@ -517,7 +517,7 @@ class Controller( HydrusController.HydrusController ):
|
|||
|
||||
HydrusThreading.DAEMONWorker( 'CheckImportFolders', ClientDaemons.DAEMONCheckImportFolders, ( 'notify_restart_import_folders_daemon', 'notify_new_import_folders' ), period = 180 )
|
||||
HydrusThreading.DAEMONWorker( 'CheckExportFolders', ClientDaemons.DAEMONCheckExportFolders, ( 'notify_restart_export_folders_daemon', 'notify_new_export_folders' ), period = 180 )
|
||||
HydrusThreading.DAEMONWorker( 'DownloadFiles', ClientDaemons.DAEMONDownloadFiles, ( 'notify_new_downloads', 'notify_new_permissions' ) )
|
||||
HydrusThreading.DAEMONWorker( 'DownloadFiles', ClientDaemons.DAEMONDownloadFiles, ( 'notify_new_downloads', 'notify_new_permissions' ), pre_callable_wait = 0 )
|
||||
HydrusThreading.DAEMONWorker( 'MaintainTrash', ClientDaemons.DAEMONMaintainTrash, init_wait = 60 )
|
||||
HydrusThreading.DAEMONWorker( 'ResizeThumbnails', ClientDaemons.DAEMONResizeThumbnails, period = 3600 * 24, init_wait = 600 )
|
||||
HydrusThreading.DAEMONWorker( 'SynchroniseAccounts', ClientDaemons.DAEMONSynchroniseAccounts, ( 'permissions_are_stale', ) )
|
||||
|
|
|
@ -3890,18 +3890,29 @@ class DB( HydrusDB.HydrusDB ):
|
|||
return names
|
||||
|
||||
|
||||
def _ImportFile( self, path, advanced_import_options = None, service_keys_to_tags = None, generate_media_result = False, override_deleted = False, url = None ):
|
||||
def _ImportFile( self, path, import_file_options = None, service_keys_to_tags = None, generate_media_result = False, override_deleted = False, url = None ):
|
||||
|
||||
if advanced_import_options is None: advanced_import_options = ClientDefaults.GetDefaultAdvancedImportOptions()
|
||||
if import_file_options is None: import_file_options = ClientDefaults.GetDefaultImportFileOptions()
|
||||
if service_keys_to_tags is None: service_keys_to_tags = {}
|
||||
|
||||
result = CC.STATUS_SUCCESSFUL
|
||||
|
||||
can_add = True
|
||||
|
||||
archive = advanced_import_options[ 'auto_archive' ]
|
||||
|
||||
exclude_deleted_files = advanced_import_options[ 'exclude_deleted_files' ]
|
||||
if type( import_file_options ) == dict:
|
||||
|
||||
archive = import_file_options[ 'auto_archive' ]
|
||||
|
||||
exclude_deleted_files = import_file_options[ 'exclude_deleted_files' ]
|
||||
|
||||
min_size = import_file_options[ 'min_size' ]
|
||||
|
||||
min_resolution = import_file_options[ 'min_resolution' ]
|
||||
|
||||
else:
|
||||
|
||||
( archive, exclude_deleted_files, min_size, min_resolution ) = import_file_options.ToTuple()
|
||||
|
||||
|
||||
HydrusImageHandling.ConvertToPngIfBmp( path )
|
||||
|
||||
|
@ -3945,17 +3956,15 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
if width is not None and height is not None:
|
||||
|
||||
if advanced_import_options[ 'min_resolution' ] is not None:
|
||||
if min_resolution is not None:
|
||||
|
||||
( min_x, min_y ) = advanced_import_options[ 'min_resolution' ]
|
||||
( min_x, min_y ) = min_resolution
|
||||
|
||||
if width < min_x or height < min_y: raise Exception( 'Resolution too small' )
|
||||
|
||||
|
||||
|
||||
if advanced_import_options[ 'min_size' ] is not None:
|
||||
|
||||
min_size = advanced_import_options[ 'min_size' ]
|
||||
if min_size is not None:
|
||||
|
||||
if size < min_size: raise Exception( 'File too small' )
|
||||
|
||||
|
@ -5064,34 +5073,6 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
HydrusGlobals.pubsub.pub( 'splash_set_text', 'updating db to v' + HydrusData.ToString( version + 1 ) )
|
||||
|
||||
if version == 115:
|
||||
|
||||
for path in ClientFiles.IterateAllFilePaths():
|
||||
|
||||
try:
|
||||
|
||||
filename = os.path.basename( path )
|
||||
|
||||
( hash_encoded, ext ) = filename.split( '.', 1 )
|
||||
|
||||
hash = hash_encoded.decode( 'hex' )
|
||||
|
||||
if ext == 'webm':
|
||||
|
||||
thumbnail = HydrusFileHandling.GenerateThumbnail( path )
|
||||
|
||||
with open( ClientFiles.GetExpectedThumbnailPath( hash ), 'wb' ) as f: f.write( thumbnail )
|
||||
|
||||
|
||||
except: print( traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
if version == 116:
|
||||
|
||||
self._c.execute( 'DELETE FROM service_info WHERE info_type = ?;', ( HC.SERVICE_INFO_NUM_THUMBNAILS, ) )
|
||||
|
||||
|
||||
if version == 117:
|
||||
|
||||
i = 0
|
||||
|
|
|
@ -179,56 +179,92 @@ def DAEMONDownloadFiles():
|
|||
|
||||
num_downloads = len( hashes )
|
||||
|
||||
for hash in hashes:
|
||||
if num_downloads > 0:
|
||||
|
||||
( media_result, ) = wx.GetApp().Read( 'media_results', CC.COMBINED_FILE_SERVICE_KEY, ( hash, ) )
|
||||
successful_hashes = set()
|
||||
|
||||
service_keys = list( media_result.GetLocationsManager().GetCurrent() )
|
||||
job_key = HydrusData.JobKey()
|
||||
|
||||
random.shuffle( service_keys )
|
||||
job_key.SetVariable( 'popup_text_1', 'initialising downloader' )
|
||||
|
||||
for service_key in service_keys:
|
||||
HydrusGlobals.pubsub.pub( 'message', job_key )
|
||||
|
||||
for hash in hashes:
|
||||
|
||||
if service_key == CC.LOCAL_FILE_SERVICE_KEY: break
|
||||
elif service_key == CC.TRASH_SERVICE_KEY: continue
|
||||
job_key.SetVariable( 'popup_text_1', 'downloading ' + HydrusData.ConvertIntToPrettyString( num_downloads - len( successful_hashes ) ) + ' files from repositories' )
|
||||
|
||||
try: file_repository = wx.GetApp().GetServicesManager().GetService( service_key )
|
||||
except HydrusExceptions.NotFoundException: continue
|
||||
( media_result, ) = wx.GetApp().Read( 'media_results', CC.COMBINED_FILE_SERVICE_KEY, ( hash, ) )
|
||||
|
||||
if file_repository.CanDownload():
|
||||
service_keys = list( media_result.GetLocationsManager().GetCurrent() )
|
||||
|
||||
random.shuffle( service_keys )
|
||||
|
||||
for service_key in service_keys:
|
||||
|
||||
try:
|
||||
|
||||
request_args = { 'hash' : hash.encode( 'hex' ) }
|
||||
|
||||
( os_file_handle, temp_path ) = HydrusFileHandling.GetTempPath()
|
||||
if service_key == CC.LOCAL_FILE_SERVICE_KEY: break
|
||||
elif service_key == CC.TRASH_SERVICE_KEY: continue
|
||||
|
||||
try: file_repository = wx.GetApp().GetServicesManager().GetService( service_key )
|
||||
except HydrusExceptions.NotFoundException: continue
|
||||
|
||||
if file_repository.CanDownload():
|
||||
|
||||
try:
|
||||
|
||||
file_repository.Request( HC.GET, 'file', request_args = request_args, temp_path = temp_path )
|
||||
request_args = { 'hash' : hash.encode( 'hex' ) }
|
||||
|
||||
num_downloads -= 1
|
||||
( os_file_handle, temp_path ) = HydrusFileHandling.GetTempPath()
|
||||
|
||||
wx.GetApp().WaitUntilWXThreadIdle()
|
||||
try:
|
||||
|
||||
file_repository.Request( HC.GET, 'file', request_args = request_args, temp_path = temp_path )
|
||||
|
||||
wx.GetApp().WaitUntilWXThreadIdle()
|
||||
|
||||
wx.GetApp().WriteSynchronous( 'import_file', temp_path, override_deleted = True )
|
||||
|
||||
successful_hashes.add( hash )
|
||||
|
||||
break
|
||||
|
||||
finally:
|
||||
|
||||
HydrusFileHandling.CleanUpTempPath( os_file_handle, temp_path )
|
||||
|
||||
|
||||
wx.GetApp().WriteSynchronous( 'import_file', temp_path, override_deleted = True )
|
||||
except HydrusExceptions.ServerBusyException:
|
||||
|
||||
finally:
|
||||
job_key.SetVariable( 'popup_text_1', file_repository.GetName() + ' was busy. waiting 30s before trying again' )
|
||||
|
||||
HydrusFileHandling.CleanUpTempPath( os_file_handle, temp_path )
|
||||
time.sleep( 30 )
|
||||
|
||||
job_key.Delete()
|
||||
|
||||
HydrusGlobals.pubsub.pub( 'notify_new_downloads' )
|
||||
|
||||
return
|
||||
|
||||
except Exception as e:
|
||||
|
||||
HydrusData.ShowText( 'Error downloading file!' )
|
||||
HydrusData.ShowException( e )
|
||||
|
||||
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
|
||||
HydrusData.ShowText( 'Error downloading file!' )
|
||||
HydrusData.ShowException( e )
|
||||
|
||||
|
||||
if HydrusGlobals.shutdown: return
|
||||
|
||||
|
||||
if HydrusGlobals.shutdown: return
|
||||
|
||||
if len( successful_hashes ) > 0:
|
||||
|
||||
job_key.SetVariable( 'popup_text_1', HydrusData.ConvertIntToPrettyString( len( successful_hashes ) ) + ' files downloaded' )
|
||||
|
||||
else:
|
||||
|
||||
job_key.SetVariable( 'popup_text_1', 'all files failed to download' )
|
||||
|
||||
|
||||
job_key.Delete()
|
||||
|
||||
|
||||
def DAEMONFlushServiceUpdates( list_of_service_keys_to_service_updates ):
|
||||
|
@ -435,7 +471,7 @@ def DAEMONSynchroniseSubscriptions():
|
|||
get_tags_if_redundant = info[ 'get_tags_if_redundant' ]
|
||||
initial_limit = info[ 'initial_limit' ]
|
||||
advanced_tag_options = info[ 'advanced_tag_options' ]
|
||||
advanced_import_options = info[ 'advanced_import_options' ]
|
||||
import_file_options = info[ 'advanced_import_options' ]
|
||||
last_checked = info[ 'last_checked' ]
|
||||
url_cache = info[ 'url_cache' ]
|
||||
paused = info[ 'paused' ]
|
||||
|
@ -672,7 +708,7 @@ def DAEMONSynchroniseSubscriptions():
|
|||
|
||||
( status, hash ) = wx.GetApp().Read( 'url_status', url )
|
||||
|
||||
if status == CC.STATUS_DELETED and not advanced_import_options[ 'exclude_deleted_files' ]: status = CC.STATUS_NEW
|
||||
if status == CC.STATUS_DELETED and not import_file_options[ 'exclude_deleted_files' ]: status = CC.STATUS_NEW
|
||||
|
||||
if status == CC.STATUS_REDUNDANT:
|
||||
|
||||
|
@ -715,7 +751,7 @@ def DAEMONSynchroniseSubscriptions():
|
|||
|
||||
job_key.SetVariable( 'popup_text_1', x_out_of_y + 'importing file' )
|
||||
|
||||
( status, hash ) = wx.GetApp().WriteSynchronous( 'import_file', temp_path, advanced_import_options = advanced_import_options, service_keys_to_tags = service_keys_to_tags, url = url )
|
||||
( status, hash ) = wx.GetApp().WriteSynchronous( 'import_file', temp_path, import_file_options = import_file_options, service_keys_to_tags = service_keys_to_tags, url = url )
|
||||
|
||||
finally:
|
||||
|
||||
|
@ -742,7 +778,7 @@ def DAEMONSynchroniseSubscriptions():
|
|||
info[ 'get_tags_if_redundant' ] = get_tags_if_redundant
|
||||
info[ 'initial_limit' ] = initial_limit
|
||||
info[ 'advanced_tag_options' ] = advanced_tag_options
|
||||
info[ 'advanced_import_options' ] = advanced_import_options
|
||||
info[ 'advanced_import_options' ] = import_file_options
|
||||
info[ 'last_checked' ] = last_checked
|
||||
info[ 'url_cache' ] = url_cache
|
||||
info[ 'paused' ] = paused
|
||||
|
@ -794,7 +830,7 @@ def DAEMONSynchroniseSubscriptions():
|
|||
info[ 'get_tags_if_redundant' ] = get_tags_if_redundant
|
||||
info[ 'initial_limit' ] = initial_limit
|
||||
info[ 'advanced_tag_options' ] = advanced_tag_options
|
||||
info[ 'advanced_import_options' ] = advanced_import_options
|
||||
info[ 'advanced_import_options' ] = import_file_options
|
||||
info[ 'last_checked' ] = last_checked
|
||||
info[ 'url_cache' ] = url_cache
|
||||
info[ 'paused' ] = paused
|
||||
|
|
|
@ -780,6 +780,38 @@ class ImportFileOptions( HydrusSerialisable.SerialisableBase ):
|
|||
( self._automatic_archive, self._exclude_deleted, self._min_size, self._min_resolution ) = serialisable_info
|
||||
|
||||
|
||||
def FileIsValid( self, size, resolution = None ):
|
||||
|
||||
if self._min_size is not None and size < self._min_size:
|
||||
|
||||
return False
|
||||
|
||||
|
||||
if resolution is not None and self._min_resolution is not None:
|
||||
|
||||
( x, y ) = resolution
|
||||
|
||||
( min_x, min_y ) = self._min_resolution
|
||||
|
||||
if x < min_x or y < min_y:
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def GetAutomaticArchive( self ):
|
||||
|
||||
return self._automatic_archive
|
||||
|
||||
|
||||
def GetExcludeDeleted( self ):
|
||||
|
||||
return self._exclude_deleted
|
||||
|
||||
|
||||
def ToTuple( self ):
|
||||
|
||||
return ( self._automatic_archive, self._exclude_deleted, self._min_size, self._min_resolution )
|
||||
|
@ -1021,13 +1053,16 @@ class Service( HydrusData.HydrusYAMLBase ):
|
|||
wx.GetApp().Write( 'service_updates', service_keys_to_service_updates )
|
||||
|
||||
|
||||
def CanDownload( self ): return self._info[ 'account' ].HasPermission( HC.GET_DATA ) and not self.HasRecentError()
|
||||
def CanDownload( self ):
|
||||
|
||||
return self._info[ 'account' ].HasPermission( HC.GET_DATA ) and not self.HasRecentError()
|
||||
|
||||
|
||||
def CanDownloadUpdate( self ):
|
||||
|
||||
update_due = HydrusData.TimeHasPassed( self._info[ 'next_download_timestamp' ] + HC.UPDATE_DURATION + 1800 )
|
||||
|
||||
return not self.IsPaused() and self.CanDownload() and update_due
|
||||
return self.CanDownload() and update_due and not self.IsPaused()
|
||||
|
||||
|
||||
def CanProcessUpdate( self ):
|
||||
|
@ -1036,7 +1071,7 @@ class Service( HydrusData.HydrusYAMLBase ):
|
|||
|
||||
it_is_time = HydrusData.TimeHasPassed( self._info[ 'next_processing_timestamp' ] + HC.UPDATE_DURATION + HC.options[ 'processing_phase' ] )
|
||||
|
||||
return update_is_downloaded and it_is_time
|
||||
return update_is_downloaded and it_is_time and not self.IsPaused()
|
||||
|
||||
|
||||
def CanUpload( self ): return self._info[ 'account' ].HasPermission( HC.POST_DATA ) and not self.HasRecentError()
|
||||
|
@ -1098,7 +1133,7 @@ class Service( HydrusData.HydrusYAMLBase ):
|
|||
downloaded_text = 'downloaded ' + HydrusData.ConvertValueRangeToPrettyString( num_updates_downloaded, num_updates )
|
||||
processed_text = 'processed ' + HydrusData.ConvertValueRangeToPrettyString( num_updates_processed, num_updates )
|
||||
|
||||
if not self._info[ 'account' ].HasPermission( HC.GET_DATA ): status = 'updates on hold'
|
||||
if self.IsPaused() or not self._info[ 'account' ].HasPermission( HC.GET_DATA ): status = 'updates on hold'
|
||||
else:
|
||||
|
||||
if self.CanDownloadUpdate(): status = 'downloaded up to ' + HydrusData.ConvertTimestampToPrettySync( self._info[ 'next_download_timestamp' ] )
|
||||
|
@ -1212,9 +1247,6 @@ class Service( HydrusData.HydrusYAMLBase ):
|
|||
elif command in ( 'session_key', 'access_key_verification' ): HydrusNetworking.AddHydrusCredentialsToHeaders( credentials, request_headers )
|
||||
else: HydrusNetworking.AddHydrusSessionKeyToHeaders( self._service_key, request_headers )
|
||||
|
||||
if command == 'backup': long_timeout = True
|
||||
else: long_timeout = False
|
||||
|
||||
path = '/' + command
|
||||
|
||||
if method == HC.GET:
|
||||
|
@ -1252,7 +1284,7 @@ class Service( HydrusData.HydrusYAMLBase ):
|
|||
|
||||
url = 'http://' + host + ':' + HydrusData.ToString( port ) + path_and_query
|
||||
|
||||
( response, size_of_response, response_headers, cookies ) = wx.GetApp().DoHTTP( method, url, request_headers, body, report_hooks = report_hooks, temp_path = temp_path, return_everything = True, long_timeout = long_timeout )
|
||||
( response, size_of_response, response_headers, cookies ) = wx.GetApp().DoHTTP( method, url, request_headers, body, report_hooks = report_hooks, temp_path = temp_path, return_everything = True )
|
||||
|
||||
HydrusNetworking.CheckHydrusVersion( self._service_key, self._service_type, response_headers )
|
||||
|
||||
|
@ -1266,26 +1298,29 @@ class Service( HydrusData.HydrusYAMLBase ):
|
|||
|
||||
except Exception as e:
|
||||
|
||||
if isinstance( e, HydrusExceptions.SessionException ):
|
||||
if not isinstance( e, HydrusExceptions.ServerBusyException ):
|
||||
|
||||
session_manager = wx.GetApp().GetManager( 'hydrus_sessions' )
|
||||
|
||||
session_manager.DeleteSessionKey( self._service_key )
|
||||
|
||||
|
||||
wx.GetApp().Write( 'service_updates', { self._service_key : [ HydrusData.ServiceUpdate( HC.SERVICE_UPDATE_ERROR, HydrusData.ToString( e ) ) ] } )
|
||||
|
||||
if isinstance( e, HydrusExceptions.PermissionException ):
|
||||
|
||||
if 'account' in self._info:
|
||||
if isinstance( e, HydrusExceptions.SessionException ):
|
||||
|
||||
account_key = self._info[ 'account' ].GetAccountKey()
|
||||
session_manager = wx.GetApp().GetManager( 'hydrus_sessions' )
|
||||
|
||||
unknown_account = HydrusData.GetUnknownAccount( account_key )
|
||||
session_manager.DeleteSessionKey( self._service_key )
|
||||
|
||||
else: unknown_account = HydrusData.GetUnknownAccount()
|
||||
|
||||
wx.GetApp().Write( 'service_updates', { self._service_key : [ HydrusData.ServiceUpdate( HC.SERVICE_UPDATE_ACCOUNT, unknown_account ) ] } )
|
||||
wx.GetApp().Write( 'service_updates', { self._service_key : [ HydrusData.ServiceUpdate( HC.SERVICE_UPDATE_ERROR, HydrusData.ToString( e ) ) ] } )
|
||||
|
||||
if isinstance( e, HydrusExceptions.PermissionException ):
|
||||
|
||||
if 'account' in self._info:
|
||||
|
||||
account_key = self._info[ 'account' ].GetAccountKey()
|
||||
|
||||
unknown_account = HydrusData.GetUnknownAccount( account_key )
|
||||
|
||||
else: unknown_account = HydrusData.GetUnknownAccount()
|
||||
|
||||
wx.GetApp().Write( 'service_updates', { self._service_key : [ HydrusData.ServiceUpdate( HC.SERVICE_UPDATE_ACCOUNT, unknown_account ) ] } )
|
||||
|
||||
|
||||
|
||||
raise
|
||||
|
|
|
@ -171,18 +171,18 @@ def GetClientDefaultOptions():
|
|||
|
||||
return options
|
||||
|
||||
def GetDefaultAdvancedImportOptions():
|
||||
def GetDefaultImportFileOptions():
|
||||
|
||||
options = wx.GetApp().GetOptions()
|
||||
|
||||
a_i_o = {}
|
||||
result = {}
|
||||
|
||||
a_i_o[ 'auto_archive' ] = False
|
||||
a_i_o[ 'exclude_deleted_files' ] = options[ 'exclude_deleted_files' ]
|
||||
a_i_o[ 'min_size' ] = None
|
||||
a_i_o[ 'min_resolution' ] = None
|
||||
result[ 'auto_archive' ] = False
|
||||
result[ 'exclude_deleted_files' ] = options[ 'exclude_deleted_files' ]
|
||||
result[ 'min_size' ] = None
|
||||
result[ 'min_resolution' ] = None
|
||||
|
||||
return a_i_o
|
||||
return result
|
||||
|
||||
def GetDefaultBoorus():
|
||||
|
||||
|
|
|
@ -16,7 +16,6 @@ import traceback
|
|||
import urllib
|
||||
import urlparse
|
||||
import wx
|
||||
import zipfile
|
||||
import HydrusTags
|
||||
import HydrusData
|
||||
import HydrusFileHandling
|
||||
|
@ -1189,11 +1188,11 @@ class GalleryParserTumblr( GalleryParser ):
|
|||
|
||||
class ImportArgsGenerator( object ):
|
||||
|
||||
def __init__( self, job_key, item, advanced_import_options ):
|
||||
def __init__( self, job_key, item, import_file_options ):
|
||||
|
||||
self._job_key = job_key
|
||||
self._item = item
|
||||
self._advanced_import_options = advanced_import_options
|
||||
self._import_file_options = import_file_options
|
||||
|
||||
|
||||
def __call__( self ):
|
||||
|
@ -1212,7 +1211,7 @@ class ImportArgsGenerator( object ):
|
|||
|
||||
self._job_key.SetVariable( 'status', 'importing' )
|
||||
|
||||
( result, media_result ) = wx.GetApp().WriteSynchronous( 'import_file', temp_path, advanced_import_options = self._advanced_import_options, service_keys_to_tags = service_keys_to_tags, generate_media_result = True, url = url )
|
||||
( result, media_result ) = wx.GetApp().WriteSynchronous( 'import_file', temp_path, import_file_options = self._import_file_options, service_keys_to_tags = service_keys_to_tags, generate_media_result = True, url = url )
|
||||
|
||||
finally:
|
||||
|
||||
|
@ -1236,7 +1235,7 @@ class ImportArgsGenerator( object ):
|
|||
|
||||
self._job_key.Finish()
|
||||
|
||||
self._CleanUp() # e.g. possibly delete the file for hdd importargsgenerator
|
||||
self._CleanUp()
|
||||
|
||||
except Exception as e:
|
||||
|
||||
|
@ -1263,9 +1262,9 @@ class ImportArgsGenerator( object ):
|
|||
|
||||
class ImportArgsGeneratorGallery( ImportArgsGenerator ):
|
||||
|
||||
def __init__( self, job_key, item, advanced_import_options, advanced_tag_options, gallery_parsers_factory ):
|
||||
def __init__( self, job_key, item, import_file_options, advanced_tag_options, gallery_parsers_factory ):
|
||||
|
||||
ImportArgsGenerator.__init__( self, job_key, item, advanced_import_options )
|
||||
ImportArgsGenerator.__init__( self, job_key, item, import_file_options )
|
||||
|
||||
self._advanced_tag_options = advanced_tag_options
|
||||
self._gallery_parsers_factory = gallery_parsers_factory
|
||||
|
@ -1319,7 +1318,7 @@ class ImportArgsGeneratorGallery( ImportArgsGenerator ):
|
|||
|
||||
( status, hash ) = wx.GetApp().Read( 'url_status', url )
|
||||
|
||||
if status == CC.STATUS_DELETED and not self._advanced_import_options[ 'exclude_deleted_files' ]: status = CC.STATUS_NEW
|
||||
if status == CC.STATUS_DELETED and not self._import_file_options[ 'exclude_deleted_files' ]: status = CC.STATUS_NEW
|
||||
|
||||
if status == CC.STATUS_REDUNDANT:
|
||||
|
||||
|
@ -1345,80 +1344,11 @@ class ImportArgsGeneratorGallery( ImportArgsGenerator ):
|
|||
else: return ( status, None )
|
||||
|
||||
|
||||
class ImportArgsGeneratorHDD( ImportArgsGenerator ):
|
||||
|
||||
def __init__( self, job_key, item, advanced_import_options, paths_to_tags, delete_after_success ):
|
||||
|
||||
ImportArgsGenerator.__init__( self, job_key, item, advanced_import_options )
|
||||
|
||||
self._paths_to_tags = paths_to_tags
|
||||
self._delete_after_success = delete_after_success
|
||||
|
||||
|
||||
def _CleanUp( self ):
|
||||
|
||||
result = self._job_key.GetVariable( 'result' )
|
||||
|
||||
if self._delete_after_success and result in ( CC.STATUS_SUCCESSFUL, CC.STATUS_REDUNDANT ):
|
||||
|
||||
( path_type, path_info ) = self._item
|
||||
|
||||
if path_type == 'path':
|
||||
|
||||
path = path_info
|
||||
|
||||
try: os.remove( path )
|
||||
except: pass
|
||||
|
||||
|
||||
|
||||
|
||||
def _GetArgs( self, temp_path ):
|
||||
|
||||
self._job_key.SetVariable( 'status', 'reading from hdd' )
|
||||
|
||||
( path_type, path_info ) = self._item
|
||||
|
||||
service_keys_to_tags = {}
|
||||
|
||||
if path_type == 'path':
|
||||
|
||||
path = path_info
|
||||
|
||||
with open( path, 'rb' ) as f_source:
|
||||
|
||||
with open( temp_path, 'wb' ) as f_dest:
|
||||
|
||||
HydrusFileHandling.CopyFileLikeToFileLike( f_source, f_dest )
|
||||
|
||||
|
||||
|
||||
if path in self._paths_to_tags: service_keys_to_tags = self._paths_to_tags[ path ]
|
||||
|
||||
elif path_type == 'zip':
|
||||
|
||||
( zip_path, name ) = path_info
|
||||
|
||||
with open( temp_path, 'wb' ) as f:
|
||||
|
||||
with zipfile.ZipFile( zip_path, 'r' ) as z: f.write( z.read( name ) )
|
||||
|
||||
|
||||
pretty_path = zip_path + os.path.sep + name
|
||||
|
||||
if pretty_path in self._paths_to_tags: service_keys_to_tags = self._paths_to_tags[ pretty_path ]
|
||||
|
||||
path = pretty_path
|
||||
|
||||
|
||||
return ( path, service_keys_to_tags, None )
|
||||
|
||||
|
||||
class ImportArgsGeneratorThread( ImportArgsGenerator ):
|
||||
|
||||
def __init__( self, job_key, item, advanced_import_options, advanced_tag_options ):
|
||||
def __init__( self, job_key, item, import_file_options, advanced_tag_options ):
|
||||
|
||||
ImportArgsGenerator.__init__( self, job_key, item, advanced_import_options )
|
||||
ImportArgsGenerator.__init__( self, job_key, item, import_file_options )
|
||||
|
||||
self._advanced_tag_options = advanced_tag_options
|
||||
|
||||
|
@ -1454,7 +1384,7 @@ class ImportArgsGeneratorThread( ImportArgsGenerator ):
|
|||
|
||||
( status, hash ) = wx.GetApp().Read( 'md5_status', md5 )
|
||||
|
||||
if status == CC.STATUS_DELETED and not self._advanced_import_options[ 'exclude_deleted_files' ]: status = CC.STATUS_NEW
|
||||
if status == CC.STATUS_DELETED and not self._import_file_options[ 'exclude_deleted_files' ]: status = CC.STATUS_NEW
|
||||
|
||||
if status == CC.STATUS_REDUNDANT:
|
||||
|
||||
|
@ -1509,7 +1439,7 @@ class ImportArgsGeneratorURLs( ImportArgsGenerator ):
|
|||
|
||||
( status, hash ) = wx.GetApp().Read( 'url_status', url )
|
||||
|
||||
if status == CC.STATUS_DELETED and not self._advanced_import_options[ 'exclude_deleted_files' ]: status = CC.STATUS_NEW
|
||||
if status == CC.STATUS_DELETED and not self._import_file_options[ 'exclude_deleted_files' ]: status = CC.STATUS_NEW
|
||||
|
||||
if status == CC.STATUS_REDUNDANT:
|
||||
|
||||
|
|
|
@ -350,17 +350,40 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
|
|||
|
||||
def _BackupService( self, service_key ):
|
||||
|
||||
def do_it():
|
||||
|
||||
service = wx.GetApp().GetServicesManager().GetService( service_key )
|
||||
|
||||
service.Request( HC.POST, 'backup' )
|
||||
|
||||
HydrusData.ShowText( 'Server backup started!' )
|
||||
|
||||
time.sleep( 10 )
|
||||
|
||||
result = service.Request( HC.GET, 'busy' )
|
||||
|
||||
while result == '1':
|
||||
|
||||
if HydrusGlobals.shutdown:
|
||||
|
||||
return
|
||||
|
||||
|
||||
time.sleep( 10 )
|
||||
|
||||
result = service.Request( HC.GET, 'busy' )
|
||||
|
||||
|
||||
HydrusData.ShowText( 'Server backup done!' )
|
||||
|
||||
|
||||
message = 'This will tell the server to lock and copy its database files. It will probably take a few minutes to complete, during which time it will not be able to serve any requests. The client\'s GUI will lock up as well.'
|
||||
|
||||
with ClientGUIDialogs.DialogYesNo( self, message, yes_label = 'do it', no_label = 'forget it' ) as dlg:
|
||||
|
||||
if dlg.ShowModal() == wx.ID_YES:
|
||||
|
||||
service = wx.GetApp().GetServicesManager().GetService( service_key )
|
||||
|
||||
with wx.BusyCursor(): service.Request( HC.POST, 'backup' )
|
||||
|
||||
HydrusData.ShowText( 'Server backup done!' )
|
||||
HydrusThreading.CallToThread( do_it )
|
||||
|
||||
|
||||
|
||||
|
@ -1004,7 +1027,10 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
|
|||
|
||||
session = wx.GetApp().Read( 'gui_sessions', name )
|
||||
|
||||
except:
|
||||
except Exception as e:
|
||||
|
||||
HydrusData.ShowText( 'While trying to load session ' + name + ', this error happened:' )
|
||||
HydrusData.ShowException( e )
|
||||
|
||||
self._NewPageQuery( CC.LOCAL_FILE_SERVICE_KEY )
|
||||
|
||||
|
@ -1693,11 +1719,11 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
|
|||
|
||||
wx.GetApp().Write( 'content_updates', { service_key : content_updates } )
|
||||
|
||||
except Exception as e:
|
||||
except HydrusExceptions.ServerBusyException:
|
||||
|
||||
HydrusData.ShowException( e )
|
||||
job_key.SetVariable( 'popup_text_1', service.GetName() + ' was busy. please try again in a few minutes' )
|
||||
|
||||
time.sleep( 2 )
|
||||
return
|
||||
|
||||
|
||||
time.sleep( 0.1 )
|
||||
|
@ -1745,18 +1771,26 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
|
|||
job_key.SetVariable( 'popup_text_1', prefix + 'posting update: ' + HydrusData.ConvertValueRangeToPrettyString( i + 1, len( updates ) ) )
|
||||
job_key.SetVariable( 'popup_gauge_1', ( i, len( updates ) ) )
|
||||
|
||||
service.Request( HC.POST, 'content_update_package', { 'update' : update } )
|
||||
try:
|
||||
|
||||
service.Request( HC.POST, 'content_update_package', { 'update' : update } )
|
||||
|
||||
content_updates = update.GetContentUpdates( for_client = True )
|
||||
|
||||
wx.GetApp().Write( 'content_updates', { service_key : content_updates } )
|
||||
|
||||
except HydrusExceptions.ServerBusyException:
|
||||
|
||||
job_key.SetVariable( 'popup_text_1', service.GetName() + ' was busy. please try again in a few minutes' )
|
||||
|
||||
return
|
||||
|
||||
|
||||
content_updates = update.GetContentUpdates( for_client = True )
|
||||
|
||||
wx.GetApp().Write( 'content_updates', { service_key : content_updates } )
|
||||
|
||||
time.sleep( 0.5 )
|
||||
time.sleep( 0.1 )
|
||||
|
||||
wx.GetApp().WaitUntilWXThreadIdle()
|
||||
|
||||
|
||||
|
||||
|
||||
except Exception as e:
|
||||
|
||||
|
@ -2043,9 +2077,9 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
|
|||
self._NewPageImportGallery( site_type, gallery_type )
|
||||
|
||||
|
||||
def NewPageImportHDD( self, paths_info, advanced_import_options, paths_to_tags, delete_after_success ):
|
||||
def NewPageImportHDD( self, paths, import_file_options, paths_to_tags, delete_after_success ):
|
||||
|
||||
management_controller = ClientGUIManagement.CreateManagementControllerImportHDD( paths_info, advanced_import_options, paths_to_tags, delete_after_success )
|
||||
management_controller = ClientGUIManagement.CreateManagementControllerImportHDD( paths, import_file_options, paths_to_tags, delete_after_success )
|
||||
|
||||
self._NewPage( 'import', management_controller )
|
||||
|
||||
|
|
|
@ -25,6 +25,8 @@ class CollapsibleOptions( ClientGUICommon.StaticBox ):
|
|||
|
||||
def GetInfo( self ): return self._options_panel.GetInfo()
|
||||
|
||||
def GetOptions( self ): return self._options_panel.GetOptions()
|
||||
|
||||
def SetInfo( self, info ): self._options_panel.SetInfo( info )
|
||||
|
||||
class CollapsibleOptionsHentaiFoundry( CollapsibleOptions ):
|
||||
|
|
|
@ -1986,7 +1986,6 @@ class ListBox( wx.ScrolledWindow ):
|
|||
self._current_selected_index = None
|
||||
self._current_selected_term = None
|
||||
|
||||
self._last_virtual_size = None
|
||||
self._last_view_start = None
|
||||
self._dirty = True
|
||||
|
||||
|
@ -2302,6 +2301,15 @@ class ListBox( wx.ScrolledWindow ):
|
|||
|
||||
def EventPaint( self, event ):
|
||||
|
||||
( my_x, my_y ) = self.GetClientSize()
|
||||
|
||||
if ( my_x, my_y ) != self._client_bmp.GetSize():
|
||||
|
||||
self._client_bmp = wx.EmptyBitmap( my_x, my_y, 24 )
|
||||
|
||||
self._dirty = True
|
||||
|
||||
|
||||
dc = wx.BufferedPaintDC( self, self._client_bmp )
|
||||
|
||||
if self._dirty or self._last_view_start != self.GetViewStart():
|
||||
|
@ -2318,19 +2326,12 @@ class ListBox( wx.ScrolledWindow ):
|
|||
|
||||
ideal_virtual_size = ( my_x, max( self._text_y * len( self._ordered_strings ), my_y ) )
|
||||
|
||||
if ideal_virtual_size != self._last_virtual_size:
|
||||
if ideal_virtual_size != self.GetVirtualSize():
|
||||
|
||||
self.SetVirtualSize( ideal_virtual_size )
|
||||
|
||||
self._last_virtual_size = ideal_virtual_size
|
||||
|
||||
if self._client_bmp.GetSize() != ( my_x, my_y ):
|
||||
|
||||
self._client_bmp = wx.EmptyBitmap( my_x, my_y, 24 )
|
||||
|
||||
|
||||
self._SetDirty()
|
||||
|
||||
|
||||
self._SetDirty()
|
||||
|
||||
|
||||
def GetClientData( self, s = None ):
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import Crypto.PublicKey.RSA
|
||||
import HydrusConstants as HC
|
||||
import ClientDownloading
|
||||
import HydrusEncryption
|
||||
import HydrusExceptions
|
||||
import HydrusFileHandling
|
||||
import HydrusNATPunch
|
||||
|
@ -30,7 +29,6 @@ import traceback
|
|||
import urllib
|
||||
import wx
|
||||
import yaml
|
||||
import zipfile
|
||||
import HydrusData
|
||||
import ClientSearch
|
||||
import HydrusGlobals
|
||||
|
@ -1482,103 +1480,86 @@ class DialogInputLocalFiles( Dialog ):
|
|||
|
||||
if paths is None: paths = []
|
||||
|
||||
def InitialiseControls():
|
||||
|
||||
self._paths_list = ClientGUICommon.SaneListCtrl( self, 120, [ ( 'path', -1 ), ( 'guessed mime', 110 ), ( 'size', 60 ) ], delete_key_callback = self.RemovePaths )
|
||||
|
||||
self._gauge = ClientGUICommon.Gauge( self )
|
||||
|
||||
self._gauge_text = wx.StaticText( self, label = '' )
|
||||
|
||||
self._gauge_pause = wx.BitmapButton( self, bitmap = CC.GlobalBMPs.pause )
|
||||
self._gauge_pause.Bind( wx.EVT_BUTTON, self.EventGaugePause )
|
||||
self._gauge_pause.Disable()
|
||||
|
||||
self._gauge_cancel = wx.BitmapButton( self, bitmap = CC.GlobalBMPs.stop )
|
||||
self._gauge_cancel.Bind( wx.EVT_BUTTON, self.EventGaugeCancel )
|
||||
self._gauge_cancel.Disable()
|
||||
|
||||
self._add_files_button = wx.Button( self, label = 'Add Files' )
|
||||
self._add_files_button.Bind( wx.EVT_BUTTON, self.EventAddPaths )
|
||||
|
||||
self._add_folder_button = wx.Button( self, label = 'Add Folder' )
|
||||
self._add_folder_button.Bind( wx.EVT_BUTTON, self.EventAddFolder )
|
||||
|
||||
self._remove_files_button = wx.Button( self, label = 'Remove Files' )
|
||||
self._remove_files_button.Bind( wx.EVT_BUTTON, self.EventRemovePaths )
|
||||
|
||||
self._advanced_import_options = ClientGUICollapsible.CollapsibleOptionsImportFiles( self )
|
||||
|
||||
self._delete_after_success = wx.CheckBox( self, label = 'delete files after successful import' )
|
||||
|
||||
self._add_button = wx.Button( self, label = 'Import now' )
|
||||
self._add_button.Bind( wx.EVT_BUTTON, self.EventOK )
|
||||
self._add_button.SetForegroundColour( ( 0, 128, 0 ) )
|
||||
|
||||
self._tag_button = wx.Button( self, label = 'Add tags before importing' )
|
||||
self._tag_button.Bind( wx.EVT_BUTTON, self.EventTags )
|
||||
self._tag_button.SetForegroundColour( ( 0, 128, 0 ) )
|
||||
|
||||
self._cancel = wx.Button( self, id = wx.ID_CANCEL, label = 'Cancel' )
|
||||
self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel )
|
||||
self._cancel.SetForegroundColour( ( 128, 0, 0 ) )
|
||||
|
||||
|
||||
def PopulateControls():
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def ArrangeControls():
|
||||
|
||||
gauge_sizer = wx.BoxSizer( wx.HORIZONTAL )
|
||||
|
||||
gauge_sizer.AddF( self._gauge_text, CC.FLAGS_EXPAND_BOTH_WAYS )
|
||||
gauge_sizer.AddF( self._gauge, CC.FLAGS_EXPAND_BOTH_WAYS )
|
||||
gauge_sizer.AddF( self._gauge_pause, CC.FLAGS_MIXED )
|
||||
gauge_sizer.AddF( self._gauge_cancel, CC.FLAGS_MIXED )
|
||||
|
||||
file_buttons = wx.BoxSizer( wx.HORIZONTAL )
|
||||
|
||||
file_buttons.AddF( self._add_files_button, CC.FLAGS_MIXED )
|
||||
file_buttons.AddF( self._add_folder_button, CC.FLAGS_MIXED )
|
||||
file_buttons.AddF( self._remove_files_button, CC.FLAGS_MIXED )
|
||||
|
||||
buttons = wx.BoxSizer( wx.HORIZONTAL )
|
||||
|
||||
buttons.AddF( self._add_button, CC.FLAGS_MIXED )
|
||||
buttons.AddF( self._tag_button, CC.FLAGS_MIXED )
|
||||
buttons.AddF( self._cancel, CC.FLAGS_MIXED )
|
||||
|
||||
vbox = wx.BoxSizer( wx.VERTICAL )
|
||||
|
||||
vbox.AddF( self._paths_list, CC.FLAGS_EXPAND_BOTH_WAYS )
|
||||
vbox.AddF( gauge_sizer, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
|
||||
vbox.AddF( file_buttons, CC.FLAGS_BUTTON_SIZER )
|
||||
vbox.AddF( self._advanced_import_options, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
vbox.AddF( self._delete_after_success, CC.FLAGS_LONE_BUTTON )
|
||||
vbox.AddF( ( 0, 5 ), CC.FLAGS_NONE )
|
||||
vbox.AddF( buttons, CC.FLAGS_BUTTON_SIZER )
|
||||
|
||||
self.SetSizer( vbox )
|
||||
|
||||
( x, y ) = self.GetEffectiveMinSize()
|
||||
|
||||
if x < 780: x = 780
|
||||
if y < 480: y = 480
|
||||
|
||||
self.SetInitialSize( ( x, y ) )
|
||||
|
||||
|
||||
Dialog.__init__( self, parent, 'importing files' )
|
||||
|
||||
self.SetDropTarget( ClientGUICommon.FileDropTarget( self._AddPathsToList ) )
|
||||
|
||||
InitialiseControls()
|
||||
self._paths_list = ClientGUICommon.SaneListCtrl( self, 120, [ ( 'path', -1 ), ( 'guessed mime', 110 ), ( 'size', 60 ) ], delete_key_callback = self.RemovePaths )
|
||||
|
||||
PopulateControls()
|
||||
self._gauge = ClientGUICommon.Gauge( self )
|
||||
|
||||
ArrangeControls()
|
||||
self._gauge_text = wx.StaticText( self, label = '' )
|
||||
|
||||
self._gauge_pause = wx.BitmapButton( self, bitmap = CC.GlobalBMPs.pause )
|
||||
self._gauge_pause.Bind( wx.EVT_BUTTON, self.EventGaugePause )
|
||||
self._gauge_pause.Disable()
|
||||
|
||||
self._gauge_cancel = wx.BitmapButton( self, bitmap = CC.GlobalBMPs.stop )
|
||||
self._gauge_cancel.Bind( wx.EVT_BUTTON, self.EventGaugeCancel )
|
||||
self._gauge_cancel.Disable()
|
||||
|
||||
self._add_files_button = wx.Button( self, label = 'Add Files' )
|
||||
self._add_files_button.Bind( wx.EVT_BUTTON, self.EventAddPaths )
|
||||
|
||||
self._add_folder_button = wx.Button( self, label = 'Add Folder' )
|
||||
self._add_folder_button.Bind( wx.EVT_BUTTON, self.EventAddFolder )
|
||||
|
||||
self._remove_files_button = wx.Button( self, label = 'Remove Files' )
|
||||
self._remove_files_button.Bind( wx.EVT_BUTTON, self.EventRemovePaths )
|
||||
|
||||
self._import_file_options = ClientGUICollapsible.CollapsibleOptionsImportFiles( self )
|
||||
|
||||
self._delete_after_success = wx.CheckBox( self, label = 'delete files after successful import' )
|
||||
|
||||
self._add_button = wx.Button( self, label = 'Import now' )
|
||||
self._add_button.Bind( wx.EVT_BUTTON, self.EventOK )
|
||||
self._add_button.SetForegroundColour( ( 0, 128, 0 ) )
|
||||
|
||||
self._tag_button = wx.Button( self, label = 'Add tags before importing' )
|
||||
self._tag_button.Bind( wx.EVT_BUTTON, self.EventTags )
|
||||
self._tag_button.SetForegroundColour( ( 0, 128, 0 ) )
|
||||
|
||||
self._cancel = wx.Button( self, id = wx.ID_CANCEL, label = 'Cancel' )
|
||||
self._cancel.Bind( wx.EVT_BUTTON, self.EventCancel )
|
||||
self._cancel.SetForegroundColour( ( 128, 0, 0 ) )
|
||||
|
||||
gauge_sizer = wx.BoxSizer( wx.HORIZONTAL )
|
||||
|
||||
gauge_sizer.AddF( self._gauge_text, CC.FLAGS_EXPAND_BOTH_WAYS )
|
||||
gauge_sizer.AddF( self._gauge, CC.FLAGS_EXPAND_BOTH_WAYS )
|
||||
gauge_sizer.AddF( self._gauge_pause, CC.FLAGS_MIXED )
|
||||
gauge_sizer.AddF( self._gauge_cancel, CC.FLAGS_MIXED )
|
||||
|
||||
file_buttons = wx.BoxSizer( wx.HORIZONTAL )
|
||||
|
||||
file_buttons.AddF( self._add_files_button, CC.FLAGS_MIXED )
|
||||
file_buttons.AddF( self._add_folder_button, CC.FLAGS_MIXED )
|
||||
file_buttons.AddF( self._remove_files_button, CC.FLAGS_MIXED )
|
||||
|
||||
buttons = wx.BoxSizer( wx.HORIZONTAL )
|
||||
|
||||
buttons.AddF( self._add_button, CC.FLAGS_MIXED )
|
||||
buttons.AddF( self._tag_button, CC.FLAGS_MIXED )
|
||||
buttons.AddF( self._cancel, CC.FLAGS_MIXED )
|
||||
|
||||
vbox = wx.BoxSizer( wx.VERTICAL )
|
||||
|
||||
vbox.AddF( self._paths_list, CC.FLAGS_EXPAND_BOTH_WAYS )
|
||||
vbox.AddF( gauge_sizer, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
|
||||
vbox.AddF( file_buttons, CC.FLAGS_BUTTON_SIZER )
|
||||
vbox.AddF( self._import_file_options, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
vbox.AddF( self._delete_after_success, CC.FLAGS_LONE_BUTTON )
|
||||
vbox.AddF( ( 0, 5 ), CC.FLAGS_NONE )
|
||||
vbox.AddF( buttons, CC.FLAGS_BUTTON_SIZER )
|
||||
|
||||
self.SetSizer( vbox )
|
||||
|
||||
( x, y ) = self.GetEffectiveMinSize()
|
||||
|
||||
if x < 780: x = 780
|
||||
if y < 480: y = 480
|
||||
|
||||
self.SetInitialSize( ( x, y ) )
|
||||
|
||||
self._processing_queue = []
|
||||
self._currently_parsing = False
|
||||
|
@ -1599,8 +1580,6 @@ class DialogInputLocalFiles( Dialog ):
|
|||
self._ProcessQueue()
|
||||
|
||||
|
||||
def _GetPathsInfo( self ): return [ row[0] for row in self._paths_list.GetClientData() ]
|
||||
|
||||
def _ProcessQueue( self ):
|
||||
|
||||
if not self._currently_parsing:
|
||||
|
@ -1636,23 +1615,16 @@ class DialogInputLocalFiles( Dialog ):
|
|||
|
||||
def _TidyUp( self ): self._job_key.Cancel()
|
||||
|
||||
def AddParsedPath( self, path_type, mime, size, path_info ):
|
||||
def AddParsedPath( self, path, mime, size ):
|
||||
|
||||
pretty_mime = HC.mime_string_lookup[ mime ]
|
||||
pretty_size = HydrusData.ConvertIntToBytes( size )
|
||||
|
||||
if path_type == 'path': pretty_path = path_info
|
||||
elif path_type == 'zip':
|
||||
if path not in self._current_paths:
|
||||
|
||||
( zip_path, name ) = path_info
|
||||
self._current_paths.add( path )
|
||||
|
||||
pretty_path = zip_path + os.path.sep + name
|
||||
|
||||
|
||||
if ( path_type, path_info ) not in self._current_paths:
|
||||
|
||||
self._current_paths.add( ( path_type, path_info ) )
|
||||
|
||||
self._paths_list.Append( ( pretty_path, HC.mime_string_lookup[ mime ], pretty_size ), ( ( path_type, path_info ), mime, size ) )
|
||||
self._paths_list.Append( ( path, pretty_mime, pretty_size ), ( path, mime, size ) )
|
||||
|
||||
|
||||
|
||||
|
@ -1733,72 +1705,49 @@ class DialogInputLocalFiles( Dialog ):
|
|||
|
||||
self._TidyUp()
|
||||
|
||||
paths_info = self._GetPathsInfo()
|
||||
|
||||
if len( paths_info ) > 0:
|
||||
if len( self._current_paths ) > 0:
|
||||
|
||||
advanced_import_options = self._advanced_import_options.GetInfo()
|
||||
import_file_options = self._import_file_options.GetOptions()
|
||||
|
||||
paths_to_tags = {}
|
||||
|
||||
delete_after_success = self._delete_after_success.GetValue()
|
||||
|
||||
HydrusGlobals.pubsub.pub( 'new_hdd_import', paths_info, advanced_import_options, paths_to_tags, delete_after_success )
|
||||
|
||||
self.EndModal( wx.ID_OK )
|
||||
HydrusGlobals.pubsub.pub( 'new_hdd_import', self._current_paths, import_file_options, paths_to_tags, delete_after_success )
|
||||
|
||||
|
||||
self.EndModal( wx.ID_OK )
|
||||
|
||||
|
||||
def EventRemovePaths( self, event ): self.RemovePaths()
|
||||
|
||||
def EventTags( self, event ):
|
||||
|
||||
try:
|
||||
if len( self._current_paths ) > 0:
|
||||
|
||||
paths_info = self._GetPathsInfo()
|
||||
import_file_options = self._import_file_options.GetOptions()
|
||||
|
||||
if len( paths_info ) > 0:
|
||||
with DialogPathsToTags( self, self._current_paths ) as dlg:
|
||||
|
||||
advanced_import_options = self._advanced_import_options.GetInfo()
|
||||
|
||||
paths_to_send_to_dialog = []
|
||||
|
||||
for ( path_type, path_info ) in paths_info:
|
||||
if dlg.ShowModal() == wx.ID_OK:
|
||||
|
||||
if path_type == 'path': pretty_path = path_info
|
||||
elif path_type == 'zip':
|
||||
|
||||
( zip_path, name ) = path_info
|
||||
|
||||
pretty_path = zip_path + os.path.sep + name
|
||||
|
||||
paths_to_tags = dlg.GetInfo()
|
||||
|
||||
paths_to_send_to_dialog.append( pretty_path )
|
||||
delete_after_success = self._delete_after_success.GetValue()
|
||||
|
||||
|
||||
with DialogPathsToTags( self, paths_to_send_to_dialog ) as dlg:
|
||||
HydrusGlobals.pubsub.pub( 'new_hdd_import', self._current_paths, import_file_options, paths_to_tags, delete_after_success )
|
||||
|
||||
if dlg.ShowModal() == wx.ID_OK:
|
||||
|
||||
paths_to_tags = dlg.GetInfo()
|
||||
|
||||
delete_after_success = self._delete_after_success.GetValue()
|
||||
|
||||
HydrusGlobals.pubsub.pub( 'new_hdd_import', paths_info, advanced_import_options, paths_to_tags, delete_after_success )
|
||||
|
||||
self.EndModal( wx.ID_OK )
|
||||
|
||||
self.EndModal( wx.ID_OK )
|
||||
|
||||
|
||||
|
||||
except: wx.MessageBox( traceback.format_exc() )
|
||||
|
||||
|
||||
def RemovePaths( self ):
|
||||
|
||||
self._paths_list.RemoveAllSelected()
|
||||
|
||||
self._current_paths = set( self._GetPathsInfo() )
|
||||
self._current_paths = { row[0] for row in self._paths_list.GetClientData() }
|
||||
|
||||
|
||||
def SetGaugeInfo( self, gauge_range, gauge_value, text ):
|
||||
|
@ -1857,134 +1806,7 @@ class DialogInputLocalFiles( Dialog ):
|
|||
|
||||
num_good_files += 1
|
||||
|
||||
wx.CallAfter( self.AddParsedPath, 'path', mime, size, path )
|
||||
|
||||
elif mime in HC.ARCHIVES:
|
||||
|
||||
wx.CallAfter( self.SetGaugeInfo, num_file_paths, i, u'Found an archive; parsing\u2026' )
|
||||
|
||||
if mime == HC.APPLICATION_HYDRUS_ENCRYPTED_ZIP:
|
||||
|
||||
aes_key = None
|
||||
iv = None
|
||||
|
||||
if '.encrypted' in path:
|
||||
|
||||
try:
|
||||
|
||||
potential_key_path = path.replace( '.encrypted', '.key' )
|
||||
|
||||
if os.path.exists( potential_key_path ):
|
||||
|
||||
with open( potential_key_path, 'rb' ) as f: key_text = f.read()
|
||||
|
||||
( aes_key, iv ) = HydrusEncryption.AESTextToKey( key_text )
|
||||
|
||||
|
||||
except: HydrusData.ShowText( 'Tried to read a key, but did not understand it.' )
|
||||
|
||||
|
||||
job_key = HydrusData.JobKey()
|
||||
|
||||
def WXTHREADGetAESKey( key ):
|
||||
|
||||
while key is None:
|
||||
|
||||
with DialogTextEntry( wx.GetApp().GetTopWindow(), 'Please enter the key for ' + path + '.' ) as dlg:
|
||||
|
||||
result = dlg.ShowModal()
|
||||
|
||||
if result == wx.ID_OK:
|
||||
|
||||
try:
|
||||
|
||||
key_text = dlg.GetValue()
|
||||
|
||||
( key, iv ) = HydrusEncryption.AESTextToKey( key_text )
|
||||
|
||||
job_key.SetVariable( 'result', ( key, iv ) )
|
||||
|
||||
except: wx.MessageBox( 'Did not understand that key!' )
|
||||
|
||||
elif result == wx.ID_CANCEL: job_key.SetVariable( 'result', ( None, None ) )
|
||||
|
||||
|
||||
|
||||
|
||||
if aes_key is None:
|
||||
|
||||
wx.CallAfter( WXTHREADGetAESKey, aes_key )
|
||||
|
||||
while not job_key.HasVariable( 'result' ):
|
||||
|
||||
if job_key.IsCancelled(): return
|
||||
|
||||
time.sleep( 0.1 )
|
||||
|
||||
|
||||
( aes_key, iv ) = job_key.GetVariable( 'result' )
|
||||
|
||||
|
||||
if aes_key is not None:
|
||||
|
||||
path_to = HydrusEncryption.DecryptAESFile( aes_key, iv, path )
|
||||
|
||||
path = path_to
|
||||
mime = HC.APPLICATION_ZIP
|
||||
|
||||
|
||||
|
||||
if mime == HC.APPLICATION_ZIP:
|
||||
|
||||
try:
|
||||
|
||||
with zipfile.ZipFile( path, 'r' ) as z:
|
||||
|
||||
if z.testzip() is not None: raise Exception()
|
||||
|
||||
for name in z.namelist():
|
||||
|
||||
# zip is deflate, which means have to read the whole file to read any of the file, so:
|
||||
# the file pointer returned by open doesn't support seek, lol!
|
||||
# so, might as well open the whole damn file
|
||||
|
||||
|
||||
( os_file_handle, temp_path ) = HydrusFileHandling.GetTempPath()
|
||||
|
||||
try:
|
||||
|
||||
with open( temp_path, 'wb' ) as f: f.write( z.read( name ) )
|
||||
|
||||
name_mime = HydrusFileHandling.GetMime( temp_path )
|
||||
|
||||
finally:
|
||||
|
||||
HydrusFileHandling.CleanUpTempPath( os_file_handle, temp_path )
|
||||
|
||||
|
||||
if name_mime in HC.ALLOWED_MIMES:
|
||||
|
||||
size = z.getinfo( name ).file_size
|
||||
|
||||
if size > 0:
|
||||
|
||||
num_good_files += 1
|
||||
|
||||
wx.CallAfter( self.AddParsedPath, 'zip', name_mime, size, ( path, name ) )
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
except Exception as e:
|
||||
|
||||
num_odd_files += 1
|
||||
|
||||
HydrusData.ShowException( e )
|
||||
|
||||
continue
|
||||
|
||||
|
||||
wx.CallAfter( self.AddParsedPath, path, mime, size )
|
||||
|
||||
else:
|
||||
|
||||
|
@ -4693,17 +4515,6 @@ class DialogSetupExport( Dialog ):
|
|||
self._open_location = wx.Button( self._export_path_box, label = 'open this location' )
|
||||
self._open_location.Bind( wx.EVT_BUTTON, self.EventOpenLocation )
|
||||
|
||||
self._zip_box = ClientGUICommon.StaticBox( self, 'zip' )
|
||||
|
||||
self._export_to_zip = wx.CheckBox( self._zip_box, label = 'export to zip' )
|
||||
self._export_to_zip.Bind( wx.EVT_CHECKBOX, self.EventExportToZipCheckbox )
|
||||
|
||||
self._zip_name = wx.TextCtrl( self._zip_box )
|
||||
self._zip_name.Disable()
|
||||
|
||||
self._export_encrypted = wx.CheckBox( self._zip_box, label = 'encrypt zip' )
|
||||
self._export_encrypted.Disable()
|
||||
|
||||
self._filenames_box = ClientGUICommon.StaticBox( self, 'filenames' )
|
||||
|
||||
self._pattern = wx.TextCtrl( self._filenames_box )
|
||||
|
@ -4735,8 +4546,6 @@ class DialogSetupExport( Dialog ):
|
|||
|
||||
self._directory_picker.SetPath( export_path )
|
||||
|
||||
self._zip_name.SetValue( 'archive name.zip' )
|
||||
|
||||
self._pattern.SetValue( '{hash}' )
|
||||
|
||||
|
||||
|
@ -4762,20 +4571,11 @@ class DialogSetupExport( Dialog ):
|
|||
|
||||
self._filenames_box.AddF( hbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
|
||||
|
||||
hbox = wx.BoxSizer( wx.HORIZONTAL )
|
||||
|
||||
hbox.AddF( self._export_to_zip, CC.FLAGS_MIXED )
|
||||
hbox.AddF( self._zip_name, CC.FLAGS_EXPAND_BOTH_WAYS )
|
||||
hbox.AddF( self._export_encrypted, CC.FLAGS_MIXED )
|
||||
|
||||
self._zip_box.AddF( hbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
|
||||
|
||||
vbox = wx.BoxSizer( wx.VERTICAL )
|
||||
|
||||
vbox.AddF( top_hbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
|
||||
vbox.AddF( self._export_path_box, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
vbox.AddF( self._filenames_box, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
vbox.AddF( self._zip_box, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
vbox.AddF( self._export, CC.FLAGS_LONE_BUTTON )
|
||||
vbox.AddF( self._cancel, CC.FLAGS_LONE_BUTTON )
|
||||
|
||||
|
@ -4809,14 +4609,11 @@ class DialogSetupExport( Dialog ):
|
|||
|
||||
filename = ClientFiles.GenerateExportFilename( media, terms )
|
||||
|
||||
if self._export_to_zip.GetValue() == True: zip_path = self._zip_name.GetValue() + os.path.sep
|
||||
else: zip_path = ''
|
||||
|
||||
mime = media.GetMime()
|
||||
|
||||
ext = HC.mime_ext_lookup[ mime ]
|
||||
|
||||
return directory + os.path.sep + zip_path + filename + ext
|
||||
return directory + os.path.sep + filename + ext
|
||||
|
||||
|
||||
def _RecalcPaths( self ):
|
||||
|
@ -4862,73 +4659,25 @@ class DialogSetupExport( Dialog ):
|
|||
|
||||
self._RecalcPaths()
|
||||
|
||||
if self._export_to_zip.GetValue() == True:
|
||||
for ( ( ordering_index, media ), mime, path ) in self._paths.GetClientData():
|
||||
|
||||
directory = self._directory_picker.GetPath()
|
||||
|
||||
zip_path = directory + os.path.sep + self._zip_name.GetValue()
|
||||
|
||||
with zipfile.ZipFile( zip_path, mode = 'w', compression = zipfile.ZIP_DEFLATED ) as z:
|
||||
try:
|
||||
|
||||
for ( ( ordering_index, media ), mime, path ) in self._paths.GetClientData():
|
||||
|
||||
try:
|
||||
|
||||
hash = media.GetHash()
|
||||
|
||||
source_path = ClientFiles.GetFilePath( hash, mime )
|
||||
|
||||
( gumpf, filename ) = os.path.split( path )
|
||||
|
||||
z.write( source_path, filename )
|
||||
|
||||
except:
|
||||
|
||||
wx.MessageBox( 'Encountered a problem while attempting to export file with index ' + HydrusData.ToString( ordering_index + 1 ) + '.' + os.linesep * 2 + traceback.format_exc() )
|
||||
|
||||
break
|
||||
|
||||
|
||||
hash = media.GetHash()
|
||||
|
||||
|
||||
if self._export_encrypted.GetValue() == True: HydrusEncryption.EncryptAESFile( zip_path, preface = 'hydrus encrypted zip' )
|
||||
|
||||
else:
|
||||
|
||||
for ( ( ordering_index, media ), mime, path ) in self._paths.GetClientData():
|
||||
source_path = ClientFiles.GetFilePath( hash, mime )
|
||||
|
||||
try:
|
||||
|
||||
hash = media.GetHash()
|
||||
|
||||
source_path = ClientFiles.GetFilePath( hash, mime )
|
||||
|
||||
shutil.copy( source_path, path )
|
||||
shutil.copystat( source_path, path )
|
||||
try: os.chmod( path, stat.S_IWRITE | stat.S_IREAD )
|
||||
except: pass
|
||||
|
||||
except:
|
||||
|
||||
wx.MessageBox( 'Encountered a problem while attempting to export file with index ' + HydrusData.ToString( ordering_index + 1 ) + ':' + os.linesep * 2 + traceback.format_exc() )
|
||||
|
||||
break
|
||||
|
||||
shutil.copy( source_path, path )
|
||||
shutil.copystat( source_path, path )
|
||||
try: os.chmod( path, stat.S_IWRITE | stat.S_IREAD )
|
||||
except: pass
|
||||
|
||||
except:
|
||||
|
||||
wx.MessageBox( 'Encountered a problem while attempting to export file with index ' + HydrusData.ToString( ordering_index + 1 ) + ':' + os.linesep * 2 + traceback.format_exc() )
|
||||
|
||||
break
|
||||
|
||||
|
||||
|
||||
|
||||
def EventExportToZipCheckbox( self, event ):
|
||||
|
||||
if self._export_to_zip.GetValue() == True:
|
||||
|
||||
self._zip_name.Enable()
|
||||
self._export_encrypted.Enable()
|
||||
|
||||
else:
|
||||
|
||||
self._zip_name.Disable()
|
||||
self._export_encrypted.Disable()
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -13,7 +13,6 @@ import ClientRatings
|
|||
import collections
|
||||
import HydrusConstants as HC
|
||||
import HydrusData
|
||||
import HydrusEncryption
|
||||
import HydrusExceptions
|
||||
import HydrusFileHandling
|
||||
import HydrusGlobals
|
||||
|
@ -6087,7 +6086,7 @@ class DialogManageSubscriptions( ClientGUIDialogs.Dialog ):
|
|||
|
||||
self._advanced_tag_options = ClientGUICollapsible.CollapsibleOptionsTags( self )
|
||||
|
||||
self._advanced_import_options = ClientGUICollapsible.CollapsibleOptionsImportFiles( self )
|
||||
self._import_file_options = ClientGUICollapsible.CollapsibleOptionsImportFiles( self )
|
||||
|
||||
|
||||
def PopulateControls():
|
||||
|
@ -6132,7 +6131,7 @@ class DialogManageSubscriptions( ClientGUIDialogs.Dialog ):
|
|||
vbox.AddF( self._query_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
vbox.AddF( self._info_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
vbox.AddF( self._advanced_tag_options, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
vbox.AddF( self._advanced_import_options, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
vbox.AddF( self._import_file_options, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
|
||||
self.SetSizer( vbox )
|
||||
|
||||
|
@ -6153,7 +6152,7 @@ class DialogManageSubscriptions( ClientGUIDialogs.Dialog ):
|
|||
info[ 'get_tags_if_redundant' ] = False
|
||||
info[ 'initial_limit' ] = 500
|
||||
info[ 'advanced_tag_options' ] = {}
|
||||
info[ 'advanced_import_options' ] = ClientDefaults.GetDefaultAdvancedImportOptions()
|
||||
info[ 'advanced_import_options' ] = ClientDefaults.GetDefaultImportFileOptions()
|
||||
info[ 'last_checked' ] = None
|
||||
info[ 'url_cache' ] = set()
|
||||
info[ 'paused' ] = False
|
||||
|
@ -6262,7 +6261,7 @@ class DialogManageSubscriptions( ClientGUIDialogs.Dialog ):
|
|||
get_tags_if_redundant = info[ 'get_tags_if_redundant' ]
|
||||
initial_limit = info[ 'initial_limit' ]
|
||||
advanced_tag_options = info[ 'advanced_tag_options' ]
|
||||
advanced_import_options = info[ 'advanced_import_options' ]
|
||||
import_file_options = info[ 'advanced_import_options' ]
|
||||
last_checked = info[ 'last_checked' ]
|
||||
url_cache = info[ 'url_cache' ]
|
||||
paused = info[ 'paused' ]
|
||||
|
@ -6316,7 +6315,7 @@ class DialogManageSubscriptions( ClientGUIDialogs.Dialog ):
|
|||
|
||||
self._advanced_tag_options.SetInfo( advanced_tag_options )
|
||||
|
||||
self._advanced_import_options.SetInfo( advanced_import_options )
|
||||
self._import_file_options.SetInfo( import_file_options )
|
||||
|
||||
|
||||
def EventBooruSelected( self, event ): self._ConfigureAdvancedTagOptions()
|
||||
|
@ -6368,7 +6367,7 @@ class DialogManageSubscriptions( ClientGUIDialogs.Dialog ):
|
|||
|
||||
info[ 'advanced_tag_options' ] = self._advanced_tag_options.GetInfo()
|
||||
|
||||
info[ 'advanced_import_options' ] = self._advanced_import_options.GetInfo()
|
||||
info[ 'advanced_import_options' ] = self._import_file_options.GetInfo()
|
||||
|
||||
if self._reset_cache:
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@ import ClientGUICollapsible
|
|||
import ClientGUICommon
|
||||
import ClientGUIDialogs
|
||||
import ClientGUIMedia
|
||||
import ClientImporting
|
||||
import ClientMedia
|
||||
import json
|
||||
import os
|
||||
|
@ -99,14 +100,13 @@ def CreateManagementControllerImportURL():
|
|||
|
||||
return management_controller
|
||||
|
||||
def CreateManagementControllerImportHDD( paths_info, advanced_import_options, paths_to_tags, delete_after_success ):
|
||||
def CreateManagementControllerImportHDD( paths, import_file_options, paths_to_tags, delete_after_success ):
|
||||
|
||||
management_controller = CreateManagementController( MANAGEMENT_TYPE_IMPORT_HDD )
|
||||
|
||||
management_controller.SetVariable( 'paths_info', paths_info )
|
||||
management_controller.SetVariable( 'advanced_import_options', advanced_import_options )
|
||||
management_controller.SetVariable( 'paths_to_tags', paths_to_tags )
|
||||
management_controller.SetVariable( 'delete_after_success', delete_after_success )
|
||||
hdd_import = ClientImporting.HDDImport( paths = paths, import_file_options = import_file_options, paths_to_tags = paths_to_tags, delete_after_success = delete_after_success )
|
||||
|
||||
management_controller.SetVariable( 'hdd_import', hdd_import )
|
||||
|
||||
return management_controller
|
||||
|
||||
|
@ -462,7 +462,7 @@ class Comment( wx.Panel ):
|
|||
class ManagementController( HydrusSerialisable.SerialisableBase ):
|
||||
|
||||
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_MANAGEMENT_CONTROLLER
|
||||
SERIALISABLE_VERSION = 1
|
||||
SERIALISABLE_VERSION = 2
|
||||
|
||||
def __init__( self ):
|
||||
|
||||
|
@ -483,22 +483,6 @@ class ManagementController( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
serialisable_simples = dict( self._simples )
|
||||
|
||||
if 'paths_to_tags' in serialisable_simples:
|
||||
|
||||
paths_to_tags = serialisable_simples[ 'paths_to_tags' ]
|
||||
|
||||
serialisable_paths_to_tags = {}
|
||||
|
||||
for ( path, service_keys_to_tags ) in paths_to_tags.items():
|
||||
|
||||
serialisable_service_keys_to_tags = { service_key.encode( 'hex' ) : tags for ( service_key, tags ) in service_keys_to_tags.items() }
|
||||
|
||||
serialisable_paths_to_tags[ path ] = serialisable_service_keys_to_tags
|
||||
|
||||
|
||||
serialisable_simples[ 'paths_to_tags' ] = serialisable_paths_to_tags
|
||||
|
||||
|
||||
serialisable_serialisables = { name : HydrusSerialisable.GetSerialisableTuple( value ) for ( name, value ) in self._serialisables.items() }
|
||||
|
||||
return ( self._management_type, serialisable_keys, serialisable_simples, serialisable_serialisables )
|
||||
|
@ -512,25 +496,49 @@ class ManagementController( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
self._simples = dict( serialisable_simples )
|
||||
|
||||
if 'paths_to_tags' in self._simples:
|
||||
|
||||
serialisable_paths_to_tags = self._simples[ 'paths_to_tags' ]
|
||||
|
||||
paths_to_tags = {}
|
||||
|
||||
for ( path, serialisable_service_keys_to_tags ) in paths_to_tags.items():
|
||||
|
||||
service_keys_to_tags = { service_key.decode( 'hex' ) : tags for ( service_key, tags ) in serialisable_service_keys_to_tags.items() }
|
||||
|
||||
paths_to_tags[ path ] = service_keys_to_tags
|
||||
|
||||
|
||||
self._simples[ 'paths_to_tags' ] = paths_to_tags
|
||||
|
||||
|
||||
self._serialisables = { name : HydrusSerialisable.CreateFromSerialisableTuple( value ) for ( name, value ) in serialisables.items() }
|
||||
|
||||
|
||||
def _UpdateSerialisableInfo( self, version, old_serialisable_info ):
|
||||
|
||||
if version == 1:
|
||||
|
||||
( management_type, serialisable_keys, serialisable_simples, serialisable_serialisables ) = old_serialisable_info
|
||||
|
||||
if management_type == MANAGEMENT_TYPE_IMPORT_HDD:
|
||||
|
||||
advanced_import_options = serialisable_simples[ 'advanced_import_options' ]
|
||||
paths_info = serialisable_simples[ 'paths_info' ]
|
||||
paths_to_tags = serialisable_simples[ 'paths_to_tags' ]
|
||||
delete_after_success = serialisable_simples[ 'delete_after_success' ]
|
||||
|
||||
paths = [ path_info for ( path_type, path_info ) in paths_info if path_type != 'zip' ]
|
||||
|
||||
automatic_archive = advanced_import_options[ 'automatic_archive' ]
|
||||
exclude_deleted = advanced_import_options[ 'exclude_deleted' ]
|
||||
min_size = advanced_import_options[ 'min_size' ]
|
||||
min_resolution = advanced_import_options[ 'min_resolution' ]
|
||||
|
||||
import_file_options = ClientData.ImportFileOptions( automatic_archive = automatic_archive, exclude_deleted = exclude_deleted, min_size = min_size, min_resolution = min_resolution )
|
||||
|
||||
paths_to_tags = { path : { service_key.decode( 'hex' ) : tags for ( service_key, tags ) in service_keys_to_tags } for ( path, service_keys_to_tags ) in paths_to_tags.items() }
|
||||
|
||||
hdd_import = ClientImporting.HDDImport( paths = paths, import_file_options = import_file_options, paths_to_tags = paths_to_tags, delete_after_success = delete_after_success )
|
||||
|
||||
serialisable_serialisables[ 'hdd_import' ] = HydrusSerialisable.GetSerialisableTuple( hdd_import )
|
||||
|
||||
del serialisable_serialisables[ 'advanced_import_options' ]
|
||||
del serialisable_serialisables[ 'paths_info' ]
|
||||
del serialisable_serialisables[ 'paths_to_tags' ]
|
||||
del serialisable_serialisables[ 'delete_after_success' ]
|
||||
|
||||
|
||||
new_serialisable_info = ( management_type, serialisable_keys, serialisable_simples, serialisable_serialisables )
|
||||
|
||||
return ( 2, new_serialisable_info )
|
||||
|
||||
|
||||
|
||||
def GetKey( self, name ):
|
||||
|
||||
return self._keys[ name ]
|
||||
|
@ -1711,13 +1719,13 @@ class ManagementPanelImports( ManagementPanelImport ):
|
|||
queue_hbox.AddF( self._pending_import_queues_listbox, CC.FLAGS_EXPAND_BOTH_WAYS )
|
||||
queue_hbox.AddF( queue_buttons_vbox, CC.FLAGS_MIXED )
|
||||
|
||||
self._advanced_import_options = ClientGUICollapsible.CollapsibleOptionsImportFiles( self._pending_import_queues_panel )
|
||||
self._import_file_options = ClientGUICollapsible.CollapsibleOptionsImportFiles( self._pending_import_queues_panel )
|
||||
|
||||
self._pending_import_queues_panel.AddF( queue_hbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
|
||||
self._pending_import_queues_panel.AddF( self._new_queue_input, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
self._pending_import_queues_panel.AddF( self._get_tags_if_redundant, CC.FLAGS_CENTER )
|
||||
self._pending_import_queues_panel.AddF( self._file_limit, CC.FLAGS_CENTER )
|
||||
self._pending_import_queues_panel.AddF( self._advanced_import_options, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
self._pending_import_queues_panel.AddF( self._import_file_options, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
|
||||
vbox.AddF( self._pending_import_queues_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
|
||||
|
||||
|
@ -1793,7 +1801,7 @@ class ManagementPanelImports( ManagementPanelImport ):
|
|||
|
||||
|
||||
|
||||
def GetAdvancedImportOptions( self ): return self._advanced_import_options.GetInfo()
|
||||
def GetImportFileOptions( self ): return self._import_file_options.GetInfo()
|
||||
|
||||
def EventCancelImportQueueBuilder( self, event ):
|
||||
|
||||
|
@ -1905,13 +1913,13 @@ class ManagementPanelImportsGallery( ManagementPanelImports ):
|
|||
|
||||
def factory( job_key, item ):
|
||||
|
||||
advanced_import_options = HydrusThreading.CallBlockingToWx( self.GetAdvancedImportOptions )
|
||||
import_file_options = HydrusThreading.CallBlockingToWx( self.GetImportFileOptions )
|
||||
|
||||
advanced_tag_options = HydrusThreading.CallBlockingToWx( self.GetAdvancedTagOptions )
|
||||
|
||||
gallery_parsers_factory = self._GetGalleryParsersFactory()
|
||||
|
||||
return ClientDownloading.ImportArgsGeneratorGallery( job_key, item, advanced_import_options, advanced_tag_options, gallery_parsers_factory )
|
||||
return ClientDownloading.ImportArgsGeneratorGallery( job_key, item, import_file_options, advanced_tag_options, gallery_parsers_factory )
|
||||
|
||||
|
||||
return factory
|
||||
|
@ -2116,9 +2124,9 @@ class ManagementPanelImportsURL( ManagementPanelImports ):
|
|||
|
||||
def factory( job_key, item ):
|
||||
|
||||
advanced_import_options = HydrusThreading.CallBlockingToWx( self.GetAdvancedImportOptions )
|
||||
import_file_options = HydrusThreading.CallBlockingToWx( self.GetImportFileOptions )
|
||||
|
||||
return ClientDownloading.ImportArgsGeneratorURLs( job_key, item, advanced_import_options )
|
||||
return ClientDownloading.ImportArgsGeneratorURLs( job_key, item, import_file_options )
|
||||
|
||||
|
||||
return factory
|
||||
|
@ -2146,36 +2154,125 @@ class ManagementPanelImportsURL( ManagementPanelImports ):
|
|||
|
||||
management_panel_types_to_classes[ MANAGEMENT_TYPE_IMPORT_URL ] = ManagementPanelImportsURL
|
||||
|
||||
class ManagementPanelImportHDD( ManagementPanelImport ):
|
||||
class ManagementPanelImportHDD( ManagementPanel ):
|
||||
|
||||
def __init__( self, parent, page, management_controller ):
|
||||
|
||||
self._paths_info = management_controller.GetVariable( 'paths_info' )
|
||||
self._advanced_import_options = management_controller.GetVariable( 'advanced_import_options' )
|
||||
self._paths_to_tags = management_controller.GetVariable( 'paths_to_tags' )
|
||||
self._delete_after_success = management_controller.GetVariable( 'delete_after_success' )
|
||||
ManagementPanel.__init__( self, parent, page, management_controller )
|
||||
|
||||
ManagementPanelImport.__init__( self, parent, page, management_controller )
|
||||
self._import_queue_panel = ClientGUICommon.StaticBox( self, 'import summary' )
|
||||
|
||||
self._import_controller.PendImportQueueJob( self._paths_info )
|
||||
self._overall_status = wx.StaticText( self._import_queue_panel )
|
||||
self._current_action = wx.StaticText( self._import_queue_panel )
|
||||
self._overall_gauge = ClientGUICommon.Gauge( self._import_queue_panel )
|
||||
|
||||
self._pause_button = wx.BitmapButton( self._import_queue_panel, bitmap = CC.GlobalBMPs.pause )
|
||||
self._pause_button.Bind( wx.EVT_BUTTON, self.EventPause )
|
||||
|
||||
#
|
||||
|
||||
vbox = wx.BoxSizer( wx.VERTICAL )
|
||||
|
||||
self._import_queue_panel.AddF( self._overall_status, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
self._import_queue_panel.AddF( self._current_action, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
self._import_queue_panel.AddF( self._overall_gauge, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
self._import_queue_panel.AddF( self._pause_button, CC.FLAGS_LONE_BUTTON )
|
||||
|
||||
vbox.AddF( self._import_queue_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
|
||||
self._MakeCurrentSelectionTagsBox( vbox )
|
||||
|
||||
self.SetSizer( vbox )
|
||||
|
||||
HydrusGlobals.pubsub.sub( self, 'UpdateStatus', 'update_status' )
|
||||
|
||||
self._hdd_import = self._controller.GetVariable( 'hdd_import' )
|
||||
|
||||
self._Update()
|
||||
|
||||
self._hdd_import.Start( self._page_key )
|
||||
|
||||
|
||||
def _GenerateImportArgsGeneratorFactory( self ):
|
||||
def _Update( self ):
|
||||
|
||||
def factory( job_key, item ):
|
||||
( ( overall_status, ( overall_value, overall_range ) ), paused ) = self._hdd_import.GetStatus()
|
||||
|
||||
self._overall_status.SetLabel( overall_status )
|
||||
|
||||
self._overall_gauge.SetRange( overall_range )
|
||||
self._overall_gauge.SetValue( overall_value )
|
||||
|
||||
if paused:
|
||||
|
||||
return ClientDownloading.ImportArgsGeneratorHDD( job_key, item, self._advanced_import_options, self._paths_to_tags, self._delete_after_success )
|
||||
current_action = 'paused at ' + HydrusData.ConvertValueRangeToPrettyString( overall_value, overall_range )
|
||||
|
||||
if self._pause_button.GetBitmap() != CC.GlobalBMPs.play:
|
||||
|
||||
self._pause_button.SetBitmap( CC.GlobalBMPs.play )
|
||||
|
||||
|
||||
else:
|
||||
|
||||
current_action = 'processing at ' + HydrusData.ConvertValueRangeToPrettyString( overall_value, overall_range )
|
||||
|
||||
if self._pause_button.GetBitmap() != CC.GlobalBMPs.pause:
|
||||
|
||||
self._pause_button.SetBitmap( CC.GlobalBMPs.pause )
|
||||
|
||||
|
||||
|
||||
return factory
|
||||
if overall_value < overall_range:
|
||||
|
||||
if not self._pause_button.IsShown():
|
||||
|
||||
self._pause_button.Show()
|
||||
self._current_action.Show()
|
||||
self._overall_gauge.Show()
|
||||
|
||||
self.Layout()
|
||||
|
||||
|
||||
else:
|
||||
|
||||
if self._pause_button.IsShown():
|
||||
|
||||
self._pause_button.Hide()
|
||||
self._current_action.Hide()
|
||||
self._overall_gauge.Hide()
|
||||
|
||||
self.Layout()
|
||||
|
||||
|
||||
|
||||
self._current_action.SetLabel( current_action )
|
||||
|
||||
|
||||
def _InitExtraVboxElements( self, vbox ):
|
||||
def EventPause( self, event ):
|
||||
|
||||
ManagementPanelImport._InitExtraVboxElements( self, vbox )
|
||||
self._hdd_import.PausePlay()
|
||||
|
||||
self._import_gauge.Hide()
|
||||
self._import_cancel_button.Hide()
|
||||
self._Update()
|
||||
|
||||
|
||||
def TestAbleToClose( self ):
|
||||
|
||||
( ( overall_status, ( overall_value, overall_range ) ), paused ) = self._hdd_import.GetStatus()
|
||||
|
||||
if overall_value < overall_range and not paused:
|
||||
|
||||
with ClientGUIDialogs.DialogYesNo( self, 'This page is still importing. Are you sure you want to close it?' ) as dlg:
|
||||
|
||||
if dlg.ShowModal() == wx.ID_NO: raise Exception()
|
||||
|
||||
|
||||
|
||||
|
||||
def UpdateStatus( self, page_key ):
|
||||
|
||||
if page_key == self._page_key:
|
||||
|
||||
self._Update()
|
||||
|
||||
|
||||
|
||||
management_panel_types_to_classes[ MANAGEMENT_TYPE_IMPORT_HDD ] = ManagementPanelImportHDD
|
||||
|
@ -2186,14 +2283,14 @@ class ManagementPanelImportThreadWatcher( ManagementPanelImport ):
|
|||
|
||||
def factory( job_key, item ):
|
||||
|
||||
advanced_import_options = HydrusThreading.CallBlockingToWx( self.GetAdvancedImportOptions )
|
||||
import_file_options = HydrusThreading.CallBlockingToWx( self.GetImportFileOptions )
|
||||
advanced_tag_options = HydrusThreading.CallBlockingToWx( self.GetAdvancedTagOptions )
|
||||
|
||||
# fourchan_board should be on the job_key or whatever. it is stuck on initial queue generation
|
||||
# we should not be getting it from the management_panel
|
||||
# we should have access to this info from the job_key or w/e
|
||||
|
||||
return ClientDownloading.ImportArgsGeneratorThread( job_key, item, advanced_import_options, advanced_tag_options )
|
||||
return ClientDownloading.ImportArgsGeneratorThread( job_key, item, import_file_options, advanced_tag_options )
|
||||
|
||||
|
||||
return factory
|
||||
|
@ -2253,7 +2350,7 @@ class ManagementPanelImportThreadWatcher( ManagementPanelImport ):
|
|||
button_box.AddF( self._thread_pause_button, CC.FLAGS_EXPAND_BOTH_WAYS )
|
||||
button_box.AddF( self._thread_manual_refresh_button, CC.FLAGS_EXPAND_BOTH_WAYS )
|
||||
|
||||
self._advanced_import_options = ClientGUICollapsible.CollapsibleOptionsImportFiles( self._thread_panel )
|
||||
self._import_file_options = ClientGUICollapsible.CollapsibleOptionsImportFiles( self._thread_panel )
|
||||
|
||||
self._advanced_tag_options = ClientGUICollapsible.CollapsibleOptionsTags( self._thread_panel, namespaces = [ 'filename' ] )
|
||||
|
||||
|
@ -2261,7 +2358,7 @@ class ManagementPanelImportThreadWatcher( ManagementPanelImport ):
|
|||
self._thread_panel.AddF( self._thread_input, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
self._thread_panel.AddF( hbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
|
||||
self._thread_panel.AddF( button_box, CC.FLAGS_BUTTON_SIZER )
|
||||
self._thread_panel.AddF( self._advanced_import_options, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
self._thread_panel.AddF( self._import_file_options, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
self._thread_panel.AddF( self._advanced_tag_options, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
|
||||
vbox.AddF( self._thread_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
|
@ -2438,7 +2535,7 @@ class ManagementPanelImportThreadWatcher( ManagementPanelImport ):
|
|||
|
||||
def EventThreadVariable( self, event ): self._SetThreadVariables()
|
||||
|
||||
def GetAdvancedImportOptions( self ): return self._advanced_import_options.GetInfo()
|
||||
def GetImportFileOptions( self ): return self._import_file_options.GetInfo()
|
||||
|
||||
def GetAdvancedTagOptions( self ): return self._advanced_tag_options.GetInfo()
|
||||
|
||||
|
|
|
@ -583,7 +583,7 @@ class MediaPanel( ClientMedia.ListeningMediaList, wx.ScrolledWindow ):
|
|||
( media_to_deselect, media_to_select ) = ( self._selected_media, set( self._sorted_media ) - self._selected_media )
|
||||
|
||||
elif select_type == 'none': ( media_to_deselect, media_to_select ) = ( self._selected_media, [] )
|
||||
else:
|
||||
elif select_type in ( 'inbox', 'archive' ):
|
||||
|
||||
inbox_media = { m for m in self._sorted_media if m.HasInbox() }
|
||||
archive_media = { m for m in self._sorted_media if m not in inbox_media }
|
||||
|
@ -599,6 +599,22 @@ class MediaPanel( ClientMedia.ListeningMediaList, wx.ScrolledWindow ):
|
|||
media_to_select = [ m for m in archive_media if m not in self._selected_media ]
|
||||
|
||||
|
||||
elif select_type in ( 'local', 'trash' ):
|
||||
|
||||
local_media = { media for media in self._sorted_media if CC.LOCAL_FILE_SERVICE_KEY in media.GetLocationsManager().GetCurrent() }
|
||||
trash_media = { media for media in self._sorted_media if CC.TRASH_SERVICE_KEY in media.GetLocationsManager().GetCurrent() }
|
||||
|
||||
if select_type == 'local':
|
||||
|
||||
media_to_deselect = [ m for m in trash_media if m in self._selected_media ]
|
||||
media_to_select = [ m for m in local_media if m not in self._selected_media ]
|
||||
|
||||
elif select_type == 'trash':
|
||||
|
||||
media_to_deselect = [ m for m in local_media if m in self._selected_media ]
|
||||
media_to_select = [ m for m in trash_media if m not in self._selected_media ]
|
||||
|
||||
|
||||
|
||||
if self._focussed_media in media_to_deselect: self._SetFocussedMedia( None )
|
||||
|
||||
|
@ -1436,6 +1452,7 @@ class MediaPanelThumbnails( MediaPanel ):
|
|||
elif command == 'open_externally': self._OpenExternally()
|
||||
elif command == 'petition': self._PetitionFiles( data )
|
||||
elif command == 'remove': self._Remove()
|
||||
elif command == 'rescind_download': wx.GetApp().Write( 'content_updates', { CC.LOCAL_FILE_SERVICE_KEY : [ HydrusData.ContentUpdate( HC.CONTENT_DATA_TYPE_FILES, HC.CONTENT_UPDATE_RESCIND_PENDING, self._GetSelectedHashes( discriminant = CC.DISCRIMINANT_DOWNLOADING ) ) ] } )
|
||||
elif command == 'rescind_petition': self._RescindPetitionFiles( data )
|
||||
elif command == 'rescind_upload': self._RescindUploadFiles( data )
|
||||
elif command == 'scroll_end': self._ScrollEnd( False )
|
||||
|
@ -1560,13 +1577,16 @@ class MediaPanelThumbnails( MediaPanel ):
|
|||
|
||||
if thumbnail is not None: self._HitMedia( thumbnail, event.CmdDown(), event.ShiftDown() )
|
||||
|
||||
all_locations_managers = [ media.GetLocationsManager() for media in self._selected_media ]
|
||||
all_locations_managers = [ media.GetLocationsManager() for media in self._sorted_media ]
|
||||
selected_locations_managers = [ media.GetLocationsManager() for media in self._selected_media ]
|
||||
|
||||
selection_has_local_file_service = True in ( CC.LOCAL_FILE_SERVICE_KEY in locations_manager.GetCurrent() for locations_manager in all_locations_managers )
|
||||
selection_has_trash = True in ( CC.TRASH_SERVICE_KEY in locations_manager.GetCurrent() for locations_manager in all_locations_managers )
|
||||
selection_has_local_file_service = True in ( CC.LOCAL_FILE_SERVICE_KEY in locations_manager.GetCurrent() for locations_manager in selected_locations_managers )
|
||||
selection_has_trash = True in ( CC.TRASH_SERVICE_KEY in locations_manager.GetCurrent() for locations_manager in selected_locations_managers )
|
||||
selection_has_inbox = True in ( media.HasInbox() for media in self._selected_media )
|
||||
selection_has_archive = True in ( media.HasArchive() for media in self._selected_media )
|
||||
|
||||
media_has_local_file_service = True in ( CC.LOCAL_FILE_SERVICE_KEY in locations_manager.GetCurrent() for locations_manager in all_locations_managers )
|
||||
media_has_trash = True in ( CC.TRASH_SERVICE_KEY in locations_manager.GetCurrent() for locations_manager in all_locations_managers )
|
||||
media_has_inbox = True in ( media.HasInbox() for media in self._sorted_media )
|
||||
media_has_archive = True in ( media.HasArchive() for media in self._sorted_media )
|
||||
|
||||
|
@ -1595,6 +1615,12 @@ class MediaPanelThumbnails( MediaPanel ):
|
|||
select_menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'archive' ), 'archive' )
|
||||
|
||||
|
||||
if media_has_local_file_service and media_has_trash:
|
||||
|
||||
select_menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'local' ), 'local files' )
|
||||
select_menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'trash' ), 'trash' )
|
||||
|
||||
|
||||
select_menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'none' ), 'none' )
|
||||
|
||||
menu.AppendMenu( CC.ID_NULL, 'select', select_menu )
|
||||
|
@ -1635,6 +1661,7 @@ class MediaPanelThumbnails( MediaPanel ):
|
|||
deleted_phrase = 'selected deleted from'
|
||||
|
||||
download_phrase = 'download all possible selected'
|
||||
rescind_download_phrase = 'cancel downloads for all possible selected'
|
||||
upload_phrase = 'upload all possible selected to'
|
||||
rescind_upload_phrase = 'rescind pending selected uploads to'
|
||||
petition_phrase = 'petition all possible selected for removal from'
|
||||
|
@ -1663,6 +1690,7 @@ class MediaPanelThumbnails( MediaPanel ):
|
|||
deleted_phrase = 'deleted from'
|
||||
|
||||
download_phrase = 'download'
|
||||
rescind_download_phrase = 'cancel download'
|
||||
upload_phrase = 'upload to'
|
||||
rescind_upload_phrase = 'rescind pending upload to'
|
||||
petition_phrase = 'petition for removal from'
|
||||
|
@ -1688,13 +1716,15 @@ class MediaPanelThumbnails( MediaPanel ):
|
|||
|
||||
def MassUnion( lists ): return { item for item in itertools.chain.from_iterable( lists ) }
|
||||
|
||||
all_current_file_service_keys = [ locations_manager.GetCurrentRemote() for locations_manager in all_locations_managers ]
|
||||
all_current_file_service_keys = [ locations_manager.GetCurrentRemote() for locations_manager in selected_locations_managers ]
|
||||
|
||||
current_file_service_keys = HydrusData.IntelligentMassIntersect( all_current_file_service_keys )
|
||||
|
||||
some_current_file_service_keys = MassUnion( all_current_file_service_keys ) - current_file_service_keys
|
||||
|
||||
all_pending_file_service_keys = [ locations_manager.GetPendingRemote() for locations_manager in all_locations_managers ]
|
||||
all_pending_file_service_keys = [ locations_manager.GetPendingRemote() for locations_manager in selected_locations_managers ]
|
||||
|
||||
some_downloading = True in ( CC.LOCAL_FILE_SERVICE_KEY in locations_manager.GetPending() for locations_manager in selected_locations_managers )
|
||||
|
||||
pending_file_service_keys = HydrusData.IntelligentMassIntersect( all_pending_file_service_keys )
|
||||
|
||||
|
@ -1702,7 +1732,7 @@ class MediaPanelThumbnails( MediaPanel ):
|
|||
|
||||
selection_uploaded_file_service_keys = some_pending_file_service_keys.union( pending_file_service_keys )
|
||||
|
||||
all_petitioned_file_service_keys = [ locations_manager.GetPetitionedRemote() for locations_manager in all_locations_managers ]
|
||||
all_petitioned_file_service_keys = [ locations_manager.GetPetitionedRemote() for locations_manager in selected_locations_managers ]
|
||||
|
||||
petitioned_file_service_keys = HydrusData.IntelligentMassIntersect( all_petitioned_file_service_keys )
|
||||
|
||||
|
@ -1710,7 +1740,7 @@ class MediaPanelThumbnails( MediaPanel ):
|
|||
|
||||
selection_petitioned_file_service_keys = some_petitioned_file_service_keys.union( petitioned_file_service_keys )
|
||||
|
||||
all_deleted_file_service_keys = [ locations_manager.GetDeletedRemote() for locations_manager in all_locations_managers ]
|
||||
all_deleted_file_service_keys = [ locations_manager.GetDeletedRemote() for locations_manager in selected_locations_managers ]
|
||||
|
||||
deleted_file_service_keys = HydrusData.IntelligentMassIntersect( all_deleted_file_service_keys )
|
||||
|
||||
|
@ -1724,7 +1754,7 @@ class MediaPanelThumbnails( MediaPanel ):
|
|||
|
||||
selection_petitionable_file_service_keys = set()
|
||||
|
||||
for locations_manager in all_locations_managers:
|
||||
for locations_manager in selected_locations_managers:
|
||||
|
||||
# we can upload (set pending) to a repo_id when we have permission, a file is local, not current, not pending, and either ( not deleted or admin )
|
||||
|
||||
|
@ -1742,7 +1772,7 @@ class MediaPanelThumbnails( MediaPanel ):
|
|||
|
||||
selection_deletable_file_service_keys = set()
|
||||
|
||||
for locations_manager in all_locations_managers:
|
||||
for locations_manager in selected_locations_managers:
|
||||
|
||||
# we can delete remote when we have permission and a file is current and it is not already petitioned
|
||||
|
||||
|
@ -1751,7 +1781,7 @@ class MediaPanelThumbnails( MediaPanel ):
|
|||
|
||||
selection_modifyable_file_service_keys = set()
|
||||
|
||||
for locations_manager in all_locations_managers:
|
||||
for locations_manager in selected_locations_managers:
|
||||
|
||||
# we can modify users when we have permission and the file is current or deleted
|
||||
|
||||
|
@ -1803,6 +1833,8 @@ class MediaPanelThumbnails( MediaPanel ):
|
|||
|
||||
if len( selection_downloadable_file_service_keys ) > 0: file_repo_menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'download' ), download_phrase )
|
||||
|
||||
if some_downloading: file_repo_menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'rescind_download' ), rescind_download_phrase )
|
||||
|
||||
if len( selection_uploadable_file_service_keys ) > 0: AddFileServiceKeysToMenu( file_repo_menu, selection_uploadable_file_service_keys, upload_phrase, 'upload' )
|
||||
|
||||
if len( selection_uploaded_file_service_keys ) > 0: AddFileServiceKeysToMenu( file_repo_menu, selection_uploaded_file_service_keys, rescind_upload_phrase, 'rescind_upload' )
|
||||
|
@ -1939,6 +1971,12 @@ class MediaPanelThumbnails( MediaPanel ):
|
|||
select_menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'archive' ), 'archive' )
|
||||
|
||||
|
||||
if media_has_local_file_service and media_has_trash:
|
||||
|
||||
select_menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'local' ), 'local files' )
|
||||
select_menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'trash' ), 'trash' )
|
||||
|
||||
|
||||
select_menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetId( 'select', 'none' ), 'none' )
|
||||
|
||||
menu.AppendMenu( CC.ID_NULL, 'select', select_menu )
|
||||
|
|
|
@ -212,7 +212,7 @@ class OptionsPanelImportFiles( OptionsPanel ):
|
|||
|
||||
self.SetSizer( vbox )
|
||||
|
||||
self.SetInfo( ClientDefaults.GetDefaultAdvancedImportOptions() )
|
||||
self.SetInfo( ClientDefaults.GetDefaultImportFileOptions() )
|
||||
|
||||
|
||||
def GetInfo( self ):
|
||||
|
|
|
@ -44,14 +44,6 @@ class Page( wx.SplitterWindow ):
|
|||
|
||||
self._search_preview_split.Bind( wx.EVT_SPLITTER_DCLICK, self.EventPreviewUnsplit )
|
||||
|
||||
if self._management_controller.GetType() == ClientGUIManagement.MANAGEMENT_TYPE_IMPORT_HDD:
|
||||
|
||||
if len( initial_media_results ) > 0:
|
||||
|
||||
self._management_controller.SetVariable( 'paths_info', [] )
|
||||
|
||||
|
||||
|
||||
self._management_panel = ClientGUIManagement.CreateManagementPanel( self._search_preview_split, self, self._management_controller )
|
||||
|
||||
file_service_key = self._management_controller.GetKey( 'file_service' )
|
||||
|
|
|
@ -1,9 +1,15 @@
|
|||
import ClientConstants as CC
|
||||
import collections
|
||||
import HydrusConstants as HC
|
||||
import HydrusData
|
||||
import HydrusGlobals
|
||||
import HydrusSerialisable
|
||||
import HydrusThreading
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
import wx
|
||||
|
||||
class ImportController( HydrusSerialisable.SerialisableBase ):
|
||||
|
||||
|
@ -34,7 +40,7 @@ class ImportController( HydrusSerialisable.SerialisableBase ):
|
|||
self._import_seed_queues = []
|
||||
self._importer_status = ( '', 0, 1 )
|
||||
|
||||
self._search_seeds = SeedQueue()
|
||||
self._search_seeds = SeedCache()
|
||||
self._searcher_status = ( '', 0, 1 )
|
||||
|
||||
self._options = {}
|
||||
|
@ -63,7 +69,7 @@ class ImportController( HydrusSerialisable.SerialisableBase ):
|
|||
self._options = { name : HydrusSerialisable.CreateFromSerialisableTuple( serialisable_suboptions_tuple ) for ( name, serialisable_suboptions_tuple ) in serialisable_options_tuple.items() }
|
||||
|
||||
|
||||
def _ProcessImportSeed( self, seed, seed_info ):
|
||||
def _ProcessImportSeed( self, seed ):
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
|
@ -100,9 +106,9 @@ class ImportController( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
if result is not None:
|
||||
|
||||
( seed, seed_info ) = result
|
||||
seed = result
|
||||
|
||||
self._ProcessImportSeed( import_seed, seed_info )
|
||||
self._ProcessImportSeed( seed )
|
||||
|
||||
|
||||
|
||||
|
@ -117,7 +123,6 @@ class ImportController( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
result = import_seed_queue.GetNextUnknownSeed()
|
||||
|
||||
|
||||
|
||||
if result is not None:
|
||||
|
||||
|
@ -176,109 +181,183 @@ class ImportController( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
|
||||
|
||||
class ImportControllerHDD( HydrusSerialisable.SerialisableBase ):
|
||||
class HDDImport( HydrusSerialisable.SerialisableBase ):
|
||||
|
||||
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_HDD_IMPORT
|
||||
SERIALISABLE_VERSION = 1
|
||||
|
||||
def __init__( self ):
|
||||
def __init__( self, paths = None, import_file_options = None, paths_to_tags = None, delete_after_success = None ):
|
||||
|
||||
HydrusSerialisable.SerialisableBase.__init__( self )
|
||||
|
||||
# this stuff is all moved to the search seed
|
||||
self._paths_info = None
|
||||
self._paths_to_tags = None
|
||||
self._delete_file_after_import = None
|
||||
self._import_file_options = None
|
||||
if paths is None:
|
||||
|
||||
self._paths_cache = None
|
||||
|
||||
else:
|
||||
|
||||
self._paths_cache = SeedCache()
|
||||
|
||||
for path in paths:
|
||||
|
||||
self._paths_cache.AddSeed( path )
|
||||
|
||||
|
||||
|
||||
self._import_file_options = import_file_options
|
||||
self._paths_to_tags = paths_to_tags
|
||||
self._delete_after_success = delete_after_success
|
||||
self._paused = False
|
||||
|
||||
self._overall_status = ( 'initialising', ( 0, 1 ) )
|
||||
|
||||
self._lock = threading.Lock()
|
||||
|
||||
|
||||
def _GetSerialisableInfo( self ):
|
||||
|
||||
serialisable_url_cache = HydrusSerialisable.GetSerialisableTuple( self._url_cache )
|
||||
serialisable_url_cache = HydrusSerialisable.GetSerialisableTuple( self._paths_cache )
|
||||
serialisable_options = HydrusSerialisable.GetSerialisableTuple( self._import_file_options )
|
||||
serialisable_paths_to_tags = { path : { service_key.encode( 'hex' ) : tags for ( service_key, tags ) in service_keys_to_tags.items() } for ( path, service_keys_to_tags ) in self._paths_to_tags.items() }
|
||||
|
||||
serialisable_options = { name : HydrusSerialisable.GetSerialisableTuple( options ) for ( name, options ) in self._options.items() }
|
||||
|
||||
return ( self._site_type, self._query_type, self._query, self._get_tags_if_redundant, serialisable_url_cache, serialisable_options )
|
||||
return ( serialisable_url_cache, serialisable_options, serialisable_paths_to_tags, self._delete_after_success, self._paused )
|
||||
|
||||
|
||||
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
|
||||
|
||||
( self._site_type, self._query_type, self._query, self._get_tags_if_redundant, serialisable_url_cache_tuple, serialisable_options_tuple ) = serialisable_info
|
||||
( serialisable_url_cache, serialisable_options, serialisable_paths_to_tags, self._delete_after_success, self._paused ) = serialisable_info
|
||||
|
||||
self._url_cache = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_url_cache_tuple )
|
||||
|
||||
self._options = { name : HydrusSerialisable.CreateFromSerialisableTuple( serialisable_suboptions_tuple ) for ( name, serialisable_suboptions_tuple ) in serialisable_options_tuple.items() }
|
||||
self._paths_cache = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_url_cache )
|
||||
self._import_file_options = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_options )
|
||||
self._paths_to_tags = { path : { service_key.decode( 'hex' ) : tags for ( service_key, tags ) in service_keys_to_tags.items() } for ( path, service_keys_to_tags ) in serialisable_paths_to_tags.items() }
|
||||
|
||||
|
||||
def GetImportStatus( self ):
|
||||
def _RegenerateStatus( self ):
|
||||
|
||||
self._overall_status = self._paths_cache.GetStatus()
|
||||
|
||||
|
||||
def _THREADWork( self, page_key ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
return self._import_status
|
||||
self._RegenerateStatus()
|
||||
|
||||
|
||||
HydrusGlobals.pubsub.pub( 'update_status', page_key )
|
||||
|
||||
while True:
|
||||
|
||||
if HydrusGlobals.shutdown:
|
||||
|
||||
return
|
||||
|
||||
|
||||
while self._paused:
|
||||
|
||||
if HydrusGlobals.shutdown:
|
||||
|
||||
return
|
||||
|
||||
|
||||
time.sleep( 0.1 )
|
||||
|
||||
|
||||
try:
|
||||
|
||||
with self._lock:
|
||||
|
||||
path = self._paths_cache.GetNextUnknownSeed()
|
||||
|
||||
if path is not None:
|
||||
|
||||
if path in self._paths_to_tags:
|
||||
|
||||
service_keys_to_tags = self._paths_to_tags[ path ]
|
||||
|
||||
else:
|
||||
|
||||
service_keys_to_tags = {}
|
||||
|
||||
|
||||
|
||||
|
||||
if path is not None:
|
||||
|
||||
try:
|
||||
|
||||
( status, media_result ) = wx.GetApp().WriteSynchronous( 'import_file', path, import_file_options = self._import_file_options, service_keys_to_tags = service_keys_to_tags, generate_media_result = True )
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._paths_cache.UpdateSeedStatus( path, status )
|
||||
|
||||
if status in ( CC.STATUS_SUCCESSFUL, CC.STATUS_REDUNDANT ):
|
||||
|
||||
HydrusGlobals.pubsub.pub( 'add_media_results', page_key, ( media_result, ) )
|
||||
|
||||
if self._delete_after_success:
|
||||
|
||||
try: os.remove( path )
|
||||
except: pass
|
||||
|
||||
|
||||
|
||||
|
||||
except Exception as e:
|
||||
|
||||
status = CC.STATUS_FAILED
|
||||
|
||||
note = HydrusData.ToString( e )
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._paths_cache.UpdateSeedStatus( path, status, note = note )
|
||||
|
||||
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._RegenerateStatus()
|
||||
|
||||
|
||||
HydrusGlobals.pubsub.pub( 'update_status', page_key )
|
||||
|
||||
else:
|
||||
|
||||
time.sleep( 1 )
|
||||
|
||||
|
||||
wx.GetApp().WaitUntilWXThreadIdle()
|
||||
|
||||
except Exception as e:
|
||||
|
||||
HydrusData.ShowException( e )
|
||||
|
||||
return
|
||||
|
||||
|
||||
|
||||
|
||||
def GetQueueStatus( self ):
|
||||
def GetStatus( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
gauge_value = self._current_position
|
||||
gauge_range = len( self._paths_info )
|
||||
|
||||
# return progress string
|
||||
# also return string for num_successful and so on
|
||||
|
||||
pass
|
||||
return ( self._overall_status, self._paused )
|
||||
|
||||
|
||||
|
||||
def GetTuple( self ):
|
||||
|
||||
return ( self._paths_info, self._paths_to_tags, self._delete_file_after_import, self._import_file_options )
|
||||
|
||||
|
||||
def MainLoop( self ):
|
||||
|
||||
# use the lock sparingly, remember
|
||||
# obey pause and hc.shutdown
|
||||
# maybe also an internal shutdown, on managementpanel cleanupbeforedestroy
|
||||
# update file_status_counts
|
||||
# increment current_position
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def Pause( self ):
|
||||
def PausePlay( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._paused = True
|
||||
self._paused = not self._paused
|
||||
|
||||
|
||||
|
||||
def Resume( self ):
|
||||
def Start( self, page_key ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._paused = False
|
||||
|
||||
|
||||
|
||||
def SetTuple( self, paths_info, paths_to_tags, delete_file_after_import, import_file_options ):
|
||||
|
||||
self._paths_info = paths_info
|
||||
self._paths_to_tags = paths_to_tags
|
||||
self._delete_file_after_import = delete_file_after_import
|
||||
self._import_file_options = import_file_options
|
||||
|
||||
|
||||
def Start( self ):
|
||||
|
||||
# init a daemon to work through the list
|
||||
|
||||
pass
|
||||
threading.Thread( target = self._THREADWork, args = ( page_key, ) ).start()
|
||||
|
||||
|
||||
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_HDD_IMPORT ] = HDDImport
|
||||
|
@ -333,13 +412,13 @@ class GalleryQuery( HydrusSerialisable.SerialisableBase ):
|
|||
self._query = query
|
||||
self._get_tags_if_redundant = get_tags_if_redundant
|
||||
self._file_limit = file_limit
|
||||
self._url_cache = URLCache()
|
||||
self._url_cache = SeedCache()
|
||||
self._options = options
|
||||
|
||||
|
||||
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_GALLERY_QUERY ] = GalleryQuery
|
||||
|
||||
class SubscriptionController( HydrusSerialisable.SerialisableBaseNamed ):
|
||||
class Subscription( HydrusSerialisable.SerialisableBaseNamed ):
|
||||
|
||||
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_SUBSCRIPTION
|
||||
SERIALISABLE_VERSION = 1
|
||||
|
@ -375,9 +454,9 @@ class SubscriptionController( HydrusSerialisable.SerialisableBaseNamed ):
|
|||
|
||||
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_SUBSCRIPTION ] = Subscription
|
||||
|
||||
class SeedQueue( HydrusSerialisable.SerialisableBase ):
|
||||
class SeedCache( HydrusSerialisable.SerialisableBase ):
|
||||
|
||||
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_SEED_QUEUE
|
||||
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_SEED_CACHE
|
||||
SERIALISABLE_VERSION = 1
|
||||
|
||||
def __init__( self ):
|
||||
|
@ -390,6 +469,18 @@ class SeedQueue( HydrusSerialisable.SerialisableBase ):
|
|||
self._lock = threading.Lock()
|
||||
|
||||
|
||||
def _GetSeedTuple( self, seed ):
|
||||
|
||||
seed_info = self._seeds_to_info[ seed ]
|
||||
|
||||
status = seed_info[ 'status' ]
|
||||
added_timestamp = seed_info[ 'added_timestamp' ]
|
||||
last_modified_timestamp = seed_info[ 'last_modified_timestamp' ]
|
||||
note = seed_info[ 'note' ]
|
||||
|
||||
return ( seed, status, added_timestamp, last_modified_timestamp, note )
|
||||
|
||||
|
||||
def _GetSerialisableInfo( self ):
|
||||
|
||||
with self._lock:
|
||||
|
@ -420,7 +511,7 @@ class SeedQueue( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
|
||||
|
||||
def AddSeed( self, seed, additional_info = None ):
|
||||
def AddSeed( self, seed ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
|
@ -431,17 +522,15 @@ class SeedQueue( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
self._seeds_ordered.append( seed )
|
||||
|
||||
now = HydrusData.GetNow()
|
||||
|
||||
seed_info = {}
|
||||
|
||||
seed_info[ 'status' ] = CC.STATUS_UNKNOWN
|
||||
seed_info[ 'timestamp' ] = HydrusData.GetNow()
|
||||
seed_info[ 'added_timestamp' ] = now
|
||||
seed_info[ 'last_modified_timestamp' ] = now
|
||||
seed_info[ 'note' ] = ''
|
||||
|
||||
if additional_info is not None:
|
||||
|
||||
seed_info.update( additional_info )
|
||||
|
||||
|
||||
self._seeds_to_info[ seed ] = seed_info
|
||||
|
||||
|
||||
|
@ -493,7 +582,7 @@ class SeedQueue( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
if seed_info[ 'status' ] == CC.STATUS_UNKNOWN:
|
||||
|
||||
return ( seed, seed_info )
|
||||
return seed
|
||||
|
||||
|
||||
|
||||
|
@ -509,7 +598,7 @@ class SeedQueue( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
|
||||
|
||||
def GetSeedsDisplayInfo( self ):
|
||||
def GetSeedsWithInfo( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
|
@ -517,19 +606,56 @@ class SeedQueue( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
for seed in self._seeds_ordered:
|
||||
|
||||
seed_info = self._seeds_to_info[ seed ]
|
||||
seed_tuple = self._GetSeedTuple( seed )
|
||||
|
||||
timestamp = seed_info[ 'timestamp' ]
|
||||
status = seed_info[ 'status' ]
|
||||
note = seed_info[ 'note' ]
|
||||
|
||||
all_info.append( ( seed, status, timestamp, note ) )
|
||||
all_info.append( seed_tuple )
|
||||
|
||||
|
||||
return all_info
|
||||
|
||||
|
||||
|
||||
def GetStatus( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
statuses_to_counts = collections.Counter()
|
||||
|
||||
for seed_info in self._seeds_to_info.values():
|
||||
|
||||
statuses_to_counts[ seed_info[ 'status' ] ] += 1
|
||||
|
||||
|
||||
num_successful = statuses_to_counts[ CC.STATUS_SUCCESSFUL ]
|
||||
num_failed = statuses_to_counts[ CC.STATUS_FAILED ]
|
||||
num_deleted = statuses_to_counts[ CC.STATUS_DELETED ]
|
||||
num_redundant = statuses_to_counts[ CC.STATUS_REDUNDANT ]
|
||||
num_unknown = statuses_to_counts[ CC.STATUS_UNKNOWN ]
|
||||
|
||||
status_strings = []
|
||||
|
||||
if num_successful > 0: status_strings.append( HydrusData.ToString( num_successful ) + ' successful' )
|
||||
if num_failed > 0: status_strings.append( HydrusData.ToString( num_failed ) + ' failed' )
|
||||
if num_deleted > 0: status_strings.append( HydrusData.ToString( num_deleted ) + ' already deleted' )
|
||||
if num_redundant > 0: status_strings.append( HydrusData.ToString( num_redundant ) + ' already in db' )
|
||||
|
||||
status = ', '.join( status_strings )
|
||||
|
||||
total_processed = len( self._seeds_ordered ) - num_unknown
|
||||
total = len( self._seeds_ordered )
|
||||
|
||||
return ( status, ( total_processed, total ) )
|
||||
|
||||
|
||||
|
||||
def HasSeed( self, seed ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
return seed in self._seeds_to_info
|
||||
|
||||
|
||||
|
||||
def RemoveSeed( self, seed ):
|
||||
|
||||
with self._lock:
|
||||
|
@ -543,16 +669,16 @@ class SeedQueue( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
|
||||
|
||||
def SetSeedStatus( self, seed, status, note = '' ):
|
||||
def UpdateSeedStatus( self, seed, status, note = '' ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
seed_info = self._seeds_to_info[ seed ]
|
||||
|
||||
seed_info[ 'status' ] = status
|
||||
seed_info[ 'timestamp' ] = HydrusData.GetNow()
|
||||
seed_info[ 'last_modified_timestamp' ] = HydrusData.GetNow()
|
||||
seed_info[ 'note' ] = note
|
||||
|
||||
|
||||
|
||||
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_SEED_QUEUE ] = SeedQueue
|
||||
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_SEED_CACHE ] = SeedCache
|
||||
|
|
|
@ -202,11 +202,14 @@ class MediaList( object ):
|
|||
|
||||
if discriminant is not None:
|
||||
|
||||
inbox_failed = discriminant == CC.DISCRIMINANT_INBOX and not media.HasInbox()
|
||||
local_failed = discriminant == CC.DISCRIMINANT_LOCAL and not media.GetLocationsManager().HasLocal()
|
||||
not_local_failed = discriminant == CC.DISCRIMINANT_NOT_LOCAL and media.GetLocationsManager().HasLocal()
|
||||
locations_manager = media.GetLocationsManager()
|
||||
|
||||
if inbox_failed or local_failed or not_local_failed: continue
|
||||
inbox_failed = discriminant == CC.DISCRIMINANT_INBOX and not media.HasInbox()
|
||||
local_failed = discriminant == CC.DISCRIMINANT_LOCAL and not locations_manager.HasLocal()
|
||||
not_local_failed = discriminant == CC.DISCRIMINANT_NOT_LOCAL and locations_manager.HasLocal()
|
||||
downloading_failed = discriminant == CC.DISCRIMINANT_DOWNLOADING and CC.LOCAL_FILE_SERVICE_KEY not in locations_manager.GetPending()
|
||||
|
||||
if inbox_failed or local_failed or not_local_failed or downloading_failed: continue
|
||||
|
||||
|
||||
if unrated is not None:
|
||||
|
|
|
@ -49,7 +49,7 @@ options = {}
|
|||
# Misc
|
||||
|
||||
NETWORK_VERSION = 17
|
||||
SOFTWARE_VERSION = 165
|
||||
SOFTWARE_VERSION = 166
|
||||
|
||||
UNSCALED_THUMBNAIL_DIMENSIONS = ( 200, 200 )
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@ class NoContentException( Exception ): pass
|
|||
class NotFoundException( Exception ): pass
|
||||
class NotModifiedException( Exception ): pass
|
||||
class PermissionException( Exception ): pass
|
||||
class ServerBusyException( Exception ): pass
|
||||
class SessionException( Exception ): pass
|
||||
class ShutdownException( Exception ): pass
|
||||
class SizeException( Exception ): pass
|
||||
|
|
|
@ -264,7 +264,11 @@ def LaunchDirectory( path ):
|
|||
|
||||
cmd.append( path )
|
||||
|
||||
subprocess.Popen( cmd, startupinfo = HydrusData.GetSubprocessStartupInfo() )
|
||||
process = subprocess.Popen( cmd, startupinfo = HydrusData.GetSubprocessStartupInfo() )
|
||||
|
||||
process.wait()
|
||||
|
||||
process.communicate()
|
||||
|
||||
|
||||
def LaunchFile( path ):
|
||||
|
@ -280,6 +284,10 @@ def LaunchFile( path ):
|
|||
|
||||
cmd.append( path )
|
||||
|
||||
subprocess.Popen( cmd, startupinfo = HydrusData.GetSubprocessStartupInfo() )
|
||||
process = subprocess.Popen( cmd, startupinfo = HydrusData.GetSubprocessStartupInfo() )
|
||||
|
||||
process.wait()
|
||||
|
||||
process.communicate()
|
||||
|
||||
|
|
@ -12,3 +12,4 @@ subs_changed = False
|
|||
currently_processing_updates = False
|
||||
|
||||
db_profile_mode = False
|
||||
server_busy = False
|
||||
|
|
|
@ -209,11 +209,11 @@ class HTTPConnectionManager( object ):
|
|||
threading.Thread( target = self.DAEMONMaintainConnections, name = 'Maintain Connections' ).start()
|
||||
|
||||
|
||||
def _DoRequest( self, method, location, path, query, request_headers, body, follow_redirects = True, report_hooks = None, temp_path = None, num_redirects_permitted = 4, long_timeout = False ):
|
||||
def _DoRequest( self, method, location, path, query, request_headers, body, follow_redirects = True, report_hooks = None, temp_path = None, num_redirects_permitted = 4 ):
|
||||
|
||||
if report_hooks is None: report_hooks = []
|
||||
|
||||
connection = self._GetConnection( location, long_timeout )
|
||||
connection = self._GetConnection( location )
|
||||
|
||||
try:
|
||||
|
||||
|
@ -236,7 +236,7 @@ class HTTPConnectionManager( object ):
|
|||
|
||||
if new_location is None: new_location = location
|
||||
|
||||
return self._DoRequest( new_method, new_location, new_path, new_query, request_headers, body, follow_redirects = follow_redirects, report_hooks = report_hooks, temp_path = temp_path, num_redirects_permitted = num_redirects_permitted - 1, long_timeout = long_timeout )
|
||||
return self._DoRequest( new_method, new_location, new_path, new_query, request_headers, body, follow_redirects = follow_redirects, report_hooks = report_hooks, temp_path = temp_path, num_redirects_permitted = num_redirects_permitted - 1 )
|
||||
|
||||
|
||||
except:
|
||||
|
@ -247,26 +247,22 @@ class HTTPConnectionManager( object ):
|
|||
|
||||
|
||||
|
||||
def _GetConnection( self, location, long_timeout = False ):
|
||||
def _GetConnection( self, location ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
if long_timeout: return HTTPConnection( location, long_timeout )
|
||||
else:
|
||||
if location not in self._connections:
|
||||
|
||||
if location not in self._connections:
|
||||
|
||||
connection = HTTPConnection( location )
|
||||
|
||||
self._connections[ location ] = connection
|
||||
|
||||
connection = HTTPConnection( location )
|
||||
|
||||
return self._connections[ location ]
|
||||
self._connections[ location ] = connection
|
||||
|
||||
|
||||
return self._connections[ location ]
|
||||
|
||||
|
||||
|
||||
def Request( self, method, url, request_headers = None, body = '', return_everything = False, return_cookies = False, report_hooks = None, temp_path = None, long_timeout = False ):
|
||||
def Request( self, method, url, request_headers = None, body = '', return_everything = False, return_cookies = False, report_hooks = None, temp_path = None ):
|
||||
|
||||
if request_headers is None: request_headers = {}
|
||||
|
||||
|
@ -274,7 +270,7 @@ class HTTPConnectionManager( object ):
|
|||
|
||||
follow_redirects = not return_cookies
|
||||
|
||||
( response, size_of_response, response_headers, cookies ) = self._DoRequest( method, location, path, query, request_headers, body, follow_redirects = follow_redirects, report_hooks = report_hooks, temp_path = temp_path, long_timeout = long_timeout )
|
||||
( response, size_of_response, response_headers, cookies ) = self._DoRequest( method, location, path, query, request_headers, body, follow_redirects = follow_redirects, report_hooks = report_hooks, temp_path = temp_path )
|
||||
|
||||
if return_everything: return ( response, size_of_response, response_headers, cookies )
|
||||
elif return_cookies: return ( response, cookies )
|
||||
|
@ -317,12 +313,11 @@ class HTTPConnection( object ):
|
|||
|
||||
read_block_size = 64 * 1024
|
||||
|
||||
def __init__( self, location, long_timeout = False ):
|
||||
def __init__( self, location ):
|
||||
|
||||
( self._scheme, self._host, self._port ) = location
|
||||
|
||||
if long_timeout: self._timeout = 600
|
||||
else: self._timeout = 30
|
||||
self._timeout = 30
|
||||
|
||||
self.lock = threading.Lock()
|
||||
|
||||
|
@ -607,7 +602,28 @@ class HTTPConnection( object ):
|
|||
elif response.status == 404: raise HydrusExceptions.NotFoundException( parsed_response )
|
||||
elif response.status == 419: raise HydrusExceptions.SessionException( parsed_response )
|
||||
elif response.status == 426: raise HydrusExceptions.NetworkVersionException( parsed_response )
|
||||
elif response.status in ( 500, 501, 502, 503 ): raise Exception( parsed_response )
|
||||
elif response.status in ( 500, 501, 502, 503 ):
|
||||
|
||||
server_header = response.getheader( 'Server' )
|
||||
|
||||
if server_header is not None and 'hydrus' in server_header:
|
||||
|
||||
hydrus_service = True
|
||||
|
||||
else:
|
||||
|
||||
hydrus_service = False
|
||||
|
||||
|
||||
if response.status == 503 and hydrus_service:
|
||||
|
||||
raise HydrusExceptions.ServerBusyException( 'Server is busy, please try again later.' )
|
||||
|
||||
else:
|
||||
|
||||
raise Exception( parsed_response )
|
||||
|
||||
|
||||
else: raise Exception( parsed_response )
|
||||
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ SERIALISABLE_TYPE_PERIODIC = 4
|
|||
SERIALISABLE_TYPE_GALLERY_QUERY = 5
|
||||
SERIALISABLE_TYPE_IMPORT_TAG_OPTIONS = 6
|
||||
SERIALISABLE_TYPE_IMPORT_FILE_OPTIONS = 7
|
||||
SERIALISABLE_TYPE_SEED_QUEUE = 8
|
||||
SERIALISABLE_TYPE_SEED_CACHE = 8
|
||||
SERIALISABLE_TYPE_HDD_IMPORT = 9
|
||||
SERIALISABLE_TYPE_SERVER_TO_CLIENT_CONTENT_UPDATE_PACKAGE = 10
|
||||
SERIALISABLE_TYPE_SERVER_TO_CLIENT_SERVICE_UPDATE_PACKAGE = 11
|
||||
|
@ -90,9 +90,9 @@ class SerialisableBase( object ):
|
|||
raise NotImplementedError()
|
||||
|
||||
|
||||
def _UpdateInfo( self, version, old_info ):
|
||||
def _UpdateSerialisableInfo( self, version, old_serialisable_info ):
|
||||
|
||||
return old_info
|
||||
return old_serialisable_info
|
||||
|
||||
|
||||
def GetSerialisableInfo( self ):
|
||||
|
@ -109,9 +109,9 @@ class SerialisableBase( object ):
|
|||
|
||||
def InitialiseFromSerialisableInfo( self, version, serialisable_info ):
|
||||
|
||||
if version != self.SERIALISABLE_VERSION:
|
||||
while version < self.SERIALISABLE_VERSION:
|
||||
|
||||
serialisable_info = self._UpdateInfo( version, serialisable_info )
|
||||
( version, serialisable_info ) = self._UpdateSerialisableInfo( version, serialisable_info )
|
||||
|
||||
|
||||
self._InitialiseFromSerialisableInfo( serialisable_info )
|
||||
|
|
|
@ -298,6 +298,7 @@ class HydrusServiceAdmin( HydrusServiceRestricted ):
|
|||
|
||||
root = HydrusServiceRestricted._InitRoot( self )
|
||||
|
||||
root.putChild( 'busy', HydrusServerResources.HydrusResourceBusyCheck() )
|
||||
root.putChild( 'backup', HydrusServerResources.HydrusResourceCommandRestrictedBackup( self._service_key, self._service_type, REMOTE_DOMAIN ) )
|
||||
root.putChild( 'init', HydrusServerResources.HydrusResourceCommandInit( self._service_key, self._service_type, REMOTE_DOMAIN ) )
|
||||
root.putChild( 'services', HydrusServerResources.HydrusResourceCommandRestrictedServices( self._service_key, self._service_type, REMOTE_DOMAIN ) )
|
||||
|
|
|
@ -5,6 +5,7 @@ import HydrusConstants as HC
|
|||
import HydrusExceptions
|
||||
import HydrusFileHandling
|
||||
import HydrusImageHandling
|
||||
import HydrusThreading
|
||||
import os
|
||||
import ServerFiles
|
||||
import time
|
||||
|
@ -102,6 +103,23 @@ class HydrusDomain( object ):
|
|||
if self._local_only and client_ip != '127.0.0.1': raise HydrusExceptions.ForbiddenException( 'Only local access allowed!' )
|
||||
|
||||
|
||||
class HydrusResourceBusyCheck( Resource ):
|
||||
|
||||
def __init__( self ):
|
||||
|
||||
Resource.__init__( self )
|
||||
|
||||
self._server_version_string = HC.service_string_lookup[ HC.SERVER_ADMIN ] + '/' + str( HC.NETWORK_VERSION )
|
||||
|
||||
|
||||
def render_GET( self, request ):
|
||||
|
||||
request.setHeader( 'Server', self._server_version_string )
|
||||
|
||||
if HydrusGlobals.server_busy: return '1'
|
||||
else: return '0'
|
||||
|
||||
|
||||
class HydrusResourceWelcome( Resource ):
|
||||
|
||||
def __init__( self, service_key, service_type, message ):
|
||||
|
@ -115,7 +133,7 @@ class HydrusResourceWelcome( Resource ):
|
|||
|
||||
self._server_version_string = HC.service_string_lookup[ service_type ] + '/' + str( HC.NETWORK_VERSION )
|
||||
|
||||
|
||||
|
||||
def render_GET( self, request ):
|
||||
|
||||
request.setHeader( 'Server', self._server_version_string )
|
||||
|
@ -136,8 +154,18 @@ class HydrusResourceCommand( Resource ):
|
|||
self._server_version_string = HC.service_string_lookup[ service_type ] + '/' + str( HC.NETWORK_VERSION )
|
||||
|
||||
|
||||
def _checkServerBusy( self ):
|
||||
|
||||
if HydrusGlobals.server_busy:
|
||||
|
||||
raise HydrusExceptions.ServerBusyException( 'This server is busy, please try again later.' )
|
||||
|
||||
|
||||
|
||||
def _callbackCheckRestrictions( self, request ):
|
||||
|
||||
self._checkServerBusy()
|
||||
|
||||
self._checkUserAgent( request )
|
||||
|
||||
self._domain.CheckValid( request.getClientIP() )
|
||||
|
@ -423,6 +451,7 @@ class HydrusResourceCommand( Resource ):
|
|||
elif failure.type == HydrusExceptions.ForbiddenException: response_context = ResponseContext( 403, mime = default_mime, body = default_encoding( failure.value ) )
|
||||
elif failure.type == HydrusExceptions.NotFoundException: response_context = ResponseContext( 404, mime = default_mime, body = default_encoding( failure.value ) )
|
||||
elif failure.type == HydrusExceptions.NetworkVersionException: response_context = ResponseContext( 426, mime = default_mime, body = default_encoding( failure.value ) )
|
||||
elif failure.type == HydrusExceptions.ServerBusyException: response_context = ResponseContext( 503, mime = default_mime, body = default_encoding( failure.value ) )
|
||||
elif failure.type == HydrusExceptions.SessionException: response_context = ResponseContext( 419, mime = default_mime, body = default_encoding( failure.value ) )
|
||||
else:
|
||||
|
||||
|
@ -561,6 +590,8 @@ class HydrusResourceCommandBooru( HydrusResourceCommand ):
|
|||
|
||||
def _callbackCheckRestrictions( self, request ):
|
||||
|
||||
self._checkServerBusy()
|
||||
|
||||
self._checkUserAgent( request )
|
||||
|
||||
self._domain.CheckValid( request.getClientIP() )
|
||||
|
@ -853,6 +884,8 @@ class HydrusResourceCommandRestricted( HydrusResourceCommand ):
|
|||
|
||||
def _callbackCheckRestrictions( self, request ):
|
||||
|
||||
self._checkServerBusy()
|
||||
|
||||
self._checkUserAgent( request )
|
||||
|
||||
self._domain.CheckValid( request.getClientIP() )
|
||||
|
@ -1021,9 +1054,16 @@ class HydrusResourceCommandRestrictedBackup( HydrusResourceCommandRestricted ):
|
|||
|
||||
def _threadDoPOSTJob( self, request ):
|
||||
|
||||
#threading.Thread( target = HC.app.Write, args = ( 'backup', ), name = 'Backup Thread' ).start()
|
||||
def do_it():
|
||||
|
||||
HydrusGlobals.server_busy = True
|
||||
|
||||
wx.GetApp().WriteSynchronous( 'backup' )
|
||||
|
||||
HydrusGlobals.server_busy = False
|
||||
|
||||
|
||||
wx.GetApp().WriteSynchronous( 'backup' )
|
||||
HydrusThreading.CallToThread( do_it )
|
||||
|
||||
response_context = ResponseContext( 200 )
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ class FakeHTTPConnectionManager():
|
|||
self._fake_responses = {}
|
||||
|
||||
|
||||
def Request( self, method, url, request_headers = None, body = '', return_everything = False, return_cookies = False, report_hooks = None, temp_path = None, long_timeout = False ):
|
||||
def Request( self, method, url, request_headers = None, body = '', return_everything = False, return_cookies = False, report_hooks = None, temp_path = None ):
|
||||
|
||||
if request_headers is None: request_headers = {}
|
||||
if report_hooks is None: report_hooks = []
|
||||
|
|
Loading…
Reference in New Issue