Version 337

This commit is contained in:
Hydrus Network Developer 2019-01-23 16:19:16 -06:00
parent 836ae13e1f
commit 2d9b5719ff
30 changed files with 859 additions and 348 deletions

View File

@ -10,7 +10,9 @@ try:
from include import HydrusPy2To3
HydrusPy2To3.do_2to3_test()
import wx
HydrusPy2To3.do_2to3_test( wx_error_display_callable = wx.SafeShowMessage )
from include import HydrusExceptions
from include import HydrusConstants as HC
@ -86,7 +88,7 @@ try:
if not HG.twisted_is_broke:
threading.Thread( target = reactor.run, kwargs = { 'installSignalHandlers' : 0 } ).start()
threading.Thread( target = reactor.run, name = 'twisted', kwargs = { 'installSignalHandlers' : 0 } ).start()
controller = ClientController.Controller( db_dir, no_daemons, no_wal )

View File

@ -10,7 +10,9 @@ try:
from include import HydrusPy2To3
HydrusPy2To3.do_2to3_test()
import wx
HydrusPy2To3.do_2to3_test( wx_error_display_callable = wx.SafeShowMessage )
from include import HydrusExceptions
from include import HydrusConstants as HC
@ -86,7 +88,7 @@ try:
if not HG.twisted_is_broke:
threading.Thread( target = reactor.run, kwargs = { 'installSignalHandlers' : 0 } ).start()
threading.Thread( target = reactor.run, name = 'twisted', kwargs = { 'installSignalHandlers' : 0 } ).start()
controller = ClientController.Controller( db_dir, no_daemons, no_wal )

View File

@ -8,6 +8,31 @@
<div class="content">
<h3>changelog</h3>
<ul>
<li><h3>version 337</h3></li>
<ul>
<li>fixed another couple of unicode encoding problems with the logging and profiling code</li>
<li>the logger now sticks a unicode BOM at the top of new log files to help text readers guess the utf-8 encoding</li>
<li>fixed musical webm import when the video stream has no stated duration but the audio stream does and typically stretches out a 'single frame' video</li>
<li>fixed some 'max size' download file size testing</li>
<li>the downloader easy-importer lain image is now clickable to launch a file selection dialog</li>
<li>if you are in advanced mode, the manual file export dialog now lets you export symlinks with a new checkbox. this is experimental, so if you are interested, give it a brief test and let me know how it works for you</li>
<li>duplicate content merging now applies to pending as well as current tags</li>
<li>the duplicate filter now counts pending tags when saying which file has more tags</li>
<li>advanced content updates now _copy_ both current and pending tags. the other actions now state what they do more clearly</li>
<li>stopped printing long server error text on 304 and 404, where actual response content is uninteresting</li>
<li>removed wx import that accidentally came into server space due to 2to3 check</li>
<li>improved the path-fixing code that helps environment construction when launching external programs from non-windows frozen builds</li>
<li>fixed a critical pubsub processing bug that kicked in at a certain stage of client shutdown. this should stop the post-shutdown-processing memory explosion certain users were seeing and should stop any ui jank in the last 0.2s of the program for everyone else</li>
<li>improved some other shutdown memory cleanup that was sometimes leading to double-log-printing of exit statements</li>
<li>did a full pass over the daemon scheduling code. it now reacts more responsively to various shut down situations</li>
<li>reduced db disk cache aggression significantly and added more memory maintenance to the cache population process</li>
<li>fixed an issue where subscriptions were not promptly responding to shutdown events</li>
<li>fixed an issue where some delayed network jobs (e.g. while all network traffic is paused) were also not responding to shutdown events</li>
<li>added a 'pubsub report mode' debug mode for simpler pubsub review</li>
<li>the db is now less redundantly spammy on certain behind the scenes update notifications</li>
<li>wrote a first version of the client api manager and permissions handling objects</li>
<li>misc fixes, cleaned some shutdown code</li>
</ul>
<li><h3>version 336</h3></li>
<ul>
<li>fixed an issue where the numerical rating control was coercing all clicks to either the minimum or maximum allowable rating (e.g. 3/5 stars wasn't working)</li>

261
include/ClientAPI.py Normal file
View File

@ -0,0 +1,261 @@
from . import ClientTags
from . import HydrusData
from . import HydrusExceptions
from . import HydrusGlobals as HG
from . import HydrusSerialisable
import os
import threading
CLIENT_API_PERMISSION_ADD_URLS = 0
CLIENT_API_PERMISSION_ADD_FILES = 1
CLIENT_API_PERMISSION_ADD_TAGS = 2
CLIENT_API_PERMISSION_SEARCH_FILES = 3
basic_permission_to_str_lookup = {}
basic_permission_to_str_lookup[ CLIENT_API_PERMISSION_ADD_URLS ] = 'add urls for processing'
basic_permission_to_str_lookup[ CLIENT_API_PERMISSION_ADD_FILES ] = 'import files'
basic_permission_to_str_lookup[ CLIENT_API_PERMISSION_ADD_TAGS ] = 'add tags to files'
basic_permission_to_str_lookup[ CLIENT_API_PERMISSION_SEARCH_FILES ] = 'search for files'
SEARCH_RESULTS_CACHE_TIMEOUT = 4 * 3600
class APIManager( HydrusSerialisable.SerialisableBase ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_CLIENT_API_MANAGER
SERIALISABLE_NAME = 'Client API Manager'
SERIALISABLE_VERSION = 1
def __init__( self ):
HydrusSerialisable.SerialisableBase.__init__( self )
self._dirty = False
self._access_keys_to_permissions = {}
self._lock = threading.Lock()
HG.client_controller.sub( self, 'MaintainMemory', 'memory_maintenance_pulse' )
def _GetSerialisableInfo( self ):
serialisable_permissions_objects = [ permissions_object.GetSerialisableTuple() for permissions_object in self._access_keys_to_permissions.values() ]
return serialisable_permissions_objects
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
serialisable_permissions_objects = serialisable_info
permissions_objects = [ HydrusSerialisable.CreateFromSerialisableTuple( serialisable_permissions_object ) for serialisable_permissions_object in serialisable_permissions_objects ]
self._access_keys_to_permissions = { permissions_object.GetAccessKey() : permissions_object for permissions_object in permissions_objects }
def GetPermissions( self, access_key ):
with self._lock:
if access_key not in self._access_keys_to_permissions:
raise HydrusExceptions.DataMissing( 'Did not find an entry for that access key!' )
return self._access_keys_to_permissions[ access_key ]
def IsDirty( self ):
with self._lock:
return self._dirty
def MaintainMemory( self ):
with self._lock:
for permissions_object in self._access_keys_to_permissions.values():
permissions_object.MaintainMemory()
def SetClean( self ):
with self._lock:
self._dirty = False
def SetPermissions( self, permissions_objects ):
with self._lock:
self._access_keys_to_permissions = { permissions_object.GetAccessKey() : permissions_object for permissions_object in permissions_objects }
self._dirty = True
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_CLIENT_API_MANAGER ] = APIManager
class APIPermissions( HydrusSerialisable.SerialisableBaseNamed ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_CLIENT_API_PERMISSIONS
SERIALISABLE_NAME = 'Client API Permissions'
SERIALISABLE_VERSION = 1
def __init__( self, name = 'new api permissions' ):
HydrusSerialisable.SerialisableBaseNamed.__init__( self, name )
self._access_key = os.urandom( 32 )
self._basic_permissions = set()
self._search_tag_filter = ClientTags.TagFilter()
self._last_search_results = None
self._search_results_timeout = 0
self._lock = threading.Lock()
def _GetSerialisableInfo( self ):
serialisable_access_key = self._access_key.hex()
serialisable_basic_permissions = list( self._basic_permissions )
serialisable_search_tag_filter = self._search_tag_filter.GetSerialisableTuple()
return ( serialisable_access_key, serialisable_basic_permissions, serialisable_search_tag_filter )
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
( serialisable_access_key, serialisable_basic_permissions, serialisable_search_tag_filter ) = serialisable_info
self._access_key = bytes.fromhex( serialisable_access_key )
self._basic_permissions = set( serialisable_basic_permissions )
self._search_tag_filter = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_search_tag_filter )
def CanSearchThis( self, tags ):
# this is very simple, but a simple tag filter works for our v1.0 purposes
# you say 'only allow "for my script" tag' and then any file tagged with that is one you have allowed, nice
# also, if you blacklist "my secrets", then len filtered_tags reduces
# this doesn't support tag negation or OR
with self._lock:
filtered_tags = self._search_tag_filter.Filter( tags )
return len( filtered_tags ) == len( tags )
def CheckPermissionToSeeFiles( self, hash_ids ):
with self._lock:
if self._search_tag_filter.AllowsEverything():
return
if self._last_search_results is None:
raise HydrusExceptions.PermissionException( 'It looks like those search results are no longer available--please run the search again!' )
num_files_asked_for = len( hash_ids )
num_files_allowed_to_see = len( self._last_search_results.intersection( hash_ids ) )
if num_files_allowed_to_see != num_files_asked_for:
error_text = 'You do not seem to have access to all those files! You asked to see {} files, but you were only authorised to see {} of them!'
error_text = error_text.format( HydrusData.ToHumanInt( num_files_asked_for ), HydrusData.ToHumanInt( num_files_allowed_to_see ) )
raise HydrusExceptions.PermissionException( error_text )
self._search_results_timeout = HydrusData.GetNow() + SEARCH_RESULTS_CACHE_TIMEOUT
def GetAdvancedPermissionsString( self ):
with self._lock:
p_strings = []
if self.HasPermission( CLIENT_API_PERMISSION_SEARCH_FILES ):
p_strings.append( 'Can search: ' + self._search_tag_filter.ToPermittedString() )
return ''.join( p_strings )
def GetBasicPermissions( self ):
with self._lock:
return self._basic_permissions
def GetTagFilter( self ):
with self._lock:
return self._search_tag_filter
def HasPermission( self, permission ):
with self._lock:
return permission in self._basic_permissions
def MaintainMemory( self ):
with self._lock:
if self._last_search_results is not None and HydrusData.TimeHasPassed( self._search_results_timeout ):
self._last_search_results = None
def SetLastSearchResults( self, hash_ids ):
with self._lock:
if self._search_tag_filter.AllowsEverything():
return
self._last_search_results = set( hash_ids )
self._search_results_timeout = HydrusData.GetNow() + SEARCH_RESULTS_CACHE_TIMEOUT
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_CLIENT_API_PERMISSIONS ] = APIPermissions

View File

@ -11,6 +11,7 @@ if wx_first_num < 4:
raise Exception( wx_error )
from . import ClientAPI
from . import ClientCaches
from . import ClientData
from . import ClientDaemons
@ -107,7 +108,9 @@ class Controller( HydrusController.HydrusController ):
if self._splash is not None:
self._splash.DestroyLater()
wx.CallAfter( self._splash.Hide )
wx.CallAfter( self._splash.Destroy )
self._splash = None
@ -163,7 +166,7 @@ class Controller( HydrusController.HydrusController ):
job_key = ClientThreading.JobKey()
job_key = ClientThreading.JobKey( cancel_on_shutdown = False )
job_key.Begin()
@ -459,6 +462,7 @@ class Controller( HydrusController.HydrusController ):
else:
# if they said no, don't keep asking
self.Write( 'last_shutdown_work_time', HydrusData.GetNow() )
@ -495,11 +499,6 @@ class Controller( HydrusController.HydrusController ):
return self._app
def GetBandwidthManager( self ):
raise NotImplementedError()
def GetClipboardText( self ):
if wx.TheClipboard.Open():
@ -625,6 +624,22 @@ class Controller( HydrusController.HydrusController ):
self.pub( 'splash_set_status_subtext', 'network' )
self.parsing_cache = ClientCaches.ParsingCache()
'''
client_api_manager = self.Read( 'serialisable', HydrusSerialisable.SERIALISABLE_TYPE_CLIENT_API_MANAGER )
if client_api_manager is None:
client_api_manager = ClientAPI.APIManager()
client_api_manager._dirty = True
wx.MessageBox( 'Your client api manager was missing on boot! I have recreated a new empty one. Please check that your hard drive and client are ok and let the hydrus dev know the details if there is a mystery.' )
self.client_api_manager = client_api_manager
'''
self.client_api_manager = ClientAPI.APIManager()
bandwidth_manager = self.Read( 'serialisable', HydrusSerialisable.SERIALISABLE_TYPE_NETWORK_BANDWIDTH_MANAGER )
@ -912,23 +927,10 @@ class Controller( HydrusController.HydrusController ):
disk_cache_maintenance_mb = self.new_options.GetNoneableInteger( 'disk_cache_maintenance_mb' )
if disk_cache_maintenance_mb is not None:
if disk_cache_maintenance_mb is not None and not self._view_shutdown:
if self.CurrentlyVeryIdle():
cache_period = 3600
disk_cache_stop_time = HydrusData.GetNow() + 30
elif self.CurrentlyIdle():
cache_period = 1800
disk_cache_stop_time = HydrusData.GetNow() + 10
else:
cache_period = 240
disk_cache_stop_time = HydrusData.GetNow() + 2
cache_period = 3600
disk_cache_stop_time = HydrusData.GetNow() + 2
if HydrusData.TimeHasPassed( self._timestamps[ 'last_disk_cache_population' ] + cache_period ):
@ -1138,7 +1140,7 @@ class Controller( HydrusController.HydrusController ):
self._app.MainLoop()
HydrusData.Print( 'shutting down controller\u2026' )
HydrusData.DebugPrint( 'shutting down controller\u2026' )
def SaveDirtyObjects( self ):
@ -1151,7 +1153,14 @@ class Controller( HydrusController.HydrusController ):
self.WriteSynchronous( 'dirty_services', dirty_services )
'''
if self.client_api_manager.IsDirty():
self.WriteSynchronous( 'serialisable', self.client_api_manager )
self.client_api_manager.SetClean()
'''
if self.network_engine.bandwidth_manager.IsDirty():
self.WriteSynchronous( 'serialisable', self.network_engine.bandwidth_manager )
@ -1313,6 +1322,8 @@ class Controller( HydrusController.HydrusController ):
try:
gc.collect()
self.pub( 'splash_set_title_text', 'shutting down gui\u2026' )
self.ShutdownView()
@ -1322,7 +1333,6 @@ class Controller( HydrusController.HydrusController ):
self.ShutdownModel()
self.pub( 'splash_set_title_text', 'cleaning up\u2026' )
self.pub( 'splash_set_status_text', '' )
HydrusData.CleanRunningFile( self.db_dir, 'client' )

View File

@ -1,3 +1,4 @@
from . import ClientAPI
from . import ClientCaches
from . import ClientData
from . import ClientDefaults
@ -379,6 +380,8 @@ class DB( HydrusDB.HydrusDB ):
self._controller.pub( 'splash_set_status_text', 'analyzing ' + name )
job_key.SetVariable( 'popup_text_1', 'analyzing ' + name )
time.sleep( 0.25 )
started = HydrusData.GetNowPrecise()
self._AnalyzeTable( name )
@ -2971,6 +2974,10 @@ class DB( HydrusDB.HydrusDB ):
self._SetJSONDump( shortcuts )
#client_api_manager = ClientAPI.APIManager()
#self._SetJSONDump( client_api_manager )
bandwidth_manager = ClientNetworkingBandwidth.NetworkBandwidthManager()
ClientDefaults.SetDefaultBandwidthManagerRules( bandwidth_manager )
@ -7488,6 +7495,7 @@ class DB( HydrusDB.HydrusDB ):
try:
next_stop_time_presentation = 0
next_gc_collect = 0
paths = [ os.path.join( self._db_dir, filename ) for filename in list(self._db_filenames.values()) ]
@ -7503,13 +7511,13 @@ class DB( HydrusDB.HydrusDB ):
if HydrusData.TimeHasPassed( next_stop_time_presentation ):
HG.client_controller.pub( 'splash_set_status_subtext', 'cached ' + HydrusData.TimestampToPrettyTimeDelta( stop_time, just_now_string = 'ok', just_now_threshold = 1 ) )
if HydrusData.TimeHasPassed( stop_time ):
return False
HG.client_controller.pub( 'splash_set_status_subtext', 'cached ' + HydrusData.TimestampToPrettyTimeDelta( stop_time, just_now_string = 'ok', just_now_threshold = 1 ) )
next_stop_time_presentation = HydrusData.GetNow() + 1
@ -7526,11 +7534,22 @@ class DB( HydrusDB.HydrusDB ):
return False
if HydrusData.TimeHasPassed( next_gc_collect ):
gc.collect()
next_gc_collect = HydrusData.GetNow() + 1
time.sleep( 0.00001 )
finally:
gc.collect()
self._InitDBCursor()
@ -7962,25 +7981,25 @@ class DB( HydrusDB.HydrusDB ):
( tag, hashes, service_key_target ) = sub_row
source_table_name = current_mappings_table_name
source_table_names = [ current_mappings_table_name, pending_mappings_table_name ]
elif sub_action == 'delete':
( tag, hashes ) = sub_row
source_table_name = current_mappings_table_name
source_table_names = [ current_mappings_table_name ]
elif sub_action == 'delete_deleted':
( tag, hashes ) = sub_row
source_table_name = deleted_mappings_table_name
source_table_names = [ deleted_mappings_table_name ]
elif sub_action == 'delete_for_deleted_files':
( tag, hashes ) = sub_row
source_table_name = current_mappings_table_name + ' NATURAL JOIN deleted_files'
source_table_names = [ current_mappings_table_name + ' NATURAL JOIN deleted_files' ]
predicates.append( 'deleted_files.service_id = ' + str( self._combined_local_file_service_id ) )
@ -8021,28 +8040,33 @@ class DB( HydrusDB.HydrusDB ):
if do_namespace_join:
num_to_do = 0
for source_table_name in source_table_names:
source_table_name = source_table_name + ' NATURAL JOIN tags NATURAL JOIN namespaces'
if do_namespace_join:
source_table_name = source_table_name + ' NATURAL JOIN tags NATURAL JOIN namespaces'
if hashes is not None:
hash_ids = self._GetHashIds( hashes )
predicates.append( 'hash_id IN ' + HydrusData.SplayListForDB( hash_ids ) )
if len( predicates ) == 0:
self._c.execute( 'INSERT INTO temp_operation ( tag_id, hash_id ) SELECT tag_id, hash_id FROM ' + source_table_name + ';' )
else:
self._c.execute( 'INSERT INTO temp_operation ( tag_id, hash_id ) SELECT tag_id, hash_id FROM ' + source_table_name + ' WHERE ' + ' AND '.join( predicates ) + ';' )
if hashes is not None:
hash_ids = self._GetHashIds( hashes )
predicates.append( 'hash_id IN ' + HydrusData.SplayListForDB( hash_ids ) )
if len( predicates ) == 0:
self._c.execute( 'INSERT INTO temp_operation ( tag_id, hash_id ) SELECT tag_id, hash_id FROM ' + source_table_name + ';' )
else:
self._c.execute( 'INSERT INTO temp_operation ( tag_id, hash_id ) SELECT tag_id, hash_id FROM ' + source_table_name + ' WHERE ' + ' AND '.join( predicates ) + ';' )
num_to_do = self._GetRowCount()
num_to_do += self._GetRowCount()
i = 0
@ -8935,6 +8959,8 @@ class DB( HydrusDB.HydrusDB ):
finally:
HG.client_controller.pub( 'splash_set_status_text', 'committing' )
self._AnalyzeStaleBigTables()
job_key.SetVariable( 'popup_text_1', 'finished' )
@ -11535,6 +11561,14 @@ class DB( HydrusDB.HydrusDB ):
self._CreateIndex( 'file_viewing_stats', [ 'media_viewtime' ] )
'''
if version == bring in api manager:
client_api_manager = ClientAPI.APIManager()
self._SetJSONDump( client_api_manager )
'''
self._controller.pub( 'splash_set_title_text', 'updated db to v' + str( version + 1 ) )
self._c.execute( 'UPDATE version SET version = ?;', ( version + 1, ) )
@ -11714,7 +11748,7 @@ class DB( HydrusDB.HydrusDB ):
self._CacheCombinedFilesMappingsUpdate( tag_service_id, combined_files_counts )
#
#
post_existing_tag_ids = self._STS( self._c.execute( 'SELECT tag_id as t FROM temp_tag_ids WHERE EXISTS ( SELECT 1 FROM ' + current_mappings_table_name + ' WHERE tag_id = t );' ) )
post_existing_hash_ids = self._STS( self._c.execute( 'SELECT hash_id as h FROM temp_hash_ids WHERE EXISTS ( SELECT 1 FROM ' + current_mappings_table_name + ' WHERE hash_id = h );' ) )

View File

@ -174,32 +174,32 @@ class DuplicateActionOptions( HydrusSerialisable.SerialisableBase ):
add_content_action = HC.CONTENT_UPDATE_PEND
first_current_tags = first_media.GetTagsManager().GetCurrent( service_key )
second_current_tags = second_media.GetTagsManager().GetCurrent( service_key )
first_tags = first_media.GetTagsManager().GetCurrentAndPending( service_key )
second_tags = second_media.GetTagsManager().GetCurrentAndPending( service_key )
first_current_tags = tag_filter.Filter( first_current_tags )
second_current_tags = tag_filter.Filter( second_current_tags )
first_tags = tag_filter.Filter( first_tags )
second_tags = tag_filter.Filter( second_tags )
if action == HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE:
first_needs = second_current_tags.difference( first_current_tags )
second_needs = first_current_tags.difference( second_current_tags )
first_needs = second_tags.difference( first_tags )
second_needs = first_tags.difference( second_tags )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, add_content_action, ( tag, first_hashes ) ) for tag in first_needs ) )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, add_content_action, ( tag, second_hashes ) ) for tag in second_needs ) )
elif action == HC.CONTENT_MERGE_ACTION_COPY:
first_needs = second_current_tags.difference( first_current_tags )
first_needs = second_tags.difference( first_tags )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, add_content_action, ( tag, first_hashes ) ) for tag in first_needs ) )
elif service_type == HC.LOCAL_TAG and action == HC.CONTENT_MERGE_ACTION_MOVE:
first_needs = second_current_tags.difference( first_current_tags )
first_needs = second_tags.difference( first_tags )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, add_content_action, ( tag, first_hashes ) ) for tag in first_needs ) )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( tag, second_hashes ) ) for tag in second_current_tags ) )
content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( tag, second_hashes ) ) for tag in second_tags ) )
if len( content_updates ) > 0:

View File

@ -1068,7 +1068,7 @@ class FrameGUI( ClientGUITopLevelWindows.FrameThatResizes ):
to_print.sort( key = lambda pair: -pair[1] )
for ( k, v ) in list( count.items() ):
for ( k, v ) in to_print:
if v > 100:
@ -2038,6 +2038,7 @@ class FrameGUI( ClientGUITopLevelWindows.FrameThatResizes ):
ClientGUIMenus.AppendMenuCheckItem( self, profile_modes, 'db profile mode', 'Run detailed \'profiles\' on every database query and dump this information to the log (this is very useful for hydrus dev to have, if something is running slow for you!).', HG.db_profile_mode, self._SwitchBoolean, 'db_profile_mode' )
ClientGUIMenus.AppendMenuCheckItem( self, profile_modes, 'menu profile mode', 'Run detailed \'profiles\' on menu actions.', HG.menu_profile_mode, self._SwitchBoolean, 'menu_profile_mode' )
ClientGUIMenus.AppendMenuCheckItem( self, profile_modes, 'pubsub report mode', 'Report info about every pubsub processed.', HG.pubsub_report_mode, self._SwitchBoolean, 'pubsub_report_mode' )
ClientGUIMenus.AppendMenuCheckItem( self, profile_modes, 'pubsub profile mode', 'Run detailed \'profiles\' on every internal publisher/subscriber message and dump this information to the log. This can hammer your log with dozens of large dumps every second. Don\'t run it unless you know you need to.', HG.pubsub_profile_mode, self._SwitchBoolean, 'pubsub_profile_mode' )
ClientGUIMenus.AppendMenuCheckItem( self, profile_modes, 'ui timer profile mode', 'Run detailed \'profiles\' on every ui timer update. This will likely spam you!', HG.ui_timer_profile_mode, self._SwitchBoolean, 'ui_timer_profile_mode' )
@ -3526,6 +3527,10 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
HG.network_report_mode = not HG.network_report_mode
elif name == 'pubsub_report_mode':
HG.pubsub_report_mode = not HG.pubsub_report_mode
elif name == 'pubsub_profile_mode':
HG.pubsub_profile_mode = not HG.pubsub_profile_mode
@ -4893,7 +4898,7 @@ class FrameSplashStatus( object ):
def SetText( self, text, print_to_log = True ):
if print_to_log:
if print_to_log and len( text ) > 0:
HydrusData.Print( text )
@ -4921,7 +4926,7 @@ class FrameSplashStatus( object ):
if print_to_log:
HydrusData.Print( text )
HydrusData.DebugPrint( text )
with self._lock:

View File

@ -428,6 +428,9 @@ class ReviewExportFilesPanel( ClientGUIScrolledPanels.ReviewPanel ):
self._delete_files_after_export = wx.CheckBox( self, label = 'delete files from client after export?' )
self._delete_files_after_export.SetForegroundColour( wx.Colour( 127, 0, 0 ) )
self._export_symlinks = wx.CheckBox( self, label = 'EXPERIMENTAL: export symlinks' )
self._export_symlinks.SetForegroundColour( wx.Colour( 127, 0, 0 ) )
text = 'This will export all the files\' tags, newline separated, into .txts beside the files themselves.'
self._export_tag_txts = wx.CheckBox( self, label = 'export tags to .txt files?' )
@ -457,6 +460,11 @@ class ReviewExportFilesPanel( ClientGUIScrolledPanels.ReviewPanel ):
self._delete_files_after_export.SetValue( HG.client_controller.new_options.GetBoolean( 'delete_files_after_export' ) )
self._delete_files_after_export.Bind( wx.EVT_CHECKBOX, self.EventDeleteFilesChanged )
if not HG.client_controller.new_options.GetBoolean( 'advanced_mode' ):
self._export_symlinks.Hide()
#
top_hbox = wx.BoxSizer( wx.HORIZONTAL )
@ -485,6 +493,7 @@ class ReviewExportFilesPanel( ClientGUIScrolledPanels.ReviewPanel ):
vbox.Add( self._export_path_box, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.Add( self._filenames_box, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.Add( self._delete_files_after_export, CC.FLAGS_LONE_BUTTON )
vbox.Add( self._export_symlinks, CC.FLAGS_LONE_BUTTON )
vbox.Add( self._export_tag_txts, CC.FLAGS_LONE_BUTTON )
vbox.Add( self._export, CC.FLAGS_LONE_BUTTON )
@ -532,6 +541,7 @@ class ReviewExportFilesPanel( ClientGUIScrolledPanels.ReviewPanel ):
def _DoExport( self, quit_afterwards = False ):
delete_afterwards = self._delete_files_after_export.GetValue()
export_symlinks = self._export_symlinks.GetValue() and not delete_afterwards
if quit_afterwards:
@ -626,7 +636,7 @@ class ReviewExportFilesPanel( ClientGUIScrolledPanels.ReviewPanel ):
def do_it( neighbouring_txt_tag_service_keys, delete_afterwards, quit_afterwards ):
def do_it( neighbouring_txt_tag_service_keys, delete_afterwards, export_symlinks, quit_afterwards ):
for ( index, ( ordering_index, media ) ) in enumerate( to_do ):
@ -678,10 +688,17 @@ class ReviewExportFilesPanel( ClientGUIScrolledPanels.ReviewPanel ):
source_path = client_files_manager.GetFilePath( hash, mime, check_file_exists = False )
HydrusPaths.MirrorFile( source_path, path )
try: os.chmod( path, stat.S_IWRITE | stat.S_IREAD )
except: pass
if export_symlinks:
os.symlink( source_path, path )
else:
HydrusPaths.MirrorFile( source_path, path )
try: os.chmod( path, stat.S_IWRITE | stat.S_IREAD )
except: pass
except:
@ -716,7 +733,7 @@ class ReviewExportFilesPanel( ClientGUIScrolledPanels.ReviewPanel ):
wx.CallAfter( wx_done, quit_afterwards )
HG.client_controller.CallToThread( do_it, self._neighbouring_txt_tag_service_keys, delete_afterwards, quit_afterwards )
HG.client_controller.CallToThread( do_it, self._neighbouring_txt_tag_service_keys, delete_afterwards, export_symlinks, quit_afterwards )
def _GetPath( self, media ):
@ -793,7 +810,14 @@ class ReviewExportFilesPanel( ClientGUIScrolledPanels.ReviewPanel ):
def EventDeleteFilesChanged( self, event ):
HG.client_controller.new_options.SetBoolean( 'delete_files_after_export', self._delete_files_after_export.GetValue() )
value = self._delete_files_after_export.GetValue()
HG.client_controller.new_options.SetBoolean( 'delete_files_after_export', value )
if value:
self._export_symlinks.SetValue( False )
def EventExportTagTxtsChanged( self, event ):

View File

@ -102,25 +102,25 @@ class AdvancedContentUpdatePanel( ClientGUIScrolledPanels.ReviewPanel ):
if len( services ) > 0:
self._action_dropdown.Append( 'copy', self.COPY )
self._action_dropdown.Append( 'copy current and pending mappings', self.COPY )
if self._service_key == CC.LOCAL_TAG_SERVICE_KEY:
self._action_dropdown.Append( 'delete', self.DELETE )
self._action_dropdown.Append( 'clear deleted record', self.DELETE_DELETED )
self._action_dropdown.Append( 'delete from deleted files', self.DELETE_FOR_DELETED_FILES )
self._action_dropdown.Append( 'delete current mappings', self.DELETE )
self._action_dropdown.Append( 'clear deleted mappings record', self.DELETE_DELETED )
self._action_dropdown.Append( 'delete current mappings from deleted files', self.DELETE_FOR_DELETED_FILES )
self._action_dropdown.Select( 0 )
#
self._tag_type_dropdown.Append( 'all mappings', self.ALL_MAPPINGS )
self._tag_type_dropdown.Append( 'all namespaced mappings', self.NAMESPACED )
self._tag_type_dropdown.Append( 'all unnamespaced mappings', self.UNNAMESPACED )
self._tag_type_dropdown.Append( 'specific tag\'s mappings', self.SPECIFIC_MAPPINGS )
self._tag_type_dropdown.Append( 'specific namespace\'s mappings', self.SPECIFIC_NAMESPACE )
self._tag_type_dropdown.Append( 'all', self.ALL_MAPPINGS )
self._tag_type_dropdown.Append( 'all namespaced', self.NAMESPACED )
self._tag_type_dropdown.Append( 'all unnamespaced', self.UNNAMESPACED )
self._tag_type_dropdown.Append( 'specific tag', self.SPECIFIC_MAPPINGS )
self._tag_type_dropdown.Append( 'specific namespace', self.SPECIFIC_NAMESPACE )
self._tag_type_dropdown.Select( 0 )
@ -173,7 +173,7 @@ class AdvancedContentUpdatePanel( ClientGUIScrolledPanels.ReviewPanel ):
message = 'These advanced operations are powerful, so think before you click. They can lock up your client for a _long_ time, and are not undoable.'
message += os.linesep * 2
message += 'You may need to refresh your existing searches to see their effect.'
message += 'You may need to restart your client to see their effect.'
st = ClientGUICommon.BetterStaticText( self, message )
@ -1530,6 +1530,8 @@ class ReviewDownloaderImport( ClientGUIScrolledPanels.ReviewPanel ):
win = ClientGUICommon.BufferedWindowIcon( self, lain_bmp )
win.SetCursor( wx.Cursor( wx.CURSOR_HAND ) )
self._select_from_list = wx.CheckBox( self )
if HG.client_controller.new_options.GetBoolean( 'advanced_mode' ):
@ -1548,8 +1550,10 @@ class ReviewDownloaderImport( ClientGUIScrolledPanels.ReviewPanel ):
win.SetDropTarget( ClientDragDrop.FileDropTarget( self, filenames_callable = self.ImportFromDragDrop ) )
win.Bind( wx.EVT_LEFT_DOWN, self.EventLainClick )
def ImportFromDragDrop( self, paths ):
def _ImportPaths( self, paths ):
gugs = []
url_matches = []
@ -1881,6 +1885,24 @@ class ReviewDownloaderImport( ClientGUIScrolledPanels.ReviewPanel ):
wx.MessageBox( final_message )
def EventLainClick( self, event ):
with wx.FileDialog( self, 'Select the pngs to add.', style = wx.FD_OPEN | wx.FD_MULTIPLE ) as dlg:
if dlg.ShowModal() == wx.ID_OK:
paths = dlg.GetPaths()
self._ImportPaths( paths )
def ImportFromDragDrop( self, paths ):
self._ImportPaths( paths )
class ReviewHowBonedAmI( ClientGUIScrolledPanels.ReviewPanel ):
def __init__( self, parent, stats ):

View File

@ -689,11 +689,9 @@ class FileImportOptions( HydrusSerialisable.SerialisableBase ):
def CheckNetworkDownload( self, possible_mime, size, certain ):
def CheckNetworkDownload( self, possible_mime, num_bytes, is_complete_file_size ):
if certain:
# by certain, we really mean 'content-length said', hence the 'apparently'
if is_complete_file_size:
error_prefix = 'Download was apparently '
@ -704,22 +702,22 @@ class FileImportOptions( HydrusSerialisable.SerialisableBase ):
if possible_mime is not None:
if possible_mime == HC.IMAGE_GIF and self._max_gif_size is not None and size > self._max_gif_size:
if possible_mime == HC.IMAGE_GIF and self._max_gif_size is not None and num_bytes > self._max_gif_size:
raise HydrusExceptions.SizeException( error_prefix + HydrusData.ToHumanBytes( size ) + ' but the upper limit for gifs is ' + HydrusData.ToHumanBytes( self._max_gif_size ) + '.' )
raise HydrusExceptions.SizeException( error_prefix + HydrusData.ToHumanBytes( num_bytes ) + ' but the upper limit for gifs is ' + HydrusData.ToHumanBytes( self._max_gif_size ) + '.' )
if self._max_size is not None and size > self._max_size:
if self._max_size is not None and num_bytes > self._max_size:
raise HydrusExceptions.SizeException( error_prefix + HydrusData.ToHumanBytes( size ) + ' but the upper limit is ' + HydrusData.ToHumanBytes( self._max_size ) + '.' )
raise HydrusExceptions.SizeException( error_prefix + HydrusData.ToHumanBytes( num_bytes ) + ' but the upper limit is ' + HydrusData.ToHumanBytes( self._max_size ) + '.' )
if certain:
if is_complete_file_size:
if self._min_size is not None and size < self._min_size:
if self._min_size is not None and num_bytes < self._min_size:
raise HydrusExceptions.SizeException( error_prefix + HydrusData.ToHumanBytes( size ) + ' but the lower limit is ' + HydrusData.ToHumanBytes( self._min_size ) + '.' )
raise HydrusExceptions.SizeException( error_prefix + HydrusData.ToHumanBytes( num_bytes ) + ' but the lower limit is ' + HydrusData.ToHumanBytes( self._min_size ) + '.' )

View File

@ -14,6 +14,7 @@ from . import HydrusExceptions
from . import HydrusGlobals as HG
from . import HydrusPaths
from . import HydrusSerialisable
from . import HydrusThreading
import os
import random
import time
@ -521,11 +522,12 @@ class Subscription( HydrusSerialisable.SerialisableBaseNamed ):
p1 = HC.options[ 'pause_subs_sync' ]
p2 = HydrusThreading.IsThreadShuttingDown()
p3 = HG.view_shutdown
p4 = not self._QueryBandwidthIsOK( query )
p5 = not self._QueryFileLoginIsOK( query )
if p1 or p3 or p4 or p5:
if p1 or p2 or p3 or p4 or p5:
if p4 and this_query_has_done_work:
@ -794,8 +796,9 @@ class Subscription( HydrusSerialisable.SerialisableBaseNamed ):
p1 = HC.options[ 'pause_subs_sync' ]
p2 = HG.view_shutdown
p3 = not self._QuerySyncLoginIsOK( query )
p4 = HydrusThreading.IsThreadShuttingDown()
if p1 or p2 or p3:
if p1 or p2 or p3 or p4:
if p3:
@ -1064,7 +1067,7 @@ class Subscription( HydrusSerialisable.SerialisableBaseNamed ):
example_network_contexts = self._GetExampleNetworkContexts( query )
estimate = HG.client_controller.network_engine.bandwidth_manager.GetWaitingEstimate( example_network_contexts )
( estimate, bandwidth_network_context ) = HG.client_controller.network_engine.bandwidth_manager.GetWaitingEstimateAndContext( example_network_contexts )
return estimate
@ -1082,7 +1085,7 @@ class Subscription( HydrusSerialisable.SerialisableBaseNamed ):
example_network_contexts = self._GetExampleNetworkContexts( query )
estimate = HG.client_controller.network_engine.bandwidth_manager.GetWaitingEstimate( example_network_contexts )
( estimate, bandwidth_network_context ) = HG.client_controller.network_engine.bandwidth_manager.GetWaitingEstimateAndContext( example_network_contexts )
estimates.append( estimate )

View File

@ -142,8 +142,8 @@ def GetDuplicateComparisonStatements( shown_media, comparison_media ):
# more tags
s_num_tags = len( shown_media.GetTagsManager().GetCurrent() )
c_num_tags = len( comparison_media.GetTagsManager().GetCurrent() )
s_num_tags = len( shown_media.GetTagsManager().GetCurrentAndPending() )
c_num_tags = len( comparison_media.GetTagsManager().GetCurrentAndPending() )
if s_num_tags > 0 and c_num_tags > 0:
@ -2496,6 +2496,11 @@ class TagsManagerSimple( object ):
return statuses_to_tags[ HC.CONTENT_STATUS_CURRENT ]
def GetCurrentAndPending( self, service_key = CC.COMBINED_TAG_SERVICE_KEY ):
return self.GetCurrent( service_key = service_key ).union( self.GetPending( service_key = service_key ) )
def GetDeleted( self, service_key = CC.COMBINED_TAG_SERVICE_KEY ):
if service_key == CC.COMBINED_TAG_SERVICE_KEY:

View File

@ -334,7 +334,7 @@ class NetworkBandwidthManager( HydrusSerialisable.SerialisableBase ):
def GetWaitingEstimate( self, network_contexts ):
def GetWaitingEstimateAndContext( self, network_contexts ):
with self._lock:
@ -346,16 +346,18 @@ class NetworkBandwidthManager( HydrusSerialisable.SerialisableBase ):
bandwidth_tracker = self._network_contexts_to_bandwidth_trackers[ network_context ]
estimates.append( bandwidth_rules.GetWaitingEstimate( bandwidth_tracker ) )
estimates.append( ( bandwidth_rules.GetWaitingEstimate( bandwidth_tracker ), network_context ) )
estimates.sort( key = lambda pair: -pair[0] ) # biggest first
if len( estimates ) == 0:
return 0
return ( 0, ClientNetworkingContexts.GLOBAL_NETWORK_CONTEXT )
else:
return max( estimates )
return estimates[0]

View File

@ -127,6 +127,18 @@ class NetworkContext( HydrusSerialisable.SerialisableBase ):
return summary
def ToHumanString( self ):
if self.IsEphemeral():
return CC.network_context_type_string_lookup[ self.context_type ] + ' instance'
else:
return self.ToString()
def ToString( self ):
if self.context_data is None:

View File

@ -7,6 +7,7 @@ from . import HydrusData
from . import HydrusExceptions
from . import HydrusGlobals as HG
from . import HydrusNetworking
from . import HydrusThreading
import os
import requests
import threading
@ -25,21 +26,12 @@ def ConvertStatusCodeAndDataIntoExceptionInfo( status_code, data, is_hydrus_serv
error_text = repr( data )
if len( error_text ) > 1024:
large_chunk = error_text[ : 512 * 1024 ]
smaller_chunk = large_chunk[:256]
HydrusData.DebugPrint( large_chunk )
error_text = 'The server\'s error text was too long to display. The first part follows, while a larger chunk has been written to the log.'
error_text += os.linesep
error_text += smaller_chunk
print_long_error_text = True
if status_code == 304:
print_long_error_text = False
eclass = HydrusExceptions.NotModifiedException
elif status_code == 401:
@ -52,6 +44,8 @@ def ConvertStatusCodeAndDataIntoExceptionInfo( status_code, data, is_hydrus_serv
elif status_code == 404:
print_long_error_text = False
eclass = HydrusExceptions.NotFoundException
elif status_code == 419:
@ -82,6 +76,19 @@ def ConvertStatusCodeAndDataIntoExceptionInfo( status_code, data, is_hydrus_serv
eclass = HydrusExceptions.NetworkException
if len( error_text ) > 1024 and print_long_error_text:
large_chunk = error_text[ : 512 * 1024 ]
smaller_chunk = large_chunk[:256]
HydrusData.DebugPrint( large_chunk )
error_text = 'The server\'s error text was too long to display. The first part follows, while a larger chunk has been written to the log.'
error_text += os.linesep
error_text += smaller_chunk
e = eclass( error_text )
return ( e, error_text )
@ -275,7 +282,7 @@ class NetworkJob( object ):
return True
if self.engine.controller.ModelIsShutdown():
if self.engine.controller.ModelIsShutdown() or HydrusThreading.IsThreadShuttingDown():
return True
@ -353,9 +360,9 @@ class NetworkJob( object ):
if self._file_import_options is not None:
certain = True
is_complete_file_size = True
self._file_import_options.CheckNetworkDownload( mime, self._num_bytes_to_read, certain )
self._file_import_options.CheckNetworkDownload( mime, self._num_bytes_to_read, is_complete_file_size )
else:
@ -386,9 +393,9 @@ class NetworkJob( object ):
if self._file_import_options is not None:
certain = False
is_complete_file_size = False
self._file_import_options.CheckNetworkDownload( mime, self._num_bytes_to_read, certain )
self._file_import_options.CheckNetworkDownload( mime, self._num_bytes_read, is_complete_file_size )
@ -478,7 +485,7 @@ class NetworkJob( object ):
else:
bandwidth_waiting_duration = self.engine.bandwidth_manager.GetWaitingEstimate( self._network_contexts )
( bandwidth_waiting_duration, bandwidth_network_context ) = self.engine.bandwidth_manager.GetWaitingEstimateAndContext( self._network_contexts )
will_override = self._bandwidth_manual_override_delayed_timestamp is not None
@ -491,24 +498,24 @@ class NetworkJob( object ):
override_coming_first = override_waiting_duration < bandwidth_waiting_duration
just_now_threshold = 2
if override_coming_first:
waiting_duration = override_waiting_duration
prefix = 'overriding bandwidth '
waiting_str = HydrusData.TimestampToPrettyTimeDelta( self._bandwidth_manual_override_delayed_timestamp, just_now_string = 'imminently', just_now_threshold = 2 )
waiting_str = 'overriding bandwidth ' + HydrusData.TimestampToPrettyTimeDelta( self._bandwidth_manual_override_delayed_timestamp, just_now_string = 'imminently', just_now_threshold = just_now_threshold )
else:
waiting_duration = bandwidth_waiting_duration
prefix = 'bandwidth free '
waiting_str = HydrusData.TimestampToPrettyTimeDelta( HydrusData.GetNow() + waiting_duration, just_now_string = 'imminently', just_now_threshold = 2 )
waiting_str = 'bandwidth free ' + HydrusData.TimestampToPrettyTimeDelta( HydrusData.GetNow() + waiting_duration, just_now_string = 'imminently', just_now_threshold = just_now_threshold )
self._status_text = prefix + waiting_str + '\u2026'
waiting_str += '\u2026 (' + bandwidth_network_context.ToHumanString() + ')'
self._status_text = waiting_str
if waiting_duration > 1200:
@ -1119,7 +1126,7 @@ class NetworkJob( object ):
with self._lock:
if self.engine.controller.ModelIsShutdown():
if self.engine.controller.ModelIsShutdown() or HydrusThreading.IsThreadShuttingDown():
raise HydrusExceptions.ShutdownException()

View File

@ -267,6 +267,22 @@ class TagFilter( HydrusSerialisable.SerialisableBase ):
def AllowsEverything( self ):
with self._lock:
for ( tag_slice, rule ) in self._tag_slices_to_rules.items():
if rule == CC.FILTER_BLACKLIST:
return False
return True
def Filter( self, tags ):
with self._lock:
@ -293,159 +309,168 @@ class TagFilter( HydrusSerialisable.SerialisableBase ):
def ToBlacklistString( self ):
blacklist = []
whitelist = []
for ( tag_slice, rule ) in list(self._tag_slices_to_rules.items()):
with self._lock:
if rule == CC.FILTER_BLACKLIST:
blacklist = []
whitelist = []
for ( tag_slice, rule ) in list(self._tag_slices_to_rules.items()):
blacklist.append( tag_slice )
elif rule == CC.FILTER_WHITELIST:
whitelist.append( tag_slice )
if rule == CC.FILTER_BLACKLIST:
blacklist.append( tag_slice )
elif rule == CC.FILTER_WHITELIST:
whitelist.append( tag_slice )
blacklist.sort()
whitelist.sort()
if len( blacklist ) == 0:
blacklist.sort()
whitelist.sort()
return 'no blacklist set'
else:
if set( blacklist ) == { '', ':' }:
if len( blacklist ) == 0:
text = 'blacklisting on any tags'
return 'no blacklist set'
else:
text = 'blacklisting on ' + ', '.join( ( ConvertTagSliceToString( tag_slice ) for tag_slice in blacklist ) )
if set( blacklist ) == { '', ':' }:
text = 'blacklisting on any tags'
else:
text = 'blacklisting on ' + ', '.join( ( ConvertTagSliceToString( tag_slice ) for tag_slice in blacklist ) )
if len( whitelist ) > 0:
if len( whitelist ) > 0:
text += ' except ' + ', '.join( ( ConvertTagSliceToString( tag_slice ) for tag_slice in whitelist ) )
text += ' except ' + ', '.join( ( ConvertTagSliceToString( tag_slice ) for tag_slice in whitelist ) )
return text
return text
def ToCensoredString( self ):
blacklist = []
whitelist = []
for ( tag_slice, rule ) in list(self._tag_slices_to_rules.items()):
with self._lock:
if rule == CC.FILTER_BLACKLIST:
blacklist = []
whitelist = []
for ( tag_slice, rule ) in list(self._tag_slices_to_rules.items()):
blacklist.append( tag_slice )
elif rule == CC.FILTER_WHITELIST:
whitelist.append( tag_slice )
if rule == CC.FILTER_BLACKLIST:
blacklist.append( tag_slice )
elif rule == CC.FILTER_WHITELIST:
whitelist.append( tag_slice )
blacklist.sort()
whitelist.sort()
if len( blacklist ) == 0:
blacklist.sort()
whitelist.sort()
return 'all tags allowed'
else:
if set( blacklist ) == { '', ':' }:
if len( blacklist ) == 0:
text = 'no tags allowed'
return 'all tags allowed'
else:
text = 'all but ' + ', '.join( ( ConvertTagSliceToString( tag_slice ) for tag_slice in blacklist ) ) + ' allowed'
if set( blacklist ) == { '', ':' }:
text = 'no tags allowed'
else:
text = 'all but ' + ', '.join( ( ConvertTagSliceToString( tag_slice ) for tag_slice in blacklist ) ) + ' allowed'
if len( whitelist ) > 0:
text += ' except ' + ', '.join( ( ConvertTagSliceToString( tag_slice ) for tag_slice in whitelist ) )
return text
if len( whitelist ) > 0:
def ToPermittedString( self ):
with self._lock:
blacklist = []
whitelist = []
for ( tag_slice, rule ) in list(self._tag_slices_to_rules.items()):
text += ' except ' + ', '.join( ( ConvertTagSliceToString( tag_slice ) for tag_slice in whitelist ) )
if rule == CC.FILTER_BLACKLIST:
blacklist.append( tag_slice )
elif rule == CC.FILTER_WHITELIST:
whitelist.append( tag_slice )
blacklist.sort()
whitelist.sort()
if len( blacklist ) == 0:
return 'all tags'
else:
if set( blacklist ) == { '', ':' }:
if len( whitelist ) == 0:
text = 'no tags'
else:
text = 'only ' + ', '.join( ( ConvertTagSliceToString( tag_slice ) for tag_slice in whitelist ) )
elif set( blacklist ) == { '' }:
text = 'all namespaced tags'
if len( whitelist ) > 0:
text += ' and ' + ', '.join( ( ConvertTagSliceToString( tag_slice ) for tag_slice in whitelist ) )
elif set( blacklist ) == { ':' }:
text = 'all unnamespaced tags'
if len( whitelist ) > 0:
text += ' and ' + ', '.join( ( ConvertTagSliceToString( tag_slice ) for tag_slice in whitelist ) )
else:
text = 'all tags except ' + ', '.join( ( ConvertTagSliceToString( tag_slice ) for tag_slice in blacklist ) )
if len( whitelist ) > 0:
text += ' (except ' + ', '.join( ( ConvertTagSliceToString( tag_slice ) for tag_slice in whitelist ) ) + ')'
return text
def ToPermittedString( self ):
blacklist = []
whitelist = []
for ( tag_slice, rule ) in list(self._tag_slices_to_rules.items()):
if rule == CC.FILTER_BLACKLIST:
blacklist.append( tag_slice )
elif rule == CC.FILTER_WHITELIST:
whitelist.append( tag_slice )
blacklist.sort()
whitelist.sort()
if len( blacklist ) == 0:
return 'all tags'
else:
if set( blacklist ) == { '', ':' }:
if len( whitelist ) == 0:
text = 'no tags'
else:
text = 'only ' + ', '.join( ( ConvertTagSliceToString( tag_slice ) for tag_slice in whitelist ) )
elif set( blacklist ) == { '' }:
text = 'all namespaced tags'
if len( whitelist ) > 0:
text += ' and ' + ', '.join( ( ConvertTagSliceToString( tag_slice ) for tag_slice in whitelist ) )
elif set( blacklist ) == { ':' }:
text = 'all unnamespaced tags'
if len( whitelist ) > 0:
text += ' and ' + ', '.join( ( ConvertTagSliceToString( tag_slice ) for tag_slice in whitelist ) )
else:
text = 'all tags except ' + ', '.join( ( ConvertTagSliceToString( tag_slice ) for tag_slice in blacklist ) )
if len( whitelist ) > 0:
text += ' (except ' + ', '.join( ( ConvertTagSliceToString( tag_slice ) for tag_slice in whitelist ) ) + ')'
return text
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_TAG_FILTER ] = TagFilter

View File

@ -67,7 +67,7 @@ options = {}
# Misc
NETWORK_VERSION = 18
SOFTWARE_VERSION = 336
SOFTWARE_VERSION = 337
UNSCALED_THUMBNAIL_DIMENSIONS = ( 200, 200 )

View File

@ -550,7 +550,7 @@ class HydrusController( object ):
profile_log_path = os.path.join( self.db_dir, profile_log_filename )
with open( profile_log_path, 'a' ) as f:
with open( profile_log_path, 'a', encoding = 'utf-8' ) as f:
prefix = time.strftime( '%Y/%m/%d %H:%M:%S: ' )
@ -602,11 +602,10 @@ class HydrusController( object ):
def ShutdownModel( self ):
self._model_shutdown = True
HG.model_shutdown = True
if self.db is not None:
self.db.Shutdown()
while not self.db.LoopIsFinished():
time.sleep( 0.1 )
@ -632,6 +631,9 @@ class HydrusController( object ):
HydrusPaths.DeletePath( self.temp_dir )
self._model_shutdown = True
HG.model_shutdown = True
def ShutdownView( self ):

View File

@ -735,6 +735,14 @@ class HydrusDB( object ):
def pub_after_job( self, topic, *args, **kwargs ):
if len( args ) == 0 and len( kwargs ) == 0:
if ( topic, args, kwargs ) in self._pubsubs:
return
self._pubsubs.append( ( topic, args, kwargs ) )

View File

@ -573,19 +573,23 @@ def GetSubprocessEnv():
if ( HC.PLATFORM_LINUX or HC.PLATFORM_OSX ) and 'PATH' in env:
# fix for pyinstaller on os x, which drops this for some reason and hence breaks ffmpeg
# fix for pyinstaller, which drops this stuff for some reason and hence breaks ffmpeg
path = env[ 'PATH' ]
missing_path_location = '/usr/local/bin'
path_locations = set( path.split( ':' ) )
desired_path_locations = [ '/usr/bin', '/usr/local/bin' ]
if missing_path_location not in path:
for desired_path_location in desired_path_locations:
path = missing_path_location + ':' + path
env[ 'PATH' ] = path
changes_made = True
if desired_path_location not in path_locations:
path = desired_path_location + ':' + path
env[ 'PATH' ] = path
changes_made = True

View File

@ -22,6 +22,7 @@ subscription_report_mode = False
hover_window_report_mode = False
menu_profile_mode = False
network_report_mode = False
pubsub_report_mode = False
pubsub_profile_mode = False
ui_timer_profile_mode = False
daemon_report_mode = False

View File

@ -58,7 +58,14 @@ class HydrusLogger( object ):
self._log_path = self._GetLogPath()
self._log_file = open( self._log_path, 'a' )
is_new_file = not os.path.exists( self._log_path )
self._log_file = open( self._log_path, 'a', encoding = 'utf-8' )
if is_new_file:
self._log_file.write( u'\uFEFF' ) # Byte Order Mark, BOM, to help reader software interpret this as utf-8
def _SwitchToANewLogFileIfDue( self ):

View File

@ -269,10 +269,7 @@ def FilterFreePaths( paths ):
for path in paths:
if HydrusThreading.IsThreadShuttingDown():
raise HydrusExceptions.ShutdownException()
HydrusThreading.CheckIfThreadShuttingDown()
if PathIsFree( path ):

View File

@ -100,13 +100,19 @@ class HydrusPubSub( object ):
try:
# do all this _outside_ the lock, lol
callables = self._GetCallables( topic )
# do this _outside_ the lock, lol
# don't want to report the showtext we just send here!
not_a_report = topic != 'message'
pubsub_profilable = topic != 'message'
if HG.pubsub_report_mode and not_a_report:
HydrusData.ShowText( ( topic, args, kwargs, callables ) )
if HG.pubsub_profile_mode and pubsub_profilable:
if HG.pubsub_profile_mode and not_a_report:
summary = 'Profiling ' + HydrusData.ToHumanInt( len( callables ) ) + ' x ' + topic

View File

@ -1,8 +1,7 @@
from . import HydrusConstants as HC
import os
import wx
def do_2to3_test():
def do_2to3_test( wx_error_display_callable = None ):
bad_filenames = [ 'python27.dll', 'lz4._version.so' ]
@ -14,7 +13,10 @@ def do_2to3_test():
message = 'It looks like you still have some Python 2 files in your install directory! Hydrus is now Python 3 and needs a clean install. Please check the v335 release post for more information! The program will now exit!'
wx.SafeShowMessage( 'Python 2/3 Error!', message )
if wx_error_display_callable is not None:
wx_error_display_callable( 'Python 2/3 Error!', message )
print( message )

View File

@ -89,6 +89,8 @@ SERIALISABLE_TYPE_DOMAIN_METADATA_PACKAGE = 71
SERIALISABLE_TYPE_LOGIN_CREDENTIAL_DEFINITION = 72
SERIALISABLE_TYPE_LOGIN_SCRIPT_DOMAIN = 73
SERIALISABLE_TYPE_LOGIN_STEP = 74
SERIALISABLE_TYPE_CLIENT_API_MANAGER = 75
SERIALISABLE_TYPE_CLIENT_API_PERMISSIONS = 76
SERIALISABLE_TYPES_TO_OBJECT_TYPES = {}

View File

@ -15,6 +15,13 @@ NEXT_THREAD_CLEAROUT = 0
THREADS_TO_THREAD_INFO = {}
THREAD_INFO_LOCK = threading.Lock()
def CheckIfThreadShuttingDown():
if IsThreadShuttingDown():
raise HydrusExceptions.ShutdownException( 'Thread is shutting down!' )
def ClearOutDeadThreads():
with THREAD_INFO_LOCK:
@ -159,9 +166,9 @@ class DAEMONWorker( DAEMON ):
self.start()
def _CanStart( self, time_started_waiting ):
def _CanStart( self ):
return self._PreCallWaitIsDone( time_started_waiting ) and self._ControllerIsOKWithIt()
return self._ControllerIsOKWithIt()
def _ControllerIsOKWithIt( self ):
@ -169,12 +176,40 @@ class DAEMONWorker( DAEMON ):
return True
def _PreCallWaitIsDone( self, time_started_waiting ):
def _DoAWait( self, wait_time, event_can_wake = True ):
# just shave a bit off so things that don't have any wait won't somehow have to wait a single accidentaly cycle
time_to_start = ( time_started_waiting - 0.1 ) + self._pre_call_wait
time_to_start = HydrusData.GetNow() + wait_time
return HydrusData.TimeHasPassed( time_to_start )
while not HydrusData.TimeHasPassed( time_to_start ):
if event_can_wake:
event_was_set = self._event.wait( 1.0 )
if event_was_set:
self._event.clear()
return
else:
time.sleep( 1.0 )
CheckIfThreadShuttingDown()
def _WaitUntilCanStart( self ):
while not self._CanStart():
time.sleep( 1.0 )
CheckIfThreadShuttingDown()
def GetCurrentJobSummary( self ):
@ -184,53 +219,52 @@ class DAEMONWorker( DAEMON ):
def run( self ):
self._event.wait( self._init_wait )
while True:
try:
if IsThreadShuttingDown():
return
self._DoAWait( self._init_wait )
time_started_waiting = HydrusData.GetNow()
while not self._CanStart( time_started_waiting ):
while True:
time.sleep( 1 )
CheckIfThreadShuttingDown()
if IsThreadShuttingDown():
self._DoAWait( self._pre_call_wait, event_can_wake = False )
CheckIfThreadShuttingDown()
self._WaitUntilCanStart()
CheckIfThreadShuttingDown()
self._DoPreCall()
try:
self._callable( self._controller )
except HydrusExceptions.ShutdownException:
return
except Exception as e:
HydrusData.ShowText( 'Daemon ' + self._name + ' encountered an exception:' )
HydrusData.ShowException( e )
self._DoAWait( self._period )
self._DoPreCall()
except HydrusExceptions.ShutdownException:
try:
self._callable( self._controller )
except HydrusExceptions.ShutdownException:
return
except Exception as e:
HydrusData.ShowText( 'Daemon ' + self._name + ' encountered an exception:' )
HydrusData.ShowException( e )
if IsThreadShuttingDown(): return
self._event.wait( self._period )
self._event.clear()
return
def set( self, *args, **kwargs ): self._event.set()
def set( self, *args, **kwargs ):
self._event.set()
# Big stuff like DB maintenance that we don't want to run while other important stuff is going on, like user interaction or vidya on another process
class DAEMONBackgroundWorker( DAEMONWorker ):
@ -282,50 +316,56 @@ class THREADCallToThread( DAEMON ):
def run( self ):
while True:
try:
try:
while True:
while self._queue.empty():
if IsThreadShuttingDown():
return
CheckIfThreadShuttingDown()
self._event.wait( 1200 )
self._event.wait( 10.0 )
self._event.clear()
CheckIfThreadShuttingDown()
self._DoPreCall()
( callable, args, kwargs ) = self._queue.get()
try:
( callable, args, kwargs ) = self._queue.get()
self._callable = ( callable, args, kwargs )
callable( *args, **kwargs )
self._callable = None
del callable
except HydrusExceptions.ShutdownException:
return
except Exception as e:
HydrusData.Print( traceback.format_exc() )
HydrusData.ShowException( e )
finally:
self._currently_working = False
self._callable = ( callable, args, kwargs )
callable( *args, **kwargs )
self._callable = None
del callable
except HydrusExceptions.ShutdownException:
return
except Exception as e:
HydrusData.Print( traceback.format_exc() )
HydrusData.ShowException( e )
finally:
self._currently_working = False
time.sleep( 0.00001 )
time.sleep( 0.00001 )
except HydrusExceptions.ShutdownException:
return

View File

@ -390,6 +390,11 @@ def ParseFFMPEGDuration( lines ):
duration = 3600 * hms[0] + 60 * hms[1] + hms[2]
if duration == 0:
return None
duration -= start_offset
return duration

View File

@ -94,7 +94,7 @@ try:
HydrusData.Print( 'Initialising controller\u2026' )
threading.Thread( target = reactor.run, kwargs = { 'installSignalHandlers' : 0 } ).start()
threading.Thread( target = reactor.run, name = 'twisted', kwargs = { 'installSignalHandlers' : 0 } ).start()
controller = ServerController.Controller( db_dir, no_daemons, no_wal )