Version 190

This commit is contained in:
Hydrus 2016-01-13 16:08:19 -06:00
parent 90784a000d
commit b3deeaac26
24 changed files with 789 additions and 688 deletions

View File

@ -50,8 +50,14 @@ try:
HydrusGlobals.view_shutdown = True
HydrusGlobals.model_shutdown = True
try: controller.pubimmediate( 'wake_daemons' )
except: pass
try:
controller.pubimmediate( 'wake_daemons' )
except:
HydrusData.Print( traceback.format_exc() )
reactor.callFromThread( reactor.stop )

View File

@ -8,6 +8,32 @@
<div class="content">
<h3>changelog</h3>
<ul>
<li><h3>version 190</h3></li>
<ul>
<li>fixed some hashing recalculation in collections that meant they reported no files in many cases</li>
<li>fixed some hashing recalculation when 'remove files' is called</li>
<li>improved the way the client db stores file information and service->file mappings</li>
<li>idle processing jobs will now explicitly wake up as soon as the client naturally switches from not idle to idle</li>
<li>the minimum allowed value for the 'max cpu %' control in the maintenance and processing options panel is now 5%</li>
<li>the maintenance and processing panel is rewritten and laid out a little clearer</li>
<li>'busy' is now 'system busy' on the status bar</li>
<li>force idle and force unbusy are now merged into a new 'force idle' that sticks until you explicitly turn it off</li>
<li>busy and idle states should now update immidiately after closing the manage options dialog</li>
<li>improved exit code event order to be less rude to the OS</li>
<li>improved exit code emergency event handling</li>
<li>fixed a typo that was not appropriately skipping the 'do you want to run shutdown jobs?' dialog</li>
<li>file storage folder rebalancing will now occur on shutdown maintenance</li>
<li>the client now closes down more reliably if the db fails to boot</li>
<li>the client now closes down more reliably if the gui fails to boot</li>
<li>if a client vacuum fails, it'll now also raise the exact error sqlite gave</li>
<li>fixed ctrl+a on the autocomplete dropdown (I think the Insert/IME support change broke it)</li>
<li>the ways the 'read' tag autocomplete control talks to other gui elements is improved</li>
<li>the tag autocompletes will now refresh their results lists on more internal variable changes</li>
<li>the query page management controller manages fewer redundant variables</li>
<li>updated sqlite for windows</li>
<li>the client and server dbs will attempt to change to TRUNCATE journal mode if WAL causes a read disk i/o error</li>
<li>misc code cleanup</li>
</ul>
<li><h3>version 189</h3></li>
<ul>
<li>split the big analyze db calls into individual table/index calls and moved them from update code to the normal maintenance routines</li>

View File

@ -374,7 +374,7 @@ class ClientFilesManager( object ):
def Rebalance( self, partial = True ):
def Rebalance( self, partial = True, stop_time = None ):
with self._lock:
@ -392,6 +392,7 @@ class ClientFilesManager( object ):
else:
self._controller.pub( 'splash_set_status_text', text )
HydrusData.ShowText( text )
@ -404,6 +405,11 @@ class ClientFilesManager( object ):
break
if stop_time is not None and HydrusData.TimeHasPassed( stop_time ):
return
rebalance_tuple = self._GetRebalanceTuple()
@ -421,6 +427,7 @@ class ClientFilesManager( object ):
else:
self._controller.pub( 'splash_set_status_text', text )
HydrusData.ShowText( text )
@ -442,6 +449,11 @@ class ClientFilesManager( object ):
break
if stop_time is not None and HydrusData.TimeHasPassed( stop_time ):
return
recover_tuple = self._GetRecoverTuple()

View File

@ -45,6 +45,7 @@ class Controller( HydrusController.HydrusController ):
self._last_mouse_position = None
self._menu_open = False
self._previously_idle = False
def _InitDB( self ):
@ -275,49 +276,69 @@ class Controller( HydrusController.HydrusController ):
def CreateSplash( self ):
try:
self._splash = ClientGUI.FrameSplash( self )
except:
HydrusData.Print( 'There was an error trying to start the splash screen!' )
HydrusData.Print( traceback.format_exc() )
raise
def CurrentlyIdle( self ):
if HydrusGlobals.force_idle_mode:
return True
idle_normal = self._options[ 'idle_normal' ]
idle_period = self._options[ 'idle_period' ]
idle_mouse_period = self._options[ 'idle_mouse_period' ]
possibly_idle = False
definitely_not_idle = False
if idle_normal:
possibly_idle = True
currently_idle = True
if idle_period is not None:
if not HydrusData.TimeHasPassed( self._timestamps[ 'last_user_action' ] + idle_period ):
if idle_period is not None:
definitely_not_idle = True
if not HydrusData.TimeHasPassed( self._timestamps[ 'last_user_action' ] + idle_period ):
currently_idle = False
if idle_mouse_period is not None:
if not HydrusData.TimeHasPassed( self._timestamps[ 'last_mouse_action' ] + idle_mouse_period ):
if idle_mouse_period is not None:
definitely_not_idle = True
if not HydrusData.TimeHasPassed( self._timestamps[ 'last_mouse_action' ] + idle_mouse_period ):
currently_idle = False
if definitely_not_idle:
return False
elif possibly_idle:
return True
else:
return False
currently_idle = False
turning_idle = not self._previously_idle and currently_idle
self._previously_idle = currently_idle
if turning_idle:
self.pub( 'wake_daemons' )
return currently_idle
def DoHTTP( self, *args, **kwargs ): return self._http.Request( *args, **kwargs )
@ -325,6 +346,8 @@ class Controller( HydrusController.HydrusController ):
stop_time = HydrusData.GetNow() + ( self._options[ 'idle_shutdown_max_minutes' ] * 60 )
self._client_files_manager.Rebalance( partial = False, stop_time = stop_time )
self.MaintainDB()
if not self._options[ 'pause_repo_sync' ]:
@ -354,51 +377,53 @@ class Controller( HydrusController.HydrusController ):
try:
self._gui.TestAbleToClose()
self.CreateSplash()
except HydrusExceptions.PermissionException:
idle_shutdown_action = self._options[ 'idle_shutdown' ]
return
if idle_shutdown_action in ( CC.IDLE_ON_SHUTDOWN, CC.IDLE_ON_SHUTDOWN_ASK_FIRST ):
if self.ThereIsIdleShutdownWorkDue():
if idle_shutdown_action == CC.IDLE_ON_SHUTDOWN_ASK_FIRST:
text = 'Is now a good time for the client to do up to ' + HydrusData.ConvertIntToPrettyString( self._options[ 'idle_shutdown_max_minutes' ] ) + ' minutes\' maintenance work?'
with ClientGUIDialogs.DialogYesNo( self._splash, text, title = 'Maintenance is due' ) as dlg_yn:
if dlg_yn.ShowModal() == wx.ID_YES:
HydrusGlobals.do_idle_shutdown_work = True
else:
HydrusGlobals.do_idle_shutdown_work = True
try:
exit_thread = threading.Thread( target = self.THREADExitEverything, name = 'Application Exit Thread' )
self._splash = ClientGUI.FrameSplash( self )
exit_thread.start()
except Exception as e:
except:
HydrusData.Print( 'There was an error trying to start the splash screen!' )
self.pub( 'splash_destroy' )
HydrusData.Print( traceback.format_exc() )
HydrusData.DebugPrint( traceback.format_exc() )
HydrusGlobals.emergency_exit = True
self.Exit()
exit_thread = threading.Thread( target = self.THREADExitEverything, name = 'Application Exit Thread' )
exit_thread.start()
def ForceIdle( self ):
if 'last_user_action' in self._timestamps:
del self._timestamps[ 'last_user_action' ]
if 'last_mouse_action' in self._timestamps:
del self._timestamps[ 'last_mouse_action' ]
self._last_mouse_position = None
self.pub( 'wake_daemons' )
self.pub( 'refresh_status' )
def ForceUnbusy( self ):
self._system_busy = False
HydrusGlobals.force_idle_mode = not HydrusGlobals.force_idle_mode
self.pub( 'wake_daemons' )
self.pub( 'refresh_status' )
@ -835,18 +860,7 @@ class Controller( HydrusController.HydrusController ):
HydrusData.Print( 'booting controller...' )
try:
self._splash = ClientGUI.FrameSplash( self )
except:
HydrusData.Print( 'There was an error trying to start the splash screen!' )
HydrusData.Print( traceback.format_exc() )
raise
self.CreateSplash()
boot_thread = threading.Thread( target = self.THREADBootEverything, name = 'Application Boot Thread' )
@ -859,63 +873,19 @@ class Controller( HydrusController.HydrusController ):
def ShutdownView( self ):
if HydrusGlobals.emergency_exit:
self._gui.Shutdown()
HydrusController.HydrusController.ShutdownView( self )
else:
self.CallBlockingToWx( self._gui.Shutdown )
if not HydrusGlobals.emergency_exit:
self.pub( 'splash_set_status_text', 'waiting for daemons to exit' )
self._ShutdownDaemons()
idle_shutdown_action = self._options[ 'idle_shutdown' ]
if idle_shutdown_action in ( CC.IDLE_ON_SHUTDOWN, CC.IDLE_ON_SHUTDOWN_ASK_FIRST ):
if HydrusGlobals.do_idle_shutdown_work:
self.pub( 'splash_set_status_text', 'running maintenance' )
self.ResetIdleTimer()
do_it = True
if CC.IDLE_ON_SHUTDOWN_ASK_FIRST:
if self.ThereIsIdleShutdownWorkDue():
def wx_code():
text = 'Is now a good time for the client to do up to ' + HydrusData.ConvertIntToPrettyString( self._options[ 'idle_shutdown_max_minutes' ] ) + ' minutes\' maintenance work?'
with ClientGUIDialogs.DialogYesNo( self._splash, text, title = 'Maintenance is due' ) as dlg_yn:
if dlg_yn.ShowModal() == wx.ID_YES:
return True
else:
return False
do_it = self.CallBlockingToWx( wx_code )
if do_it:
self.DoIdleShutdownWork()
self.DoIdleShutdownWork()
HydrusController.HydrusController.ShutdownView( self )
HydrusController.HydrusController.ShutdownView( self )
def StartFileQuery( self, query_key, search_context ):
@ -924,7 +894,12 @@ class Controller( HydrusController.HydrusController ):
def SystemBusy( self ):
if HydrusGlobals.force_idle_mode:
return False
max_cpu = self._options[ 'idle_cpu_max' ]
if max_cpu is None:
@ -1038,16 +1013,24 @@ class Controller( HydrusController.HydrusController ):
HydrusData.Print( e )
except:
HydrusGlobals.emergency_exit = True
self.Exit()
except Exception as e:
text = 'A serious error occured while trying to start the program. Its traceback will be shown next. It should have also been written to client.log.'
traceback.print_exc()
HydrusData.DebugPrint( text )
wx.CallAfter( wx.MessageBox, text )
traceback.print_exc()
wx.CallAfter( wx.MessageBox, traceback.format_exc() )
wx.CallAfter( wx.MessageBox, text )
HydrusGlobals.emergency_exit = True
self.Exit()
finally:
@ -1073,7 +1056,7 @@ class Controller( HydrusController.HydrusController ):
traceback.print_exc()
text = 'A serious error occured while trying to exit the program. Its traceback will be shown next. It should have also been written to client.log. You may need to quit the program from task manager.'
text = 'A serious error occured while trying to exit the program. Its traceback may be shown next. It should have also been written to client.log. You may need to quit the program from task manager.'
HydrusData.DebugPrint( text )

View File

@ -997,71 +997,75 @@ class DB( HydrusDB.HydrusDB ):
READ_WRITE_ACTIONS = [ 'service_info', 'system_predicates' ]
WRITE_SPECIAL_ACTIONS = [ 'vacuum' ]
def _AddFilesInfo( self, rows, overwrite = False ):
if overwrite:
insert_phrase = 'REPLACE INTO'
else:
insert_phrase = 'INSERT OR IGNORE INTO'
# hash_id, size, mime, width, height, duration, num_frames, num_words
self._c.executemany( insert_phrase + ' files_info VALUES ( ?, ?, ?, ?, ?, ?, ?, ? );', rows )
def _AddFiles( self, service_id, rows ):
successful_hash_ids = set()
hash_ids = { row[0] for row in rows }
num_deleted = 0
delta_size = 0
num_thumbnails = 0
num_inbox = 0
existing_hash_ids = { hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM current_files WHERE service_id = ? AND hash_id IN ' + HydrusData.SplayListForDB( hash_ids ) + ';', ( service_id, ) ) }
for ( hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words ) in rows:
result = self._c.execute( 'SELECT 1 FROM files_info WHERE service_id = ? AND hash_id = ?;', ( service_id, hash_id ) ).fetchone()
if result is None:
self._c.execute( 'INSERT OR IGNORE INTO files_info VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ? );', ( service_id, hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words ) )
delta_size += size
if mime in HC.MIMES_WITH_THUMBNAILS:
num_thumbnails += 1
successful_hash_ids.add( hash_id )
valid_hash_ids = hash_ids.difference( existing_hash_ids )
if len( successful_hash_ids ) > 0:
if len( valid_hash_ids ) > 0:
splayed_successful_hash_ids = HydrusData.SplayListForDB( successful_hash_ids )
self._c.executemany( 'INSERT OR IGNORE INTO current_files VALUES ( ?, ?, ? );', ( ( service_id, hash_id, timestamp ) for ( hash_id, timestamp ) in rows if hash_id in valid_hash_ids ) )
num_deleted = len( self._c.execute( 'SELECT 1 FROM deleted_files WHERE service_id = ? AND hash_id IN ' + splayed_successful_hash_ids + ';', ( service_id, ) ).fetchall() )
splayed_valid_hash_ids = HydrusData.SplayListForDB( valid_hash_ids )
self._c.execute( 'DELETE FROM deleted_files WHERE service_id = ? AND hash_id IN ' + splayed_successful_hash_ids + ';', ( service_id, ) )
self._c.execute( 'DELETE FROM deleted_files WHERE service_id = ? AND hash_id IN ' + splayed_valid_hash_ids + ';', ( service_id, ) )
num_deleted = self._GetRowCount()
info = self._c.execute( 'SELECT size, mime FROM files_info WHERE hash_id IN ' + splayed_valid_hash_ids + ';' ).fetchall()
num_files = len( valid_hash_ids )
delta_size = sum( ( size for ( size, mime ) in info ) )
num_thumbnails = sum( ( 1 for ( size, mime ) in info if mime in HC.MIMES_WITH_THUMBNAILS ) )
num_inbox = len( valid_hash_ids.intersection( self._inbox_hash_ids ) )
service_info_updates = []
service_info_updates.append( ( -num_deleted, service_id, HC.SERVICE_INFO_NUM_DELETED_FILES ) )
service_info_updates.append( ( delta_size, service_id, HC.SERVICE_INFO_TOTAL_SIZE ) )
service_info_updates.append( ( len( successful_hash_ids ), service_id, HC.SERVICE_INFO_NUM_FILES ) )
service_info_updates.append( ( num_files, service_id, HC.SERVICE_INFO_NUM_FILES ) )
service_info_updates.append( ( num_thumbnails, service_id, HC.SERVICE_INFO_NUM_THUMBNAILS ) )
service_info_updates.append( ( len( successful_hash_ids.intersection( self._inbox_hash_ids ) ), service_id, HC.SERVICE_INFO_NUM_INBOX ) )
service_info_updates.append( ( num_inbox, service_id, HC.SERVICE_INFO_NUM_INBOX ) )
self._c.executemany( 'UPDATE service_info SET info = info + ? WHERE service_id = ? AND info_type = ?;', service_info_updates )
self._c.execute( 'DELETE FROM file_transfers WHERE service_id = ? AND hash_id IN ' + HydrusData.SplayListForDB( successful_hash_ids ) + ';', ( service_id, ) )
self._c.execute( 'DELETE FROM file_transfers WHERE service_id = ? AND hash_id IN ' + splayed_valid_hash_ids + ';', ( service_id, ) )
if num_thumbnails > 0:
self._c.execute( 'DELETE FROM service_info WHERE service_id = ? AND info_type = ?;', ( service_id, HC.SERVICE_INFO_NUM_THUMBNAILS_LOCAL ) )
self._UpdateAutocompleteTagCacheFromFiles( service_id, successful_hash_ids, 1 )
self._UpdateAutocompleteTagCacheFromFiles( service_id, valid_hash_ids, 1 )
if service_id == self._local_file_service_id:
self._DeleteFiles( self._trash_service_id, successful_hash_ids, files_being_undeleted = True )
self._DeleteFiles( self._trash_service_id, valid_hash_ids, files_being_undeleted = True )
if service_id == self._trash_service_id:
now = HydrusData.GetNow()
self._c.executemany( 'INSERT OR IGNORE INTO file_trash ( hash_id, timestamp ) VALUES ( ?, ? );', ( ( hash_id, now ) for hash_id in successful_hash_ids ) )
self._c.executemany( 'INSERT OR IGNORE INTO file_trash ( hash_id, timestamp ) VALUES ( ?, ? );', ( ( hash_id, now ) for hash_id in valid_hash_ids ) )
@ -1218,7 +1222,7 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'DELETE FROM file_inbox WHERE hash_id IN ' + splayed_hash_ids + ';' )
updates = self._c.execute( 'SELECT service_id, COUNT( * ) FROM files_info WHERE hash_id IN ' + splayed_hash_ids + ' GROUP BY service_id;' ).fetchall()
updates = self._c.execute( 'SELECT service_id, COUNT( * ) FROM current_files WHERE hash_id IN ' + splayed_hash_ids + ' GROUP BY service_id;' ).fetchall()
self._c.executemany( 'UPDATE service_info SET info = info - ? WHERE service_id = ? AND info_type = ?;', [ ( count, service_id, HC.SERVICE_INFO_NUM_INBOX ) for ( service_id, count ) in updates ] )
@ -1335,7 +1339,7 @@ class DB( HydrusDB.HydrusDB ):
self._controller.pub( 'message', job_key )
info = self._c.execute( 'SELECT hash_id, mime FROM files_info WHERE service_id IN ( ?, ? );', ( self._local_file_service_id, self._trash_service_id ) ).fetchall()
info = self._c.execute( 'SELECT hash_id, mime FROM current_files, files_info USING ( hash_id ) WHERE service_id IN ( ?, ? );', ( self._local_file_service_id, self._trash_service_id ) ).fetchall()
missing_count = 0
deletee_hash_ids = []
@ -1486,7 +1490,6 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'PRAGMA auto_vacuum = 0;' ) # none
self._c.execute( 'PRAGMA journal_mode = WAL;' )
if HC.PLATFORM_WINDOWS:
@ -1517,6 +1520,9 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'CREATE VIRTUAL TABLE conversation_subjects USING fts4( subject );' )
self._c.execute( 'CREATE TABLE current_files ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, timestamp INTEGER, PRIMARY KEY( service_id, hash_id ) );' )
self._c.execute( 'CREATE INDEX current_files_timestamp ON current_files ( timestamp );' )
self._c.execute( 'CREATE TABLE deleted_files ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, PRIMARY KEY( service_id, hash_id ) );' )
self._c.execute( 'CREATE TABLE existing_tags ( namespace_id INTEGER, tag_id INTEGER, PRIMARY KEY( namespace_id, tag_id ) );' )
@ -1524,8 +1530,13 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'CREATE TABLE file_inbox ( hash_id INTEGER PRIMARY KEY );' )
self._c.execute( 'CREATE TABLE files_info ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, size INTEGER, mime INTEGER, timestamp INTEGER, width INTEGER, height INTEGER, duration INTEGER, num_frames INTEGER, num_words INTEGER, PRIMARY KEY( service_id, hash_id ) );' )
self._c.execute( 'CREATE INDEX files_info_hash_id ON files_info ( hash_id );' )
self._c.execute( 'CREATE TABLE files_info ( hash_id INTEGER PRIMARY KEY, size INTEGER, mime INTEGER, width INTEGER, height INTEGER, duration INTEGER, num_frames INTEGER, num_words INTEGER );' )
self._c.execute( 'CREATE INDEX files_info_size ON files_info ( size );' )
self._c.execute( 'CREATE INDEX files_info_mime ON files_info ( mime );' )
self._c.execute( 'CREATE INDEX files_info_width ON files_info ( width );' )
self._c.execute( 'CREATE INDEX files_info_height ON files_info ( height );' )
self._c.execute( 'CREATE INDEX files_info_duration ON files_info ( duration );' )
self._c.execute( 'CREATE INDEX files_info_num_frames ON files_info ( num_frames );' )
self._c.execute( 'CREATE TABLE file_transfers ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, PRIMARY KEY( service_id, hash_id ) );' )
self._c.execute( 'CREATE INDEX file_transfers_hash_id ON file_transfers ( hash_id );' )
@ -1533,7 +1544,7 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'CREATE TABLE file_trash ( hash_id INTEGER PRIMARY KEY, timestamp INTEGER );' )
self._c.execute( 'CREATE INDEX file_trash_timestamp ON file_trash ( timestamp );' )
self._c.execute( 'CREATE TABLE file_petitions ( service_id INTEGER, hash_id INTEGER, reason_id INTEGER, PRIMARY KEY( service_id, hash_id, reason_id ), FOREIGN KEY( service_id, hash_id ) REFERENCES files_info ON DELETE CASCADE );' )
self._c.execute( 'CREATE TABLE file_petitions ( service_id INTEGER, hash_id INTEGER, reason_id INTEGER, PRIMARY KEY( service_id, hash_id, reason_id ) );' )
self._c.execute( 'CREATE INDEX file_petitions_hash_id_index ON file_petitions ( hash_id );' )
self._c.execute( 'CREATE TABLE hashes ( hash_id INTEGER PRIMARY KEY, hash BLOB_BYTES );' )
@ -1686,22 +1697,24 @@ class DB( HydrusDB.HydrusDB ):
splayed_hash_ids = HydrusData.SplayListForDB( hash_ids )
rows = self._c.execute( 'SELECT hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words FROM files_info WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) ).fetchall()
rows = self._c.execute( 'SELECT hash_id, timestamp FROM current_files WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) ).fetchall()
hash_ids = { row[ 0 ] for row in rows }
valid_hash_ids = { row[ 0 ] for row in rows }
if len( hash_ids ) > 0:
if len( valid_hash_ids ) > 0:
total_size = sum( [ row[ 1 ] for row in rows ] )
num_files = len( rows )
num_thumbnails = len( [ 1 for row in rows if row[ 2 ] in HC.MIMES_WITH_THUMBNAILS ] )
num_inbox = len( hash_ids.intersection( self._inbox_hash_ids ) )
splayed_valid_hash_ids = HydrusData.SplayListForDB( valid_hash_ids )
splayed_hash_ids = HydrusData.SplayListForDB( hash_ids )
info = self._c.execute( 'SELECT size, mime FROM files_info WHERE hash_id IN ' + splayed_hash_ids + ';' ).fetchall()
num_files = len( valid_hash_ids )
delta_size = sum( ( size for ( size, mime ) in info ) )
num_thumbnails = sum( ( 1 for ( size, mime ) in info if mime in HC.MIMES_WITH_THUMBNAILS ) )
num_inbox = len( valid_hash_ids.intersection( self._inbox_hash_ids ) )
service_info_updates = []
service_info_updates.append( ( -total_size, service_id, HC.SERVICE_INFO_TOTAL_SIZE ) )
service_info_updates.append( ( -delta_size, service_id, HC.SERVICE_INFO_TOTAL_SIZE ) )
service_info_updates.append( ( -num_files, service_id, HC.SERVICE_INFO_NUM_FILES ) )
service_info_updates.append( ( -num_thumbnails, service_id, HC.SERVICE_INFO_NUM_THUMBNAILS ) )
service_info_updates.append( ( -num_inbox, service_id, HC.SERVICE_INFO_NUM_INBOX ) )
@ -1717,9 +1730,12 @@ class DB( HydrusDB.HydrusDB ):
self._c.executemany( 'UPDATE service_info SET info = info + ? WHERE service_id = ? AND info_type = ?;', service_info_updates )
self._c.execute( 'DELETE FROM service_info WHERE service_id = ? AND info_type = ' + str( HC.SERVICE_INFO_NUM_THUMBNAILS_LOCAL ) + ';', ( service_id, ) )
if num_thumbnails > 0:
self._c.execute( 'DELETE FROM service_info WHERE service_id = ? AND info_type = ' + str( HC.SERVICE_INFO_NUM_THUMBNAILS_LOCAL ) + ';', ( service_id, ) )
self._c.execute( 'DELETE FROM files_info WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) )
self._c.execute( 'DELETE FROM current_files WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) )
self._c.execute( 'DELETE FROM file_petitions WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) )
self._UpdateAutocompleteTagCacheFromFiles( service_id, hash_ids, -1 )
@ -1823,11 +1839,7 @@ class DB( HydrusDB.HydrusDB ):
# perceptual_hashes and thumbs
job_key.SetVariable( 'popup_text_1', prefix + 'deleting internal orphan information' )
self._c.execute( 'DELETE FROM perceptual_hashes WHERE hash_id NOT IN SELECT hash_id FROM files_info;' )
# thumbs
job_key.SetVariable( 'popup_text_1', prefix + 'gathering thumbnail information' )
@ -1837,7 +1849,7 @@ class DB( HydrusDB.HydrusDB ):
hash_id = self._GetHashId( hash )
result = self._c.execute( 'SELECT 1 FROM files_info WHERE hash_id = ?;', ( hash_id, ) ).fetchone()
result = self._c.execute( 'SELECT 1 FROM current_files WHERE hash_id = ?;', ( hash_id, ) ).fetchone()
if result is None:
@ -1955,7 +1967,7 @@ class DB( HydrusDB.HydrusDB ):
useful_thumbnail_hash_ids = { hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM files_info WHERE service_id != ? AND hash_id IN ' + HydrusData.SplayListForDB( hash_ids ) + ';', ( self._trash_service_id, ) ) }
useful_thumbnail_hash_ids = { hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM current_files WHERE service_id != ? AND hash_id IN ' + HydrusData.SplayListForDB( hash_ids ) + ';', ( self._trash_service_id, ) ) }
deletable_thumbnail_hash_ids = hash_ids.difference( useful_thumbnail_hash_ids )
@ -2256,9 +2268,9 @@ class DB( HydrusDB.HydrusDB ):
else:
table_phrase = 'mappings, files_info USING ( hash_id ) '
table_phrase = 'mappings, current_files USING ( hash_id ) '
predicates.append( 'files_info.service_id = ' + str( file_service_id ) )
predicates.append( 'current_files.service_id = ' + str( file_service_id ) )
predicates_phrase = 'WHERE ' + ' AND '.join( predicates ) + ' AND '
@ -2423,7 +2435,7 @@ class DB( HydrusDB.HydrusDB ):
if service_type == HC.FILE_REPOSITORY:
( num_local, ) = self._c.execute( 'SELECT COUNT( * ) FROM files_info AS remote_files_info, files_info USING ( hash_id ) WHERE remote_files_info.service_id = ? AND files_info.service_id = ?;', ( service_id, self._local_file_service_id ) ).fetchone()
( num_local, ) = self._c.execute( 'SELECT COUNT( * ) FROM current_files AS remote_current_files, current_files USING ( hash_id ) WHERE remote_current_files.service_id = ? AND current_files.service_id = ?;', ( service_id, self._local_file_service_id ) ).fetchone()
num_not_local = num_everything - num_local
@ -2542,11 +2554,11 @@ class DB( HydrusDB.HydrusDB ):
else:
table_phrase = 'mappings, files_info USING ( hash_id )'
table_phrase = 'mappings, current_files USING ( hash_id )'
file_service_id = self._GetServiceId( file_service_key )
predicates.append( 'files_info.service_id = ' + str( file_service_id ) )
predicates.append( 'current_files.service_id = ' + str( file_service_id ) )
if tag_service_key != CC.COMBINED_TAG_SERVICE_KEY:
@ -2719,7 +2731,7 @@ class DB( HydrusDB.HydrusDB ):
sql_predicates.insert( 0, 'service_id = ' + str( file_service_id ) )
query_hash_ids = { id for ( id, ) in self._c.execute( 'SELECT hash_id FROM files_info WHERE ' + ' AND '.join( sql_predicates ) + ';' ) }
query_hash_ids = { id for ( id, ) in self._c.execute( 'SELECT hash_id FROM current_files, files_info USING ( hash_id ) WHERE ' + ' AND '.join( sql_predicates ) + ';' ) }
else:
@ -2729,7 +2741,7 @@ class DB( HydrusDB.HydrusDB ):
else:
query_hash_ids = { id for ( id, ) in self._c.execute( 'SELECT DISTINCT hash_id FROM mappings UNION SELECT hash_id FROM files_info;' ) }
query_hash_ids = { id for ( id, ) in self._c.execute( 'SELECT DISTINCT hash_id FROM mappings UNION SELECT hash_id FROM current_files;' ) }
if len( sql_predicates ) > 0:
@ -2811,7 +2823,7 @@ class DB( HydrusDB.HydrusDB ):
service_id = self._GetServiceId( service_key )
query_hash_ids.intersection_update( [ hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM files_info WHERE service_id = ?;', ( service_id, ) ) ] )
query_hash_ids.intersection_update( [ hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM current_files WHERE service_id = ?;', ( service_id, ) ) ] )
for service_key in file_services_to_include_pending:
@ -2825,7 +2837,7 @@ class DB( HydrusDB.HydrusDB ):
service_id = self._GetServiceId( service_key )
query_hash_ids.difference_update( [ hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM files_info WHERE service_id = ?;', ( service_id, ) ) ] )
query_hash_ids.difference_update( [ hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM current_files WHERE service_id = ?;', ( service_id, ) ) ] )
for service_key in file_services_to_exclude_pending:
@ -2861,21 +2873,36 @@ class DB( HydrusDB.HydrusDB ):
if file_service_type == HC.LOCAL_FILE:
if must_not_be_local: query_hash_ids = set()
if must_not_be_local:
query_hash_ids = set()
else:
local_hash_ids = [ id for ( id, ) in self._c.execute( 'SELECT hash_id FROM files_info WHERE service_id = ?;', ( self._local_file_service_id, ) ) ]
local_hash_ids = [ id for ( id, ) in self._c.execute( 'SELECT hash_id FROM current_files WHERE service_id = ?;', ( self._local_file_service_id, ) ) ]
if must_be_local: query_hash_ids.intersection_update( local_hash_ids )
else: query_hash_ids.difference_update( local_hash_ids )
if must_be_local:
query_hash_ids.intersection_update( local_hash_ids )
else:
query_hash_ids.difference_update( local_hash_ids )
if must_be_inbox or must_be_archive:
if must_be_inbox: query_hash_ids.intersection_update( self._inbox_hash_ids )
elif must_be_archive: query_hash_ids.difference_update( self._inbox_hash_ids )
if must_be_inbox:
query_hash_ids.intersection_update( self._inbox_hash_ids )
elif must_be_archive:
query_hash_ids.difference_update( self._inbox_hash_ids )
#
@ -2997,11 +3024,11 @@ class DB( HydrusDB.HydrusDB ):
else:
table_phrase = 'mappings, files_info USING ( hash_id )'
table_phrase = 'mappings, current_files USING ( hash_id )'
file_service_id = self._GetServiceId( file_service_key )
predicates.append( 'files_info.service_id = ' + str( file_service_id ) )
predicates.append( 'current_files.service_id = ' + str( file_service_id ) )
if tag_service_key != CC.COMBINED_TAG_SERVICE_KEY:
@ -3051,11 +3078,11 @@ class DB( HydrusDB.HydrusDB ):
else:
table_phrase = 'mappings, files_info USING ( hash_id )'
table_phrase = 'mappings, current_files USING ( hash_id )'
file_service_id = self._GetServiceId( file_service_key )
predicates.append( 'files_info.service_id = ' + str( file_service_id ) )
predicates.append( 'current_files.service_id = ' + str( file_service_id ) )
if tag_service_key != CC.COMBINED_TAG_SERVICE_KEY:
@ -3130,7 +3157,7 @@ class DB( HydrusDB.HydrusDB ):
return ( CC.STATUS_DELETED, None )
result = self._c.execute( 'SELECT 1 FROM files_info WHERE service_id = ? AND hash_id = ?;', ( self._local_file_service_id, hash_id ) ).fetchone()
result = self._c.execute( 'SELECT 1 FROM current_files WHERE service_id = ? AND hash_id = ?;', ( self._local_file_service_id, hash_id ) ).fetchone()
if result is not None:
@ -3228,45 +3255,38 @@ class DB( HydrusDB.HydrusDB ):
service_id = self._GetServiceId( service_key )
splayed_hash_ids = HydrusData.SplayListForDB( hash_ids )
# get first detailed results
if service_key == CC.COMBINED_FILE_SERVICE_KEY:
hash_ids_to_info = { hash_id : ( size, mime, width, height, duration, num_frames, num_words ) for ( hash_id, size, mime, width, height, duration, num_frames, num_words ) in self._c.execute( 'SELECT * FROM files_info WHERE hash_id IN ' + splayed_hash_ids + ';' ) }
# this is the bit to improve later--where we'll fold individual file service timestamps into the locations manager
# but for now, we'll merge, preferring the local timestamp
hash_ids_to_timestamps = { hash_id : timestamp for ( hash_id, timestamp ) in self._c.execute( 'SELECT hash_id, timestamp FROM current_files WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( self._local_file_service_id, ) ) }
if len( hash_ids_to_timestamps ) < len( hash_ids ):
all_services_results = self._c.execute( 'SELECT hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words FROM files_info WHERE hash_id IN ' + HydrusData.SplayListForDB( hash_ids ) + ';' ).fetchall()
missing_hash_ids = { hash_id for hash_id in hash_ids if hash_id not in hash_ids_to_timestamps }
hash_ids_i_have_info_for = set()
hash_ids_to_timestamps.update( { hash_id : timestamp for ( hash_id, timestamp ) in self._c.execute( 'SELECT hash_id, timestamp FROM current_files WHERE hash_id IN ' + HydrusData.SplayListForDB( missing_hash_ids ) + ';' ) } )
results = []
for result in all_services_results:
hash_id = result[0]
if hash_id not in hash_ids_i_have_info_for:
hash_ids_i_have_info_for.add( hash_id )
results.append( result )
results.extend( [ ( hash_id, None, HC.APPLICATION_UNKNOWN, None, None, None, None, None, None ) for hash_id in hash_ids if hash_id not in hash_ids_i_have_info_for ] )
else: results = self._c.execute( 'SELECT hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words FROM files_info WHERE service_id = ? AND hash_id IN ' + HydrusData.SplayListForDB( hash_ids ) + ';', ( service_id, ) ).fetchall()
# get tagged results
splayed_hash_ids = HydrusData.SplayListForDB( hash_ids )
hash_ids_to_hashes = self._GetHashIdsToHashes( hash_ids )
hash_ids_to_tags = HydrusData.BuildKeyToListDict( [ ( hash_id, ( service_id, ( status, HydrusTags.CombineTag( namespace, tag ) ) ) ) for ( hash_id, service_id, namespace, tag, status ) in self._c.execute( 'SELECT hash_id, service_id, namespace, tag, status FROM namespaces, ( tags, mappings USING ( tag_id ) ) USING ( namespace_id ) WHERE hash_id IN ' + splayed_hash_ids + ';' ) ] )
hash_ids_to_petitioned_tags = HydrusData.BuildKeyToListDict( [ ( hash_id, ( service_id, ( HC.PETITIONED, HydrusTags.CombineTag( namespace, tag ) ) ) ) for ( hash_id, service_id, namespace, tag ) in self._c.execute( 'SELECT hash_id, service_id, namespace, tag FROM namespaces, ( tags, mapping_petitions USING ( tag_id ) ) USING ( namespace_id ) WHERE hash_id IN ' + splayed_hash_ids + ';' ) ] )
for ( hash_id, tag_data ) in hash_ids_to_petitioned_tags.items(): hash_ids_to_tags[ hash_id ].extend( tag_data )
for ( hash_id, tag_data ) in hash_ids_to_petitioned_tags.items():
hash_ids_to_tags[ hash_id ].extend( tag_data )
hash_ids_to_current_file_service_ids = HydrusData.BuildKeyToListDict( self._c.execute( 'SELECT hash_id, service_id FROM files_info WHERE hash_id IN ' + splayed_hash_ids + ';' ) )
hash_ids_to_current_file_service_ids = HydrusData.BuildKeyToListDict( self._c.execute( 'SELECT hash_id, service_id FROM current_files WHERE hash_id IN ' + splayed_hash_ids + ';' ) )
hash_ids_to_deleted_file_service_ids = HydrusData.BuildKeyToListDict( self._c.execute( 'SELECT hash_id, service_id FROM deleted_files WHERE hash_id IN ' + splayed_hash_ids + ';' ) )
@ -3276,15 +3296,13 @@ class DB( HydrusDB.HydrusDB ):
hash_ids_to_local_ratings = HydrusData.BuildKeyToListDict( [ ( hash_id, ( service_id, rating ) ) for ( service_id, hash_id, rating ) in self._c.execute( 'SELECT service_id, hash_id, rating FROM local_ratings WHERE hash_id IN ' + splayed_hash_ids + ';' ) ] )
# do current and pending remote ratings here
service_ids_to_service_keys = { service_id : service_key for ( service_id, service_key ) in self._c.execute( 'SELECT service_id, service_key FROM services;' ) }
# build it
media_results = []
for ( hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words ) in results:
for hash_id in hash_ids:
hash = hash_ids_to_hashes[ hash_id ]
@ -3323,6 +3341,24 @@ class DB( HydrusDB.HydrusDB ):
#
if hash_id in hash_ids_to_info:
( size, mime, width, height, duration, num_frames, num_words ) = hash_ids_to_info[ hash_id ]
else:
( size, mime, width, height, duration, num_frames, num_words ) = ( None, HC.APPLICATION_UNKNOWN, None, None, None, None, None )
if hash_id in hash_ids_to_timestamps:
timestamp = hash_ids_to_timestamps[ hash_id ]
else:
timestamp = None
media_results.append( ClientMedia.MediaResult( ( hash, inbox, size, mime, timestamp, width, height, duration, num_frames, num_words, tags_manager, file_service_keys_cdpp, local_ratings, remote_ratings ) ) )
@ -3758,15 +3794,15 @@ class DB( HydrusDB.HydrusDB ):
if info_type in ( HC.SERVICE_INFO_NUM_PENDING_FILES, HC.SERVICE_INFO_NUM_PETITIONED_FILES ): save_it = False
if info_type == HC.SERVICE_INFO_NUM_FILES: result = self._c.execute( 'SELECT COUNT( * ) FROM files_info WHERE service_id = ?;', ( service_id, ) ).fetchone()
elif info_type == HC.SERVICE_INFO_TOTAL_SIZE: result = self._c.execute( 'SELECT SUM( size ) FROM files_info WHERE service_id = ?;', ( service_id, ) ).fetchone()
if info_type == HC.SERVICE_INFO_NUM_FILES: result = self._c.execute( 'SELECT COUNT( * ) FROM current_files WHERE service_id = ?;', ( service_id, ) ).fetchone()
elif info_type == HC.SERVICE_INFO_TOTAL_SIZE: result = self._c.execute( 'SELECT SUM( size ) FROM current_files, files_info USING ( hash_id ) WHERE service_id = ?;', ( service_id, ) ).fetchone()
elif info_type == HC.SERVICE_INFO_NUM_DELETED_FILES: result = self._c.execute( 'SELECT COUNT( * ) FROM deleted_files WHERE service_id = ?;', ( service_id, ) ).fetchone()
elif info_type == HC.SERVICE_INFO_NUM_PENDING_FILES: result = self._c.execute( 'SELECT COUNT( * ) FROM file_transfers WHERE service_id = ?;', ( service_id, ) ).fetchone()
elif info_type == HC.SERVICE_INFO_NUM_PETITIONED_FILES: result = self._c.execute( 'SELECT COUNT( * ) FROM file_petitions where service_id = ?;', ( service_id, ) ).fetchone()
elif info_type == HC.SERVICE_INFO_NUM_THUMBNAILS: result = self._c.execute( 'SELECT COUNT( * ) FROM files_info WHERE service_id = ? AND mime IN ' + HydrusData.SplayListForDB( HC.MIMES_WITH_THUMBNAILS ) + ';', ( service_id, ) ).fetchone()
elif info_type == HC.SERVICE_INFO_NUM_THUMBNAILS: result = self._c.execute( 'SELECT COUNT( * ) FROM current_files, files_info USING ( hash_id ) WHERE service_id = ? AND mime IN ' + HydrusData.SplayListForDB( HC.MIMES_WITH_THUMBNAILS ) + ';', ( service_id, ) ).fetchone()
elif info_type == HC.SERVICE_INFO_NUM_THUMBNAILS_LOCAL:
hash_ids = [ hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM files_info WHERE mime IN ' + HydrusData.SplayListForDB( HC.MIMES_WITH_THUMBNAILS ) + ' AND service_id = ?;', ( service_id, ) ) ]
hash_ids = [ hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM current_files, files_info USING ( hash_id ) WHERE mime IN ' + HydrusData.SplayListForDB( HC.MIMES_WITH_THUMBNAILS ) + ' AND service_id = ?;', ( service_id, ) ) ]
thumbnails_i_should_have = self._GetHashes( hash_ids )
@ -3784,7 +3820,7 @@ class DB( HydrusDB.HydrusDB ):
result = ( num_local, )
elif info_type == HC.SERVICE_INFO_NUM_INBOX: result = self._c.execute( 'SELECT COUNT( * ) FROM file_inbox, files_info USING ( hash_id ) WHERE service_id = ?;', ( service_id, ) ).fetchone()
elif info_type == HC.SERVICE_INFO_NUM_INBOX: result = self._c.execute( 'SELECT COUNT( * ) FROM file_inbox, current_files USING ( hash_id ) WHERE service_id = ?;', ( service_id, ) ).fetchone()
elif service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ):
@ -3998,7 +4034,7 @@ class DB( HydrusDB.HydrusDB ):
service_id = self._GetServiceId( service_key )
hash_ids = [ hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM files_info WHERE mime IN ' + HydrusData.SplayListForDB( HC.MIMES_WITH_THUMBNAILS ) + ' AND service_id = ?;', ( service_id, ) ) ]
hash_ids = [ hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM current_files, files_info USING ( hash_id ) WHERE mime IN ' + HydrusData.SplayListForDB( HC.MIMES_WITH_THUMBNAILS ) + ' AND service_id = ?;', ( service_id, ) ) ]
hashes = set( self._GetHashes( hash_ids ) )
@ -4177,7 +4213,9 @@ class DB( HydrusDB.HydrusDB ):
self._AddThumbnails( [ ( hash, thumbnail ) ] )
self._AddFiles( self._local_file_service_id, [ ( hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words ) ] )
self._AddFilesInfo( [ ( hash_id, size, mime, width, height, duration, num_frames, num_words ) ], overwrite = True )
self._AddFiles( self._local_file_service_id, [ ( hash_id, timestamp ) ] )
content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ADD, ( hash, size, mime, timestamp, width, height, duration, num_frames, num_words ) )
@ -4231,7 +4269,7 @@ class DB( HydrusDB.HydrusDB ):
splayed_hash_ids = HydrusData.SplayListForDB( hash_ids )
updates = self._c.execute( 'SELECT service_id, COUNT( * ) FROM files_info WHERE hash_id IN ' + splayed_hash_ids + ' GROUP BY service_id;' ).fetchall()
updates = self._c.execute( 'SELECT service_id, COUNT( * ) FROM current_files WHERE hash_id IN ' + splayed_hash_ids + ' GROUP BY service_id;' ).fetchall()
self._c.executemany( 'UPDATE service_info SET info = info + ? WHERE service_id = ? AND info_type = ?;', [ ( count, service_id, HC.SERVICE_INFO_NUM_INBOX ) for ( service_id, count ) in updates ] )
@ -4286,7 +4324,10 @@ class DB( HydrusDB.HydrusDB ):
def _ManageDBError( self, job, e ):
if type( e ) == MemoryError: HydrusData.ShowText( 'The client is running out of memory! Restart it ASAP!' )
if isinstance( e, MemoryError ):
HydrusData.ShowText( 'The client is running out of memory! Restart it ASAP!' )
( etype, value, tb ) = sys.exc_info()
@ -4441,7 +4482,9 @@ class DB( HydrusDB.HydrusDB ):
hash_id = self._GetHashId( hash )
self._AddFiles( service_id, [ ( hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words ) ] )
self._AddFilesInfo( [ ( hash_id, size, mime, width, height, duration, num_frames, num_words ) ] )
self._AddFiles( service_id, [ ( hash_id, timestamp ) ] )
elif action == HC.CONTENT_UPDATE_PEND:
@ -5347,7 +5390,7 @@ class DB( HydrusDB.HydrusDB ):
splayed_hash_ids = HydrusData.SplayListForDB( hash_ids )
rows = self._c.execute( 'SELECT hash_id, size, mime, timestamp, width, height, duration, num_frames, num_words FROM files_info WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( self._trash_service_id, ) ).fetchall()
rows = self._c.execute( 'SELECT hash_id, timestamp FROM current_files WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( self._trash_service_id, ) ).fetchall()
if len( rows ) > 0:
@ -5374,32 +5417,6 @@ class DB( HydrusDB.HydrusDB ):
self._controller.pub( 'splash_set_title_text', 'updating db to v' + str( version + 1 ) )
if version == 136:
result = self._c.execute( 'SELECT tag_id FROM tags WHERE tag = ?;', ( '', ) ).fetchone()
if result is not None:
( tag_id, ) = result
self._c.execute( 'DELETE FROM mappings WHERE tag_id = ?;', ( tag_id, ) )
self._c.execute( 'DELETE FROM mapping_petitions WHERE tag_id = ?;', ( tag_id, ) )
self._c.execute( 'DELETE FROM autocomplete_tags_cache WHERE tag_id = ?;', ( tag_id, ) )
self._c.execute( 'DELETE FROM existing_tags WHERE tag_id = ?;', ( tag_id, ) )
self._DeleteServiceInfo()
if version == 139:
self._combined_tag_service_id = self._GetServiceId( CC.COMBINED_TAG_SERVICE_KEY )
self._local_file_service_id = self._GetServiceId( CC.LOCAL_FILE_SERVICE_KEY )
self._c.execute( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id != ?;', ( self._combined_tag_service_id, ) )
self._c.execute( 'DELETE FROM autocomplete_tags_cache WHERE file_service_id != ?;', ( self._local_file_service_id, ) )
self._c.execute( 'DELETE FROM autocomplete_tags_cache WHERE current_count < ?;', ( 5, ) )
if version == 140:
self._combined_tag_service_id = self._GetServiceId( CC.COMBINED_TAG_SERVICE_KEY )
@ -5486,7 +5503,7 @@ class DB( HydrusDB.HydrusDB ):
for ( modifier, key, service_key, data ) in actions:
if type( service_key ) == ClientData.ClientServiceIdentifier: service_key = service_key.GetServiceKey()
if isinstance( service_key, ClientData.ClientServiceIdentifier ): service_key = service_key.GetServiceKey()
action = ( service_key, data )
@ -6089,7 +6106,7 @@ class DB( HydrusDB.HydrusDB ):
gallery_identifier = ClientDownloading.GalleryIdentifier( HC.SITE_TYPE_DEFAULT )
elif type( name ) == int:
elif isinstance( name, int ):
site_type = name
@ -6238,6 +6255,44 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'CREATE TABLE analyze_timestamps ( name TEXT, timestamp INTEGER );' )
if version == 189:
self._controller.pub( 'splash_set_status_text', 'updating file tables' )
#
self._c.execute( 'DROP INDEX file_petitions_hash_id_index;' )
self._c.execute( 'ALTER TABLE file_petitions RENAME TO file_petitions_old;' )
self._c.execute( 'CREATE TABLE file_petitions ( service_id INTEGER, hash_id INTEGER, reason_id INTEGER, PRIMARY KEY( service_id, hash_id, reason_id ) );' )
self._c.execute( 'CREATE INDEX file_petitions_hash_id_index ON file_petitions ( hash_id );' )
self._c.execute( 'INSERT INTO file_petitions SELECT * FROM file_petitions_old;' )
self._c.execute( 'DROP TABLE file_petitions_old;' )
#
self._c.execute( 'ALTER TABLE files_info RENAME TO files_info_old;' )
self._c.execute( 'CREATE TABLE current_files ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, timestamp INTEGER, PRIMARY KEY( service_id, hash_id ) );' )
self._c.execute( 'CREATE INDEX current_files_timestamp ON current_files ( timestamp );' )
self._c.execute( 'CREATE TABLE files_info ( hash_id INTEGER PRIMARY KEY, size INTEGER, mime INTEGER, width INTEGER, height INTEGER, duration INTEGER, num_frames INTEGER, num_words INTEGER );' )
self._c.execute( 'CREATE INDEX files_info_size ON files_info ( size );' )
self._c.execute( 'CREATE INDEX files_info_mime ON files_info ( mime );' )
self._c.execute( 'CREATE INDEX files_info_width ON files_info ( width );' )
self._c.execute( 'CREATE INDEX files_info_height ON files_info ( height );' )
self._c.execute( 'CREATE INDEX files_info_duration ON files_info ( duration );' )
self._c.execute( 'CREATE INDEX files_info_num_frames ON files_info ( num_frames );' )
self._c.execute( 'INSERT INTO current_files SELECT service_id, hash_id, timestamp FROM files_info_old;' )
self._c.execute( 'INSERT OR IGNORE INTO files_info SELECT hash_id, size, mime, width, height, duration, num_frames, num_words FROM files_info_old;' )
self._c.execute( 'DROP TABLE files_info_old;' )
self._controller.pub( 'splash_set_title_text', 'updating db to v' + str( version + 1 ) )
self._c.execute( 'UPDATE version SET version = ?;', ( version + 1, ) )
@ -6455,10 +6510,8 @@ class DB( HydrusDB.HydrusDB ):
for ( namespace_id, tag_id, hash_ids ) in pending_mappings_ids:
num_raw_adds = InsertMappings( namespace_id, tag_id, hash_ids, HC.PENDING )
num_deleted_petitions = DeletePetitions( namespace_id, tag_id, hash_ids )
change_in_num_pending_mappings += num_raw_adds
change_in_num_petitioned_mappings -= num_deleted_petitions
for ( namespace_id, tag_id, hash_ids ) in pending_rescinded_mappings_ids:
@ -6769,7 +6822,9 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'VACUUM' )
except sqlite3.OperationalError:
except sqlite3.OperationalError as e:
HydrusData.ShowException( e )
HC.options[ 'maintenance_vacuum_period' ] = None

View File

@ -236,7 +236,7 @@ def DAEMONMaintainTrash( controller ):
def DAEMONRebalanceClientFiles( controller ):
if controller.CurrentlyIdle() and not controller.SystemBusy():
if controller.CurrentlyIdle():
controller.GetClientFilesManager().Rebalance()

View File

@ -60,7 +60,7 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
self._statusbar = self.CreateStatusBar()
self._statusbar.SetFieldsCount( 4 )
self._statusbar.SetStatusWidths( [ -1, 25, 25, 50 ] )
self._statusbar.SetStatusWidths( [ -1, 25, 90, 50 ] )
self._focus_holder = wx.Window( self, size = ( 0, 0 ) )
@ -595,45 +595,6 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
wx.CallAfter( gc.collect )
def _Exit( self, restart = False ):
if not HydrusGlobals.emergency_exit:
if HC.options[ 'confirm_client_exit' ]:
if restart:
text = 'Are you sure you want to restart the client? (Will auto-yes in 15 seconds)'
else:
text = 'Are you sure you want to exit the client? (Will auto-yes in 15 seconds)'
with ClientGUIDialogs.DialogYesNo( self, text ) as dlg:
call_later = wx.CallLater( 15000, dlg.EndModal, wx.ID_YES )
if dlg.ShowModal() == wx.ID_NO:
call_later.Stop()
return
call_later.Stop()
if restart:
HydrusGlobals.restart = True
self._controller.Exit()
def _FetchIP( self, service_key ):
with ClientGUIDialogs.DialogTextEntry( self, 'Enter the file\'s hash.' ) as dlg:
@ -1083,14 +1044,15 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
db_profile_mode_id = ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetPermanentId( 'db_profile_mode' )
pubsub_profile_mode_id = ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetPermanentId( 'pubsub_profile_mode' )
force_idle_mode_id = ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetPermanentId( 'force_idle_mode' )
debug = wx.Menu()
debug.AppendCheckItem( db_profile_mode_id, p( '&DB Profile Mode' ) )
debug.Check( db_profile_mode_id, HydrusGlobals.db_profile_mode )
debug.AppendCheckItem( pubsub_profile_mode_id, p( '&PubSub Profile Mode' ) )
debug.Check( pubsub_profile_mode_id, HydrusGlobals.pubsub_profile_mode )
debug.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetPermanentId( 'force_idle' ), p( 'Force Idle Mode' ) )
debug.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetPermanentId( 'force_unbusy' ), p( 'Force Unbusy Mode' ) )
debug.AppendCheckItem( force_idle_mode_id, p( '&Force Idle Mode' ) )
debug.Check( force_idle_mode_id, HydrusGlobals.force_idle_mode )
debug.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetPermanentId( 'debug_garbage' ), p( 'Garbage' ) )
debug.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetPermanentId( 'clear_caches' ), p( '&Clear Preview/Fullscreen Caches' ) )
debug.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetPermanentId( 'delete_service_info' ), p( '&Clear DB Service Info Cache' ), p( 'Delete all cached service info, in case it has become desynchronised.' ) )
@ -1253,6 +1215,9 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
with ClientGUIDialogsManage.DialogManageOptions( self ) as dlg: dlg.ShowModal()
self._controller.pub( 'wake_daemons' )
self._controller.pub( 'refresh_status' )
def _ManagePixivAccount( self ):
@ -1514,7 +1479,7 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
if self._controller.SystemBusy():
busy_status = 'busy'
busy_status = 'system busy'
else:
@ -2104,16 +2069,17 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
def EventExit( self, event ):
if event.CanVeto():
event.Veto()
else:
if not event.CanVeto():
HydrusGlobals.emergency_exit = True
self._Exit()
result = self.Exit()
if not result:
event.Veto()
def EventFocus( self, event ):
@ -2160,9 +2126,9 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
count[ type( o ) ] += 1
if type( o ) == types.InstanceType: class_count[ o.__class__.__name__ ] += 1
elif type( o ) == types.BuiltinFunctionType: class_count[ o.__name__ ] += 1
elif type( o ) == types.BuiltinMethodType: class_count[ o.__name__ ] += 1
if isinstance( o, types.InstanceType ): class_count[ o.__class__.__name__ ] += 1
elif isinstance( o, types.BuiltinFunctionType ): class_count[ o.__name__ ] += 1
elif isinstance( o, types.BuiltinMethodType ): class_count[ o.__name__ ] += 1
HydrusData.Print( 'gc:' )
@ -2189,16 +2155,12 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
elif command == 'delete_orphans': self._DeleteOrphans()
elif command == 'delete_pending': self._DeletePending( data )
elif command == 'delete_service_info': self._DeleteServiceInfo()
elif command == 'exit': self._Exit()
elif command == 'exit': self.Exit()
elif command == 'fetch_ip': self._FetchIP( data )
elif command == 'force_idle':
elif command == 'force_idle_mode':
self._controller.ForceIdle()
elif command == 'force_unbusy':
self._controller.ForceUnbusy()
elif command == '8chan_board': webbrowser.open( 'https://8ch.net/hydrus/index.html' )
elif command == 'file_integrity': self._CheckFileIntegrity()
elif command == 'help': webbrowser.open( 'file://' + HC.HELP_DIR + '/index.html' )
@ -2262,7 +2224,7 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
if page is not None: page.RefreshQuery()
elif command == 'regenerate_thumbnails': self._RegenerateThumbnails()
elif command == 'restart': self._Exit( restart = True )
elif command == 'restart': self.Exit( restart = True )
elif command == 'restore_database': self._controller.RestoreDatabase()
elif command == 'review_services': self._ReviewServices()
elif command == 'save_gui_session': self._SaveGUISession()
@ -2333,6 +2295,87 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
event.Skip( True )
def Exit( self, restart = False ):
if not HydrusGlobals.emergency_exit:
if HC.options[ 'confirm_client_exit' ]:
if restart:
text = 'Are you sure you want to restart the client? (Will auto-yes in 15 seconds)'
else:
text = 'Are you sure you want to exit the client? (Will auto-yes in 15 seconds)'
with ClientGUIDialogs.DialogYesNo( self, text ) as dlg:
call_later = wx.CallLater( 15000, dlg.EndModal, wx.ID_YES )
if dlg.ShowModal() == wx.ID_NO:
call_later.Stop()
return False
call_later.Stop()
try:
for page in [ self._notebook.GetPage( i ) for i in range( self._notebook.GetPageCount() ) ]:
page.TestAbleToClose()
except HydrusExceptions.PermissionException:
return False
if restart:
HydrusGlobals.restart = True
self._SaveGUISession( 'last session' )
self._message_manager.CleanBeforeDestroy()
self._message_manager.Hide()
for page in [ self._notebook.GetPage( i ) for i in range( self._notebook.GetPageCount() ) ]: page.CleanBeforeDestroy()
page = self._notebook.GetCurrentPage()
if page is not None:
( HC.options[ 'hpos' ], HC.options[ 'vpos' ] ) = page.GetSashPositions()
self._controller.WriteSynchronous( 'save_options', HC.options )
self.Hide()
if HydrusGlobals.emergency_exit:
self._controller.Exit()
else:
wx.CallAfter( self._controller.Exit )
self.Destroy()
return True
def GetCurrentPage( self ):
return self._notebook.GetCurrentPage()
@ -2513,65 +2556,11 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p
FrameSeedCache( self._controller, seed_cache )
def Shutdown( self ):
if HydrusGlobals.emergency_exit:
self._SaveGUISession( 'last session' )
self._message_manager.CleanBeforeDestroy()
for page in [ self._notebook.GetPage( i ) for i in range( self._notebook.GetPageCount() ) ]: page.CleanBeforeDestroy()
page = self._notebook.GetCurrentPage()
if page is not None:
( HC.options[ 'hpos' ], HC.options[ 'vpos' ] ) = page.GetSashPositions()
self._controller.Write( 'save_options', HC.options )
self.Destroy()
else:
self._SaveGUISession( 'last session' )
try:
self._message_manager.Hide()
self._message_manager.CleanBeforeDestroy()
except: pass
self.Hide()
for page in [ self._notebook.GetPage( i ) for i in range( self._notebook.GetPageCount() ) ]: page.CleanBeforeDestroy()
page = self._notebook.GetCurrentPage()
if page is not None:
( HC.options[ 'hpos' ], HC.options[ 'vpos' ] ) = page.GetSashPositions()
self._controller.Write( 'save_options', HC.options )
wx.CallAfter( self.Destroy )
def SyncToTagArchive( self, hta, adding, namespaces, service_key ):
self._controller.CallToThread( self._THREADSyncToTagArchive, hta, adding, namespaces, service_key )
def TestAbleToClose( self ):
for page in [ self._notebook.GetPage( i ) for i in range( self._notebook.GetPageCount() ) ]: page.TestAbleToClose()
'''
class FrameComposeMessage( ClientGUICommon.Frame ):

View File

@ -3324,7 +3324,7 @@ class MediaContainer( wx.Window ):
def Pause( self ):
if type( self._media_window ) == Animation:
if isinstance( self._media_window, Animation ):
self._media_window.Pause()

View File

@ -155,7 +155,7 @@ class AutoCompleteDropdown( wx.Panel ):
# There's a big bug in wx where FRAME_FLOAT_ON_PARENT Frames don't get passed their mouse events if their parent is a Dialog jej
# I think it is something to do with the initialisation order; if the frame is init'ed before the ShowModal call, but whatever.
if issubclass( type( tlp ), wx.Dialog ) or HC.options[ 'always_embed_autocompletes' ]: self._float_mode = False
if isinstance( tlp, wx.Dialog ) or HC.options[ 'always_embed_autocompletes' ]: self._float_mode = False
else: self._float_mode = True
self._text_ctrl = wx.TextCtrl( self, style=wx.TE_PROCESS_ENTER )
@ -241,12 +241,15 @@ class AutoCompleteDropdown( wx.Panel ):
parent = parent.GetParent()
if issubclass( type( parent ), wx.ScrolledWindow ):
if isinstance( parent, wx.ScrolledWindow ):
parent.Bind( wx.EVT_SCROLLWIN, self.EventMove )
except: break
except:
break
@ -436,7 +439,11 @@ class AutoCompleteDropdown( wx.Panel ):
elif self._intercept_key_events:
if event.KeyCode in ( wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER ) and self._ShouldTakeResponsibilityForEnter():
if event.KeyCode in ( ord( 'A' ), ord( 'a' ) ) and event.CmdDown():
event.Skip()
elif event.KeyCode in ( wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER ) and self._ShouldTakeResponsibilityForEnter():
self._TakeResponsibilityForEnter()
@ -563,6 +570,14 @@ class AutoCompleteDropdown( wx.Panel ):
def RefreshList( self ):
self._cache_text = ''
self._current_namespace = ''
self._UpdateList()
def TIMEREventDropdownHide( self, event ):
try:
@ -638,7 +653,7 @@ class AutoCompleteDropdownTags( AutoCompleteDropdown ):
self.Bind( wx.EVT_MENU, self.EventMenu )
def _ChangeFileRepository( self, file_service_key ):
def _ChangeFileService( self, file_service_key ):
self._file_service_key = file_service_key
@ -648,8 +663,10 @@ class AutoCompleteDropdownTags( AutoCompleteDropdown ):
self._file_repo_button.SetLabel( name )
wx.CallAfter( self.RefreshList )
def _ChangeTagRepository( self, tag_service_key ):
def _ChangeTagService( self, tag_service_key ):
self._tag_service_key = tag_service_key
@ -659,6 +676,11 @@ class AutoCompleteDropdownTags( AutoCompleteDropdown ):
self._tag_repo_button.SetLabel( name )
self._cache_text = ''
self._current_namespace = ''
wx.CallAfter( self.RefreshList )
def _InitDropDownList( self ): return ListBoxTagsAutocompleteDropdown( self._dropdown_window, self.BroadcastChoices, min_height = self._list_height )
@ -689,7 +711,7 @@ class AutoCompleteDropdownTags( AutoCompleteDropdown ):
menu = wx.Menu()
for service in services: menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetTemporaryId( 'change_file_repository', service.GetServiceKey() ), service.GetName() )
for service in services: menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetTemporaryId( 'change_file_service', service.GetServiceKey() ), service.GetName() )
HydrusGlobals.client_controller.PopupMenu( self._file_repo_button, menu )
@ -702,8 +724,14 @@ class AutoCompleteDropdownTags( AutoCompleteDropdown ):
( command, data ) = action
if command == 'change_file_repository': self._ChangeFileRepository( data )
elif command == 'change_tag_repository': self._ChangeTagRepository( data )
if command == 'change_file_service':
self._ChangeFileService( data )
elif command == 'change_tag_service':
self._ChangeTagService( data )
else:
event.Skip()
@ -729,26 +757,28 @@ class AutoCompleteDropdownTags( AutoCompleteDropdown ):
menu = wx.Menu()
for service in services: menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetTemporaryId( 'change_tag_repository', service.GetServiceKey() ), service.GetName() )
for service in services: menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetTemporaryId( 'change_tag_service', service.GetServiceKey() ), service.GetName() )
HydrusGlobals.client_controller.PopupMenu( self._tag_repo_button, menu )
class AutoCompleteDropdownTagsRead( AutoCompleteDropdownTags ):
def __init__( self, parent, page_key, file_service_key, tag_service_key, media_callable = None, include_current = True, include_pending = True, synchronised = True, include_unusual_predicate_types = True ):
def __init__( self, parent, page_key, file_search_context, media_callable = None, synchronised = True, include_unusual_predicate_types = True ):
file_service_key = file_search_context.GetFileServiceKey()
tag_service_key = file_search_context.GetTagServiceKey()
AutoCompleteDropdownTags.__init__( self, parent, file_service_key, tag_service_key )
self._media_callable = media_callable
self._page_key = page_key
self._include_current = include_current
self._include_pending = include_pending
self._file_search_context = file_search_context
self._include_current_tags = OnOffButton( self._dropdown_window, self._page_key, 'notify_include_current', on_label = 'include current tags', off_label = 'exclude current tags', start_on = self._include_current )
self._include_current_tags = OnOffButton( self._dropdown_window, self._page_key, 'notify_include_current', on_label = 'include current tags', off_label = 'exclude current tags', start_on = file_search_context.IncludeCurrentTags() )
self._include_current_tags.SetToolTipString( 'select whether to include current tags in the search' )
self._include_pending_tags = OnOffButton( self._dropdown_window, self._page_key, 'notify_include_pending', on_label = 'include pending tags', off_label = 'exclude pending tags', start_on = self._include_pending )
self._include_pending_tags = OnOffButton( self._dropdown_window, self._page_key, 'notify_include_pending', on_label = 'include pending tags', off_label = 'exclude pending tags', start_on = file_search_context.IncludePendingTags() )
self._include_pending_tags.SetToolTipString( 'select whether to include pending tags in the search' )
self._synchronised = OnOffButton( self._dropdown_window, self._page_key, 'notify_search_immediately', on_label = 'searching immediately', off_label = 'waiting -- tag counts may be inaccurate', start_on = synchronised )
@ -807,18 +837,26 @@ class AutoCompleteDropdownTagsRead( AutoCompleteDropdownTags ):
self._BroadcastChoices( { entry_predicate } )
def _ChangeFileRepository( self, file_service_key ):
def _ChangeFileService( self, file_service_key ):
AutoCompleteDropdownTags._ChangeFileRepository( self, file_service_key )
AutoCompleteDropdownTags._ChangeFileService( self, file_service_key )
HydrusGlobals.client_controller.pub( 'change_file_repository', self._page_key, self._file_service_key )
self._file_search_context.SetFileServiceKey( file_service_key )
HydrusGlobals.client_controller.pub( 'change_file_service', self._page_key, file_service_key )
HydrusGlobals.client_controller.pub( 'refresh_query', self._page_key )
def _ChangeTagRepository( self, tag_service_key ):
def _ChangeTagService( self, tag_service_key ):
AutoCompleteDropdownTags._ChangeTagRepository( self, tag_service_key )
AutoCompleteDropdownTags._ChangeTagService( self, tag_service_key )
HydrusGlobals.client_controller.pub( 'change_tag_repository', self._page_key, self._tag_service_key )
self._file_search_context.SetTagServiceKey( tag_service_key )
HydrusGlobals.client_controller.pub( 'change_tag_service', self._page_key, tag_service_key )
HydrusGlobals.client_controller.pub( 'refresh_query', self._page_key )
def _ParseSearchText( self ):
@ -923,9 +961,12 @@ class AutoCompleteDropdownTagsRead( AutoCompleteDropdownTags ):
if fetch_from_db:
include_current = self._file_search_context.IncludeCurrentTags()
include_pending = self._file_search_context.IncludePendingTags()
if len( search_text ) < num_autocomplete_chars:
predicates = HydrusGlobals.client_controller.Read( 'autocomplete_predicates', file_service_key = self._file_service_key, tag_service_key = self._tag_service_key, tag = search_text, include_current = self._include_current, include_pending = self._include_pending, add_namespaceless = True )
predicates = HydrusGlobals.client_controller.Read( 'autocomplete_predicates', file_service_key = self._file_service_key, tag_service_key = self._tag_service_key, tag = search_text, include_current = include_current, include_pending = include_pending, add_namespaceless = True )
predicates = siblings_manager.CollapsePredicates( predicates )
@ -937,7 +978,7 @@ class AutoCompleteDropdownTagsRead( AutoCompleteDropdownTags ):
self._cache_text = search_text
self._cached_results = HydrusGlobals.client_controller.Read( 'autocomplete_predicates', file_service_key = self._file_service_key, tag_service_key = self._tag_service_key, half_complete_tag = search_text, include_current = self._include_current, include_pending = self._include_pending, add_namespaceless = True )
self._cached_results = HydrusGlobals.client_controller.Read( 'autocomplete_predicates', file_service_key = self._file_service_key, tag_service_key = self._tag_service_key, half_complete_tag = search_text, include_current = include_current, include_pending = include_pending, add_namespaceless = True )
self._cached_results = siblings_manager.CollapsePredicates( self._cached_results )
@ -962,22 +1003,36 @@ class AutoCompleteDropdownTagsRead( AutoCompleteDropdownTags ):
else: tags_managers.append( m.GetTagsManager() )
lists_of_current_tags = [ list( tags_manager.GetCurrent( self._tag_service_key ) ) for tags_manager in tags_managers ]
lists_of_pending_tags = [ list( tags_manager.GetPending( self._tag_service_key ) ) for tags_manager in tags_managers ]
current_tags_flat_iterable = itertools.chain.from_iterable( lists_of_current_tags )
pending_tags_flat_iterable = itertools.chain.from_iterable( lists_of_pending_tags )
current_tags_flat = [ tag for tag in current_tags_flat_iterable if ClientSearch.SearchEntryMatchesTag( search_text, tag ) ]
pending_tags_flat = [ tag for tag in pending_tags_flat_iterable if ClientSearch.SearchEntryMatchesTag( search_text, tag ) ]
current_tags_to_count = collections.Counter( current_tags_flat )
pending_tags_to_count = collections.Counter( pending_tags_flat )
tags_to_do = set()
if self._include_current: tags_to_do.update( current_tags_to_count.keys() )
if self._include_pending: tags_to_do.update( pending_tags_to_count.keys() )
current_tags_to_count = collections.Counter()
pending_tags_to_count = collections.Counter()
if self._file_search_context.IncludeCurrentTags():
lists_of_current_tags = [ list( tags_manager.GetCurrent( self._tag_service_key ) ) for tags_manager in tags_managers ]
current_tags_flat_iterable = itertools.chain.from_iterable( lists_of_current_tags )
current_tags_flat = ( tag for tag in current_tags_flat_iterable if ClientSearch.SearchEntryMatchesTag( search_text, tag ) )
current_tags_to_count.update( current_tags_flat )
tags_to_do.update( current_tags_to_count.keys() )
if self._file_search_context.IncludePendingTags():
lists_of_pending_tags = [ list( tags_manager.GetPending( self._tag_service_key ) ) for tags_manager in tags_managers ]
pending_tags_flat_iterable = itertools.chain.from_iterable( lists_of_pending_tags )
pending_tags_flat = ( tag for tag in pending_tags_flat_iterable if ClientSearch.SearchEntryMatchesTag( search_text, tag ) )
pending_tags_to_count.update( pending_tags_flat )
tags_to_do.update( pending_tags_to_count.keys() )
predicates = [ ClientSearch.Predicate( HC.PREDICATE_TYPE_TAG, tag, inclusive = inclusive, counts = { HC.CURRENT : current_tags_to_count[ tag ], HC.PENDING : pending_tags_to_count[ tag ] } ) for tag in tags_to_do ]
@ -1049,23 +1104,31 @@ class AutoCompleteDropdownTagsRead( AutoCompleteDropdownTags ):
def GetFileSearchContext( self ):
return ClientSearch.FileSearchContext( file_service_key = self._file_service_key, tag_service_key = self._tag_service_key, include_current_tags = self._include_current, include_pending_tags = self._include_pending )
return self._file_search_context
def IncludeCurrent( self, page_key, value ):
if page_key == self._page_key: self._include_current = value
if page_key == self._page_key:
self._file_search_context.SetIncludeCurrentTags( value )
self._cache_text = ''
self._current_namespace = ''
wx.CallAfter( self.RefreshList )
HydrusGlobals.client_controller.pub( 'refresh_query', self._page_key )
def IncludePending( self, page_key, value ):
if page_key == self._page_key: self._include_pending = value
if page_key == self._page_key:
self._file_search_context.SetIncludePendingTags( value )
self._cache_text = ''
self._current_namespace = ''
wx.CallAfter( self.RefreshList )
HydrusGlobals.client_controller.pub( 'refresh_query', self._page_key )
def SetSynchronisedWait( self, page_key ):
@ -1974,7 +2037,7 @@ class ListBook( wx.Panel ):
def AddPage( self, name, page, select = False ):
if type( page ) != tuple:
if not isinstance( page, tuple ):
page.Hide()
@ -2278,7 +2341,7 @@ class ListBox( wx.ScrolledWindow ):
for term in self._selected_terms:
if type( term ) == ClientSearch.Predicate:
if isinstance( term, ClientSearch.Predicate ):
predicate_type = term.GetType()
@ -2846,7 +2909,7 @@ class ListBoxTags( ListBox ):
( term, ) = self._selected_terms
if type( term ) == ClientSearch.Predicate:
if isinstance( term, ClientSearch.Predicate ):
if term.GetType() == HC.PREDICATE_TYPE_TAG:
@ -3824,7 +3887,7 @@ class ListBoxTagsSelection( ListBoxTags ):
self._TextsHaveChanged()
def ChangeTagRepository( self, service_key ):
def ChangeTagService( self, service_key ):
self._tag_service_key = service_key
@ -3964,7 +4027,7 @@ class ListBoxTagsSelectionManagementPanel( ListBoxTagsSelection ):
HydrusGlobals.client_controller.sub( self, 'IncrementTagsByMediaPubsub', 'increment_tags_selection' )
HydrusGlobals.client_controller.sub( self, 'SetTagsByMediaPubsub', 'new_tags_selection' )
HydrusGlobals.client_controller.sub( self, 'ChangeTagRepositoryPubsub', 'change_tag_repository' )
HydrusGlobals.client_controller.sub( self, 'ChangeTagServicePubsub', 'change_tag_service' )
def _Activate( self ):
@ -3999,9 +4062,9 @@ class ListBoxTagsSelectionManagementPanel( ListBoxTagsSelection ):
def ChangeTagRepositoryPubsub( self, page_key, service_key ):
def ChangeTagServicePubsub( self, page_key, service_key ):
if page_key == self._page_key: self.ChangeTagRepository( service_key )
if page_key == self._page_key: self.ChangeTagService( service_key )
def IncrementTagsByMediaPubsub( self, page_key, media ):
@ -5973,7 +6036,7 @@ class StaticBoxSorterForListBoxTags( StaticBox ):
self.AddF( self._sorter, CC.FLAGS_EXPAND_PERPENDICULAR )
def ChangeTagRepository( self, service_key ): self._tags_box.ChangeTagRepository( service_key )
def ChangeTagService( self, service_key ): self._tags_box.ChangeTagService( service_key )
def EventSort( self, event ):

View File

@ -221,14 +221,23 @@ class Dialog( wx.Dialog ):
if parent is not None and position == 'topleft':
if issubclass( type( parent ), wx.TopLevelWindow ): parent_tlp = parent
else: parent_tlp = parent.GetTopLevelParent()
if isinstance( parent, wx.TopLevelWindow ):
parent_tlp = parent
else:
parent_tlp = parent.GetTopLevelParent()
( pos_x, pos_y ) = parent_tlp.GetPositionTuple()
pos = ( pos_x + 50, pos_y + 100 )
else: pos = wx.DefaultPosition
else:
pos = wx.DefaultPosition
if not HC.PLATFORM_LINUX and parent is not None:
@ -4480,7 +4489,7 @@ class DialogShortcuts( Dialog ):
else:
if type( service_key ) == ClientData.ClientServiceIdentifier: service_key = service_key.GetServiceKey()
if isinstance( service_key, ClientData.ClientServiceIdentifier ): service_key = service_key.GetServiceKey()
try:

View File

@ -543,7 +543,7 @@ class DialogManageBoorus( ClientGUIDialogs.Dialog ):
thing = yaml.safe_load( file )
if type( thing ) == ClientData.Booru:
if isinstance( thing, ClientData.Booru ):
booru = thing
@ -1650,12 +1650,7 @@ class DialogManageExportFoldersEdit( ClientGUIDialogs.Dialog ):
self._predicates_box = ClientGUICommon.ListBoxTagsPredicates( self._query_box, self._page_key, predicates )
file_service_key = file_search_context.GetFileServiceKey()
tag_service_key = file_search_context.GetTagServiceKey()
include_current = file_search_context.IncludeCurrentTags()
include_pending = file_search_context.IncludePendingTags()
self._searchbox = ClientGUICommon.AutoCompleteDropdownTagsRead( self._query_box, self._page_key, file_service_key = file_service_key, tag_service_key = tag_service_key, include_current = include_current, include_pending = include_pending )
self._searchbox = ClientGUICommon.AutoCompleteDropdownTagsRead( self._query_box, self._page_key, file_search_context )
#
@ -1945,7 +1940,7 @@ class DialogManageImageboards( ClientGUIDialogs.Dialog ):
thing = yaml.safe_load( file )
if type( thing ) == dict:
if isinstance( thing, dict ):
( name, imageboards ) = thing.items()[0]
@ -1960,10 +1955,10 @@ class DialogManageImageboards( ClientGUIDialogs.Dialog ):
for imageboard in imageboards:
if type( imageboard ) == ClientData.Imageboard: page.UpdateImageboard( imageboard )
if isinstance( imageboard, ClientData.Imageboard ): page.UpdateImageboard( imageboard )
elif type( thing ) == ClientData.Imageboard:
elif isinstance( thing, ClientData.Imageboard ):
imageboard = thing
@ -3612,18 +3607,25 @@ class DialogManageOptions( ClientGUIDialogs.Dialog ):
self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) )
self._idle_panel = ClientGUICommon.StaticBox( self, 'when to run high cpu jobs' )
self._jobs_panel = ClientGUICommon.StaticBox( self, 'when to run high cpu jobs' )
self._maintenance_panel = ClientGUICommon.StaticBox( self, 'maintenance period' )
self._processing_panel = ClientGUICommon.StaticBox( self, 'processing' )
self._idle_panel = ClientGUICommon.StaticBox( self._jobs_panel, 'idle' )
self._shutdown_panel = ClientGUICommon.StaticBox( self._jobs_panel, 'shutdown' )
#
self._idle_normal = wx.CheckBox( self._idle_panel )
self._idle_normal.Bind( wx.EVT_CHECKBOX, self.EventIdleNormal )
self._idle_period = ClientGUICommon.NoneableSpinCtrl( self._idle_panel, '', min = 1, max = 1000, multiplier = 60, unit = 'minutes', none_phrase = 'ignore normal browsing' )
self._idle_mouse_period = ClientGUICommon.NoneableSpinCtrl( self._idle_panel, '', min = 1, max = 1000, multiplier = 60, unit = 'minutes', none_phrase = 'ignore mouse movements' )
self._idle_cpu_max = ClientGUICommon.NoneableSpinCtrl( self._idle_panel, '', min = 0, max = 99, unit = '%', none_phrase = 'ignore cpu usage' )
self._idle_cpu_max = ClientGUICommon.NoneableSpinCtrl( self._idle_panel, '', min = 5, max = 99, unit = '%', none_phrase = 'ignore cpu usage' )
self._idle_shutdown = ClientGUICommon.BetterChoice( self._idle_panel )
#
self._idle_shutdown = ClientGUICommon.BetterChoice( self._shutdown_panel )
for idle_id in ( CC.IDLE_NOT_ON_SHUTDOWN, CC.IDLE_ON_SHUTDOWN, CC.IDLE_ON_SHUTDOWN_ASK_FIRST ):
@ -3632,10 +3634,14 @@ class DialogManageOptions( ClientGUIDialogs.Dialog ):
self._idle_shutdown.Bind( wx.EVT_CHOICE, self.EventIdleShutdown )
self._idle_shutdown_max_minutes = wx.SpinCtrl( self._idle_panel, min = 1, max = 1440 )
self._idle_shutdown_max_minutes = wx.SpinCtrl( self._shutdown_panel, min = 1, max = 1440 )
#
self._maintenance_vacuum_period = ClientGUICommon.NoneableSpinCtrl( self._maintenance_panel, '', min = 1, max = 365, multiplier = 86400, none_phrase = 'do not automatically vacuum' )
#
self._processing_phase = wx.SpinCtrl( self._processing_panel, min = 0, max = 100000 )
self._processing_phase.SetToolTipString( 'how long this client will delay processing updates after they are due. useful if you have multiple clients and do not want them to process at the same time' )
@ -3645,6 +3651,7 @@ class DialogManageOptions( ClientGUIDialogs.Dialog ):
self._idle_period.SetValue( HC.options[ 'idle_period' ] )
self._idle_mouse_period.SetValue( HC.options[ 'idle_mouse_period' ] )
self._idle_cpu_max.SetValue( HC.options[ 'idle_cpu_max' ] )
self._idle_shutdown.SelectClientData( HC.options[ 'idle_shutdown' ] )
self._idle_shutdown_max_minutes.SetValue( HC.options[ 'idle_shutdown_max_minutes' ] )
@ -3658,34 +3665,51 @@ class DialogManageOptions( ClientGUIDialogs.Dialog ):
gridbox.AddGrowableCol( 1, 1 )
gridbox.AddF( wx.StaticText( self._idle_panel, label = 'Run jobs when the client is not busy?: ' ), CC.FLAGS_MIXED )
gridbox.AddF( wx.StaticText( self._idle_panel, label = 'Run maintenance jobs when the client is idle and the system is not otherwise busy?: ' ), CC.FLAGS_MIXED )
gridbox.AddF( self._idle_normal, CC.FLAGS_MIXED )
gridbox.AddF( wx.StaticText( self._idle_panel, label = 'Assume the client is busy if general browsing activity has occured in the past: ' ), CC.FLAGS_MIXED )
gridbox.AddF( wx.StaticText( self._idle_panel, label = 'Assume the client is idle if no general browsing activity has occured in the past: ' ), CC.FLAGS_MIXED )
gridbox.AddF( self._idle_period, CC.FLAGS_MIXED )
gridbox.AddF( wx.StaticText( self._idle_panel, label = 'Assume the client is busy if the mouse has been moved in the past: ' ), CC.FLAGS_MIXED )
gridbox.AddF( wx.StaticText( self._idle_panel, label = 'Assume the client is idle if the mouse has not been moved in the past: ' ), CC.FLAGS_MIXED )
gridbox.AddF( self._idle_mouse_period, CC.FLAGS_MIXED )
gridbox.AddF( wx.StaticText( self._idle_panel, label = 'Assume the client is busy if any CPU core has recent average usage above: ' ), CC.FLAGS_MIXED )
gridbox.AddF( wx.StaticText( self._idle_panel, label = 'Assume the system is busy if any CPU core has recent average usage above: ' ), CC.FLAGS_MIXED )
gridbox.AddF( self._idle_cpu_max, CC.FLAGS_MIXED )
gridbox.AddF( wx.StaticText( self._idle_panel, label = 'Run jobs on shutdown?: ' ), CC.FLAGS_MIXED )
self._idle_panel.AddF( gridbox, CC.FLAGS_EXPAND_PERPENDICULAR )
#
gridbox = wx.FlexGridSizer( 0, 2 )
gridbox.AddGrowableCol( 1, 1 )
gridbox.AddF( wx.StaticText( self._shutdown_panel, label = 'Run jobs on shutdown?: ' ), CC.FLAGS_MIXED )
gridbox.AddF( self._idle_shutdown, CC.FLAGS_MIXED )
gridbox.AddF( wx.StaticText( self._idle_panel, label = 'Max number of minutes to run shutdown jobs: ' ), CC.FLAGS_MIXED )
gridbox.AddF( wx.StaticText( self._shutdown_panel, label = 'Max number of minutes to run shutdown jobs: ' ), CC.FLAGS_MIXED )
gridbox.AddF( self._idle_shutdown_max_minutes, CC.FLAGS_MIXED )
self._shutdown_panel.AddF( gridbox, CC.FLAGS_EXPAND_PERPENDICULAR )
#
text = 'CPU-heavy jobs like maintenance routines and repository synchronisation processing will stutter or lock up your gui, so they do not normally run when you are searching for and looking at files.'
text += os.linesep * 2
text += 'You can set these jobs to run only when the client is not busy, or only during shutdown, or neither, or both.'
text += 'You can set them to run only when the client is idle, or only during shutdown, or neither, or both.'
text += os.linesep * 2
text += 'If the client switches from idle to not idle, it will try to abandon any jobs it is half way through.'
text += os.linesep * 2
text += 'If the client believes the system is busy, it will not start jobs.'
st = wx.StaticText( self._idle_panel, label = text )
st = wx.StaticText( self._jobs_panel, label = text )
st.Wrap( 500 )
st.Wrap( 550 )
self._idle_panel.AddF( st, CC.FLAGS_EXPAND_PERPENDICULAR )
self._idle_panel.AddF( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self._jobs_panel.AddF( st, CC.FLAGS_EXPAND_PERPENDICULAR )
self._jobs_panel.AddF( self._idle_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
self._jobs_panel.AddF( self._shutdown_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
@ -3713,7 +3737,7 @@ class DialogManageOptions( ClientGUIDialogs.Dialog ):
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.AddF( self._idle_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( self._jobs_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( self._maintenance_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.AddF( self._processing_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
@ -9219,7 +9243,7 @@ class DialogManageTags( ClientGUIDialogs.Dialog ):
self._remove_tags = wx.Button( self, label = text )
self._remove_tags.Bind( wx.EVT_BUTTON, self.EventRemoveTags )
self._tags_box.ChangeTagRepository( self._tag_service_key )
self._tags_box.ChangeTagService( self._tag_service_key )
self.SetMedia( media )

View File

@ -128,10 +128,7 @@ def CreateManagementControllerQuery( file_service_key, file_search_context, sear
management_controller.SetVariable( 'file_search_context', file_search_context )
management_controller.SetVariable( 'search_enabled', search_enabled )
management_controller.SetVariable( 'synchronised', True )
management_controller.SetVariable( 'include_current', True )
management_controller.SetVariable( 'include_pending', True )
return management_controller
@ -2544,14 +2541,9 @@ class ManagementPanelQuery( ManagementPanel ):
self._current_predicates_box = ClientGUICommon.ListBoxTagsPredicates( self._search_panel, self._page_key, initial_predicates )
file_service_key = self._management_controller.GetKey( 'file_service' )
tag_service_key = self._management_controller.GetKey( 'tag_service' )
include_current = self._management_controller.GetVariable( 'include_current' )
include_pending = self._management_controller.GetVariable( 'include_pending' )
synchronised = self._management_controller.GetVariable( 'synchronised' )
self._searchbox = ClientGUICommon.AutoCompleteDropdownTagsRead( self._search_panel, self._page_key, file_service_key, tag_service_key, self._page.GetMedia, include_current = include_current, include_pending = include_pending, synchronised = synchronised )
self._searchbox = ClientGUICommon.AutoCompleteDropdownTagsRead( self._search_panel, self._page_key, file_search_context, media_callable = self._page.GetMedia, synchronised = synchronised )
self._search_panel.AddF( self._current_predicates_box, CC.FLAGS_EXPAND_PERPENDICULAR )
self._search_panel.AddF( self._searchbox, CC.FLAGS_EXPAND_PERPENDICULAR )
@ -2567,16 +2559,16 @@ class ManagementPanelQuery( ManagementPanel ):
self.SetSizer( vbox )
if len( initial_predicates ) > 0 and not file_search_context.IsComplete(): wx.CallAfter( self._DoQuery )
if len( initial_predicates ) > 0 and not file_search_context.IsComplete():
wx.CallAfter( self._DoQuery )
self._controller.sub( self, 'AddMediaResultsFromQuery', 'add_media_results_from_query' )
self._controller.sub( self, 'ChangeFileRepositoryPubsub', 'change_file_repository' )
self._controller.sub( self, 'ChangeTagRepositoryPubsub', 'change_tag_repository' )
self._controller.sub( self, 'IncludeCurrent', 'notify_include_current' )
self._controller.sub( self, 'IncludePending', 'notify_include_pending' )
self._controller.sub( self, 'SearchImmediately', 'notify_search_immediately' )
self._controller.sub( self, 'ShowQuery', 'file_query_done' )
self._controller.sub( self, 'RefreshQuery', 'refresh_query' )
self._controller.sub( self, 'ChangeFileServicePubsub', 'change_file_service' )
def _DoQuery( self ):
@ -2591,21 +2583,19 @@ class ManagementPanelQuery( ManagementPanel ):
try:
file_service_key = self._management_controller.GetKey( 'file_service' )
tag_service_key = self._management_controller.GetKey( 'tag_service' )
include_current = self._management_controller.GetVariable( 'include_current' )
include_pending = self._management_controller.GetVariable( 'include_pending' )
file_search_context = self._searchbox.GetFileSearchContext()
current_predicates = self._current_predicates_box.GetPredicates()
search_context = ClientSearch.FileSearchContext( file_service_key, tag_service_key, include_current, include_pending, current_predicates )
file_search_context.SetPredicates( current_predicates )
self._management_controller.SetVariable( 'file_search_context', search_context )
self._management_controller.SetVariable( 'file_search_context', file_search_context )
file_service_key = file_search_context.GetFileServiceKey()
if len( current_predicates ) > 0:
self._controller.StartFileQuery( self._query_key, search_context )
self._controller.StartFileQuery( self._query_key, file_search_context )
panel = ClientGUIMedia.MediaPanelLoading( self._page, self._page_key, file_service_key )
@ -2643,24 +2633,12 @@ class ManagementPanelQuery( ManagementPanel ):
if query_key == self._query_key: self._controller.pub( 'add_media_results', self._page_key, media_results, append = False )
def ChangeFileRepositoryPubsub( self, page_key, service_key ):
def ChangeFileServicePubsub( self, page_key, service_key ):
if page_key == self._page_key:
self._management_controller.SetKey( 'file_service', service_key )
self._DoQuery()
def ChangeTagRepositoryPubsub( self, page_key, service_key ):
if page_key == self._page_key:
self._management_controller.SetKey( 'tag_service', service_key )
self._DoQuery()
def CleanBeforeDestroy( self ):
@ -2672,33 +2650,22 @@ class ManagementPanelQuery( ManagementPanel ):
def GetPredicates( self ):
if hasattr( self, '_current_predicates_box' ): return self._current_predicates_box.GetPredicates()
else: return []
def IncludeCurrent( self, page_key, value ):
if page_key == self._page_key:
if self._search_enabled:
self._management_controller.SetVariable( 'include_current', value )
return self._current_predicates_box.GetPredicates()
self._DoQuery()
else:
def IncludePending( self, page_key, value ):
if page_key == self._page_key:
self._management_controller.SetVariable( 'include_pending', value )
self._DoQuery()
return []
def RefreshQuery( self, page_key ):
if page_key == self._page_key: self._DoQuery()
if page_key == self._page_key:
self._DoQuery()
def SearchImmediately( self, page_key, value ):

View File

@ -300,7 +300,7 @@ class PanelPredicateSystemMime( PanelPredicateSystem ):
mimes = system_predicates[ 'mime' ]
if type( mimes ) == int:
if isinstance( mimes, int ):
mimes = ( mimes, )

View File

@ -314,26 +314,50 @@ class MediaList( object ):
self._hashes = set()
for media in self._collected_media: self._hashes.update( media.GetHashes() )
for media in self._singleton_media: self._hashes.add( media.GetHash() )
for media in self._collected_media:
self._hashes.update( media.GetHashes() )
for media in self._singleton_media:
self._hashes.add( media.GetHash() )
def _RemoveMedia( self, singleton_media, collected_media ):
if type( singleton_media ) != set: singleton_media = set( singleton_media )
if type( collected_media ) != set: collected_media = set( collected_media )
if not isinstance( singleton_media, set ):
singleton_media = set( singleton_media )
if not isinstance( collected_media, set ):
collected_media = set( collected_media )
self._singleton_media.difference_update( singleton_media )
self._collected_media.difference_update( collected_media )
keys_to_remove = [ key for ( key, media ) in self._collect_map_singletons if media in singleton_media ]
for key in keys_to_remove: del self._collect_map_singletons[ key ]
for key in keys_to_remove:
del self._collect_map_singletons[ key ]
keys_to_remove = [ key for ( key, media ) in self._collect_map_collected if media in collected_media ]
for key in keys_to_remove: del self._collect_map_collected[ key ]
for key in keys_to_remove:
del self._collect_map_collected[ key ]
self._sorted_media.remove_items( singleton_media.union( collected_media ) )
self._RecalcHashes()
def AddMedia( self, new_media, append = True ):
@ -570,8 +594,6 @@ class MediaList( object ):
self._RemoveMedia( affected_singleton_media, affected_collected_media )
self._RecalcHashes()
@ -604,8 +626,6 @@ class MediaList( object ):
self._RemoveMedia( self._singleton_media, self._collected_media )
self._RecalcHashes()
else:
for media in self._collected_media: media.ResetService( service_key )
@ -741,6 +761,8 @@ class MediaCollection( MediaList, Media ):
def _RecalcInternals( self ):
self._RecalcHashes()
self._archive = True in ( media.HasArchive() for media in self._sorted_media )
self._inbox = True in ( media.HasInbox() for media in self._sorted_media )

View File

@ -292,6 +292,21 @@ class FileSearchContext( HydrusSerialisable.SerialisableBase ):
def IsComplete( self ): return self._search_complete
def SetComplete( self ): self._search_complete = True
def SetFileServiceKey( self, file_service_key ):
self._file_service_key = file_service_key
def SetIncludeCurrentTags( self, value ):
self._include_current_tags = value
def SetIncludePendingTags( self, value ):
self._include_pending_tags = value
def SetPredicates( self, predicates ):
self._predicates = predicates
@ -299,6 +314,11 @@ class FileSearchContext( HydrusSerialisable.SerialisableBase ):
self._InitialiseTemporaryVariables()
def SetTagServiceKey( self, tag_service_key ):
self._tag_service_key = tag_service_key
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_FILE_SEARCH_CONTEXT ] = FileSearchContext
class FileSystemPredicates( object ):
@ -576,7 +596,7 @@ class Predicate( HydrusSerialisable.SerialisableBase ):
if counts is None: counts = {}
if type( value ) == list:
if isinstance( value, list ):
value = tuple( value )
@ -668,7 +688,7 @@ class Predicate( HydrusSerialisable.SerialisableBase ):
self._value = serialisable_value
if type( self._value ) == list:
if isinstance( self._value, list ):
self._value = tuple( self._value )
@ -735,7 +755,10 @@ class Predicate( HydrusSerialisable.SerialisableBase ):
def GetType( self ): return self._predicate_type
def GetType( self ):
return self._predicate_type
def GetUnicode( self, with_count = True ):

View File

@ -53,7 +53,7 @@ options = {}
# Misc
NETWORK_VERSION = 17
SOFTWARE_VERSION = 189
SOFTWARE_VERSION = 190
UNSCALED_THUMBNAIL_DIMENSIONS = ( 200, 200 )

View File

@ -25,6 +25,8 @@ class HydrusController( object ):
self._model_shutdown = False
self._view_shutdown = False
self._db = None
self._pubsub = HydrusPubSub.HydrusPubSub( self, self.pubsub_binding_errors_to_ignore )
self._currently_doing_pubsub = False
@ -185,7 +187,10 @@ class HydrusController( object ):
self._model_shutdown = True
HydrusGlobals.model_shutdown = True
while not self._db.LoopIsFinished(): time.sleep( 0.1 )
if self._db is not None:
while not self._db.LoopIsFinished(): time.sleep( 0.1 )
def ShutdownView( self ):

View File

@ -127,7 +127,10 @@ class HydrusDB( object ):
create_db = False
if not os.path.exists( self._db_path ): create_db = True
if not os.path.exists( self._db_path ):
create_db = True
self._InitDBCursor()
@ -155,9 +158,19 @@ class HydrusDB( object ):
self._c.execute( 'DROP TABLE IF EXISTS ratings_aggregates;' )
self._c.execute( 'PRAGMA cache_size = -50000;' )
self._c.execute( 'PRAGMA foreign_keys = ON;' )
self._c.execute( 'PRAGMA synchronous = 1;' )
self._c.execute( 'PRAGMA journal_mode = WAL;' )
self._c.execute( 'PRAGMA synchronous = 1;' )
try:
self._c.execute( 'SELECT * FROM sqlite_master;' ).fetchone()
except sqlite3.OperationalError:
self._c.execute( 'PRAGMA journal_mode = TRUNCATE;' )
self._c.execute( 'SELECT * FROM sqlite_master;' ).fetchone()
def _ManageDBError( self, job, e ):

View File

@ -938,11 +938,11 @@ def TimeUntil( timestamp ):
def ToByteString( text_producing_object ):
if type( text_producing_object ) == unicode:
if isinstance( text_producing_object, unicode ):
return text_producing_object.encode( 'utf-8' )
elif type( text_producing_object ) == str:
elif isinstance( text_producing_object, str ):
return text_producing_object
@ -960,7 +960,7 @@ def ToByteString( text_producing_object ):
def ToUnicode( text_producing_object ):
if type( text_producing_object ) in ( str, unicode, bs4.element.NavigableString ):
if isinstance( text_producing_object, ( str, unicode, bs4.element.NavigableString ) ):
text = text_producing_object
@ -976,7 +976,7 @@ def ToUnicode( text_producing_object ):
if type( text ) != unicode:
if not isinstance( text, unicode ):
try:
@ -1626,22 +1626,46 @@ class ContentUpdate( object ):
hashes = set( ( hash, ) )
elif self._action in ( HC.CONTENT_UPDATE_ARCHIVE, HC.CONTENT_UPDATE_DELETE, HC.CONTENT_UPDATE_UNDELETE, HC.CONTENT_UPDATE_INBOX, HC.CONTENT_UPDATE_PEND, HC.CONTENT_UPDATE_RESCIND_PEND, HC.CONTENT_UPDATE_RESCIND_PETITION ): hashes = self._row
elif self._action == HC.CONTENT_UPDATE_PETITION: ( hashes, reason ) = self._row
elif self._action in ( HC.CONTENT_UPDATE_ARCHIVE, HC.CONTENT_UPDATE_DELETE, HC.CONTENT_UPDATE_UNDELETE, HC.CONTENT_UPDATE_INBOX, HC.CONTENT_UPDATE_PEND, HC.CONTENT_UPDATE_RESCIND_PEND, HC.CONTENT_UPDATE_RESCIND_PETITION ):
hashes = self._row
elif self._action == HC.CONTENT_UPDATE_PETITION:
( hashes, reason ) = self._row
elif self._data_type == HC.CONTENT_TYPE_MAPPINGS:
if self._action == HC.CONTENT_UPDATE_ADVANCED: hashes = set()
elif self._action == HC.CONTENT_UPDATE_PETITION: ( tag, hashes, reason ) = self._row
else: ( tag, hashes ) = self._row
if self._action == HC.CONTENT_UPDATE_ADVANCED:
hashes = set()
elif self._action == HC.CONTENT_UPDATE_PETITION:
( tag, hashes, reason ) = self._row
else:
( tag, hashes ) = self._row
elif self._data_type in ( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_TYPE_TAG_SIBLINGS ):
hashes = set()
elif self._data_type in ( HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_TYPE_TAG_SIBLINGS ): hashes = set()
elif self._data_type == HC.CONTENT_TYPE_RATINGS:
if self._action == HC.CONTENT_UPDATE_ADD: ( rating, hashes ) = self._row
if self._action == HC.CONTENT_UPDATE_ADD:
( rating, hashes ) = self._row
if type( hashes ) != set: hashes = set( hashes )
if not isinstance( hashes, set ):
hashes = set( hashes )
return hashes

View File

@ -232,10 +232,10 @@ def GetMime( path ):
if hsaudio_object.valid:
if type( hsaudio_object.original ) == hsaudiotag.mpeg.Mpeg: return HC.AUDIO_MP3
elif type( hsaudio_object.original ) == hsaudiotag.flac.FLAC: return HC.AUDIO_FLAC
elif type( hsaudio_object.original ) == hsaudiotag.ogg.Vorbis: return HC.AUDIO_OGG
elif type( hsaudio_object.original ) == hsaudiotag.wma.WMADecoder: return HC.AUDIO_WMA
if isinstance( hsaudio_object.original, hsaudiotag.mpeg.Mpeg ): return HC.AUDIO_MP3
elif isinstance( hsaudio_object.original, hsaudiotag.flac.FLAC ): return HC.AUDIO_FLAC
elif isinstance( hsaudio_object.original, hsaudiotag.ogg.Vorbis ): return HC.AUDIO_OGG
elif isinstance( hsaudio_object.original, hsaudiotag.wma.WMADecoder ): return HC.AUDIO_WMA
return HC.APPLICATION_UNKNOWN

View File

@ -10,8 +10,10 @@ is_db_updated = False
db_profile_mode = False
pubsub_profile_mode = False
force_idle_mode = False
server_busy = False
do_idle_shutdown_work = False
shutdown_complete = False
restart = False
emergency_exit = False

View File

@ -57,7 +57,7 @@ class HydrusPubSub( object ):
except Exception as e:
if type( e ) not in self._binding_errors_to_ignore:
if not isinstance( e, self._binding_errors_to_ignore ):
raise

View File

@ -604,7 +604,6 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'BEGIN IMMEDIATE' )
self._c.execute( 'PRAGMA auto_vacuum = 0;' ) # none
self._c.execute( 'PRAGMA journal_mode = WAL;' )
now = HydrusData.GetNow()
@ -2302,131 +2301,6 @@ class DB( HydrusDB.HydrusDB ):
def _UpdateDB( self, version ):
if version == 131:
accounts_info = self._c.execute( 'SELECT * FROM accounts;' ).fetchall()
account_map_info = self._c.execute( 'SELECT * FROM account_map;' ).fetchall()
account_types_info = self._c.execute( 'SELECT * FROM account_types;' ).fetchall()
account_type_map_info = self._c.execute( 'SELECT * FROM account_type_map;' ).fetchall()
accounts_dict = { account_id : ( account_key, hashed_access_key ) for ( account_id, account_key, hashed_access_key ) in accounts_info }
account_types_dict = { account_type_id : ( title, account_type ) for ( account_type_id, title, account_type ) in account_types_info }
#
self._c.execute( 'DROP TABLE accounts;' )
self._c.execute( 'DROP TABLE account_map;' )
self._c.execute( 'DROP TABLE account_types;' )
self._c.execute( 'DROP TABLE account_type_map;' )
self._c.execute( 'CREATE TABLE accounts( account_id INTEGER PRIMARY KEY, service_id INTEGER REFERENCES services ON DELETE CASCADE, account_key BLOB_BYTES, hashed_access_key BLOB_BYTES, account_type_id INTEGER, created INTEGER, expires INTEGER, used_bytes INTEGER, used_requests INTEGER );' )
self._c.execute( 'CREATE UNIQUE INDEX accounts_account_key_index ON accounts ( account_key );' )
self._c.execute( 'CREATE UNIQUE INDEX accounts_hashed_access_key_index ON accounts ( hashed_access_key );' )
self._c.execute( 'CREATE UNIQUE INDEX accounts_service_id_account_id_index ON accounts ( service_id, account_id );' )
self._c.execute( 'CREATE INDEX accounts_service_id_account_type_id_index ON accounts ( service_id, account_type_id );' )
self._c.execute( 'CREATE TABLE account_types ( account_type_id INTEGER PRIMARY KEY, service_id INTEGER REFERENCES services ON DELETE CASCADE, title TEXT, account_type TEXT_YAML );' )
self._c.execute( 'CREATE UNIQUE INDEX account_types_service_id_account_type_id_index ON account_types ( service_id, account_type_id );' )
#
account_log_text = ''
existing_account_ids = set()
existing_account_type_ids = set()
next_account_id = max( accounts_dict.keys() ) + 1
next_account_type_id = max( account_types_dict.keys() ) + 1
account_tables = [ 'bans', 'contacts', 'file_petitions', 'mapping_petitions', 'mappings', 'messages', 'message_statuses', 'messaging_sessions', 'sessions', 'tag_parents', 'tag_siblings' ]
account_type_tables = [ 'accounts', 'registration_keys' ]
service_dict = { service_id : options[ 'port' ] for ( service_id, options ) in self._c.execute( 'SELECT service_id, options FROM services;' ) }
# have to do accounts first because account_types may update it!
for ( service_id, account_id, account_type_id, created, expires, used_bytes, used_requests ) in account_map_info:
( account_key, hashed_access_key ) = accounts_dict[ account_id ]
if account_id in existing_account_ids:
account_key = HydrusData.GenerateKey()
access_key = HydrusData.GenerateKey()
account_log_text += 'The account at port ' + str( service_dict[ service_id ] ) + ' now uses access key: ' + access_key.encode( 'hex' ) + os.linesep
hashed_access_key = hashlib.sha256( access_key ).digest()
new_account_id = next_account_id
next_account_id += 1
for table_name in account_tables:
self._c.execute( 'UPDATE ' + table_name + ' SET account_id = ? WHERE service_id = ? AND account_id = ?;', ( new_account_id, service_id, account_id ) )
self._c.execute( 'UPDATE bans SET admin_account_id = ? WHERE service_id = ? AND admin_account_id = ?;', ( new_account_id, service_id, account_id ) )
account_id = new_account_id
existing_account_ids.add( account_id )
self._c.execute( 'INSERT INTO accounts ( account_id, service_id, account_key, hashed_access_key, account_type_id, created, expires, used_bytes, used_requests ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ? );', ( account_id, service_id, sqlite3.Binary( account_key ), sqlite3.Binary( hashed_access_key ), account_type_id, created, expires, used_bytes, used_requests ) )
if len( account_log_text ) > 0:
with open( os.path.join( HC.BASE_DIR, 'update to v132 new access keys.txt' ), 'wb' ) as f:
f.write( HydrusData.ToByteString( account_log_text ) )
for ( service_id, account_type_id ) in account_type_map_info:
( title, account_type ) = account_types_dict[ account_type_id ]
if account_type_id in existing_account_type_ids:
new_account_type_id = next_account_type_id
next_account_type_id += 1
for table_name in account_type_tables:
self._c.execute( 'UPDATE ' + table_name + ' SET account_type_id = ? WHERE service_id = ? AND account_type_id = ?;', ( new_account_type_id, service_id, account_type_id ) )
account_type_id = new_account_type_id
existing_account_type_ids.add( account_type_id )
self._c.execute( 'INSERT INTO account_types ( account_type_id, service_id, title, account_type ) VALUES ( ?, ?, ?, ? );', ( account_type_id, service_id, title, account_type ) )
if version == 132:
dirs = ( HC.SERVER_FILES_DIR, HC.SERVER_THUMBNAILS_DIR )
for dir in dirs:
for prefix in HydrusData.IterateHexPrefixes():
new_dir = os.path.join( dir, prefix )
if not os.path.exists( new_dir ):
os.makedirs( new_dir )
if version == 155:
results = self._c.execute( 'SELECT service_id, account_type_id, account_type FROM account_types;' ).fetchall()
@ -2477,7 +2351,7 @@ class DB( HydrusDB.HydrusDB ):
( dump_type, dump_version, dump ) = json.loads( inefficient_string )
if type( dump ) not in ( unicode, str ):
if not isinstance( dump, ( unicode, str ) ):
continue

View File

@ -37,6 +37,7 @@ class TestClientDB( unittest.TestCase ):
c = db.cursor()
c.execute( 'DELETE FROM current_files;' )
c.execute( 'DELETE FROM files_info;' )
c.execute( 'DELETE FROM mappings;' )
@ -299,7 +300,10 @@ class TestClientDB( unittest.TestCase ):
path = os.path.join( HC.STATIC_DIR, 'hydrus.png' )
self._write( 'import_file', path )
( written_result, written_hash ) = self._write( 'import_file', path )
self.assertEqual( written_result, CC.STATUS_SUCCESSFUL )
self.assertEqual( written_hash, hash )
time.sleep( 1 )