diff --git a/help/changelog.html b/help/changelog.html
index b2bc3a9c..0f3d6e31 100755
--- a/help/changelog.html
+++ b/help/changelog.html
@@ -8,12 +8,37 @@
changelog
+ version 197
+
+ - on client boot, autocomplete caches for specific file/tag service cross-references are now initialised and populated. progress is shown on the splash window
+ - on client boot, surplus autocomplete caches are deleted
+ - on service add, new autocomplete caches are created
+ - on file add/delete, autocomplete caches are updated
+ - on mappings pend/add/rescind pend/delete, autocomplete caches are updated
+ - the new autocomplete caches are consulted for all non-'all known tags' queries
+ - the old autocomplete cache no longer stores counts for specific file services, and the remaining associated maintenance calls are deleted
+ - databases now start their own mainloops
+ - databases now wait for their mainloops to finish prepping any large caches before they return to the controller
+ - the client database waits for autocomplete caches to finish before it finishes its own mainloop
+ - the padding around flash and the animation bar are included more accurately in some media zoom calculations, which should eliminate some general zoom jankiness and accidental 100% flash zoom coincidences that filled up the whole canvas
+ - fixed some clientside server boot error spam when local server or booru had no port set
+ - account refreshes that fail due to a network error will spam less to the log
+ - fixed .txt unicode tag parsing from import folders, which was not decoding at the correct step
+ - administrator immediate repository syncs now sync thumbnail downloads if needed
+ - service thumbnail sync will no longer superfluously check the presence of thumbnails whose files are local
+ - if a tag entered into the manage tags dialog has a sibling that already exists for all files, then a new 'ignore, as the sibling already exists' choice will appear
+ - fixed an overcounting bug in 'selection tags' when importing and adding tags at the same time
+ - fixed a typo in repository sync status text that was overcounting total number of updates by one
+ - fixed youtube downloader, which broke with the new library on my new dev machine
+ - the way that tags and predicates are filtered against a tag autocomplete text entry is now much faster
+ - bumped up the default content update chunk threshold from 100 rows to 5,000, which seems to be speeding up processing significantly, with a cost to recovery latency--see how it works for you
+
version 196
- fixed the 8chan thread watcher for boards that host content on media.8ch.net
- improved the thread watcher url check logic so it won't lag with the new fix
- cleaned up the ac generation code a little
- - 'all known tags' ac counts are now summed from all the known tag services rather than calculated directly (a <= indictor for when these cases overlap will be forthcoming). this speeds up file add/delete, service reset, a/c fetch time, and general tag processing, and reduces the size of the db
+ - 'all known tags' ac counts are now summed from all the known tag services rather than calculated directly (a <= indicator for when these cases overlap will be forthcoming). this speeds up file add/delete, service reset, a/c fetch time, and general tag processing, and reduces the size of the db
- ac generation code now deals with 'is the entry text an exact match or not?' better
- ac generation code will now no longer produce non-exact-match siblings on an exact match search
- ac generation code will no longer save half complete search text into the db as new tags
diff --git a/include/ClientCaches.py b/include/ClientCaches.py
index b02b815f..ed166107 100644
--- a/include/ClientCaches.py
+++ b/include/ClientCaches.py
@@ -1612,9 +1612,11 @@ class TagSiblingsManager( object ):
else:
- key_based_matching_values = { self._siblings[ key ] for key in self._siblings.keys() if ClientSearch.SearchEntryMatchesTag( search_text, key, search_siblings = False ) }
+ matching_keys = ClientSearch.FilterTagsBySearchEntry( search_text, self._siblings.keys(), search_siblings = False )
- value_based_matching_values = { value for value in self._siblings.values() if ClientSearch.SearchEntryMatchesTag( search_text, value, search_siblings = False ) }
+ key_based_matching_values = { self._siblings[ key ] for key in matching_keys }
+
+ value_based_matching_values = ClientSearch.FilterTagsBySearchEntry( search_text, self._siblings.values(), search_siblings = False )
matching_values = key_based_matching_values.union( value_based_matching_values )
diff --git a/include/ClientController.py b/include/ClientController.py
index 78d3d1ae..e1b88eca 100755
--- a/include/ClientController.py
+++ b/include/ClientController.py
@@ -760,7 +760,13 @@ class Controller( HydrusController.HydrusController ):
- if self._booru_service is None and port is not None: StartServer()
+ if self._booru_service is None:
+
+ if port is not None:
+
+ StartServer()
+
+
else:
deferred = defer.maybeDeferred( self._booru_service.stopListening )
@@ -823,9 +829,12 @@ class Controller( HydrusController.HydrusController ):
- if self._local_service is None and port is not None:
+ if self._local_service is None:
- StartServer()
+ if port is not None:
+
+ StartServer()
+
else:
diff --git a/include/ClientDB.py b/include/ClientDB.py
index 85750237..63cfcd02 100755
--- a/include/ClientDB.py
+++ b/include/ClientDB.py
@@ -1056,8 +1056,6 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'DELETE FROM service_info WHERE service_id = ? AND info_type = ?;', ( service_id, HC.SERVICE_INFO_NUM_THUMBNAILS_LOCAL ) )
- self._UpdateAutocompleteTagCacheFromFiles( service_id, valid_hash_ids, 1 )
-
if service_id == self._local_file_service_id:
self._DeleteFiles( self._trash_service_id, valid_hash_ids, files_being_undeleted = True )
@@ -1070,6 +1068,37 @@ class DB( HydrusDB.HydrusDB ):
self._c.executemany( 'INSERT OR IGNORE INTO file_trash ( hash_id, timestamp ) VALUES ( ?, ? );', ( ( hash_id, now ) for hash_id in valid_hash_ids ) )
+ tag_service_ids = self._GetServiceIds( HC.TAG_SERVICES )
+
+ for tag_service_id in tag_service_ids:
+
+ ac_cache = self._ac_caches[ ( service_id, tag_service_id ) ]
+
+ ac_cache.SimpleWrite( 'add_files', valid_hash_ids )
+
+ current_mapping_ids_raw = self._c.execute( 'SELECT namespace_id, tag_id, hash_id FROM mappings WHERE service_id = ? AND status = ? AND hash_id IN ' + splayed_valid_hash_ids + ';', ( tag_service_id, HC.CURRENT ) ).fetchall()
+
+ if len( current_mapping_ids_raw ) > 0:
+
+ current_mapping_ids_dict = HydrusData.BuildKeyToSetDict( [ ( ( namespace_id, tag_id ), hash_id ) for ( namespace_id, tag_id, hash_id ) in current_mapping_ids_raw ] )
+
+ current_mapping_ids = [ ( namespace_id, tag_id, hash_ids ) for ( ( namespace_id, tag_id ), hash_ids ) in current_mapping_ids_dict.items() ]
+
+ ac_cache.SimpleWrite( 'add_mappings', current_mapping_ids )
+
+
+ pending_mapping_ids_raw = self._c.execute( 'SELECT namespace_id, tag_id, hash_id FROM mappings WHERE service_id = ? AND status = ? AND hash_id IN ' + splayed_valid_hash_ids + ';', ( tag_service_id, HC.PENDING ) ).fetchall()
+
+ if len( pending_mapping_ids_raw ) > 0:
+
+ pending_mapping_ids_dict = HydrusData.BuildKeyToSetDict( [ ( ( namespace_id, tag_id ), hash_id ) for ( namespace_id, tag_id, hash_id ) in pending_mapping_ids_raw ] )
+
+ pending_mapping_ids = [ ( namespace_id, tag_id, hash_ids ) for ( ( namespace_id, tag_id ), hash_ids ) in pending_mapping_ids_dict.items() ]
+
+ ac_cache.SimpleWrite( 'pend_mappings', pending_mapping_ids )
+
+
+
def _AddHydrusSession( self, service_key, session_key, expires ):
@@ -1131,9 +1160,20 @@ class DB( HydrusDB.HydrusDB ):
os.makedirs( update_dir )
- if 'first_timestamp' not in info: info[ 'first_timestamp' ] = None
- if 'next_download_timestamp' not in info: info[ 'next_download_timestamp' ] = 0
- if 'next_processing_timestamp' not in info: info[ 'next_processing_timestamp' ] = 0
+ if 'first_timestamp' not in info:
+
+ info[ 'first_timestamp' ] = None
+
+
+ if 'next_download_timestamp' not in info:
+
+ info[ 'next_download_timestamp' ] = 0
+
+
+ if 'next_processing_timestamp' not in info:
+
+ info[ 'next_processing_timestamp' ] = 0
+
info[ 'paused' ] = False
@@ -1149,6 +1189,33 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'INSERT INTO services ( service_key, service_type, name, info ) VALUES ( ?, ?, ?, ? );', ( sqlite3.Binary( service_key ), service_type, name, info ) )
+ service_id = self._c.lastrowid
+
+ ac_cache_pairs_to_create = []
+
+ if service_type in HC.TAG_SERVICES:
+
+ file_service_ids = self._GetServiceIds( ( HC.LOCAL_FILE, HC.FILE_REPOSITORY ) )
+
+ ac_cache_pairs_to_create.extend( [ ( file_service_id, service_id ) for file_service_id in file_service_ids ] )
+
+
+ if service_type in ( HC.LOCAL_FILE, HC.FILE_REPOSITORY ):
+
+ tag_service_ids = self._GetServiceIds( HC.TAG_SERVICES )
+
+ ac_cache_pairs_to_create.extend( [ ( service_id, tag_service_id ) for tag_service_id in tag_service_ids ] )
+
+
+ for ( file_service_id, tag_service_id ) in ac_cache_pairs_to_create:
+
+ db_filename = 'ac_cache_' + str( file_service_id ) + '_' + str( tag_service_id ) + '.db'
+
+ db_path = os.path.join( HC.CLIENT_CACHE_DIR, db_filename )
+
+ self._GenerateACCache( db_path, file_service_id, tag_service_id )
+
+
def _AddThumbnails( self, thumbnails ):
@@ -1156,7 +1223,10 @@ class DB( HydrusDB.HydrusDB ):
thumbnail_path = ClientFiles.GetExpectedThumbnailPath( hash, True )
- with open( thumbnail_path, 'wb' ) as f: f.write( thumbnail )
+ with open( thumbnail_path, 'wb' ) as f:
+
+ f.write( thumbnail )
+
phash = ClientImageHandling.GeneratePerceptualHash( thumbnail_path )
@@ -1445,6 +1515,18 @@ class DB( HydrusDB.HydrusDB ):
self._tag_archives = {}
+ for ac_cache in self._ac_caches.values():
+
+ ac_cache.Shutdown()
+
+ while not ( ac_cache.LoopIsFinished() or HydrusGlobals.model_shutdown ):
+
+ time.sleep( 0.1 )
+
+
+
+ self._ac_caches = {}
+
def _CopyFiles( self, hashes ):
@@ -1680,6 +1762,8 @@ class DB( HydrusDB.HydrusDB ):
init_service_info.append( ( CC.COMBINED_TAG_SERVICE_KEY, HC.COMBINED_TAG, CC.COMBINED_TAG_SERVICE_KEY ) )
init_service_info.append( ( CC.LOCAL_BOORU_SERVICE_KEY, HC.LOCAL_BOORU, CC.LOCAL_BOORU_SERVICE_KEY ) )
+ self._ac_caches = {}
+
for ( service_key, service_type, name ) in init_service_info:
info = {}
@@ -1687,6 +1771,18 @@ class DB( HydrusDB.HydrusDB ):
self._AddService( service_key, service_type, name, info )
+ for ac_cache in self._ac_caches.values():
+
+ ac_cache.Shutdown()
+
+ while not ( ac_cache.LoopIsFinished() or HydrusGlobals.model_shutdown ):
+
+ time.sleep( 0.1 )
+
+
+
+ del self._ac_caches
+
self._c.executemany( 'INSERT INTO yaml_dumps VALUES ( ?, ?, ? );', ( ( YAML_DUMP_ID_REMOTE_BOORU, name, booru ) for ( name, booru ) in ClientDefaults.GetDefaultBoorus().items() ) )
self._c.executemany( 'INSERT INTO yaml_dumps VALUES ( ?, ?, ? );', ( ( YAML_DUMP_ID_IMAGEBOARD, name, imageboards ) for ( name, imageboards ) in ClientDefaults.GetDefaultImageboards() ) )
@@ -1747,8 +1843,6 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'DELETE FROM current_files WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) )
self._c.execute( 'DELETE FROM file_petitions WHERE service_id = ? AND hash_id IN ' + splayed_hash_ids + ';', ( service_id, ) )
- self._UpdateAutocompleteTagCacheFromFiles( service_id, hash_ids, -1 )
-
if service_id == self._local_file_service_id:
self._AddFiles( self._trash_service_id, rows )
@@ -1766,6 +1860,13 @@ class DB( HydrusDB.HydrusDB ):
+ ac_caches = self._GetACCaches( file_service_id = service_id )
+
+ for ac_cache in ac_caches:
+
+ ac_cache.SimpleWrite( 'delete_files', valid_hash_ids )
+
+
self.pub_after_commit( 'notify_new_pending' )
@@ -2123,21 +2224,82 @@ class DB( HydrusDB.HydrusDB ):
job_key.Finish()
- def _GenerateACCache( self, file_service_id, tag_service_id ):
+ def _GenerateACCache( self, db_path, file_service_id, tag_service_id ):
- db_filename = 'ac_cache_' + str( file_service_id ) + '_' + str( tag_service_id ) + '.db'
+ populate_it = False
- db_path = os.path.join( HC.CLIENT_CACHE_DIR, db_filename )
+ if not os.path.exists( db_path ):
+
+ populate_it = True
+
- # create the db
- # begin?
- # for every hash_id in the domain, add it
- # for every current mapping in the domain, add it
- # for every pending mapping in the domain, add it
- # commit?
+ ac_cache = ClientDBACCache.DB( self._controller, db_path, no_wal = self._no_wal )
+
+ if populate_it:
+
+ prefix = 'generating ac cache ' + HydrusData.ToUnicode( ( file_service_id, tag_service_id ) ) + ': '
+
+ self._controller.pub( 'splash_set_status_text', prefix + 'updating files' )
+
+ hash_ids = [ hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM current_files WHERE service_id = ?;', ( file_service_id, ) ) ]
+
+ ac_cache.SimpleWriteSynchronous( 'add_files', hash_ids )
+
+ #
+
+ unique_mapping_ids = self._c.execute( 'SELECT namespace_id, tag_id FROM existing_tags;' ).fetchall()
+
+ num_done = 0
+ total_to_do = len( unique_mapping_ids )
+
+ for sub_unique_mapping_ids in HydrusData.SplitListIntoChunks( unique_mapping_ids, 5000 ):
+
+ self._controller.pub( 'splash_set_status_text', prefix + 'updating mappings ' + HydrusData.ConvertValueRangeToPrettyString( num_done, total_to_do ) )
+
+ current_mappings_ids = [ ( namespace_id, tag_id, [ hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM mappings, current_files USING ( hash_id ) WHERE current_files.service_id = ? AND mappings.service_id = ? AND namespace_id = ? AND tag_id = ? AND status = ?;', ( file_service_id, tag_service_id, namespace_id, tag_id, HC.CURRENT ) ) ] ) for ( namespace_id, tag_id ) in sub_unique_mapping_ids ]
+
+ if len( current_mappings_ids ) > 0:
+
+ ac_cache.SimpleWriteSynchronous( 'add_mappings', current_mappings_ids )
+
+
+ pending_mappings_ids = [ ( namespace_id, tag_id, [ hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM mappings, current_files USING ( hash_id ) WHERE current_files.service_id = ? AND mappings.service_id = ? AND namespace_id = ? AND tag_id = ? AND status = ?;', ( file_service_id, tag_service_id, namespace_id, tag_id, HC.PENDING ) ) ] ) for ( namespace_id, tag_id ) in sub_unique_mapping_ids ]
+
+ if len( pending_mappings_ids ) > 0:
+
+ ac_cache.SimpleWriteSynchronous( 'pend_mappings', pending_mappings_ids )
+
+
+ num_done += len( sub_unique_mapping_ids )
+
+
+
+ self._ac_caches[ ( file_service_id, tag_service_id ) ] = ac_cache
- def _GetAutocompleteCounts( self, tag_service_id, file_service_id, mapping_ids, there_was_a_namespace, add_namespaceless ):
+ def _GetACCaches( self, file_service_id = None, tag_service_id = None ):
+
+ result = []
+
+ for ( potential_file_service_id, potential_tag_service_id ) in self._ac_caches.keys():
+
+ if file_service_id is not None and file_service_id == potential_file_service_id:
+
+ result.append( self._ac_caches[ ( potential_file_service_id, potential_tag_service_id ) ] )
+
+
+ if tag_service_id is not None and tag_service_id == potential_tag_service_id:
+
+ result.append( self._ac_caches[ ( potential_file_service_id, potential_tag_service_id ) ] )
+
+
+
+ return result
+
+
+ def _GetAutocompleteCounts( self, tag_service_id, file_service_id, namespace_id_tag_ids, there_was_a_namespace, add_namespaceless ):
+
+ namespace_ids_to_tag_ids = HydrusData.BuildKeyToListDict( namespace_id_tag_ids )
if tag_service_id == self._combined_tag_service_id:
@@ -2150,57 +2312,69 @@ class DB( HydrusDB.HydrusDB ):
cache_results = []
- for search_tag_service_id in search_tag_service_ids:
+ if file_service_id == self._combined_file_service_id:
- sub_cache_results = []
-
- for ( namespace_id, tag_ids ) in HydrusData.BuildKeyToListDict( mapping_ids ).items():
+ for search_tag_service_id in search_tag_service_ids:
- sub_cache_results.extend( self._c.execute( 'SELECT namespace_id, tag_id, current_count, pending_count FROM autocomplete_tags_cache WHERE tag_service_id = ? AND file_service_id = ? AND namespace_id = ? AND tag_id IN ' + HydrusData.SplayListForDB( tag_ids ) + ';', ( search_tag_service_id, file_service_id, namespace_id ) ).fetchall() )
+ sub_cache_results = []
-
- mapping_ids_hit = { ( namespace_id, tag_id ) for ( namespace_id, tag_id, current_count, pending_count ) in sub_cache_results }
-
- mapping_ids_missed = mapping_ids.difference( mapping_ids_hit )
-
- zero = lambda: 0
-
- predicates = [ 'status = ?', 'namespace_id = ?']
-
- count_phrase = 'SELECT tag_id, COUNT( * ) FROM '
-
- predicates.append( 'mappings.service_id = ' + str( search_tag_service_id ) )
-
- if file_service_id == self._combined_file_service_id:
-
- table_phrase = 'mappings '
-
- else:
-
- table_phrase = 'mappings, current_files USING ( hash_id ) '
-
- predicates.append( 'current_files.service_id = ' + str( file_service_id ) )
-
-
- predicates_phrase = 'WHERE ' + ' AND '.join( predicates ) + ' AND '
-
- for ( namespace_id, tag_ids ) in HydrusData.BuildKeyToListDict( mapping_ids_missed ).items():
-
- current_counts = collections.defaultdict( zero )
- pending_counts = collections.defaultdict( zero )
-
- for sub_tag_ids in HydrusData.SplitListIntoChunks( tag_ids, 50 ):
+ for ( namespace_id, tag_ids ) in namespace_ids_to_tag_ids.items():
- current_counts.update( { tag_id : count for ( tag_id, count ) in self._c.execute( count_phrase + table_phrase + predicates_phrase + 'tag_id IN ' + HydrusData.SplayListForDB( sub_tag_ids ) + ' GROUP BY tag_id;', ( HC.CURRENT, namespace_id ) ) } )
- pending_counts.update( { tag_id : count for ( tag_id, count ) in self._c.execute( count_phrase + table_phrase + predicates_phrase + 'tag_id IN ' + HydrusData.SplayListForDB( sub_tag_ids ) + ' GROUP BY tag_id;', ( HC.PENDING, namespace_id ) ) } )
+ sub_cache_results.extend( self._c.execute( 'SELECT namespace_id, tag_id, current_count, pending_count FROM autocomplete_tags_cache WHERE tag_service_id = ? AND file_service_id = ? AND namespace_id = ? AND tag_id IN ' + HydrusData.SplayListForDB( tag_ids ) + ';', ( search_tag_service_id, file_service_id, namespace_id ) ).fetchall() )
- self._c.executemany( 'INSERT OR IGNORE INTO autocomplete_tags_cache ( file_service_id, tag_service_id, namespace_id, tag_id, current_count, pending_count ) VALUES ( ?, ?, ?, ?, ?, ? );', ( ( file_service_id, search_tag_service_id, namespace_id, tag_id, current_counts[ tag_id ], pending_counts[ tag_id ] ) for tag_id in tag_ids ) )
+ namespace_id_tag_ids_hit = { ( namespace_id, tag_id ) for ( namespace_id, tag_id, current_count, pending_count ) in sub_cache_results }
- sub_cache_results.extend( ( ( namespace_id, tag_id, current_counts[ tag_id ], pending_counts[ tag_id ] ) for tag_id in tag_ids ) )
+ namespace_id_tag_ids_missed = namespace_id_tag_ids.difference( namespace_id_tag_ids_hit )
+
+ zero = lambda: 0
+
+ predicates = [ 'status = ?', 'namespace_id = ?']
+
+ count_phrase = 'SELECT tag_id, COUNT( * ) FROM '
+
+ predicates.append( 'mappings.service_id = ' + str( search_tag_service_id ) )
+
+ if file_service_id == self._combined_file_service_id:
+
+ table_phrase = 'mappings '
+
+ else:
+
+ table_phrase = 'mappings, current_files USING ( hash_id ) '
+
+ predicates.append( 'current_files.service_id = ' + str( file_service_id ) )
+
+
+ predicates_phrase = 'WHERE ' + ' AND '.join( predicates ) + ' AND '
+
+ for ( namespace_id, tag_ids ) in HydrusData.BuildKeyToListDict( namespace_id_tag_ids_missed ).items():
+
+ current_counts = collections.defaultdict( zero )
+ pending_counts = collections.defaultdict( zero )
+
+ for sub_tag_ids in HydrusData.SplitListIntoChunks( tag_ids, 50 ):
+
+ current_counts.update( { tag_id : count for ( tag_id, count ) in self._c.execute( count_phrase + table_phrase + predicates_phrase + 'tag_id IN ' + HydrusData.SplayListForDB( sub_tag_ids ) + ' GROUP BY tag_id;', ( HC.CURRENT, namespace_id ) ) } )
+ pending_counts.update( { tag_id : count for ( tag_id, count ) in self._c.execute( count_phrase + table_phrase + predicates_phrase + 'tag_id IN ' + HydrusData.SplayListForDB( sub_tag_ids ) + ' GROUP BY tag_id;', ( HC.PENDING, namespace_id ) ) } )
+
+
+ self._c.executemany( 'INSERT OR IGNORE INTO autocomplete_tags_cache ( file_service_id, tag_service_id, namespace_id, tag_id, current_count, pending_count ) VALUES ( ?, ?, ?, ?, ?, ? );', ( ( file_service_id, search_tag_service_id, namespace_id, tag_id, current_counts[ tag_id ], pending_counts[ tag_id ] ) for tag_id in tag_ids ) )
+
+ sub_cache_results.extend( ( ( namespace_id, tag_id, current_counts[ tag_id ], pending_counts[ tag_id ] ) for tag_id in tag_ids ) )
+
+
+ cache_results.extend( sub_cache_results )
- cache_results.extend( sub_cache_results )
+ else:
+
+ for search_tag_service_id in search_tag_service_ids:
+
+ ac_cache = self._ac_caches[ ( file_service_id, search_tag_service_id ) ]
+
+ cache_results.extend( ac_cache.SimpleRead( 'ac_counts', namespace_ids_to_tag_ids ) )
+
#
@@ -2253,7 +2427,7 @@ class DB( HydrusDB.HydrusDB ):
return ( current_ids_to_count, pending_ids_to_count )
- def _GetAutocompleteMappingIds( self, search_text, exact_match ):
+ def _GetAutocompleteNamespaceIdTagIds( self, search_text, exact_match ):
if exact_match:
@@ -2365,9 +2539,9 @@ class DB( HydrusDB.HydrusDB ):
- mapping_ids = { mapping_id for mapping_id in self._c.execute( 'SELECT namespace_id, tag_id FROM existing_tags WHERE ' + predicates_phrase + ';' ) }
+ namespace_id_tag_ids = { namespace_id_tag_id for namespace_id_tag_id in self._c.execute( 'SELECT namespace_id, tag_id FROM existing_tags WHERE ' + predicates_phrase + ';' ) }
- # now fetch siblings, add to mapping_ids set
+ # now fetch siblings, add to namespace_id_tag_ids set
siblings_manager = self._controller.GetManager( 'tag_siblings' )
@@ -2378,22 +2552,22 @@ class DB( HydrusDB.HydrusDB ):
try: ( namespace_id, tag_id ) = self._GetNamespaceIdTagId( sibling_tag )
except HydrusExceptions.SizeException: continue
- mapping_ids.add( ( namespace_id, tag_id ) )
+ namespace_id_tag_ids.add( ( namespace_id, tag_id ) )
- return mapping_ids
+ return namespace_id_tag_ids
def _GetAutocompletePredicates( self, tag_service_key = CC.COMBINED_TAG_SERVICE_KEY, file_service_key = CC.COMBINED_FILE_SERVICE_KEY, search_text = '', exact_match = False, include_current = True, include_pending = True, add_namespaceless = False ):
- mapping_ids = self._GetAutocompleteMappingIds( search_text, exact_match )
+ namespace_id_tag_ids = self._GetAutocompleteNamespaceIdTagIds( search_text, exact_match )
tag_service_id = self._GetServiceId( tag_service_key )
file_service_id = self._GetServiceId( file_service_key )
there_was_a_namespace = ':' in search_text
- ( current_ids_to_count, pending_ids_to_count ) = self._GetAutocompleteCounts( tag_service_id, file_service_id, mapping_ids, there_was_a_namespace, add_namespaceless )
+ ( current_ids_to_count, pending_ids_to_count ) = self._GetAutocompleteCounts( tag_service_id, file_service_id, namespace_id_tag_ids, there_was_a_namespace, add_namespaceless )
#
@@ -2472,7 +2646,7 @@ class DB( HydrusDB.HydrusDB ):
predicates = []
if service_type in ( HC.COMBINED_FILE, HC.COMBINED_TAG ): predicates.extend( [ ClientSearch.Predicate( predicate_type, None ) for predicate_type in [ HC.PREDICATE_TYPE_SYSTEM_EVERYTHING, HC.PREDICATE_TYPE_SYSTEM_UNTAGGED, HC.PREDICATE_TYPE_SYSTEM_NUM_TAGS, HC.PREDICATE_TYPE_SYSTEM_LIMIT, HC.PREDICATE_TYPE_SYSTEM_HASH ] ] )
- elif service_type in ( HC.TAG_REPOSITORY, HC.LOCAL_TAG ):
+ elif service_type in HC.TAG_SERVICES:
service_info = self._GetServiceInfoSpecific( service_id, service_type, { HC.SERVICE_INFO_NUM_FILES } )
@@ -3774,6 +3948,19 @@ class DB( HydrusDB.HydrusDB ):
return reason_id
+ def _GetRemoteThumbnailHashesIShouldHave( self, service_key ):
+
+ service_id = self._GetServiceId( service_key )
+
+ hash_ids = { hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM current_files, files_info USING ( hash_id ) WHERE mime IN ' + HydrusData.SplayListForDB( HC.MIMES_WITH_THUMBNAILS ) + ' AND service_id = ?;', ( service_id, ) ) }
+
+ hash_ids.difference_update( ( hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM current_files WHERE service_id = ?;', ( self._local_file_service_id, ) ) ) )
+
+ hashes = set( self._GetHashes( hash_ids ) )
+
+ return hashes
+
+
def _GetService( self, service_id ):
if service_id in self._service_cache: service = self._service_cache[ service_id ]
@@ -3917,7 +4104,7 @@ class DB( HydrusDB.HydrusDB ):
info_types_missed = info_types.difference( info_types_hit )
- if service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ):
+ if service_type in HC.TAG_SERVICES:
common_tag_info_types = { HC.SERVICE_INFO_NUM_FILES, HC.SERVICE_INFO_NUM_NAMESPACES, HC.SERVICE_INFO_NUM_TAGS }
@@ -3973,7 +4160,7 @@ class DB( HydrusDB.HydrusDB ):
elif info_type == HC.SERVICE_INFO_NUM_INBOX: result = self._c.execute( 'SELECT COUNT( * ) FROM file_inbox, current_files USING ( hash_id ) WHERE service_id = ?;', ( service_id, ) ).fetchone()
- elif service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ):
+ elif service_type in HC.TAG_SERVICES:
if info_type in ( HC.SERVICE_INFO_NUM_PENDING_TAG_SIBLINGS, HC.SERVICE_INFO_NUM_PETITIONED_TAG_SIBLINGS, HC.SERVICE_INFO_NUM_PENDING_TAG_PARENTS, HC.SERVICE_INFO_NUM_PETITIONED_TAG_PARENTS ): save_it = False
@@ -4181,17 +4368,6 @@ class DB( HydrusDB.HydrusDB ):
return thumbnail
- def _GetThumbnailHashesIShouldHave( self, service_key ):
-
- service_id = self._GetServiceId( service_key )
-
- hash_ids = [ hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM current_files, files_info USING ( hash_id ) WHERE mime IN ' + HydrusData.SplayListForDB( HC.MIMES_WITH_THUMBNAILS ) + ' AND service_id = ?;', ( service_id, ) ) ]
-
- hashes = set( self._GetHashes( hash_ids ) )
-
- return hashes
-
-
def _GetURLStatus( self, url ):
result = self._c.execute( 'SELECT hash_id FROM urls WHERE url = ?;', ( url, ) ).fetchone()
@@ -4458,6 +4634,44 @@ class DB( HydrusDB.HydrusDB ):
+ def _InitACCaches( self ):
+
+ self._ac_caches = {}
+
+ if not os.path.exists( HC.CLIENT_CACHE_DIR ): os.makedirs( HC.CLIENT_CACHE_DIR )
+
+ deletee_filenames = set( ( filename for filename in os.listdir( HC.CLIENT_CACHE_DIR ) if filename.endswith( '.db' ) ) )
+
+ file_service_ids = self._GetServiceIds( ( HC.LOCAL_FILE, HC.FILE_REPOSITORY ) )
+ tag_service_ids = self._GetServiceIds( HC.TAG_SERVICES )
+
+ for ( file_service_id, tag_service_id ) in itertools.product( file_service_ids, tag_service_ids ):
+
+ db_filename = 'ac_cache_' + str( file_service_id ) + '_' + str( tag_service_id ) + '.db'
+
+ deletee_filenames.discard( db_filename )
+
+ db_path = os.path.join( HC.CLIENT_CACHE_DIR, db_filename )
+
+ self._GenerateACCache( db_path, file_service_id, tag_service_id )
+
+
+ for filename in deletee_filenames:
+
+ path = os.path.join( HC.CLIENT_CACHE_DIR, filename )
+
+ try:
+
+ ClientData.DeletePath( path )
+
+ except Exception as e:
+
+ HydrusData.Print( 'I tried to delete the superfluous file ' + path + ', but encountered this error:' )
+ HydrusData.ShowExceptionDefault( e )
+
+
+
+
def _InitCaches( self ):
self._local_file_service_id = self._GetServiceId( CC.LOCAL_FILE_SERVICE_KEY )
@@ -4475,6 +4689,8 @@ class DB( HydrusDB.HydrusDB ):
self._InitArchives()
+ self._InitACCaches()
+
def _ManageDBError( self, job, e ):
@@ -4729,7 +4945,7 @@ class DB( HydrusDB.HydrusDB ):
- elif service_type in ( HC.TAG_REPOSITORY, HC.LOCAL_TAG ):
+ elif service_type in HC.TAG_SERVICES:
if data_type == HC.CONTENT_TYPE_MAPPINGS:
@@ -5275,7 +5491,7 @@ class DB( HydrusDB.HydrusDB ):
elif action == 'tag_censorship': result = self._GetTagCensorship( *args, **kwargs )
elif action == 'tag_parents': result = self._GetTagParents( *args, **kwargs )
elif action == 'tag_siblings': result = self._GetTagSiblings( *args, **kwargs )
- elif action == 'thumbnail_hashes_i_should_have': result = self._GetThumbnailHashesIShouldHave( *args, **kwargs )
+ elif action == 'remote_thumbnail_hashes_i_should_have': result = self._GetRemoteThumbnailHashesIShouldHave( *args, **kwargs )
elif action == 'url_status': result = self._GetURLStatus( *args, **kwargs )
elif action == 'web_sessions': result = self._GetWebSessions( *args, **kwargs )
else: raise Exception( 'db received an unknown read command: ' + action )
@@ -5337,7 +5553,10 @@ class DB( HydrusDB.HydrusDB ):
self._c.execute( 'DELETE FROM services WHERE service_id = ?;', ( service_id, ) )
- if service_id in self._service_cache: del self._service_cache[ service_id ]
+ if service_id in self._service_cache:
+
+ del self._service_cache[ service_id ]
+
if service_type in HC.REPOSITORIES:
@@ -5632,17 +5851,6 @@ class DB( HydrusDB.HydrusDB ):
- def _UpdateAutocompleteTagCacheFromFiles( self, file_service_id, hash_ids, direction ):
-
- splayed_hash_ids = HydrusData.SplayListForDB( hash_ids )
-
- current_tags = self._c.execute( 'SELECT service_id, namespace_id, tag_id, COUNT( * ) FROM mappings WHERE hash_id IN ' + splayed_hash_ids + ' AND status = ? GROUP BY service_id, namespace_id, tag_id;', ( HC.CURRENT, ) ).fetchall()
- pending_tags = self._c.execute( 'SELECT service_id, namespace_id, tag_id, COUNT( * ) FROM mappings WHERE hash_id IN ' + splayed_hash_ids + ' AND status = ? GROUP BY service_id, namespace_id, tag_id;', ( HC.PENDING, ) ).fetchall()
-
- self._c.executemany( 'UPDATE autocomplete_tags_cache SET current_count = current_count + ? WHERE file_service_id = ? AND tag_service_id = ? AND namespace_id = ? AND tag_id = ?;', [ ( count * direction, file_service_id, tag_service_id, namespace_id, tag_id ) for ( tag_service_id, namespace_id, tag_id, count ) in current_tags ] )
- self._c.executemany( 'UPDATE autocomplete_tags_cache SET pending_count = pending_count + ? WHERE file_service_id = ? AND tag_service_id = ? AND namespace_id = ? AND tag_id = ?;', [ ( count * direction, file_service_id, tag_service_id, namespace_id, tag_id ) for ( tag_service_id, namespace_id, tag_id, count ) in pending_tags ] )
-
-
def _UpdateDB( self, version ):
self._controller.pub( 'splash_set_title_text', 'updating db to v' + str( version + 1 ) )
@@ -6595,6 +6803,15 @@ class DB( HydrusDB.HydrusDB ):
+ if version == 196:
+
+ self._controller.pub( 'splash_set_status_text', 'clearing out more surplus autocomplete entries' )
+
+ combined_file_service_id = self._GetServiceId( CC.COMBINED_FILE_SERVICE_KEY )
+
+ self._c.execute( 'DELETE FROM autocomplete_tags_cache WHERE file_service_id != ?;', ( combined_file_service_id, ) )
+
+
self._controller.pub( 'splash_set_title_text', 'updating db to v' + str( version + 1 ) )
self._c.execute( 'UPDATE version SET version = ?;', ( version + 1, ) )
@@ -6665,11 +6882,13 @@ class DB( HydrusDB.HydrusDB ):
if petitioned_mappings_ids is None: petitioned_mappings_ids = []
if petitioned_rescinded_mappings_ids is None: petitioned_rescinded_mappings_ids = []
+ ac_caches = self._GetACCaches( tag_service_id = tag_service_id )
+
# this method grew into a monster that merged deleted, pending and current according to a heirarchy of services
# this cost a lot of CPU time and was extremely difficult to maintain
# now it attempts a simpler union, not letting delete overwrite a current or pending
- other_service_ids = [ service_id for service_id in self._GetServiceIds( ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ) ) if service_id != tag_service_id ]
+ other_service_ids = [ service_id for service_id in self._GetServiceIds( HC.TAG_SERVICES ) if service_id != tag_service_id ]
splayed_other_service_ids = HydrusData.SplayListForDB( other_service_ids )
@@ -6769,7 +6988,7 @@ class DB( HydrusDB.HydrusDB ):
for ( namespace_id, tag_ids ) in all_changed_namespace_ids_to_tag_ids.items():
- self._c.executemany( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id = ? AND namespace_id = ? AND tag_id = ?;', ( ( tag_service_id, namespace_id, tag_id ) for tag_id in tag_ids ) )
+ self._c.executemany( 'DELETE FROM autocomplete_tags_cache WHERE file_service_id = ? AND tag_service_id = ? AND namespace_id = ? AND tag_id = ?;', ( ( self._combined_file_service_id, tag_service_id, namespace_id, tag_id ) for tag_id in tag_ids ) )
namespace_ids_to_search_for = namespace_ids_being_added.union( namespace_ids_being_removed )
@@ -6788,40 +7007,72 @@ class DB( HydrusDB.HydrusDB ):
change_in_num_tags += num_tags_added
change_in_num_files += num_files_added
- for ( namespace_id, tag_id, hash_ids ) in mappings_ids:
+ if len( mappings_ids ) > 0:
- ( num_deleted_deleted, num_deleted_made_current ) = ChangeMappingStatus( namespace_id, tag_id, hash_ids, HC.DELETED, HC.CURRENT )
- ( num_pending_deleted, num_pending_made_current ) = ChangeMappingStatus( namespace_id, tag_id, hash_ids, HC.PENDING, HC.CURRENT )
- num_raw_adds = InsertMappings( namespace_id, tag_id, hash_ids, HC.CURRENT )
+ for ( namespace_id, tag_id, hash_ids ) in mappings_ids:
+
+ ( num_deleted_deleted, num_deleted_made_current ) = ChangeMappingStatus( namespace_id, tag_id, hash_ids, HC.DELETED, HC.CURRENT )
+ ( num_pending_deleted, num_pending_made_current ) = ChangeMappingStatus( namespace_id, tag_id, hash_ids, HC.PENDING, HC.CURRENT )
+ num_raw_adds = InsertMappings( namespace_id, tag_id, hash_ids, HC.CURRENT )
+
+ change_in_num_mappings += num_deleted_made_current + num_pending_made_current + num_raw_adds
+ change_in_num_deleted_mappings -= num_deleted_deleted
+ change_in_num_pending_mappings -= num_pending_deleted
+
- change_in_num_mappings += num_deleted_made_current + num_pending_made_current + num_raw_adds
- change_in_num_deleted_mappings -= num_deleted_deleted
- change_in_num_pending_mappings -= num_pending_deleted
+ for ac_cache in ac_caches:
+
+ ac_cache.SimpleWrite( 'add_mappings', mappings_ids )
+
- for ( namespace_id, tag_id, hash_ids ) in deleted_mappings_ids:
+ if len( deleted_mappings_ids ) > 0:
- ( num_current_deleted, num_current_made_deleted ) = ChangeMappingStatus( namespace_id, tag_id, hash_ids, HC.CURRENT, HC.DELETED )
- num_raw_adds = InsertMappings( namespace_id, tag_id, hash_ids, HC.DELETED )
- num_deleted_petitions = DeletePetitions( namespace_id, tag_id, hash_ids )
+ for ( namespace_id, tag_id, hash_ids ) in deleted_mappings_ids:
+
+ ( num_current_deleted, num_current_made_deleted ) = ChangeMappingStatus( namespace_id, tag_id, hash_ids, HC.CURRENT, HC.DELETED )
+ num_raw_adds = InsertMappings( namespace_id, tag_id, hash_ids, HC.DELETED )
+ num_deleted_petitions = DeletePetitions( namespace_id, tag_id, hash_ids )
+
+ change_in_num_mappings -= num_current_deleted
+ change_in_num_deleted_mappings += num_current_made_deleted + num_raw_adds
+ change_in_num_petitioned_mappings -= num_deleted_petitions
+
- change_in_num_mappings -= num_current_deleted
- change_in_num_deleted_mappings += num_current_made_deleted + num_raw_adds
- change_in_num_petitioned_mappings -= num_deleted_petitions
+ for ac_cache in ac_caches:
+
+ ac_cache.SimpleWrite( 'delete_mappings', deleted_mappings_ids )
+
- for ( namespace_id, tag_id, hash_ids ) in pending_mappings_ids:
+ if len( pending_mappings_ids ) > 0:
- num_raw_adds = InsertMappings( namespace_id, tag_id, hash_ids, HC.PENDING )
+ for ( namespace_id, tag_id, hash_ids ) in pending_mappings_ids:
+
+ num_raw_adds = InsertMappings( namespace_id, tag_id, hash_ids, HC.PENDING )
+
+ change_in_num_pending_mappings += num_raw_adds
+
- change_in_num_pending_mappings += num_raw_adds
+ for ac_cache in ac_caches:
+
+ ac_cache.SimpleWrite( 'pend_mappings', pending_mappings_ids )
+
- for ( namespace_id, tag_id, hash_ids ) in pending_rescinded_mappings_ids:
+ if len( pending_rescinded_mappings_ids ) > 0:
- num_pending_rescinded = DeletePending( namespace_id, tag_id, hash_ids )
+ for ( namespace_id, tag_id, hash_ids ) in pending_rescinded_mappings_ids:
+
+ num_pending_rescinded = DeletePending( namespace_id, tag_id, hash_ids )
+
+ change_in_num_pending_mappings -= num_pending_rescinded
+
- change_in_num_pending_mappings -= num_pending_rescinded
+ for ac_cache in ac_caches:
+
+ ac_cache.SimpleWrite( 'rescind_pending_mappings', pending_rescinded_mappings_ids )
+
post_existing_namespace_ids = { namespace_id for namespace_id in namespace_ids_to_search_for if self._c.execute( 'SELECT 1 WHERE EXISTS ( SELECT namespace_id FROM mappings WHERE namespace_id = ? AND service_id = ? AND status IN ( ?, ? ) );', ( namespace_id, tag_service_id, HC.CURRENT, HC.PENDING ) ).fetchone() is not None }
diff --git a/include/ClientDBACCache.py b/include/ClientDBACCache.py
index 1d6be2f7..d7a162a5 100644
--- a/include/ClientDBACCache.py
+++ b/include/ClientDBACCache.py
@@ -55,23 +55,38 @@ class DB( HydrusDB.HydrusDB ):
self._c.executemany( 'INSERT OR IGNORE INTO current_files ( hash_id ) VALUES ( ? );', ( ( hash_id, ) for hash_id in hash_ids ) )
- def _AddMappings( self, namespace_id, tag_id, hash_ids ):
+ def _AddMappings( self, mappings_ids ):
- hash_ids = self._FilterFiles( hash_ids )
-
- if len( hash_ids ) > 0:
+ for ( namespace_id, tag_id, hash_ids ) in mappings_ids:
- self._RescindPendingMappings( tag_id, namespace_id, hash_ids )
+ hash_ids = self._FilterFiles( hash_ids )
- self._c.executemany( 'INSERT OR IGNORE INTO current_mappings ( hash_id, namespace_id, tag_id ) VALUES ( ?, ?, ? );', ( ( hash_id, namespace_id, tag_id ) for hash_id in hash_ids ) )
-
- num_new = self._GetRowCount()
-
- if num_new > 0:
+ if len( hash_ids ) > 0:
- self._c.execute( 'INSERT OR IGNORE INTO ac_cache ( namespace_id, tag_id, current_count, pending_count ) VALUES ( ?, ?, ?, ? );', ( namespace_id, tag_id, 0, 0 ) )
+ # direct copy of rescind pending, so we don't filter twice
+ self._c.execute( 'DELETE FROM pending_mappings WHERE hash_id IN ' + HydrusData.SplayListForDB( hash_ids ) + ' AND namespace_id = ? AND tag_id = ?;', ( namespace_id, tag_id ) )
- self._c.execute( 'UPDATE ac_cache SET current_count = current_count + ? WHERE namespace_id = ? AND tag_id = ?;', ( num_new, namespace_id, tag_id ) )
+ num_deleted = self._GetRowCount()
+
+ if num_deleted > 0:
+
+ self._c.execute( 'UPDATE ac_cache SET pending_count = pending_count - ? WHERE namespace_id = ? AND tag_id = ?;', ( num_deleted, namespace_id, tag_id ) )
+
+ self._c.execute( 'DELETE FROM ac_cache WHERE namespace_id = ? AND tag_id = ? AND current_count = ? AND pending_count = ?;', ( namespace_id, tag_id, 0, 0 ) )
+
+
+ #
+
+ self._c.executemany( 'INSERT OR IGNORE INTO current_mappings ( hash_id, namespace_id, tag_id ) VALUES ( ?, ?, ? );', ( ( hash_id, namespace_id, tag_id ) for hash_id in hash_ids ) )
+
+ num_new = self._GetRowCount()
+
+ if num_new > 0:
+
+ self._c.execute( 'INSERT OR IGNORE INTO ac_cache ( namespace_id, tag_id, current_count, pending_count ) VALUES ( ?, ?, ?, ? );', ( namespace_id, tag_id, 0, 0 ) )
+
+ self._c.execute( 'UPDATE ac_cache SET current_count = current_count + ? WHERE namespace_id = ? AND tag_id = ?;', ( num_new, namespace_id, tag_id ) )
+
@@ -149,48 +164,47 @@ class DB( HydrusDB.HydrusDB ):
for hash_id in hash_ids:
- pending_mappings_ids = self._c.execute( 'SELECT namespace_id, tag_id FROM pending_mappings WHERE hash_id = ?;', ( hash_id, ) ).fetchall()
+ hash_id_set = { hash_id }
- for ( namespace_id, tag_id ) in pending_mappings_ids:
-
- self._RescindPendingMappings( namespace_id, tag_id, { hash_id } )
-
+ pending_mappings_ids = [ ( namespace_id, tag_id, hash_id_set ) for ( namespace_id, tag_id ) in self._c.execute( 'SELECT namespace_id, tag_id FROM pending_mappings WHERE hash_id = ?;', ( hash_id, ) ) ]
- current_mappings_ids = self._c.execute( 'SELECT namespace_id, tag_id FROM current_mappings WHERE hash_id = ?;', ( hash_id, ) ).fetchall()
+ self._RescindPendingMappings( pending_mappings_ids )
- for ( namespace_id, tag_id ) in current_mappings_ids:
-
- self._DeleteMappings( namespace_id, tag_id, { hash_id } )
-
+ current_mappings_ids = [ ( namespace_id, tag_id, hash_id_set ) for ( namespace_id, tag_id ) in self._c.execute( 'SELECT namespace_id, tag_id FROM current_mappings WHERE hash_id = ?;', ( hash_id, ) ) ]
+
+ self._DeleteMappings( current_mappings_ids )
self._c.execute( 'DELETE FROM current_files WHERE hash_id IN ' + HydrusData.SplayListForDB( hash_ids ) + ';' )
- def _DeleteMappings( self, namespace_id, tag_id, hash_ids ):
+ def _DeleteMappings( self, mappings_ids ):
- hash_ids = self._FilterFiles( hash_ids )
-
- if len( hash_ids ) > 0:
+ for ( namespace_id, tag_id, hash_ids ) in mappings_ids:
- self._c.execute( 'DELETE FROM current_mappings WHERE hash_id IN ' + HydrusData.SplayListForDB( hash_ids ) + ' AND namespace_id = ? AND tag_id = ?;' )
+ hash_ids = self._FilterFiles( hash_ids )
- num_deleted = self._GetRowCount()
-
- if num_deleted > 0:
+ if len( hash_ids ) > 0:
- self._c.execute( 'UPDATE ac_cache SET current_count = current_count - ? WHERE namespace_id = ? AND tag_id = ?;', ( num_deleted, namespace_id, tag_id ) )
+ self._c.execute( 'DELETE FROM current_mappings WHERE hash_id IN ' + HydrusData.SplayListForDB( hash_ids ) + ' AND namespace_id = ? AND tag_id = ?;', ( namespace_id, tag_id ) )
- self._c.execute( 'DELETE FROM ac_cache WHERE namespace_id = ? AND tag_id = ? AND current_count = ? AND pending_count = ?;', ( namespace_id, tag_id, 0, 0 ) )
+ num_deleted = self._GetRowCount()
+
+ if num_deleted > 0:
+
+ self._c.execute( 'UPDATE ac_cache SET current_count = current_count - ? WHERE namespace_id = ? AND tag_id = ?;', ( num_deleted, namespace_id, tag_id ) )
+
+ self._c.execute( 'DELETE FROM ac_cache WHERE namespace_id = ? AND tag_id = ? AND current_count = ? AND pending_count = ?;', ( namespace_id, tag_id, 0, 0 ) )
+
- def _GetAutocompleteCounts( self, mapping_ids ):
+ def _GetAutocompleteCounts( self, namespace_ids_to_tag_ids ):
results = []
- for ( namespace_id, tag_ids ) in HydrusData.BuildKeyToListDict( mapping_ids ).items():
+ for ( namespace_id, tag_ids ) in namespace_ids_to_tag_ids.items():
results.extend( ( ( namespace_id, tag_id, current_count, pending_count ) for ( tag_id, current_count, pending_count ) in self._c.execute( 'SELECT tag_id, current_count, pending_count FROM ac_cache WHERE namespace_id = ? AND tag_id IN ' + HydrusData.SplayListForDB( tag_ids ) + ';', ( namespace_id, ) ) ) )
@@ -217,40 +231,55 @@ class DB( HydrusDB.HydrusDB ):
- def _PendMappings( self, namespace_id, tag_id, hash_ids ):
+ def _ManageDBError( self, job, e ):
- hash_ids = self._FilterFiles( hash_ids )
+ ( exception_type, value, tb ) = sys.exc_info()
- if len( hash_ids ) > 0:
+ new_e = type( e )( os.linesep.join( traceback.format_exception( exception_type, value, tb ) ) )
+
+ job.PutResult( new_e )
+
+
+ def _PendMappings( self, mappings_ids ):
+
+ for ( namespace_id, tag_id, hash_ids ) in mappings_ids:
- self._c.executemany( 'INSERT OR IGNORE INTO pending_mappings ( hash_id, namespace_id, tag_id ) VALUES ( ?, ?, ? );', ( ( hash_id, namespace_id, tag_id ) for hash_id in hash_ids ) )
+ hash_ids = self._FilterFiles( hash_ids )
- num_new = self._GetRowCount()
-
- if num_new > 0:
+ if len( hash_ids ) > 0:
- self._c.execute( 'INSERT OR IGNORE INTO ac_cache ( namespace_id, tag_id, current_count, pending_count ) VALUES ( ?, ?, ?, ? );', ( namespace_id, tag_id, 0, 0 ) )
+ self._c.executemany( 'INSERT OR IGNORE INTO pending_mappings ( hash_id, namespace_id, tag_id ) VALUES ( ?, ?, ? );', ( ( hash_id, namespace_id, tag_id ) for hash_id in hash_ids ) )
- self._c.execute( 'UPDATE ac_cache SET pending_count = pending_count + ? WHERE namespace_id = ? AND tag_id = ?;', ( num_new, namespace_id, tag_id ) )
+ num_new = self._GetRowCount()
+
+ if num_new > 0:
+
+ self._c.execute( 'INSERT OR IGNORE INTO ac_cache ( namespace_id, tag_id, current_count, pending_count ) VALUES ( ?, ?, ?, ? );', ( namespace_id, tag_id, 0, 0 ) )
+
+ self._c.execute( 'UPDATE ac_cache SET pending_count = pending_count + ? WHERE namespace_id = ? AND tag_id = ?;', ( num_new, namespace_id, tag_id ) )
+
- def _RescindPendingMappings( self, namespace_id, tag_id, hash_ids ):
+ def _RescindPendingMappings( self, mappings_ids ):
- hash_ids = self._FilterFiles( hash_ids )
-
- if len( hash_ids ) > 0:
+ for ( namespace_id, tag_id, hash_ids ) in mappings_ids:
- self._c.execute( 'DELETE FROM pending_mappings WHERE hash_id IN ' + HydrusData.SplayListForDB( hash_ids ) + ' AND namespace_id = ? AND tag_id = ?;' )
+ hash_ids = self._FilterFiles( hash_ids )
- num_deleted = self._GetRowCount()
-
- if num_deleted > 0:
+ if len( hash_ids ) > 0:
- self._c.execute( 'UPDATE ac_cache SET pending_count = pending_count - ? WHERE namespace_id = ? AND tag_id = ?;', ( num_deleted, namespace_id, tag_id ) )
+ self._c.execute( 'DELETE FROM pending_mappings WHERE hash_id IN ' + HydrusData.SplayListForDB( hash_ids ) + ' AND namespace_id = ? AND tag_id = ?;', ( namespace_id, tag_id ) )
- self._c.execute( 'DELETE FROM ac_cache WHERE namespace_id = ? AND tag_id = ? AND current_count = ? AND pending_count = ?;', ( namespace_id, tag_id, 0, 0 ) )
+ num_deleted = self._GetRowCount()
+
+ if num_deleted > 0:
+
+ self._c.execute( 'UPDATE ac_cache SET pending_count = pending_count - ? WHERE namespace_id = ? AND tag_id = ?;', ( num_deleted, namespace_id, tag_id ) )
+
+ self._c.execute( 'DELETE FROM ac_cache WHERE namespace_id = ? AND tag_id = ? AND current_count = ? AND pending_count = ?;', ( namespace_id, tag_id, 0, 0 ) )
+
@@ -305,8 +334,8 @@ class DB( HydrusDB.HydrusDB ):
elif action == 'add_mappings': result = self._AddMappings( *args, **kwargs )
elif action == 'analyze': result = self._Analyze( *args, **kwargs )
elif action == 'delete_files': result = self._DeleteFiles( *args, **kwargs )
- elif action == 'delete_mappings':result = self._DeleteMappings( *args, **kwargs )
- elif action == 'pend_mappings':result = self._PendMappings( *args, **kwargs )
+ elif action == 'delete_mappings': result = self._DeleteMappings( *args, **kwargs )
+ elif action == 'pend_mappings': result = self._PendMappings( *args, **kwargs )
elif action == 'rescind_pending_mappings': result = self._RescindPendingMappings( *args, **kwargs )
elif action == 'vacuum': result = self._Vacuum( *args, **kwargs )
else: raise Exception( 'db received an unknown write command: ' + action )
diff --git a/include/ClientDaemons.py b/include/ClientDaemons.py
index c94557c5..1cf78244 100644
--- a/include/ClientDaemons.py
+++ b/include/ClientDaemons.py
@@ -287,7 +287,13 @@ def DAEMONSynchroniseAccounts( controller ):
do_notify = True
- except Exception as e:
+ except HydrusExceptions.NetworkException as e:
+
+ HydrusData.Print( 'Failed to refresh account for ' + service.GetName() + ':' )
+
+ HydrusData.Print( e )
+
+ except Exception:
HydrusData.Print( 'Failed to refresh account for ' + service.GetName() + ':' )
diff --git a/include/ClientData.py b/include/ClientData.py
index 55b2f113..3312382c 100644
--- a/include/ClientData.py
+++ b/include/ClientData.py
@@ -1439,7 +1439,7 @@ class ServiceRepository( ServiceRestricted ):
else:
- gauge_range = ( ( HydrusData.GetNow() - self._info[ 'first_timestamp' ] ) / HC.UPDATE_DURATION ) + 1
+ gauge_range = ( HydrusData.GetNow() - self._info[ 'first_timestamp' ] ) / HC.UPDATE_DURATION
gauge_value = ( ( self._info[ 'next_download_timestamp' ] - self._info[ 'first_timestamp' ] ) / HC.UPDATE_DURATION ) + 1
update_index_string = 'update ' + HydrusData.ConvertValueRangeToPrettyString( gauge_value, gauge_range ) + ': '
@@ -1553,7 +1553,7 @@ class ServiceRepository( ServiceRestricted ):
break
- gauge_range = ( ( HydrusData.GetNow() - self._info[ 'first_timestamp' ] ) / HC.UPDATE_DURATION ) + 1
+ gauge_range = ( ( HydrusData.GetNow() - self._info[ 'first_timestamp' ] ) / HC.UPDATE_DURATION )
gauge_value = ( ( self._info[ 'next_processing_timestamp' ] - self._info[ 'first_timestamp' ] ) / HC.UPDATE_DURATION ) + 1
@@ -1720,78 +1720,7 @@ class ServiceRepository( ServiceRestricted ):
job_key.DeleteVariable( 'popup_text_2' )
job_key.DeleteVariable( 'popup_gauge_2' )
- if self._service_type == HC.FILE_REPOSITORY and self.CanDownload():
-
- HydrusGlobals.client_controller.pub( 'splash_set_status_text', 'reviewing thumbnails' )
- job_key.SetVariable( 'popup_text_1', 'reviewing existing thumbnails' )
-
- job_key.SetVariable( 'popup_text_1', 'reviewing service thumbnails' )
-
- thumbnail_hashes_i_should_have = HydrusGlobals.client_controller.Read( 'thumbnail_hashes_i_should_have', self._service_key )
-
- thumbnail_hashes_i_need = set()
-
- for hash in thumbnail_hashes_i_should_have:
-
- path = ClientFiles.GetExpectedThumbnailPath( hash )
-
- if not os.path.exists( path ):
-
- thumbnail_hashes_i_need.add( hash )
-
-
-
- if len( thumbnail_hashes_i_need ) > 0:
-
- def SaveThumbnails( batch_of_thumbnails ):
-
- job_key.SetVariable( 'popup_text_1', 'saving thumbnails to database' )
-
- HydrusGlobals.client_controller.WriteSynchronous( 'thumbnails', batch_of_thumbnails )
-
- HydrusGlobals.client_controller.pub( 'add_thumbnail_count', self._service_key, len( batch_of_thumbnails ) )
-
-
- thumbnails = []
-
- for ( i, hash ) in enumerate( thumbnail_hashes_i_need ):
-
- if options[ 'pause_repo_sync' ]:
-
- break
-
-
- ( i_paused, should_quit ) = job_key.WaitIfNeeded()
-
- if should_quit:
-
- break
-
-
- job_key.SetVariable( 'popup_text_1', 'downloading thumbnail ' + HydrusData.ConvertValueRangeToPrettyString( i, len( thumbnail_hashes_i_need ) ) )
- job_key.SetVariable( 'popup_gauge_1', ( i, len( thumbnail_hashes_i_need ) ) )
-
- request_args = { 'hash' : hash.encode( 'hex' ) }
-
- thumbnail = self.Request( HC.GET, 'thumbnail', request_args = request_args )
-
- thumbnails.append( ( hash, thumbnail ) )
-
- if i % 50 == 0:
-
- SaveThumbnails( thumbnails )
-
- thumbnails = []
-
-
- HydrusGlobals.client_controller.WaitUntilPubSubsEmpty()
-
-
- if len( thumbnails ) > 0: SaveThumbnails( thumbnails )
-
- job_key.DeleteVariable( 'popup_gauge_1' )
-
-
+ self.SyncThumbnails( job_key )
HydrusGlobals.client_controller.pub( 'splash_set_status_text', '' )
@@ -1824,6 +1753,86 @@ class ServiceRepository( ServiceRestricted ):
+ def SyncThumbnails( self, job_key ):
+
+ if self._service_type == HC.FILE_REPOSITORY and self.CanDownload():
+
+ options = HydrusGlobals.client_controller.GetOptions()
+
+ HydrusGlobals.client_controller.pub( 'splash_set_status_text', 'reviewing service thumbnails' )
+
+ job_key.SetVariable( 'popup_text_1', 'reviewing service thumbnails' )
+
+ remote_thumbnail_hashes_i_should_have = HydrusGlobals.client_controller.Read( 'remote_thumbnail_hashes_i_should_have', self._service_key )
+
+ thumbnail_hashes_i_need = set()
+
+ for hash in remote_thumbnail_hashes_i_should_have:
+
+ path = ClientFiles.GetExpectedThumbnailPath( hash )
+
+ if not os.path.exists( path ):
+
+ thumbnail_hashes_i_need.add( hash )
+
+
+
+ if len( thumbnail_hashes_i_need ) > 0:
+
+ def SaveThumbnails( batch_of_thumbnails ):
+
+ job_key.SetVariable( 'popup_text_1', 'saving thumbnails to database' )
+
+ HydrusGlobals.client_controller.WriteSynchronous( 'thumbnails', batch_of_thumbnails )
+
+ HydrusGlobals.client_controller.pub( 'add_thumbnail_count', self._service_key, len( batch_of_thumbnails ) )
+
+
+ thumbnails = []
+
+ for ( i, hash ) in enumerate( thumbnail_hashes_i_need ):
+
+ if options[ 'pause_repo_sync' ]:
+
+ break
+
+
+ ( i_paused, should_quit ) = job_key.WaitIfNeeded()
+
+ if should_quit:
+
+ break
+
+
+ job_key.SetVariable( 'popup_text_1', 'downloading thumbnail ' + HydrusData.ConvertValueRangeToPrettyString( i, len( thumbnail_hashes_i_need ) ) )
+ job_key.SetVariable( 'popup_gauge_1', ( i, len( thumbnail_hashes_i_need ) ) )
+
+ request_args = { 'hash' : hash.encode( 'hex' ) }
+
+ thumbnail = self.Request( HC.GET, 'thumbnail', request_args = request_args )
+
+ thumbnails.append( ( hash, thumbnail ) )
+
+ if i % 50 == 0:
+
+ SaveThumbnails( thumbnails )
+
+ thumbnails = []
+
+
+ HydrusGlobals.client_controller.WaitUntilPubSubsEmpty()
+
+
+ if len( thumbnails ) > 0:
+
+ SaveThumbnails( thumbnails )
+
+
+ job_key.DeleteVariable( 'popup_gauge_1' )
+
+
+
+
class ServiceIPFS( ServiceRemote ):
def GetDaemonVersion( self ):
diff --git a/include/ClientDownloading.py b/include/ClientDownloading.py
index c8c2a00f..7c64523a 100644
--- a/include/ClientDownloading.py
+++ b/include/ClientDownloading.py
@@ -204,7 +204,7 @@ def GetSoup( html ):
def GetYoutubeFormats( youtube_url ):
- try: p = pafy.Pafy( youtube_url )
+ try: p = pafy.new( youtube_url )
except Exception as e:
raise Exception( 'Could not fetch video info from youtube!' + os.linesep + HydrusData.ToUnicode( e ) )
diff --git a/include/ClientGUI.py b/include/ClientGUI.py
index a446b8a5..0417ab85 100755
--- a/include/ClientGUI.py
+++ b/include/ClientGUI.py
@@ -1142,7 +1142,7 @@ class FrameGUI( ClientGUICommon.FrameThatResizes ):
paths = [ HydrusData.ToUnicode( path ) for path in dlg.GetPaths() ]
- services = self._controller.GetServicesManager().GetServices( ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ) )
+ services = self._controller.GetServicesManager().GetServices( HC.TAG_SERVICES )
service_keys = [ service.GetServiceKey() for service in services ]
@@ -2872,7 +2872,7 @@ class FrameReviewServices( ClientGUICommon.Frame ):
self._thumbnails_text = wx.StaticText( self._info_panel, style = wx.ALIGN_CENTER | wx.ST_NO_AUTORESIZE )
- elif service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ):
+ elif service_type in HC.TAG_SERVICES:
self._tags_text = wx.StaticText( self._info_panel, style = wx.ALIGN_CENTER | wx.ST_NO_AUTORESIZE )
@@ -2948,7 +2948,7 @@ class FrameReviewServices( ClientGUICommon.Frame ):
self._booru_delete.Bind( wx.EVT_BUTTON, self.EventBooruDelete )
- if service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ):
+ if service_type in HC.TAG_SERVICES:
self._service_wide_update = wx.Button( self, label = 'perform a service-wide operation' )
self._service_wide_update.Bind( wx.EVT_BUTTON, self.EventServiceWideUpdate )
@@ -3004,7 +3004,7 @@ class FrameReviewServices( ClientGUICommon.Frame ):
self._info_panel.AddF( self._thumbnails_text, CC.FLAGS_EXPAND_PERPENDICULAR )
- elif service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ):
+ elif service_type in HC.TAG_SERVICES:
self._info_panel.AddF( self._tags_text, CC.FLAGS_EXPAND_PERPENDICULAR )
@@ -3074,7 +3074,7 @@ class FrameReviewServices( ClientGUICommon.Frame ):
repo_buttons_hbox.AddF( self._delete_local_deleted, CC.FLAGS_MIXED )
- if service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ):
+ if service_type in HC.TAG_SERVICES:
repo_buttons_hbox.AddF( self._service_wide_update, CC.FLAGS_MIXED )
@@ -3281,7 +3281,7 @@ class FrameReviewServices( ClientGUICommon.Frame ):
self._DisplayNumThumbs()
- elif service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ):
+ elif service_type in HC.TAG_SERVICES:
num_files = service_info[ HC.SERVICE_INFO_NUM_FILES ]
num_namespaces = service_info[ HC.SERVICE_INFO_NUM_NAMESPACES ]
@@ -3483,7 +3483,7 @@ class FrameReviewServices( ClientGUICommon.Frame ):
def EventImmediateSync( self, event ):
def do_it():
-
+
job_key = ClientThreading.JobKey( pausable = True, cancellable = True )
job_key.SetVariable( 'popup_title', self._service.GetName() + ': immediate sync' )
@@ -3534,9 +3534,12 @@ class FrameReviewServices( ClientGUICommon.Frame ):
c_u_p_total_weight_processed += weight
- job_key.SetVariable( 'popup_text_1', 'done! ' + HydrusData.ConvertIntToPrettyString( c_u_p_num_rows ) + ' rows added.' )
job_key.DeleteVariable( 'popup_gauge_1' )
+ self._service.SyncThumbnails( job_key )
+
+ job_key.SetVariable( 'popup_text_1', 'done! ' + HydrusData.ConvertIntToPrettyString( c_u_p_num_rows ) + ' rows added.' )
+
job_key.Finish()
diff --git a/include/ClientGUICanvas.py b/include/ClientGUICanvas.py
index a78b0cb2..4c832449 100755
--- a/include/ClientGUICanvas.py
+++ b/include/ClientGUICanvas.py
@@ -42,13 +42,12 @@ ANIMATED_SCANBAR_CARET_WIDTH = 10
OPEN_EXTERNALLY_BUTTON_SIZE = ( 200, 45 )
-def CalculateCanvasZoom( media, ( canvas_width, canvas_height ) ):
+def CalculateCanvasMediaSize( media, ( canvas_width, canvas_height ) ):
- ( media_width, media_height ) = media.GetResolution()
-
- if media_width == 0 or media_height == 0: return 1.0
-
- if ShouldHaveAnimationBar( media ): canvas_height -= ANIMATED_SCANBAR_HEIGHT
+ if ShouldHaveAnimationBar( media ):
+
+ canvas_height -= ANIMATED_SCANBAR_HEIGHT
+
if media.GetMime() == HC.APPLICATION_FLASH:
@@ -56,6 +55,19 @@ def CalculateCanvasZoom( media, ( canvas_width, canvas_height ) ):
canvas_width -= 10
+ return ( canvas_width, canvas_height )
+
+def CalculateCanvasFitZoom( media, ( canvas_width, canvas_height ) ):
+
+ ( media_width, media_height ) = media.GetResolution()
+
+ if media_width == 0 or media_height == 0:
+
+ return 1.0
+
+
+ ( canvas_width, canvas_height ) = CalculateCanvasMediaSize( media, ( canvas_width, canvas_height ) )
+
width_zoom = canvas_width / float( media_width )
height_zoom = canvas_height / float( media_height )
@@ -883,20 +895,29 @@ class Canvas( object ):
def _RecalcZoom( self ):
- if self._current_display_media is None: self._current_zoom = 1.0
+ if self._current_display_media is None:
+
+ self._current_zoom = 1.0
+
else:
( my_width, my_height ) = self.GetClientSize()
( media_width, media_height ) = self._current_display_media.GetResolution()
- self._canvas_zoom = CalculateCanvasZoom( self._current_display_media, ( my_width, my_height ) )
+ ( canvas_media_width, canvas_media_height ) = CalculateCanvasMediaSize( self._current_display_media, ( my_width, my_height ) )
- media_needs_to_be_scaled_down = media_width > my_width or media_height > my_height
- media_needs_to_be_scaled_up = media_width < my_width and media_height < my_height and HC.options[ 'fit_to_canvas' ]
+ media_needs_to_be_scaled_down = media_width > canvas_media_width or media_height > canvas_media_height
+ media_needs_to_be_scaled_up = media_width < canvas_media_width and media_height < canvas_media_height and HC.options[ 'fit_to_canvas' ]
- if media_needs_to_be_scaled_down or media_needs_to_be_scaled_up: self._current_zoom = self._canvas_zoom
- else: self._current_zoom = 1.0
+ if media_needs_to_be_scaled_down or media_needs_to_be_scaled_up:
+
+ self._current_zoom = CalculateCanvasFitZoom( self._current_display_media, ( my_width, my_height ) )
+
+ else:
+
+ self._current_zoom = 1.0
+
HydrusGlobals.client_controller.pub( 'canvas_new_zoom', self._canvas_key, self._current_zoom )
@@ -1795,12 +1816,23 @@ class CanvasFullscreenMediaList( ClientMedia.ListeningMediaList, CanvasWithDetai
( media_width, media_height ) = media.GetResolution()
- if media_width > my_width or media_height > my_height: zoom = CalculateCanvasZoom( media, ( my_width, my_height ) )
- else: zoom = 1.0
+ ( canvas_media_width, canvas_media_height ) = CalculateCanvasMediaSize( media, ( my_width, my_height ) )
+
+ if media_width > canvas_media_width or media_height > canvas_media_height:
+
+ zoom = CalculateCanvasFitZoom( media, ( my_width, my_height ) )
+
+ else:
+
+ zoom = 1.0
+
resolution_to_request = ( int( round( zoom * media_width ) ), int( round( zoom * media_height ) ) )
- if not self._image_cache.HasImage( hash, resolution_to_request ): wx.CallLater( delay, self._image_cache.GetImage, media, resolution_to_request )
+ if not self._image_cache.HasImage( hash, resolution_to_request ):
+
+ wx.CallLater( delay, self._image_cache.GetImage, media, resolution_to_request )
+
@@ -2813,7 +2845,7 @@ class CanvasFullscreenMediaListCustomFilter( CanvasFullscreenMediaListNavigable
hashes = ( self._current_display_media.GetHash(), )
- if service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ):
+ if service_type in HC.TAG_SERVICES:
tag = data
diff --git a/include/ClientGUICommon.py b/include/ClientGUICommon.py
index 4912a1f5..3c17cfbb 100755
--- a/include/ClientGUICommon.py
+++ b/include/ClientGUICommon.py
@@ -1029,7 +1029,7 @@ class AutoCompleteDropdownTagsRead( AutoCompleteDropdownTags ):
current_tags_flat_iterable = itertools.chain.from_iterable( lists_of_current_tags )
- current_tags_flat = ( tag for tag in current_tags_flat_iterable if ClientSearch.SearchEntryMatchesTag( search_text, tag ) )
+ current_tags_flat = ClientSearch.FilterTagsBySearchEntry( search_text, current_tags_flat_iterable )
current_tags_to_count.update( current_tags_flat )
@@ -1042,7 +1042,7 @@ class AutoCompleteDropdownTagsRead( AutoCompleteDropdownTags ):
pending_tags_flat_iterable = itertools.chain.from_iterable( lists_of_pending_tags )
- pending_tags_flat = ( tag for tag in pending_tags_flat_iterable if ClientSearch.SearchEntryMatchesTag( search_text, tag ) )
+ pending_tags_flat = ClientSearch.FilterTagsBySearchEntry( search_text, pending_tags_flat_iterable )
pending_tags_to_count.update( pending_tags_flat )
@@ -3927,6 +3927,9 @@ class ListBoxTagsSelection( ListBoxTags ):
def IncrementTagsByMedia( self, media ):
+ media = set( media )
+ media = media.difference( self._last_media )
+
( current_tags_to_count, deleted_tags_to_count, pending_tags_to_count, petitioned_tags_to_count ) = ClientData.GetMediasTagCount( media, tag_service_key = self._tag_service_key, collapse_siblings = self._collapse_siblings )
self._current_tags_to_count.update( current_tags_to_count )
diff --git a/include/ClientGUIDialogs.py b/include/ClientGUIDialogs.py
index be93d6d2..8c350e17 100755
--- a/include/ClientGUIDialogs.py
+++ b/include/ClientGUIDialogs.py
@@ -348,7 +348,7 @@ class DialogAdvancedContentUpdate( Dialog ):
#
- services = [ service for service in HydrusGlobals.client_controller.GetServicesManager().GetServices( ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ) ) if service.GetServiceKey() != self._service_key ]
+ services = [ service for service in HydrusGlobals.client_controller.GetServicesManager().GetServices( HC.TAG_SERVICES ) if service.GetServiceKey() != self._service_key ]
for service in services:
@@ -849,7 +849,7 @@ class DialogInputCustomFilterAction( Dialog ):
service_type = service.GetServiceType()
- if service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ): choice = self._tag_service_keys
+ if service_type in HC.TAG_SERVICES: choice = self._tag_service_keys
elif service_type == HC.LOCAL_RATING_LIKE: choice = self._ratings_like_service_keys
elif service_type == HC.LOCAL_RATING_NUMERICAL: choice = self._ratings_numerical_service_keys
@@ -869,7 +869,7 @@ class DialogInputCustomFilterAction( Dialog ):
service_name = self._service.GetName()
service_type = self._service.GetServiceType()
- if service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ):
+ if service_type in HC.TAG_SERVICES:
self._tag_service_keys.SetStringSelection( service_name )
@@ -4450,83 +4450,70 @@ class DialogShortcuts( Dialog ):
def __init__( self, parent, shortcuts ):
- def InitialiseControls():
-
- self._shortcuts = ClientGUICommon.SaneListCtrl( self, 120, [ ( 'modifier', 150 ), ( 'key', 150 ), ( 'service', -1 ), ( 'action', 250 ) ], delete_key_callback = self.RemoveShortcuts )
-
- self._add = wx.Button( self, label = 'add' )
- self._add.Bind( wx.EVT_BUTTON, self.EventAdd )
- self._add.SetForegroundColour( ( 0, 128, 0 ) )
-
- self._edit = wx.Button( self, label = 'edit' )
- self._edit.Bind( wx.EVT_BUTTON, self.EventEdit )
-
- self._remove = wx.Button( self, label = 'remove' )
- self._remove.Bind( wx.EVT_BUTTON, self.EventRemove )
- self._remove.SetForegroundColour( ( 128, 0, 0 ) )
-
-
- def PopulateControls():
-
- for ( ( modifier, key ), action ) in self._original_shortcuts.IterateKeyboardShortcuts():
-
- ( pretty_modifier, pretty_key ) = ClientData.ConvertShortcutToPrettyShortcut( modifier, key )
-
- ( service_key, data ) = action
-
- if service_key is None:
-
- pretty_service_key = ''
-
- else:
-
- if isinstance( service_key, ClientData.ClientServiceIdentifier ): service_key = service_key.GetServiceKey()
-
- try:
-
- service = HydrusGlobals.client_controller.GetServicesManager().GetService( service_key )
-
- pretty_service_key = service.GetName()
-
- except HydrusExceptions.DataMissing:
-
- pretty_service_key = 'service not found'
-
-
-
- pretty_data = data
-
- self._shortcuts.Append( ( pretty_modifier, pretty_key, pretty_service_key, pretty_data ), ( modifier, key, service_key, data ) )
-
-
- self._SortListCtrl()
-
-
- def ArrangeControls():
-
- action_buttons = wx.BoxSizer( wx.HORIZONTAL )
-
- action_buttons.AddF( self._add, CC.FLAGS_MIXED )
- action_buttons.AddF( self._edit, CC.FLAGS_MIXED )
- action_buttons.AddF( self._remove, CC.FLAGS_MIXED )
-
- vbox = wx.BoxSizer( wx.VERTICAL )
-
- vbox.AddF( self._shortcuts, CC.FLAGS_EXPAND_BOTH_WAYS )
- vbox.AddF( action_buttons, CC.FLAGS_BUTTON_SIZER )
-
- self.SetSizer( vbox )
-
-
wx.Panel.__init__( self, parent )
self._original_shortcuts = shortcuts
- InitialiseControls()
+ self._shortcuts = ClientGUICommon.SaneListCtrl( self, 120, [ ( 'modifier', 150 ), ( 'key', 150 ), ( 'service', -1 ), ( 'action', 250 ) ], delete_key_callback = self.RemoveShortcuts )
- PopulateControls()
+ self._add = wx.Button( self, label = 'add' )
+ self._add.Bind( wx.EVT_BUTTON, self.EventAdd )
+ self._add.SetForegroundColour( ( 0, 128, 0 ) )
- ArrangeControls()
+ self._edit = wx.Button( self, label = 'edit' )
+ self._edit.Bind( wx.EVT_BUTTON, self.EventEdit )
+
+ self._remove = wx.Button( self, label = 'remove' )
+ self._remove.Bind( wx.EVT_BUTTON, self.EventRemove )
+ self._remove.SetForegroundColour( ( 128, 0, 0 ) )
+
+ #
+
+ for ( ( modifier, key ), action ) in self._original_shortcuts.IterateKeyboardShortcuts():
+
+ ( pretty_modifier, pretty_key ) = ClientData.ConvertShortcutToPrettyShortcut( modifier, key )
+
+ ( service_key, data ) = action
+
+ if service_key is None:
+
+ pretty_service_key = ''
+
+ else:
+
+ try:
+
+ service = HydrusGlobals.client_controller.GetServicesManager().GetService( service_key )
+
+ pretty_service_key = service.GetName()
+
+ except HydrusExceptions.DataMissing:
+
+ pretty_service_key = 'service not found'
+
+
+
+ pretty_data = data
+
+ self._shortcuts.Append( ( pretty_modifier, pretty_key, pretty_service_key, pretty_data ), ( modifier, key, service_key, data ) )
+
+
+ self._SortListCtrl()
+
+ #
+
+ action_buttons = wx.BoxSizer( wx.HORIZONTAL )
+
+ action_buttons.AddF( self._add, CC.FLAGS_MIXED )
+ action_buttons.AddF( self._edit, CC.FLAGS_MIXED )
+ action_buttons.AddF( self._remove, CC.FLAGS_MIXED )
+
+ vbox = wx.BoxSizer( wx.VERTICAL )
+
+ vbox.AddF( self._shortcuts, CC.FLAGS_EXPAND_BOTH_WAYS )
+ vbox.AddF( action_buttons, CC.FLAGS_BUTTON_SIZER )
+
+ self.SetSizer( vbox )
def _SortListCtrl( self ): self._shortcuts.SortListItems( 3 )
diff --git a/include/ClientGUIDialogsManage.py b/include/ClientGUIDialogsManage.py
index 49cf2fec..1aa5a79c 100644
--- a/include/ClientGUIDialogsManage.py
+++ b/include/ClientGUIDialogsManage.py
@@ -2996,7 +2996,7 @@ class DialogManageImportFoldersEdit( ClientGUIDialogs.Dialog ):
services_manager = HydrusGlobals.client_controller.GetServicesManager()
- tag_services = services_manager.GetServices( ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ) )
+ tag_services = services_manager.GetServices( HC.TAG_SERVICES )
names_to_service_keys = { service.GetName() : service.GetServiceKey() for service in tag_services }
@@ -4963,7 +4963,7 @@ class DialogManageOptions( ClientGUIDialogs.Dialog ):
elif HC.options[ 'default_tag_sort' ] == CC.SORT_BY_INCIDENCE_DESC: self._default_tag_sort.Select( 2 )
elif HC.options[ 'default_tag_sort' ] == CC.SORT_BY_INCIDENCE_ASC: self._default_tag_sort.Select( 3 )
- services = HydrusGlobals.client_controller.GetServicesManager().GetServices( ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ) )
+ services = HydrusGlobals.client_controller.GetServicesManager().GetServices( HC.TAG_SERVICES )
for service in services: self._default_tag_repository.Append( service.GetName(), service.GetServiceKey() )
@@ -6976,7 +6976,7 @@ class DialogManageServices( ClientGUIDialogs.Dialog ):
( service_key, service_type, name, info ) = self._original_info
- message = 'This will completely reset ' + name + ', deleting all downloaded and processed information from the database. It may take several minutes to finish the operation, during which time the gui will likely freeze.' + os.linesep * 2 + 'Once the service is reset, the client will slowly redownload and reprocess everything in the background.' + os.linesep * 2 + 'If you do not understand what this button does, you definitely want to click no!'
+ message = 'This will completely reset ' + name + ', deleting all downloaded and processed information from the database. It may take several minutes to finish the operation, during which time the gui will likely freeze.' + os.linesep * 2 + 'Once the service is reset, the client will eventually redownload and reprocess everything all over again.' + os.linesep * 2 + 'If you do not understand what this button does, you definitely want to click no!'
with ClientGUIDialogs.DialogYesNo( self, message ) as dlg:
@@ -6993,7 +6993,7 @@ class DialogManageServices( ClientGUIDialogs.Dialog ):
( service_key, service_type, name, info ) = self._original_info
- message = 'This will remove all the processed information for ' + name + ' from the database. It may take several minutes to finish the operation, during which time the gui will likely freeze.' + os.linesep * 2 + 'Once the service is reset, the client will slowly reprocess everything in the background.' + os.linesep * 2 + 'If you do not understand what this button does, you probably want to click no!'
+ message = 'This will remove all the processed information for ' + name + ' from the database. It may take several minutes to finish the operation, during which time the gui will likely freeze.' + os.linesep * 2 + 'Once the service is reset, the client will eventually reprocess everything all over again.' + os.linesep * 2 + 'If you do not understand what this button does, you probably want to click no!'
with ClientGUIDialogs.DialogYesNo( self, message ) as dlg:
@@ -9009,96 +9009,6 @@ class DialogManageTags( ClientGUIDialogs.Dialog ):
def __init__( self, parent, file_service_key, media, canvas_key = None ):
- def InitialiseControls():
-
- if canvas_key is not None:
-
- self._next = wx.Button( self, label = '->' )
- self._next.Bind( wx.EVT_BUTTON, self.EventNext )
-
- self._delete = wx.Button( self, label = 'delete' )
- self._delete.Bind( wx.EVT_BUTTON, self.EventDelete )
-
- self._previous = wx.Button( self, label = '<-' )
- self._previous.Bind( wx.EVT_BUTTON, self.EventPrevious )
-
-
- self._tag_repositories = ClientGUICommon.ListBook( self )
- self._tag_repositories.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGED, self.EventServiceChanged )
-
- self._apply = wx.Button( self, id = wx.ID_OK, label = 'apply' )
- self._apply.Bind( wx.EVT_BUTTON, self.EventOK )
- self._apply.SetForegroundColour( ( 0, 128, 0 ) )
-
- self._cancel = wx.Button( self, id = wx.ID_CANCEL, label = 'cancel' )
- self._cancel.SetForegroundColour( ( 128, 0, 0 ) )
-
-
- def PopulateControls():
-
- services = HydrusGlobals.client_controller.GetServicesManager().GetServices( ( HC.TAG_REPOSITORY, HC.LOCAL_TAG ) )
-
- name_to_select = None
-
- for service in services:
-
- service_key = service.GetServiceKey()
- service_type = service.GetServiceType()
- name = service.GetName()
-
- page = self._Panel( self._tag_repositories, self._file_service_key, service.GetServiceKey(), media )
-
- self._tag_repositories.AddPage( name, page )
-
- if service_key == HC.options[ 'default_tag_repository' ]: name_to_select = name
-
-
- if name_to_select is not None: self._tag_repositories.Select( name_to_select )
-
-
- def ArrangeControls():
-
- buttonbox = wx.BoxSizer( wx.HORIZONTAL )
-
- buttonbox.AddF( self._apply, CC.FLAGS_MIXED )
- buttonbox.AddF( self._cancel, CC.FLAGS_MIXED )
-
- vbox = wx.BoxSizer( wx.VERTICAL )
-
- if canvas_key is not None:
-
- hbox = wx.BoxSizer( wx.HORIZONTAL )
-
- hbox.AddF( self._previous, CC.FLAGS_MIXED )
- hbox.AddF( ( 20, 20 ), CC.FLAGS_EXPAND_BOTH_WAYS )
- hbox.AddF( self._delete, CC.FLAGS_MIXED )
- hbox.AddF( ( 20, 20 ), CC.FLAGS_EXPAND_BOTH_WAYS )
- hbox.AddF( self._next, CC.FLAGS_MIXED )
-
- vbox.AddF( hbox, CC.FLAGS_EXPAND_PERPENDICULAR )
-
-
- vbox.AddF( self._tag_repositories, CC.FLAGS_EXPAND_BOTH_WAYS )
- vbox.AddF( buttonbox, CC.FLAGS_BUTTON_SIZER )
-
- self.SetSizer( vbox )
-
- ( remember, size ) = HC.options[ 'tag_dialog_size' ]
-
- if remember and size is not None:
-
- self.SetInitialSize( size )
-
- else:
-
- ( x, y ) = self.GetEffectiveMinSize()
-
- ( parent_window_width, parent_window_height ) = parent.GetTopLevelParent().GetSize()
-
- self.SetInitialSize( ( x + 200, max( 500, parent_window_height - 200 ) ) )
-
-
-
self._file_service_key = file_service_key
self._hashes = set()
@@ -9124,17 +9034,99 @@ class DialogManageTags( ClientGUIDialogs.Dialog ):
ClientGUIDialogs.Dialog.__init__( self, parent, 'manage tags for ' + HydrusData.ConvertIntToPrettyString( len( self._hashes ) ) + ' files', position = my_position )
- InitialiseControls()
+ if canvas_key is not None:
+
+ self._next = wx.Button( self, label = '->' )
+ self._next.Bind( wx.EVT_BUTTON, self.EventNext )
+
+ self._delete = wx.Button( self, label = 'delete' )
+ self._delete.Bind( wx.EVT_BUTTON, self.EventDelete )
+
+ self._previous = wx.Button( self, label = '<-' )
+ self._previous.Bind( wx.EVT_BUTTON, self.EventPrevious )
+
- PopulateControls()
+ self._tag_repositories = ClientGUICommon.ListBook( self )
+ self._tag_repositories.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGED, self.EventServiceChanged )
- ArrangeControls()
+ self._apply = wx.Button( self, id = wx.ID_OK, label = 'apply' )
+ self._apply.Bind( wx.EVT_BUTTON, self.EventOK )
+ self._apply.SetForegroundColour( ( 0, 128, 0 ) )
+
+ self._cancel = wx.Button( self, id = wx.ID_CANCEL, label = 'cancel' )
+ self._cancel.SetForegroundColour( ( 128, 0, 0 ) )
+
+ #
+
+ services = HydrusGlobals.client_controller.GetServicesManager().GetServices( HC.TAG_SERVICES )
+
+ name_to_select = None
+
+ for service in services:
+
+ service_key = service.GetServiceKey()
+ service_type = service.GetServiceType()
+ name = service.GetName()
+
+ page = self._Panel( self._tag_repositories, self._file_service_key, service.GetServiceKey(), media )
+
+ self._tag_repositories.AddPage( name, page )
+
+ if service_key == HC.options[ 'default_tag_repository' ]: name_to_select = name
+
+
+ if name_to_select is not None: self._tag_repositories.Select( name_to_select )
+
+ #
+
+ buttonbox = wx.BoxSizer( wx.HORIZONTAL )
+
+ buttonbox.AddF( self._apply, CC.FLAGS_MIXED )
+ buttonbox.AddF( self._cancel, CC.FLAGS_MIXED )
+
+ vbox = wx.BoxSizer( wx.VERTICAL )
+
+ if canvas_key is not None:
+
+ hbox = wx.BoxSizer( wx.HORIZONTAL )
+
+ hbox.AddF( self._previous, CC.FLAGS_MIXED )
+ hbox.AddF( ( 20, 20 ), CC.FLAGS_EXPAND_BOTH_WAYS )
+ hbox.AddF( self._delete, CC.FLAGS_MIXED )
+ hbox.AddF( ( 20, 20 ), CC.FLAGS_EXPAND_BOTH_WAYS )
+ hbox.AddF( self._next, CC.FLAGS_MIXED )
+
+ vbox.AddF( hbox, CC.FLAGS_EXPAND_PERPENDICULAR )
+
+
+ vbox.AddF( self._tag_repositories, CC.FLAGS_EXPAND_BOTH_WAYS )
+ vbox.AddF( buttonbox, CC.FLAGS_BUTTON_SIZER )
+
+ self.SetSizer( vbox )
+
+ ( remember, size ) = HC.options[ 'tag_dialog_size' ]
+
+ if remember and size is not None:
+
+ self.SetInitialSize( size )
+
+ else:
+
+ ( x, y ) = self.GetEffectiveMinSize()
+
+ ( parent_window_width, parent_window_height ) = parent.GetTopLevelParent().GetSize()
+
+ self.SetInitialSize( ( x + 200, max( 500, parent_window_height - 200 ) ) )
+
self.Bind( wx.EVT_MENU, self.EventMenu )
self.RefreshAcceleratorTable()
- if self._canvas_key is not None: HydrusGlobals.client_controller.sub( self, 'CanvasHasNewMedia', 'canvas_new_display_media' )
+ if self._canvas_key is not None:
+
+ HydrusGlobals.client_controller.sub( self, 'CanvasHasNewMedia', 'canvas_new_display_media' )
+
def _ClearPanels( self ):
@@ -9436,9 +9428,16 @@ class DialogManageTags( ClientGUIDialogs.Dialog ):
choices.append( ( 'add ' + tag + ' to ' + HydrusData.ConvertIntToPrettyString( num_files - num_current ) + ' files', ( HC.CONTENT_UPDATE_ADD, tag ) ) )
- if sibling_tag is not None and num_sibling_current < num_files:
+ if sibling_tag is not None:
- choices.append( ( 'add ' + sibling_tag + ' (preferred sibling) to ' + HydrusData.ConvertIntToPrettyString( num_files - num_sibling_current ) + ' files', ( HC.CONTENT_UPDATE_ADD, sibling_tag ) ) )
+ if num_sibling_current < num_files:
+
+ choices.append( ( 'add ' + sibling_tag + ' (preferred sibling) to ' + HydrusData.ConvertIntToPrettyString( num_files - num_sibling_current ) + ' files', ( HC.CONTENT_UPDATE_ADD, sibling_tag ) ) )
+
+ else:
+
+ choices.append( ( 'ignore, as ' + sibling_tag + ' (preferred sibling) already exists for all', None ) )
+
potential_num_sibling_count += 1
@@ -9469,8 +9468,12 @@ class DialogManageTags( ClientGUIDialogs.Dialog ):
choices.append( ( 'pend ' + sibling_tag + ' (preferred sibling) to ' + HydrusData.ConvertIntToPrettyString( num_files - ( num_sibling_current + num_sibling_pending ) ) + ' files', ( HC.CONTENT_UPDATE_PEND, sibling_tag ) ) )
- potential_num_sibling_count += 1
+ else:
+ choices.append( ( 'ignore, as ' + sibling_tag + ' (preferred sibling) already exists for all', None ) )
+
+
+ potential_num_sibling_count += 1
@@ -9592,6 +9595,11 @@ class DialogManageTags( ClientGUIDialogs.Dialog ):
+ if choice is None:
+
+ continue
+
+
( choice_action, choice_tag ) = choice
if choice_action == HC.CONTENT_UPDATE_ADD: media_to_affect = ( m for m in self._media if choice_tag not in m.GetTagsManager().GetCurrent( self._tag_service_key ) )
diff --git a/include/ClientGUIOptionsPanels.py b/include/ClientGUIOptionsPanels.py
index 0f51d0ff..a3dcad5a 100644
--- a/include/ClientGUIOptionsPanels.py
+++ b/include/ClientGUIOptionsPanels.py
@@ -459,7 +459,7 @@ class OptionsPanelTags( OptionsPanel ):
self._vbox.Clear( True )
- services = HydrusGlobals.client_controller.GetServicesManager().GetServices( ( HC.TAG_REPOSITORY, HC.LOCAL_TAG ), randomised = False )
+ services = HydrusGlobals.client_controller.GetServicesManager().GetServices( HC.TAG_SERVICES, randomised = False )
button_id = 1
diff --git a/include/ClientImporting.py b/include/ClientImporting.py
index 47414779..84cb7cbb 100644
--- a/include/ClientImporting.py
+++ b/include/ClientImporting.py
@@ -1111,10 +1111,10 @@ class ImportFolder( HydrusSerialisable.SerialisableBaseNamed ):
with open( txt_path, 'rb' ) as f:
- raw_data = f.read()
+ txt_tags_string = f.read()
- tags = raw_data.split( os.linesep )
+ txt_tags = [ HydrusData.ToUnicode( tag ) for tag in txt_tags_string.split( os.linesep ) ]
service_keys_to_tags = { service_key : tags for service_key in self._txt_parse_tag_service_keys }
diff --git a/include/ClientMedia.py b/include/ClientMedia.py
index 2868d380..5d60a4b0 100644
--- a/include/ClientMedia.py
+++ b/include/ClientMedia.py
@@ -1244,7 +1244,7 @@ class MediaResult( object ):
service_type = service.GetServiceType()
- if service_type in ( HC.LOCAL_TAG, HC.TAG_REPOSITORY ): tags_manager.ProcessContentUpdate( service_key, content_update )
+ if service_type in HC.TAG_SERVICES: tags_manager.ProcessContentUpdate( service_key, content_update )
elif service_type in ( HC.FILE_REPOSITORY, HC.LOCAL_FILE, HC.IPFS ):
if service_type == HC.LOCAL_FILE:
diff --git a/include/ClientNetworking.py b/include/ClientNetworking.py
index 65b3e4b1..974c7c74 100644
--- a/include/ClientNetworking.py
+++ b/include/ClientNetworking.py
@@ -568,7 +568,7 @@ class HTTPConnection( object ):
text += os.linesep * 2
text += HydrusData.ToUnicode( e )
- raise Exception( text )
+ raise HydrusExceptions.NetworkException( text )
diff --git a/include/ClientSearch.py b/include/ClientSearch.py
index 2b742b38..46006543 100644
--- a/include/ClientSearch.py
+++ b/include/ClientSearch.py
@@ -8,14 +8,25 @@ import HydrusTags
import re
import wx
-def SearchEntryMatchesPredicate( search_entry, predicate ):
+def FilterPredicatesBySearchEntry( search_entry, predicates ):
- ( predicate_type, value, inclusive ) = predicate.GetInfo()
+ tags_to_predicates = {}
- if predicate_type == HC.PREDICATE_TYPE_TAG: return SearchEntryMatchesTag( search_entry, value, search_siblings = True )
- else: return False
-
-def SearchEntryMatchesTag( search_entry, tag, search_siblings = True ):
+ for predicate in predicates:
+
+ ( predicate_type, value, inclusive ) = predicate.GetInfo()
+
+ if predicate_type == HC.PREDICATE_TYPE_TAG:
+
+ tags_to_predicates[ value ] = predicate
+
+
+
+ matching_tags = FilterTagsBySearchEntry( search_entry, tags_to_predicates.keys() )
+
+ return [ tags_to_predicates[ tag ] for tag in matching_tags ]
+
+def FilterTagsBySearchEntry( search_entry, tags, search_siblings = True ):
def compile_re( s ):
@@ -36,45 +47,67 @@ def SearchEntryMatchesTag( search_entry, tag, search_siblings = True ):
namespace_re_predicate = compile_re( namespace_entry )
- else: search_namespace = False
+ else:
+
+ search_namespace = False
+
if '*' not in search_entry: search_entry += '*'
re_predicate = compile_re( search_entry )
- if search_siblings:
-
- sibling_manager = HydrusGlobals.client_controller.GetManager( 'tag_siblings' )
-
- tags = sibling_manager.GetAllSiblings( tag )
-
- else: tags = [ tag ]
+ sibling_manager = HydrusGlobals.client_controller.GetManager( 'tag_siblings' )
+
+ result = []
for tag in tags:
- if ':' in tag:
+ if search_siblings:
- ( n, t ) = tag.split( ':', 1 )
-
- if search_namespace and re.search( namespace_re_predicate, n ) is None: continue
-
- comparee = t
+ possible_tags = sibling_manager.GetAllSiblings( tag )
else:
- if search_namespace: continue
-
- comparee = tag
+ possible_tags = [ tag ]
- if re.search( re_predicate, comparee ) is not None: return True
+ for possible_tag in possible_tags:
+
+ if ':' in possible_tag:
+
+ ( n, t ) = possible_tag.split( ':', 1 )
+
+ if search_namespace and re.search( namespace_re_predicate, n ) is None:
+
+ continue
+
+
+ comparee = t
+
+ else:
+
+ if search_namespace:
+
+ continue
+
+
+ comparee = tag
+
+
+ if re.search( re_predicate, comparee ) is not None:
+
+ result.append( tag )
+
+ break
+
+
- return False
-
+ return result
+
def FilterPredicates( search_entry, predicates, service_key = None, expand_parents = False ):
- matches = [ predicate for predicate in predicates if SearchEntryMatchesPredicate( search_entry, predicate ) ]
+ matches = FilterPredicatesBySearchEntry( search_entry, predicates )
if service_key is not None and expand_parents:
diff --git a/include/HydrusConstants.py b/include/HydrusConstants.py
index fd6131e6..02c26264 100755
--- a/include/HydrusConstants.py
+++ b/include/HydrusConstants.py
@@ -54,7 +54,7 @@ options = {}
# Misc
NETWORK_VERSION = 17
-SOFTWARE_VERSION = 196
+SOFTWARE_VERSION = 197
UNSCALED_THUMBNAIL_DIMENSIONS = ( 200, 200 )
diff --git a/include/HydrusController.py b/include/HydrusController.py
index 398ff3a3..b2589dff 100644
--- a/include/HydrusController.py
+++ b/include/HydrusController.py
@@ -183,8 +183,6 @@ class HydrusController( object ):
self._db = self._InitDB()
- threading.Thread( target = self._db.MainLoop, name = 'Database Main Loop' ).start()
-
def InitView( self ):
diff --git a/include/HydrusDB.py b/include/HydrusDB.py
index c86e03f7..4e5463ab 100644
--- a/include/HydrusDB.py
+++ b/include/HydrusDB.py
@@ -11,6 +11,7 @@ import Queue
import random
import sqlite3
import sys
+import threading
import traceback
import time
@@ -38,6 +39,8 @@ class HydrusDB( object ):
self._local_shutdown = False
self._loop_finished = False
+ self._ready_to_serve_requests = False
+ self._could_not_initialise = False
self._jobs = Queue.PriorityQueue()
self._pubsubs = []
@@ -89,6 +92,18 @@ class HydrusDB( object ):
self._CloseDBCursor()
+ threading.Thread( target = self.MainLoop, name = 'Database Main Loop' ).start()
+
+ while not self._ready_to_serve_requests:
+
+ time.sleep( 0.1 )
+
+ if self._could_not_initialise:
+
+ raise Exception( 'Could not initialise the db! Error written to the log!' )
+
+
+
def _CleanUpCaches( self ):
@@ -125,7 +140,7 @@ class HydrusDB( object ):
def _InitCaches( self ):
- raise NotImplementedError()
+ pass
def _InitDB( self ):
@@ -292,9 +307,22 @@ class HydrusDB( object ):
def MainLoop( self ):
- self._InitDBCursor() # have to reinitialise because the thread id has changed
+ try:
+
+ self._InitDBCursor() # have to reinitialise because the thread id has changed
+
+ self._InitCaches()
+
+ except Exception as e:
+
+ HydrusData.Print( traceback.format_exc() )
+
+ HydrusData.ShowExceptionDefault( e )
+
+ self._could_not_initialise = True
+
- self._InitCaches()
+ self._ready_to_serve_requests = True
error_count = 0
@@ -367,11 +395,31 @@ class HydrusDB( object ):
self._jobs.put( ( priority + 1, job ) ) # +1 so all writes of equal priority can clear out first
- if synchronous: return job.GetResult()
+ return job.GetResult()
+
+
+ def ReadyToServeRequests( self ):
+
+ return self._ready_to_serve_requests
def Shutdown( self ): self._local_shutdown = True
+ def SimpleRead( self, action, *args, **kwargs ):
+
+ return self.Read( action, HC.HIGH_PRIORITY, *args, **kwargs )
+
+
+ def SimpleWrite( self, action, *args, **kwargs ):
+
+ return self.Write( action, HC.HIGH_PRIORITY, False, *args, **kwargs )
+
+
+ def SimpleWriteSynchronous( self, action, *args, **kwargs ):
+
+ return self.Write( action, HC.LOW_PRIORITY, True, *args, **kwargs )
+
+
def Write( self, action, priority, synchronous, *args, **kwargs ):
if action in self.WRITE_SPECIAL_ACTIONS: job_type = 'write_special'
diff --git a/include/HydrusData.py b/include/HydrusData.py
index c02c36b3..82e741f5 100644
--- a/include/HydrusData.py
+++ b/include/HydrusData.py
@@ -1938,7 +1938,7 @@ class ServerToClientContentUpdatePackage( HydrusSerialisable.SerialisableBase ):
return num
- def IterateContentUpdateChunks( self, chunk_weight = 100):
+ def IterateContentUpdateChunks( self, chunk_weight = 5000 ):
data_types = [ HC.CONTENT_TYPE_FILES, HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_TYPE_TAG_SIBLINGS, HC.CONTENT_TYPE_TAG_PARENTS ]
actions = [ HC.CONTENT_UPDATE_ADD, HC.CONTENT_UPDATE_DELETE ]
diff --git a/include/TestDB.py b/include/TestDB.py
index 07c45ba9..456cadc0 100644
--- a/include/TestDB.py
+++ b/include/TestDB.py
@@ -52,15 +52,16 @@ class TestClientDB( unittest.TestCase ):
self._db = ClientDB.DB( HydrusGlobals.test_controller, db_path )
- threading.Thread( target = self._db.MainLoop, name = 'Database Main Loop' ).start()
-
@classmethod
def tearDownClass( self ):
self._db.Shutdown()
- while not self._db.LoopIsFinished(): time.sleep( 0.1 )
+ while not self._db.LoopIsFinished():
+
+ time.sleep( 0.1 )
+
del self._db
@@ -1210,15 +1211,18 @@ class TestServerDB( unittest.TestCase ):
self._db = ServerDB.DB( HydrusGlobals.test_controller, db_path )
- threading.Thread( target = self._db.MainLoop, name = 'Database Main Loop' ).start()
-
@classmethod
def tearDownClass( self ):
self._db.Shutdown()
- while not self._db.LoopIsFinished(): time.sleep( 0.1 )
+ while not self._db.LoopIsFinished():
+
+ time.sleep( 0.1 )
+
+
+ del self._db
def _test_account_creation( self ):
diff --git a/test.py b/test.py
index c9da1015..d7f422c2 100644
--- a/test.py
+++ b/test.py
@@ -60,7 +60,7 @@ class Controller( object ):
HC.SERVER_THUMBNAILS_DIR = os.path.join( HC.DB_DIR, 'server_thumbnails' )
HC.SERVER_UPDATES_DIR = os.path.join( HC.DB_DIR, 'server_updates' )
- os.makedirs( HC.CLIENT_FILES_DIR ) # for the client files manager which I manually create in a bit
+ os.makedirs( HC.CLIENT_FILES_DIR )
HydrusGlobals.controller = self
HydrusGlobals.client_controller = self